diff -Nru meson-0.53.2/contributing.md meson-0.57.0+really0.56.2/contributing.md --- meson-0.53.2/contributing.md 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/contributing.md 2020-08-15 16:27:05.000000000 +0000 @@ -0,0 +1,8 @@ +## Contributing to the Meson build system + +Thank you for your interest in participating to the development! +A large fraction of Meson is contributed by people outside +the core team and we are *excited* to see what you do. + +**Contribution instructions can be found on the website** + @ https://mesonbuild.com/Contributing.html diff -Nru meson-0.53.2/cross/arm64cl.txt meson-0.57.0+really0.56.2/cross/arm64cl.txt --- meson-0.53.2/cross/arm64cl.txt 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/arm64cl.txt 2020-09-20 22:51:33.000000000 +0000 @@ -0,0 +1,17 @@ +[binaries] +c = 'cl' +cpp = 'cl' +ar = 'lib' +windres = 'rc' + +[built-in options] +c_args = ['-DWINAPI_FAMILY=WINAPI_FAMILY_APP'] +c_link_args = ['-APPCONTAINER', 'WindowsApp.lib'] +cpp_args = ['-DWINAPI_FAMILY=WINAPI_FAMILY_APP'] +cpp_link_args = ['-APPCONTAINER', 'WindowsApp.lib'] + +[host_machine] +system = 'windows' +cpu_family = 'aarch64' +cpu = 'armv8' +endian = 'little' diff -Nru meson-0.53.2/cross/armcc.txt meson-0.57.0+really0.56.2/cross/armcc.txt --- meson-0.53.2/cross/armcc.txt 2018-08-25 08:05:43.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/armcc.txt 2020-09-17 22:00:44.000000000 +0000 @@ -7,7 +7,7 @@ ar = 'armar' strip = 'armar' -[properties] +[built-in options] # The '--cpu' option with the appropriate target type should be mentioned # to cross compile c/c++ code with armcc,. c_args = ['--cpu=Cortex-M0plus'] diff -Nru meson-0.53.2/cross/armclang-linux.txt meson-0.57.0+really0.56.2/cross/armclang-linux.txt --- meson-0.53.2/cross/armclang-linux.txt 2020-01-07 19:29:59.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/armclang-linux.txt 2020-09-17 22:00:44.000000000 +0000 @@ -12,7 +12,7 @@ # Armcc is only available in toolchain version 5. # Armclang is only available in toolchain version 6. # Start shell with /opt/arm/developmentstudio-2019.0/bin/suite_exec zsh -# Now the compilers will work. +# Now the compilers will work. [binaries] # we could set exe_wrapper = qemu-arm-static but to test the case @@ -24,8 +24,7 @@ #strip = '/usr/arm-linux-gnueabihf/bin/strip' #pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config' -[properties] - +[built-in options] c_args = ['--target=aarch64-arm-none-eabi'] [host_machine] diff -Nru meson-0.53.2/cross/armclang.txt meson-0.57.0+really0.56.2/cross/armclang.txt --- meson-0.53.2/cross/armclang.txt 2018-08-25 08:05:43.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/armclang.txt 2020-09-17 22:00:44.000000000 +0000 @@ -7,7 +7,7 @@ ar = 'armar' strip = 'armar' -[properties] +[built-in options] # The '--target', '-mcpu' options with the appropriate values should be mentioned # to cross compile c/c++ code with armclang. c_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] diff -Nru meson-0.53.2/cross/c2000.txt meson-0.57.0+really0.56.2/cross/c2000.txt --- meson-0.53.2/cross/c2000.txt 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/c2000.txt 2020-09-17 22:00:44.000000000 +0000 @@ -0,0 +1,28 @@ +# This file assumes that path to the Texas Instruments C20000 toolchain is added +# to the environment(PATH) variable, so that Meson can find +# cl2000 and ar2000 while building. +[binaries] +c = 'cl2000' +ar = 'ar2000' +strip = 'cl2000' + +[host_machine] +system = 'bare metal' +cpu_family = 'c2000' +cpu = 'c28x' +endian = 'little' + +[built-in options] +c_args = [ + '-v28', + '-ml', + '-mt'] +c_link_args = [ + '-z', + '--rom_model', + '\f28004x_flash.cmd'] +cpp_args = [] +cpp_link_args = [] + +[properties] +needs_exe_wrapper = true diff -Nru meson-0.53.2/cross/ccomp-armv7a.txt meson-0.57.0+really0.56.2/cross/ccomp-armv7a.txt --- meson-0.53.2/cross/ccomp-armv7a.txt 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/ccomp-armv7a.txt 2020-09-17 22:00:51.000000000 +0000 @@ -0,0 +1,13 @@ +[binaries] +c = 'ccomp' +ar = 'ccomp' +strip = 'strip' + +[built-in options] +c_args = ['-target', 'armv7a-eabi', '-fall'] + +[host_machine] +system = 'bare metal' # Update with your system name - bare metal/OS. +cpu_family = 'arm' +cpu = 'Cortex-A9' +endian = 'little' diff -Nru meson-0.53.2/cross/ccrx.txt meson-0.57.0+really0.56.2/cross/ccrx.txt --- meson-0.53.2/cross/ccrx.txt 2020-02-25 18:00:46.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/ccrx.txt 2020-09-17 22:00:44.000000000 +0000 @@ -7,7 +7,7 @@ ar = 'rlink' strip = 'rlink' -[properties] +[built-in options] # The '--cpu' option with the appropriate target type should be mentioned # to cross compile c/c++ code with ccrx,. c_args = ['-cpu=rx600'] diff -Nru meson-0.53.2/cross/iphone.txt meson-0.57.0+really0.56.2/cross/iphone.txt --- meson-0.53.2/cross/iphone.txt 2017-05-24 16:55:14.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/iphone.txt 2020-09-17 22:00:44.000000000 +0000 @@ -8,14 +8,14 @@ ar = 'ar' strip = 'strip' -[properties] -root = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer' - +[built-in options] c_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] cpp_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] c_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] cpp_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] +[properties] +root = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer' has_function_printf = true has_function_hfkerhisadf = false diff -Nru meson-0.53.2/cross/linux-mingw-w64-32bit.json meson-0.57.0+really0.56.2/cross/linux-mingw-w64-32bit.json --- meson-0.53.2/cross/linux-mingw-w64-32bit.json 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/linux-mingw-w64-32bit.json 2020-10-18 21:29:13.000000000 +0000 @@ -0,0 +1,7 @@ +{ + "file": "linux-mingw-w64-32bit.txt", + "tests": ["common", "cmake"], + "env": { + "WINEPATH": "/usr/lib/gcc/i686-w64-mingw32/9.2-posix;/usr/i686-w64-mingw32/bin;/usr/i686-w64-mingw32/lib" + } +} diff -Nru meson-0.53.2/cross/linux-mingw-w64-32bit.txt meson-0.57.0+really0.56.2/cross/linux-mingw-w64-32bit.txt --- meson-0.53.2/cross/linux-mingw-w64-32bit.txt 2020-01-23 22:34:28.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/linux-mingw-w64-32bit.txt 2021-01-05 23:18:35.000000000 +0000 @@ -1,12 +1,14 @@ [binaries] c = '/usr/bin/i686-w64-mingw32-gcc' cpp = '/usr/bin/i686-w64-mingw32-g++' +objc = '/usr/bin/i686-w64-mingw32-gcc' ar = '/usr/bin/i686-w64-mingw32-ar' strip = '/usr/bin/i686-w64-mingw32-strip' pkgconfig = '/usr/bin/i686-w64-mingw32-pkg-config' windres = '/usr/bin/i686-w64-mingw32-windres' exe_wrapper = 'wine' ld = '/usr/bin/i686-w64-mingw32-ld' +cmake = '/usr/bin/cmake' [properties] # Directory that contains 'bin', 'lib', etc @@ -19,3 +21,11 @@ cpu_family = 'x86' cpu = 'i686' endian = 'little' + +[cmake] + +CMAKE_BUILD_WITH_INSTALL_RPATH = 'ON' +CMAKE_FIND_ROOT_PATH_MODE_PROGRAM = 'NEVER' +CMAKE_FIND_ROOT_PATH_MODE_LIBRARY = 'ONLY' +CMAKE_FIND_ROOT_PATH_MODE_INCLUDE = 'ONLY' +CMAKE_FIND_ROOT_PATH_MODE_PACKAGE = 'ONLY' diff -Nru meson-0.53.2/cross/linux-mingw-w64-64bit.json meson-0.57.0+really0.56.2/cross/linux-mingw-w64-64bit.json --- meson-0.53.2/cross/linux-mingw-w64-64bit.json 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/linux-mingw-w64-64bit.json 2020-10-18 21:29:13.000000000 +0000 @@ -0,0 +1,7 @@ +{ + "file": "linux-mingw-w64-64bit.txt", + "tests": ["common", "cmake"], + "env": { + "WINEPATH": "/usr/lib/gcc/x86_64-w64-mingw32/9.2-posix;/usr/x86_64-w64-mingw32/bin;/usr/x86_64-w64-mingw32/lib" + } +} diff -Nru meson-0.53.2/cross/linux-mingw-w64-64bit.txt meson-0.57.0+really0.56.2/cross/linux-mingw-w64-64bit.txt --- meson-0.53.2/cross/linux-mingw-w64-64bit.txt 2018-08-25 08:05:43.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/linux-mingw-w64-64bit.txt 2021-01-05 23:18:35.000000000 +0000 @@ -1,11 +1,13 @@ [binaries] c = '/usr/bin/x86_64-w64-mingw32-gcc' cpp = '/usr/bin/x86_64-w64-mingw32-g++' +objc = '/usr/bin/x86_64-w64-mingw32-gcc' ar = '/usr/bin/x86_64-w64-mingw32-ar' strip = '/usr/bin/x86_64-w64-mingw32-strip' pkgconfig = '/usr/bin/x86_64-w64-mingw32-pkg-config' windres = '/usr/bin/x86_64-w64-mingw32-windres' exe_wrapper = 'wine64' +cmake = '/usr/bin/cmake' [properties] # Directory that contains 'bin', 'lib', etc @@ -18,3 +20,11 @@ cpu_family = 'x86_64' cpu = 'x86_64' endian = 'little' + +[cmake] + +CMAKE_BUILD_WITH_INSTALL_RPATH = 'ON' +CMAKE_FIND_ROOT_PATH_MODE_PROGRAM = 'NEVER' +CMAKE_FIND_ROOT_PATH_MODE_LIBRARY = 'ONLY' +CMAKE_FIND_ROOT_PATH_MODE_INCLUDE = 'ONLY' +CMAKE_FIND_ROOT_PATH_MODE_PACKAGE = 'ONLY' diff -Nru meson-0.53.2/cross/none.txt meson-0.57.0+really0.56.2/cross/none.txt --- meson-0.53.2/cross/none.txt 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/none.txt 2020-09-17 22:00:51.000000000 +0000 @@ -0,0 +1,18 @@ +# native file used to make the build machine compiler unusable + +[host_machine] +system = 'none' +cpu_family = 'none' +cpu = 'none' +endian = 'little' + +[properties] + +[binaries] +c = ['false'] +cpp = ['false'] +objc = ['false'] +objcpp = ['false'] +ar = ['false'] +pkgconfig = ['false'] +cmake = ['false'] diff -Nru meson-0.53.2/cross/ownstdlib.txt meson-0.57.0+really0.56.2/cross/ownstdlib.txt --- meson-0.53.2/cross/ownstdlib.txt 2016-05-28 14:39:38.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/ownstdlib.txt 2020-09-17 22:00:44.000000000 +0000 @@ -10,4 +10,4 @@ [properties] -c_stdlib = ['mylibc', 'mylibc_dep'] # Subproject name, dependency name +c_stdlib = 'mylibc' # Subproject name diff -Nru meson-0.53.2/cross/tvos.txt meson-0.57.0+really0.56.2/cross/tvos.txt --- meson-0.53.2/cross/tvos.txt 2019-06-16 18:54:18.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/tvos.txt 2020-09-17 22:00:44.000000000 +0000 @@ -8,14 +8,15 @@ ar = 'ar' strip = 'strip' -[properties] -root = '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer' - +[built-in options] c_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] cpp_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] c_link_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] cpp_link_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] +[properties] +root = '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer' + has_function_printf = true has_function_hfkerhisadf = false diff -Nru meson-0.53.2/cross/ubuntu-armhf.json meson-0.57.0+really0.56.2/cross/ubuntu-armhf.json --- meson-0.53.2/cross/ubuntu-armhf.json 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/ubuntu-armhf.json 2020-10-18 21:29:13.000000000 +0000 @@ -0,0 +1,5 @@ +{ + "file": "ubuntu-armhf.txt", + "tests": ["common"], + "env": {} +} diff -Nru meson-0.53.2/cross/ubuntu-armhf.txt meson-0.57.0+really0.56.2/cross/ubuntu-armhf.txt --- meson-0.53.2/cross/ubuntu-armhf.txt 2020-01-23 22:34:28.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/ubuntu-armhf.txt 2020-09-17 22:00:44.000000000 +0000 @@ -9,15 +9,19 @@ pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config' ld = '/usr/bin/arm-linux/gnueabihf-ld' -[properties] -root = '/usr/arm-linux-gnueabihf' +[built-in options] # Used in unit test '140 get define' c_args = ['-DMESON_TEST_ISSUE_1665=1'] cpp_args = '-DMESON_TEST_ISSUE_1665=1' +[properties] +root = '/usr/arm-linux-gnueabihf' + has_function_printf = true has_function_hfkerhisadf = false +skip_sanity_check = true + [host_machine] system = 'linux' cpu_family = 'arm' diff -Nru meson-0.53.2/cross/wasm.txt meson-0.57.0+really0.56.2/cross/wasm.txt --- meson-0.53.2/cross/wasm.txt 2019-08-28 17:15:38.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/wasm.txt 2020-09-17 22:00:44.000000000 +0000 @@ -3,8 +3,7 @@ cpp = '/home/jpakkane/emsdk/fastcomp/emscripten/em++' ar = '/home/jpakkane/emsdk/fastcomp/emscripten/emar' -[properties] - +[built-in options] c_args = ['-s', 'WASM=1', '-s', 'EXPORT_ALL=1'] c_link_args = ['-s','EXPORT_ALL=1'] cpp_args = ['-s', 'WASM=1', '-s', 'EXPORT_ALL=1'] diff -Nru meson-0.53.2/cross/xc16.txt meson-0.57.0+really0.56.2/cross/xc16.txt --- meson-0.53.2/cross/xc16.txt 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/cross/xc16.txt 2020-09-17 22:00:44.000000000 +0000 @@ -0,0 +1,26 @@ +# This file assumes that path to the Microchip xc16 toolchain is added +# to the environment(PATH) variable, so that Meson can find +# xc16-gcc and xc16-ar while building. +[binaries] +c = 'xc16-gcc' +ar = 'xc16-ar' +strip = 'xc16-gcc' + +[host_machine] +system = 'bare metal' +cpu_family = 'dspic' +cpu = '33ep64mc203' +endian = 'little' + +[properties] +needs_exe_wrapper = true + +[built-in options] +c_args = [ + '-c', + '-mcpu=33EP64MC203', + '-omf=elf'] +c_link_args = [ + '-mcpu=33EP64MC203', + '-omf=elf', + '-Wl,--script=p33EP64MC203.gld,'] diff -Nru meson-0.53.2/data/macros.meson meson-0.57.0+really0.56.2/data/macros.meson --- meson-0.53.2/data/macros.meson 2019-05-02 18:59:50.000000000 +0000 +++ meson-0.57.0+really0.56.2/data/macros.meson 2020-08-15 16:27:05.000000000 +0000 @@ -2,12 +2,6 @@ %__meson_wrap_mode nodownload %__meson_auto_features enabled -%_smp_mesonflags %([ -z "$MESON_BUILD_NCPUS" ] \\\ - && MESON_BUILD_NCPUS="`/usr/bin/getconf _NPROCESSORS_ONLN`"; \\\ - ncpus_max=%{?_smp_ncpus_max}; \\\ - if [ -n "$ncpus_max" ] && [ "$ncpus_max" -gt 0 ] && [ "$MESON_BUILD_NCPUS" -gt "$ncpus_max" ]; then MESON_BUILD_NCPUS="$ncpus_max"; fi; \\\ - if [ "$MESON_BUILD_NCPUS" -gt 1 ]; then echo "--num-processes $MESON_BUILD_NCPUS"; fi) - %meson \ %set_build_flags \ %{shrink:%{__meson} \ @@ -28,17 +22,24 @@ --wrap-mode=%{__meson_wrap_mode} \ --auto-features=%{__meson_auto_features} \ %{_vpath_srcdir} %{_vpath_builddir} \ - %{nil}} + %{nil}} %meson_build \ - %ninja_build -C %{_vpath_builddir} + %{shrink:%{__meson} compile \ + -C %{_vpath_builddir} \ + -j %{_smp_build_ncpus} \ + --verbose \ + %{nil}} %meson_install \ - %ninja_install -C %{_vpath_builddir} + %{shrink:DESTDIR=%{buildroot} %{__meson} install \ + -C %{_vpath_builddir} \ + --no-rebuild \ + %{nil}} %meson_test \ - %{shrink: %{__meson} test \ + %{shrink:%{__meson} test \ -C %{_vpath_builddir} \ - %{?_smp_mesonflags} \ + --num-processes %{_smp_build_ncpus} \ --print-errorlogs \ - %{nil}} + %{nil}} diff -Nru meson-0.53.2/data/schema.xsd meson-0.57.0+really0.56.2/data/schema.xsd --- meson-0.53.2/data/schema.xsd 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/data/schema.xsd 2020-08-15 16:27:05.000000000 +0000 @@ -0,0 +1,96 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -Nru meson-0.53.2/data/shell-completions/zsh/_meson meson-0.57.0+really0.56.2/data/shell-completions/zsh/_meson --- meson-0.53.2/data/shell-completions/zsh/_meson 2019-03-10 17:10:57.000000000 +0000 +++ meson-0.57.0+really0.56.2/data/shell-completions/zsh/_meson 2020-10-18 21:29:13.000000000 +0000 @@ -1,4 +1,4 @@ -#compdef meson mesonconf=meson-configure mesontest=meson-test mesonintrospect=meson-introspect +#compdef meson # vim:ts=2 sw=2 @@ -32,13 +32,55 @@ local __meson_backends="(ninja xcode ${(j. .)${:-vs{,2010,2015,2017}}})" local __meson_build_types="(plain debug debugoptimized minsize release)" local __meson_wrap_modes="(WrapMode.{default,nofallback,nodownload,forcefallback})" +local __meson_dist_formats=("xztar" "gztar" "zip") +local __meson_cd='-C[change into this directory before running]:target dir:_directories' +local -a __meson_common=( + '--prefix=[installation prefix]: :_directories' + '--bindir=[executable directory]: :_directories' + '--datadir=[data file directory]: :_directories' + '--includedir=[header file directory]: :_directories' + '--infodir=[info page directory]: :_directories' + '--libdir=[library directory]: :_directories' + '--libexecdir=[library executable directory]: :_directories' + '--localedir=[locale data directory]: :_directories' + '--localstatedir=[local state data directory]: :_directories' + '--mandir=[manual page directory]: :_directories' + '--sbindir=[system executable directory]: :_directories' + '--sharedstatedir=[arch-independent data directory]: :_directories' + '--sysconfdir=[system configuration directory]: :_directories' + '--auto-features=[default value for auto features]:auto features types:(auto disabled enabled)' + '--backend=[backend to use]:Meson backend:'"$__meson_backends" + '--buildtype=[build type to use]:Meson build type:'"$__meson_build_types" + '--debug[turn on building with debug]' + '--default-library=[default library type]:default library type:(shared static both)' + '--errorlogs[prints the logs from failing tests]' + '--install-umask=[default umask for permissions of all installed files]' + '--layout=[build directory layout]:build directory layout:(flat mirror)' + '--optimization=[optimization level for compiled targets]:optimization:(0 g 1 2 3 s)' + '--stdsplit=[split stdout and stderr in test logs]' + '--strip[strip targets on install]' + '--unity=[unity builds on/off]:whether to do unity builds:(on off subprojects)' + '--warnlevel=[compiler warning level]:compiler warning level:warning level:(1 2 3)' + '--werror[treat warnings as errors]' + '--wrap-mode=[special wrap mode]:wrap mode:'"$__meson_wrap_modes" + '--force-fallback-for=[force fallback for listed subprojects]' + '--pkg-config-path=[extra paths for HOST pkg-config to search]:paths:_dir_list -s ,' + '--build.pkg-config-path=[extra paths for BUILD pkg-config to search]:paths:_dir_list -s ,' + '--cmake-prefix-path=[extra prefixes for HOST cmake to search]:paths:_dir_list -s ,' + '--build.cmake-prefix-path=[extra prefix for BUILD cmake to search]:paths:_dir_list -s ,' +) local -a meson_commands=( -'setup:set up a build directory' 'configure:configure a project' -'test:run tests' +'dist:generate release archive' +'init:create a new project' +'install:install one more more targets' 'introspect:query project properties' +'setup:set up a build directory' +'test:run tests' 'wrap:manage source dependencies' +'subprojects:manage subprojects' +'compile:Build the project' ) (( $+functions[__meson_is_build_dir] )) || __meson_is_build_dir() { @@ -68,6 +110,21 @@ fi } +(( $+functions[__meson_wrap_names] )) || __meson_wrap_names() { + local rwraps + rwraps="$(_call_program meson meson wrap list)" + local -a wraps=(${(@f)rwraps}) + _describe -t wraps "Meson wraps" wraps +} + +(( $+functions[__meson_installed_wraps] )) || __meson_installed_wraps() { + local rwraps + if rwraps="$(ls subprojects/ | grep '\.wrap$' | cut -d . -f 1)"; then + local -a wraps=(${(@f)rwraps}) + _describe -t wraps "Meson wraps" wraps + fi +} + (( $+functions[_meson_commands] )) || _meson_commands() { _describe -t commands "Meson subcommands" meson_commands } @@ -89,48 +146,30 @@ _arguments \ '*-D-[set the value of a build option]:build option:__meson_build_options' \ - '--prefix=[installation prefix]: :_directories' \ - '--libdir=[library directory]: :_directories' \ - '--libexecdir=[library executable directory]: :_directories' \ - '--bindir=[executable directory]: :_directories' \ - '--sbindir=[system executable directory]: :_directories' \ - '--includedir=[header file directory]: :_directories' \ - '--datadir=[data file directory]: :_directories' \ - '--mandir=[manual page directory]: :_directories' \ - '--infodir=[info page directory]: :_directories' \ - '--localedir=[locale data directory]: :_directories' \ - '--sysconfdir=[system configuration directory]: :_directories' \ - '--localstatedir=[local state data directory]: :_directories' \ - '--sharedstatedir=[arch-independent data directory]: :_directories' \ - '--backend=[backend to use]:Meson backend:'"$__meson_backends" \ - '--buildtype=[build type to use]:Meson build type:'"$__meson_build_types" \ - '--strip[strip targets on install]' \ - '--unity=[unity builds on/off]:whether to do unity builds:(on off subprojects)' \ - '--werror[treat warnings as errors]' \ - '--layout=[build directory layout]:build directory layout:(flat mirror)' \ - '--default-library=[default library type]:default library type:(shared static)' \ - '--warnlevel=[compiler warning level]:compiler warning level:warning level:(1 2 3)' \ - '--stdsplit=[split stdout and stderr in test logs]' \ - '--errorlogs=[prints the logs from failing tests]' \ '--cross-file=[cross-compilation environment description]:cross file:_files' \ - '--wrap-mode=[special wrap mode]:wrap mode:'"$__meson_wrap_modes" \ + '--native-file=[build machine compilation environment description]:native file:_files' \ + '--clearcache[clear cached state]' \ + '--fatal-meson-warnings=[exit when any meson warnings are encountered]' \ + '(-v --version)'{'-v','--version'}'[print the meson version and exit]' \ + '--reconfigure=[re-run build configuration]' \ + '--wipe=[delete saved state and restart using saved command line options]' \ ":$firstd directory:_directories" \ "::$secondd directory:_directories" \ - # + "${(@)__meson_common}" } (( $+functions[_meson-configure] )) || _meson-configure() { local curcontext="$curcontext" # TODO: implement 'mesonconf @file' local -a specs=( - '--clearcache[clear cached state]' '*-D-[set the value of a build option]:build option:__meson_build_options' '::build directory:_directories' ) _arguments \ '(: -)'{'--help','-h'}'[show a help message and quit]' \ - "${(@)specs}" + "${(@)specs}" \ + "${(@)__meson_common}" } (( $+functions[_meson-test] )) || _meson-test() { @@ -138,22 +177,23 @@ # TODO: complete test suites local -a specs=( - '(--quiet -q)'{'--quiet','-q'}'[produce less output to the terminal]' - '(--verbose -v)'{'--verbose','-v'}'[do not redirect stdout and stderr]' - '(--timeout-multiplier -t)'{'--timeout-multiplier','-t'}'[a multiplier for test timeouts]:Python floating-point number: ' - '-C[directory to cd into]: :_directories' '--repeat[number of times to run the tests]:number of times to repeat: ' '--no-rebuild[do not rebuild before running tests]' '--gdb[run tests under gdb]' + '--gdb-path=[program to run for gdb (can be wrapper or compaitble program)]:program:_path_commands' '--list[list available tests]' '(--wrapper --wrap)'{'--wrapper=','--wrap='}'[wrapper to run tests with]:wrapper program:_path_commands' - '(--no-suite)--suite[only run tests from this suite]:test suite: ' + "$__meson_cd" '(--suite)--no-suite[do not run tests from this suite]:test suite: ' + '(--no-suite)--suite[only run tests from this suite]:test suite: ' '--no-stdsplit[do not split stderr and stdout in logs]' '--print-errorlogs[print logs for failing tests]' '--benchmark[run benchmarks instead of tests]' '--logbase[base name for log file]:filename: ' '--num-processes[how many threads to use]:number of processes: ' + '(--verbose -v)'{'--verbose','-v'}'[do not redirect stdout and stderr]' + '(--quiet -q)'{'--quiet','-q'}'[produce less output to the terminal]' + '(--timeout-multiplier -t)'{'--timeout-multiplier','-t'}'[a multiplier for test timeouts]:Python floating-point number: ' '--setup[which test setup to use]:test setup: ' '--test-args[arguments to pass to the tests]: : ' '*:Meson tests:__meson_test_names' @@ -164,17 +204,32 @@ "${(@)specs}" } +(( $+functions[_meson-install] )) || _meson-install() { + local curcontext="$curcontext" + local -a specs=( + "$__meson_cd" + '--no-rebuild[Do not rebuild before installing]' + '--only-changed[Do not overwrite files that are older than the copied file]' + '--quiet[Do not print every fiel that was installed]' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + (( $+functions[_meson-introspect] )) || _meson-introspect() { local curcontext="$curcontext" local -a specs=( - '--targets[list top level targets]' - '--installed[list all installed files and directories]' - '--buildsystem-files[list files that belong to the build system]' - '--buildoptions[list all build options]' - '--tests[list all unit tests]' + '--ast[dump the ASK of the meson file]' '--benchmarks[list all benchmarks]' + '--buildoptions[list all build options]' + '--buildsystem-files[list files that belong to the build system]' '--dependencies[list external dependencies]' + '--installed[list all installed files and directories]' '--projectinfo[show project information]' + '--targets[list top level targets]' + '--tests[list all unit tests]' + '--backend=[backend to use]:Meson backend:'"$__meson_backends" '::build directory:_directories' ) _arguments \ @@ -182,8 +237,167 @@ "${(@)specs}" } +(( $+functions[_meson-init] )) || _meson-init() { + local curcontext="$curcontext" + local -a specs=( + "$__meson_cd" + '(-n --name)'{'-n','--name'}'=[the name of the project (defaults to directory name)]' + '(-e --executable)'{'-e','--executable'}'=[the name of the executable target to create (defaults to project name)]' + '(-d --deps)'{'-d','--deps'}'=[comma seperated list of dependencies]' + '(-l --language)'{'-l','--language'}'=[comma seperated list of languages (autodetected based on sources if unset)]:languages:_values , (c cpp cs cuda d fortran java objc objcpp rust)' + '(-b --build)'{'-b','--build'}'[build the project immediately after generation]' + '--builddir=[directory for building]:directory:_directories' + '(-f --force)'{'-f','--force'}'[overwrite any existing files and directories]' + '(-t --type)'{'-t','--type'}'=[project type, defaults to executable]:type:(executable library)' + '(-v --version)'{'-v','--version'}'[print the meson version and exit]' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + (( $+functions[_meson-wrap] )) || _meson-wrap() { - # TODO + local -a commands=( + 'list:list all available wraps' + 'search:search the db by name' + 'install:install the specified project' + 'update:Update a project to its newest available version' + 'info:Show info about a wrap' + 'status:Show the status of your subprojects' + ) + + if (( CURRENT == 2 )); then + _describe -t commands "Meson wrap subcommands" commands + else + local curcontext="$curcontext" + cmd="${${commands[(r)$words[2]:*]%%:*}}" + if (( $#cmd )); then + if [[ $cmd == status ]]; then + _message "no options" + elif [[ $cmd == "list" ]]; then + _arguments '*:meson wraps' + elif [[ $cmd == "search" ]]; then + _arguments '*:meson wraps' + elif [[ $cmd == "install" ]]; then + _arguments '*:meson wraps:__meson_wrap_names' + elif [[ $cmd == "update" ]]; then + _arguments '*:meson wraps:__meson_installed_wraps' + elif [[ $cmd == "info" ]]; then + _arguments '*:meson wraps:__meson_wrap_name' + elif [[ $cmd == "status" ]]; then + _arguments '*:' + elif [[ $cmd == "promote" ]]; then + # TODO: how do you figure out what wraps are provided by subprojects if + # they haven't been fetched yet? + _arguments '*:' + fi + else + _message "unknown meson wrap command: $words[2]" + fi + fi + +} + +(( $+functions[_meson-dist] )) || _meson-dist() { + local curcontext="$curcontext" + local -a specs=( + '--formats=[comma seperated list of archive types to create]:archive formats:_values -s , format '"$__meson_dist_formats" + '--include-subprojects[Include source code of subprojects that have been used for the build]' + '--no-tests[Do not build and test generated packages]' + "$__meson_cd" + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-subprojects-update] )) || _meson-subprojects-update() { + local curcontext="$curcontext" + local -a specs=( + "--rebase[rebase your branch on top of wrap's revision (git only)]" + '--sourcedir=[path to source directory]:_directories' + '*:subprojects:__meson_installed_wraps' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-subprojects-checkout] )) || _meson-subprojects-checkout() { + local curcontext="$curcontext" + local -a specs=( + '-b[create a new branch]' + '--sourcedir=[path to source directory]:_directories' + # FIXME: this doesn't work exactly right, but I can't figure it out + ':branch name' + '*:subprojects:__meson_installed_wraps' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-subprojects-download] )) || _meson-subprojects-download() { + local curcontext="$curcontext" + local -a specs=( + '--sourcedir=[path to source directory]:_directories' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-subprojects-foreach] )) || _meson-subprojects-foreach() { + local curcontext="$curcontext" + local -a specs=( + '--sourcedir=[path to source directory]:_directories' + '*:command:_command_names -e' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-subprojects] )) || _meson-subprojects() { + local -a commands=( + 'update:update all subprojects from wrap files' + 'checkout:checkout a branch (git only)' + 'download:ensure subprojects are fetched, even if not in use. Already downloaded subprojects are not modified.' + 'foreach:execute a command in each subproject directory' + ) + + if (( CURRENT == 2 )); then + _describe -t commands "Meson subproject subcommands" commands + else + local curcontext="$curcontext" + cmd="${${commands[(r)$words[2]:*]%%:*}}" + if (( $#cmd )); then + if [[ $cmd == status ]]; then + _message "no options" + else + _meson-subprojects-$cmd + fi + else + _message "unknown meson subproject command: $words[2]" + fi + fi + +} + +(( $+functions[_meson-compile] )) || _meson-compile() { + local curcontext="$curcontext" + local -a specs=( + "$__meson_cd" + '--clean[Clean the build directory]' + '(-j --jobs)'{'-j','--jobs'}'=[the number fo work jobs to run (if supported)]:_guard "[0-9]#" "number of jobs"' + '(-l --load-averate)'{'-l','--load-average'}'=[the system load average to try to maintain (if supported)]:_guard "[0-9]#" "load average"' + '(-v --verbose)'{'-v','--verbose'}'[Show more output]' + '--ninja-args=[Arguments to pass to ninja (only when using ninja)]' + '--vs-args=[Arguments to pass to vs (only when using msbuild)]' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" } if [[ $service != meson ]]; then diff -Nru meson-0.53.2/data/syntax-highlighting/vim/ftdetect/meson.vim meson-0.57.0+really0.56.2/data/syntax-highlighting/vim/ftdetect/meson.vim --- meson-0.53.2/data/syntax-highlighting/vim/ftdetect/meson.vim 2020-01-23 22:29:05.000000000 +0000 +++ meson-0.57.0+really0.56.2/data/syntax-highlighting/vim/ftdetect/meson.vim 2020-08-15 16:27:05.000000000 +0000 @@ -1,2 +1,3 @@ au BufNewFile,BufRead meson.build set filetype=meson au BufNewFile,BufRead meson_options.txt set filetype=meson +au BufNewFile,BufRead *.wrap set filetype=dosini diff -Nru meson-0.53.2/data/syntax-highlighting/vim/syntax/meson.vim meson-0.57.0+really0.56.2/data/syntax-highlighting/vim/syntax/meson.vim --- meson-0.53.2/data/syntax-highlighting/vim/syntax/meson.vim 2019-12-29 22:47:27.000000000 +0000 +++ meson-0.57.0+really0.56.2/data/syntax-highlighting/vim/syntax/meson.vim 2021-01-06 10:39:48.000000000 +0000 @@ -32,8 +32,9 @@ " http://mesonbuild.com/Syntax.html syn keyword mesonConditional elif else if endif -syn keyword mesonRepeat foreach endforeach -syn keyword mesonOperator and not or +syn keyword mesonRepeat foreach endforeach +syn keyword mesonOperator and not or in +syn keyword mesonStatement continue break syn match mesonComment "#.*$" contains=mesonTodo,@Spell syn keyword mesonTodo FIXME NOTE NOTES TODO XXX contained diff -Nru meson-0.53.2/data/test.schema.json meson-0.57.0+really0.56.2/data/test.schema.json --- meson-0.53.2/data/test.schema.json 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/data/test.schema.json 2020-09-10 16:39:24.000000000 +0000 @@ -0,0 +1,131 @@ +{ + "type": "object", + "additionalProperties": false, + "properties": { + "env": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "installed": { + "type": "array", + "items": { + "type": "object", + "properties": { + "file": { + "type": "string" + }, + "type": { + "type": "string", + "enum": [ + "file", + "dir", + "exe", + "shared_lib", + "pdb", + "implib", + "implibempty", + "expr" + ] + }, + "platform": { + "type": "string", + "enum": [ + "msvc", + "gcc", + "cygwin", + "!cygwin" + ] + }, + "version": { + "type": "string" + }, + "language": { + "type": "string" + } + }, + "required": [ + "file", + "type" + ] + } + }, + "matrix": { + "type": "object", + "additionalProperties": { + "properties": { + "options": { + "type": "array", + "items": { + "type": "object", + "properties": { + "val": { + "type": "string" + }, + "compilers": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "skip_on_env": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "val" + ] + } + }, + "exclude": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "do_not_set_opts": { + "type": "array", + "items": { + "type": "string", + "enum": [ + "libdir", + "prefix" + ] + } + }, + "tools": { + "type": "object" + }, + "stdout": { + "type": "array", + "items": { + "type": "object", + "properties": { + "line": { + "type": "string" + }, + "match": { + "type": "string", + "enum": [ + "literal", + "re" + ] + } + }, + "required": [ + "line" + ] + } + } + } +} diff -Nru meson-0.53.2/debian/changelog meson-0.57.0+really0.56.2/debian/changelog --- meson-0.53.2/debian/changelog 2020-03-03 07:59:51.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/changelog 2022-02-05 08:58:33.000000000 +0000 @@ -1,50 +1,170 @@ -meson (0.53.2-2ubuntu2) focal; urgency=medium +meson (0.57.0+really0.56.2-0.1ubuntu1~ubuntu20.04.1~ppa1) focal; urgency=medium - * Skip test cross on s390x, because arm toolchain is not available there. + * No-change backport to focal - -- Gianfranco Costamagna Tue, 03 Mar 2020 08:59:51 +0100 + -- Gianfranco Costamagna Sat, 05 Feb 2022 09:58:33 +0100 -meson (0.53.2-2ubuntu1) focal; urgency=medium +meson (0.57.0+really0.56.2-0.1ubuntu1) impish; urgency=medium - * Merge from Debian unstable. Remaining changes: - - add debian/patches/6703.patch - - Allow stderr for new test - - depend on rustc and valac, so autopkgtests can pick them up + * debian/patches/git_gnome_warning.patch: + - backport an upstream fix to resolve warnings with the newer gcc, + it fixes the nautilus build - -- Gianfranco Costamagna Mon, 02 Mar 2020 12:05:06 +0100 + -- Sebastien Bacher Tue, 14 Sep 2021 16:31:07 +0200 -meson (0.53.2-2) unstable; urgency=medium +meson (0.57.0+really0.56.2-0.1) unstable; urgency=medium - * Fix autopkgtest dependencies. Closes: #952610 + * Revert to 0.56.2 (Closes: #982865) + 0.57.0 no longer honors CPPFLAGS and therefore silently drops + -D_FORTIFY_SOURCE=2 during package builds. - -- Jussi Pakkanen Sat, 29 Feb 2020 00:20:11 +0200 + -- Sebastian Ramacher Mon, 15 Feb 2021 21:17:44 +0100 + +meson (0.57.0-1) unstable; urgency=medium + + * New upstream release. + + -- Jussi Pakkanen Sun, 14 Feb 2021 21:29:36 +0200 + +meson (0.57.0~rc1-1) experimental; urgency=medium + + * New upstream release. + * Add recommends to dpkg-dev. Closes: #977051. + + -- Jussi Pakkanen Tue, 09 Feb 2021 01:21:53 +0200 + +meson (0.56.2-1) unstable; urgency=medium + + * New upstream release + * Updated standards version. + + -- Jussi Pakkanen Sun, 10 Jan 2021 14:51:13 +0200 + +meson (0.56.1-1) unstable; urgency=medium + + * New upstream release. + * Add cups-config to debcrossgen. Closes: #977633. + * Remove aarch64 patch as the underlying issue has been fixed. + + -- Jussi Pakkanen Wed, 06 Jan 2021 12:43:02 +0200 + +meson (0.56.0-1.1) unstable; urgency=medium + + * Non-maintainer upload. + * Skip unimplmented test on arm64. Closes: #975411. + + -- Matthias Klose Sat, 28 Nov 2020 15:30:07 +0100 + +meson (0.56.0-1) unstable; urgency=medium + + * New upstream release. + + -- Jussi Pakkanen Fri, 30 Oct 2020 10:28:21 +0200 + +meson (0.56.0~rc2-1) experimental; urgency=medium + + * New upstream release. + + -- Jussi Pakkanen Mon, 26 Oct 2020 20:20:13 +0200 + +meson (0.56.0~rc1-1) experimental; urgency=medium + + * New upstream release. + + -- Jussi Pakkanen Mon, 19 Oct 2020 00:35:48 +0300 + +meson (0.55.3-1) unstable; urgency=medium + + * New upstream release. + + -- Jussi Pakkanen Fri, 11 Sep 2020 18:38:53 +0300 + +meson (0.55.2-1) unstable; urgency=medium -meson (0.53.2-1ubuntu4) focal; urgency=medium + * New upstream release. + * Install Bash completion files. Closes: #968462. + * Remove TAP test distro patch that is now upstream. + * Do not install openmpi packages as the tests don't run under pbuilder. - * Also allow stderr + -- Jussi Pakkanen Thu, 10 Sep 2020 19:44:56 +0300 - -- Gianfranco Costamagna Mon, 02 Mar 2020 11:58:04 +0100 +meson (0.55.1-1) unstable; urgency=medium -meson (0.53.2-1ubuntu3) focal; urgency=medium + * New upstream release. + * Remove patches no longer needed in 0.55.1. - * debian/patches/6703.patch - - use upstream proposed approach instead of use_python3_tests.patch - * Tweak debian tests/control to fix another test failure (Closes: #952610) - - patch taken from bug report + -- Jussi Pakkanen Fri, 21 Aug 2020 00:08:05 +0300 - -- Gianfranco Costamagna Mon, 02 Mar 2020 07:24:25 +0100 +meson (0.55.0-2.1) unstable; urgency=medium -meson (0.53.2-1ubuntu1) focal; urgency=medium + * Non-maintainer upload. + * Don't consider skipped tests as failures. Closes: #966923 + * Fix test with setuptools 49. Closes: #968704 - [ Stefano Rivera ] - * debian/patches/use_python3_tests.patch: - - Update use_python3_tests.patch to catch one more python binary use. + -- Marco Trevisan (Treviño) Thu, 20 Aug 2020 18:10:34 +0100 - [ Rico Tzschichholz ] - * debian/tests/control: - - Explicit depends on rustc and valac, so autopkgtests can pick it up +meson (0.55.0-2) unstable; urgency=medium - -- Rico Tzschichholz Wed, 26 Feb 2020 12:42:13 +0100 + * Fix crossbuild test from Gianfranco Costamagna. Closes: #963546 + * Add dep to Python pkg resources from Kunal Mehta. Closes: #965033. + + -- Jussi Pakkanen Thu, 16 Jul 2020 22:15:06 +0300 + +meson (0.55.0-1) unstable; urgency=medium + + * New upstream release. + + -- Jussi Pakkanen Sun, 12 Jul 2020 17:29:15 +0300 + +meson (0.55.0~rc2-1) experimental; urgency=medium + + * New upstream release. + * Fix test dependency setup to work on all arches. Closes: #963546. + + -- Jussi Pakkanen Wed, 08 Jul 2020 10:51:14 +0300 + +meson (0.54.3-1) unstable; urgency=medium + + * New upstream release. Closes: #960877. + + -- Jussi Pakkanen Mon, 15 Jun 2020 19:48:23 +0300 + +meson (0.54.2-1) unstable; urgency=medium + + * New upstream release. + + -- Jussi Pakkanen Fri, 15 May 2020 10:26:05 +0300 + +meson (0.54.1-2) unstable; urgency=medium + + * Add libdir regression fix. Closes: #959708. + + -- Jussi Pakkanen Wed, 06 May 2020 23:25:28 +0300 + +meson (0.54.1-1) unstable; urgency=medium + + * New upstream release. + + -- Jussi Pakkanen Sun, 26 Apr 2020 12:10:21 +0300 + +meson (0.54.0-1) unstable; urgency=medium + + * New upstream release. + + -- Jussi Pakkanen Sun, 29 Mar 2020 20:03:27 +0300 + +meson (0.54.0~rc1-1) experimental; urgency=medium + + * New upstream rc release. + * Updated debcrossgen to not write to stderr. Closes: #952610. + + -- Jussi Pakkanen Tue, 24 Mar 2020 11:32:25 +0200 + +meson (0.53.2-2) unstable; urgency=medium + + * Fix autopkgtest dependencies. Closes: #952610 + + -- Jussi Pakkanen Sat, 29 Feb 2020 00:20:11 +0200 meson (0.53.2-1) unstable; urgency=medium diff -Nru meson-0.53.2/debian/compat meson-0.57.0+really0.56.2/debian/compat --- meson-0.53.2/debian/compat 2019-08-26 17:40:34.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/compat 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -12 diff -Nru meson-0.53.2/debian/control meson-0.57.0+really0.56.2/debian/control --- meson-0.53.2/debian/control 2020-03-02 11:02:28.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/control 2021-01-10 12:51:13.000000000 +0000 @@ -2,16 +2,17 @@ Maintainer: Jussi Pakkanen Section: devel Priority: optional -Standards-Version: 4.5.0 +Standards-Version: 4.5.1 Homepage: https://mesonbuild.com X-Python3-Version: >= 3.7 Rules-Requires-Root: no -Build-Depends: debhelper (>= 12), +Build-Depends: debhelper-compat (= 13), python3:any (>= 3.5), dh-exec, dh-python, python3-setuptools, ninja-build (>= 1.6), + bash-completion, # The following are needed for the unit test suite zlib1g-dev , pkg-config , @@ -20,6 +21,7 @@ libboost-test-dev , libboost-log-dev , libboost-python-dev , + libboost-regex-dev , gobjc , gobjc++ , gnustep-make , @@ -62,9 +64,10 @@ valgrind [amd64 i386] , llvm-dev , libsdl2-dev , -# OpenMPI packages do not install currently (2018/04/23) +# OpenMPI tests do not run under pbuilder and the packages are currently +# (2020-09-10) broken and uninstallable so disable this. openmpi-bin , - libopenmpi-dev , +# libopenmpi-dev , # Running OpenMPI executables requires openssh. Yes, really. openssh-client , libvulkan-dev , @@ -93,7 +96,7 @@ ${misc:Depends}, ${python3:Depends}, ninja-build(>=1.6), -# python3-pkg-resources, + python3-pkg-resources, Description: high-productivity build system Meson is a build system designed to increase programmer productivity. It does this by providing a fast, simple and easy to diff -Nru meson-0.53.2/debian/debcrossgen meson-0.57.0+really0.56.2/debian/debcrossgen --- meson-0.53.2/debian/debcrossgen 2019-07-01 18:33:01.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/debcrossgen 2021-01-05 23:23:23.000000000 +0000 @@ -80,7 +80,8 @@ cmd = ['dpkg-architecture'] else: cmd = ['dpkg-architecture', '-a' + options.arch] - output = subprocess.check_output(cmd, universal_newlines=True) + output = subprocess.check_output(cmd, universal_newlines=True, + stderr=subprocess.DEVNULL) data = {} for line in output.split('\n'): line = line.strip() @@ -109,6 +110,10 @@ ofile.write("pkgconfig = '%s'\n" % locate_path("%s-pkg-config" % host_arch)) except ValueError: pass # pkg-config is optional + try: + ofile.write("cups-config = '%s'\n" % locate_path("cups-config")) + except ValueError: + pass # cups-config is optional ofile.write('\n[properties]\n') write_args_from_envvars(ofile) ofile.write('\n[host_machine]\n') diff -Nru meson-0.53.2/debian/install meson-0.57.0+really0.56.2/debian/install --- meson-0.53.2/debian/install 2019-11-23 20:10:34.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/install 2020-10-30 08:28:05.000000000 +0000 @@ -1,4 +1,4 @@ #!/usr/bin/dh-exec __main__.py => /usr/bin/meson debian/debcrossgen /usr/share/meson -data/shell-completions/zsh/_meson /usr/share/zsh/vendor-completions \ No newline at end of file +data/shell-completions/zsh/_meson /usr/share/zsh/vendor-completions diff -Nru meson-0.53.2/debian/meson.bash-completion meson-0.57.0+really0.56.2/debian/meson.bash-completion --- meson-0.53.2/debian/meson.bash-completion 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/meson.bash-completion 2020-10-30 08:28:05.000000000 +0000 @@ -0,0 +1 @@ +data/shell-completions/bash/meson diff -Nru meson-0.53.2/debian/patches/1-disable-openmpi.patch meson-0.57.0+really0.56.2/debian/patches/1-disable-openmpi.patch --- meson-0.53.2/debian/patches/1-disable-openmpi.patch 2020-01-07 19:00:50.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/patches/1-disable-openmpi.patch 2020-10-30 08:28:05.000000000 +0000 @@ -1,12 +1,13 @@ diff --git a/test cases/frameworks/17 mpi/meson.build b/test cases/frameworks/17 mpi/meson.build -index 2102b817..cd250b58 100644 +index 75b463cc..7bf4e7ae 100644 --- a/test cases/frameworks/17 mpi/meson.build +++ b/test cases/frameworks/17 mpi/meson.build -@@ -1,5 +1,7 @@ - project('mpi', 'c', 'cpp', default_options: ['b_asneeded=false']) +@@ -2,6 +2,8 @@ project('mpi', 'c', 'cpp', default_options: ['b_asneeded=false']) + + method = get_option('method') +error('MESON_SKIP_TEST openmpi binaries do not work when run in pbuilder for some reason.') + cc = meson.get_compiler('c') - mpic = dependency('mpi', language : 'c', required : false) + mpic = dependency('mpi', language : 'c', required : false, method : method) if not mpic.found() diff -Nru meson-0.53.2/debian/patches/2-disable-rootdir-test.patch meson-0.57.0+really0.56.2/debian/patches/2-disable-rootdir-test.patch --- meson-0.53.2/debian/patches/2-disable-rootdir-test.patch 2020-01-07 19:00:50.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/patches/2-disable-rootdir-test.patch 2020-10-30 08:28:05.000000000 +0000 @@ -1,18 +1,27 @@ diff --git a/test cases/common/227 fs module/meson.build b/test cases/common/227 fs module/meson.build -index 8795ee00..e34572ef 100644 +index a7327682..c5f90bbb 100644 --- a/test cases/common/227 fs module/meson.build +++ b/test cases/common/227 fs module/meson.build -@@ -30,8 +30,11 @@ assert(fs.is_dir('subprojects'), 'Dir not detected correctly.') +@@ -30,12 +30,16 @@ assert(fs.is_dir('subprojects'), 'Dir not detected correctly.') assert(not fs.is_dir('meson.build'), 'File detected as a dir.') assert(not fs.is_dir('nonexisting'), 'Bad path detected as a dir.') --assert(fs.is_dir('~'), 'expanduser not working') --assert(not fs.is_file('~'), 'expanduser not working') -+# These don't work under pbuilder for some reason. +-assert(fs.is_dir('~'), 'home directory not detected') +-assert(not fs.is_file('~'), 'home directory detected as file') +- +-# -- expanduser +-assert(fs.expanduser('~') != '~','expanduser failed') +-assert(fs.expanduser('~/foo').endswith('foo'), 'expanduser with tail failed') ++# These do not work with pbuilder for some reason. +# I have not been able to replicate this manually, -+# even with 'pbuilder login' -+#assert(fs.is_dir('~'), 'expanduser not working') -+#assert(not fs.is_file('~'), 'expanduser not working') ++# even with 'pbuilder login'. ++# ++#assert(fs.is_dir('~'), 'home directory not detected') ++#assert(not fs.is_file('~'), 'home directory detected as file') ++# ++## -- expanduser ++#assert(fs.expanduser('~') != '~','expanduser failed') ++#assert(fs.expanduser('~/foo').endswith('foo'), 'expanduser with tail failed') - original = 'foo.txt' - new = fs.replace_suffix(original, '.ini') + # -- as_posix + assert(fs.as_posix('/') == '/', 'as_posix idempotent') diff -Nru meson-0.53.2/debian/patches/6703.patch meson-0.57.0+really0.56.2/debian/patches/6703.patch --- meson-0.53.2/debian/patches/6703.patch 2020-03-02 06:44:52.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/patches/6703.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,46 +0,0 @@ -Origin: https://github.com/mesonbuild/meson/pull/6703 ---- meson-0.53.2.orig/run_unittests.py -+++ meson-0.53.2/run_unittests.py -@@ -6650,9 +6650,9 @@ class NativeFileTests(BasePlatformTests) - '--native-file', config, '--native-file', config2, - '-Dcase=find_program']) - -- def _simple_test(self, case, binary): -+ def _simple_test(self, case, binary, entry=None): - wrapper = self.helper_create_binary_wrapper(binary, version='12345') -- config = self.helper_create_native_file({'binaries': {binary: wrapper}}) -+ config = self.helper_create_native_file({'binaries': {entry or binary: wrapper}}) - self.init(self.testcase, extra_args=['--native-file', config, '-Dcase={}'.format(case)]) - - def test_find_program(self): -@@ -6675,16 +6675,21 @@ class NativeFileTests(BasePlatformTests) - # python module breaks. This is fine on other OSes because they - # don't need the extra indirection. - raise unittest.SkipTest('bat indirection breaks internal sanity checks.') -- if os.path.exists('/etc/debian_version'): -- rc = subprocess.call(['pkg-config', '--cflags', 'python2'], -- stdout=subprocess.DEVNULL, -- stderr=subprocess.DEVNULL) -- if rc != 0: -- # Python 2 will be removed in Debian Bullseye, thus we must -- # remove the build dependency on python2-dev. Keep the tests -- # but only run them if dev packages are available. -+ elif is_osx(): -+ binary = 'python' -+ else: -+ binary = 'python2' -+ -+ # We not have python2, check for it -+ for v in ['2', '2.7', '-2.7']: -+ rc = subprocess.call(['pkg-config', '--cflags', 'python{}'.format(v)], -+ stdout=subprocess.DEVNULL, -+ stderr=subprocess.DEVNULL) -+ if rc == 0: -+ break -+ else: - raise unittest.SkipTest('Not running Python 2 tests because dev packages not installed.') -- self._simple_test('python', 'python') -+ self._simple_test('python', binary, entry='python') - - @unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard') - @skip_if_env_set('CC') diff -Nru meson-0.53.2/debian/patches/git_gnome_warning.patch meson-0.57.0+really0.56.2/debian/patches/git_gnome_warning.patch --- meson-0.53.2/debian/patches/git_gnome_warning.patch 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/patches/git_gnome_warning.patch 2021-09-14 10:35:37.000000000 +0000 @@ -0,0 +1,159 @@ +From 42ba8efaf220bceca7850f2ffe0d84ee426f7b34 Mon Sep 17 00:00:00 2001 +From: Philip Withnall +Date: Mon, 16 Nov 2020 18:38:39 +0000 +Subject: [PATCH] gnome: Drop use of volatile in GLib type functions +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +See https://gitlab.gnome.org/GNOME/glib/-/issues/600 + +`volatile` was previously mistakenly used in GLib to indicate that a +variable was accessed atomically or otherwise multi-threaded. It’s not +meant for that, and up to date compilers (like gcc-11) will rightly warn +about it. + +Drop the `volatile` qualifiers. + +Based on a patch by Jeff Law. + +See also http://isvolatileusefulwiththreads.in/c/. + +Signed-off-by: Philip Withnall +--- + mesonbuild/modules/gnome.py | 2 +- + test cases/frameworks/7 gnome/mkenums/enums.c.in | 8 ++++---- + test cases/frameworks/7 gnome/mkenums/enums2.c.in | 8 ++++---- + test cases/frameworks/7 gnome/mkenums/meson.build | 8 ++++---- + .../dependency-generated/enum-types.c.template | 8 ++++---- + 5 files changed, 17 insertions(+), 17 deletions(-) + +diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py +index 9fd31c73885..547aff17cd4 100644 +--- a/mesonbuild/modules/gnome.py ++++ b/mesonbuild/modules/gnome.py +@@ -1430,7 +1430,7 @@ def mkenums_simple(self, state, args, kwargs): + GType + %s@enum_name@_get_type (void) + { +- static volatile gsize gtype_id = 0; ++ static gsize gtype_id = 0; + static const G@Type@Value values[] = {''' % func_prefix + + c_file_kwargs['vprod'] = ' { C_@TYPE@(@VALUENAME@), "@VALUENAME@", "@valuenick@" },' +diff --git a/test cases/frameworks/7 gnome/mkenums/enums.c.in b/test cases/frameworks/7 gnome/mkenums/enums.c.in +index 62e1adcb462..1c19d8ffaa8 100644 +--- a/test cases/frameworks/7 gnome/mkenums/enums.c.in ++++ b/test cases/frameworks/7 gnome/mkenums/enums.c.in +@@ -13,9 +13,9 @@ + /*** BEGIN value-header ***/ + GType + @enum_name@_get_type(void) { +- static volatile gsize g_define_type_id__volatile = 0; ++ static gsize static_g_define_type_id = 0; + +- if(g_once_init_enter(&g_define_type_id__volatile)) { ++ if(g_once_init_enter(&static_g_define_type_id)) { + static const G@Type@Value values [] = { + /*** END value-header ***/ + +@@ -29,10 +29,10 @@ GType + + GType g_define_type_id = + g_@type@_register_static(g_intern_static_string("@EnumName@"), values); +- g_once_init_leave(&g_define_type_id__volatile, g_define_type_id); ++ g_once_init_leave(&static_g_define_type_id, g_define_type_id); + } + +- return g_define_type_id__volatile; ++ return static_g_define_type_id; + } + + /*** END value-tail ***/ +diff --git a/test cases/frameworks/7 gnome/mkenums/enums2.c.in b/test cases/frameworks/7 gnome/mkenums/enums2.c.in +index 62e1adcb462..1c19d8ffaa8 100644 +--- a/test cases/frameworks/7 gnome/mkenums/enums2.c.in ++++ b/test cases/frameworks/7 gnome/mkenums/enums2.c.in +@@ -13,9 +13,9 @@ + /*** BEGIN value-header ***/ + GType + @enum_name@_get_type(void) { +- static volatile gsize g_define_type_id__volatile = 0; ++ static gsize static_g_define_type_id = 0; + +- if(g_once_init_enter(&g_define_type_id__volatile)) { ++ if(g_once_init_enter(&static_g_define_type_id)) { + static const G@Type@Value values [] = { + /*** END value-header ***/ + +@@ -29,10 +29,10 @@ GType + + GType g_define_type_id = + g_@type@_register_static(g_intern_static_string("@EnumName@"), values); +- g_once_init_leave(&g_define_type_id__volatile, g_define_type_id); ++ g_once_init_leave(&static_g_define_type_id, g_define_type_id); + } + +- return g_define_type_id__volatile; ++ return static_g_define_type_id; + } + + /*** END value-tail ***/ +diff --git a/test cases/frameworks/7 gnome/mkenums/meson.build b/test cases/frameworks/7 gnome/mkenums/meson.build +index 3d7adf052aa..8ff05ba5ea2 100644 +--- a/test cases/frameworks/7 gnome/mkenums/meson.build ++++ b/test cases/frameworks/7 gnome/mkenums/meson.build +@@ -89,9 +89,9 @@ enums_c3 = gnome.mkenums('enums3.c', + vhead : ''' + GType + @enum_name@_get_type(void) { +- static volatile gsize g_define_type_id__volatile = 0; ++ static gsize static_g_define_type_id = 0; + +- if(g_once_init_enter(&g_define_type_id__volatile)) { ++ if(g_once_init_enter(&static_g_define_type_id)) { + static const G@Type@Value values [] = { + ''', + vprod : ''' { @VALUENAME@, "@VALUENAME@", "@valuenick@" },''', +@@ -100,10 +100,10 @@ GType + + GType g_define_type_id = + g_@type@_register_static(g_intern_static_string("@EnumName@"), values); +- g_once_init_leave(&g_define_type_id__volatile, g_define_type_id); ++ g_once_init_leave(&static_g_define_type_id, g_define_type_id); + } + +- return g_define_type_id__volatile; ++ return static_g_define_type_id; + } + ''') + +diff --git a/test cases/vala/8 generated sources/dependency-generated/enum-types.c.template b/test cases/vala/8 generated sources/dependency-generated/enum-types.c.template +index 5ecdd2df234..85d74d11c5c 100644 +--- a/test cases/vala/8 generated sources/dependency-generated/enum-types.c.template ++++ b/test cases/vala/8 generated sources/dependency-generated/enum-types.c.template +@@ -14,9 +14,9 @@ + GType + @enum_name@_get_type (void) + { +- static volatile gsize g_define_type_id__volatile = 0; ++ static gsize static_g_define_type_id = 0; + +- if (g_once_init_enter (&g_define_type_id__volatile)) { ++ if (g_once_init_enter (&static_g_define_type_id)) { + static const G@Type@Value values[] = { + /*** END value-header ***/ + +@@ -30,10 +30,10 @@ GType + GType g_define_type_id = + g_@type@_register_static (g_intern_static_string ("@EnumName@"), values); + +- g_once_init_leave (&g_define_type_id__volatile, g_define_type_id); ++ g_once_init_leave (&static_g_define_type_id, g_define_type_id); + } + +- return g_define_type_id__volatile; ++ return static_g_define_type_id; + } + + /*** END value-tail ***/ diff -Nru meson-0.53.2/debian/patches/series meson-0.57.0+really0.56.2/debian/patches/series --- meson-0.53.2/debian/patches/series 2020-03-02 06:30:20.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/patches/series 2021-09-14 10:33:25.000000000 +0000 @@ -1,3 +1,3 @@ 1-disable-openmpi.patch 2-disable-rootdir-test.patch -6703.patch +git_gnome_warning.patch diff -Nru meson-0.53.2/debian/rules meson-0.57.0+really0.56.2/debian/rules --- meson-0.53.2/debian/rules 2019-11-23 20:11:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/rules 2020-10-30 08:28:05.000000000 +0000 @@ -12,7 +12,7 @@ export PYBUILD_NAME=meson %: - dh $@ --with python3 --buildsystem=pybuild + dh $@ --with python3 --buildsystem=pybuild --with bash-completion ifeq (,$(filter nocheck,$(DEB_BUILD_OPTIONS))) override_dh_auto_test: diff -Nru meson-0.53.2/debian/tests/control meson-0.57.0+really0.56.2/debian/tests/control --- meson-0.53.2/debian/tests/control 2020-03-03 07:59:49.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/tests/control 2020-10-30 08:28:05.000000000 +0000 @@ -4,9 +4,12 @@ Tests: clangmeson Depends: meson, clang +# At the time of writing the packaging machinery does not put @builddeps@ into +# autopkg deps. The tests are only automatically run when packages explicitly +# listed as Depends are uploaded. List a few major ones to make sure those +# tests are run and thus block broken uploads. Tests: exhaustive -Depends: meson, @builddeps@, rustc, valac +Depends: meson, @builddeps@, valac, rustc, ldc [!s390x !ppc64el] Tests: crossbuild -Depends: meson, g++, g++-arm-linux-gnueabihf -Restrictions: allow-stderr, skip-not-installable +Depends: meson, g++, g++-arm-linux-gnueabihf [!s390x] diff -Nru meson-0.53.2/debian/tests/crossbuild meson-0.57.0+really0.56.2/debian/tests/crossbuild --- meson-0.53.2/debian/tests/crossbuild 2020-03-02 11:05:06.000000000 +0000 +++ meson-0.57.0+really0.56.2/debian/tests/crossbuild 2020-10-30 08:28:05.000000000 +0000 @@ -2,6 +2,11 @@ set -e +if ! which g++-arm-linux-gnueabihf > /dev/null 2> /dev/null; then + echo Arm cross compiler not found, not running test. + exit 0 +fi + cd "${AUTOPKGTEST_TMP:-/tmp}" mkdir testproject cd testproject diff -Nru meson-0.53.2/ghwt.py meson-0.57.0+really0.56.2/ghwt.py --- meson-0.53.2/ghwt.py 2019-12-29 22:47:27.000000000 +0000 +++ meson-0.57.0+really0.56.2/ghwt.py 2020-08-15 16:27:05.000000000 +0000 @@ -24,6 +24,7 @@ req_timeout = 600.0 private_repos = {'meson', 'wrapweb', 'meson-ci'} +spdir = 'subprojects' def gh_get(url): r = urllib.request.urlopen(url, timeout=req_timeout) @@ -39,12 +40,21 @@ print(i) return 0 -def unpack(sproj, branch, outdir): - subprocess.check_call(['git', 'clone', '-b', branch, 'https://github.com/mesonbuild/{}.git'.format(sproj), outdir]) - usfile = os.path.join(outdir, 'upstream.wrap') +def unpack(sproj, branch): + tmpdir = os.path.join(spdir, sproj + '_ghwt') + shutil.rmtree(tmpdir, ignore_errors=True) + subprocess.check_call(['git', 'clone', '-b', branch, 'https://github.com/mesonbuild/{}.git'.format(sproj), tmpdir]) + usfile = os.path.join(tmpdir, 'upstream.wrap') assert(os.path.isfile(usfile)) - config = configparser.ConfigParser() + config = configparser.ConfigParser(interpolation=None) config.read(usfile) + outdir = os.path.join(spdir, sproj) + if 'directory' in config['wrap-file']: + outdir = os.path.join(spdir, config['wrap-file']['directory']) + if os.path.isdir(outdir): + print('Subproject is already there. To update, nuke the {} dir and reinstall.'.format(outdir)) + shutil.rmtree(tmpdir) + return 1 us_url = config['wrap-file']['source_url'] us = urllib.request.urlopen(us_url, timeout=req_timeout).read() h = hashlib.sha256() @@ -53,10 +63,9 @@ should = config['wrap-file']['source_hash'] if dig != should: print('Incorrect hash on download.') - print(' expected:', dig) - print(' obtained:', should) + print(' expected:', should) + print(' obtained:', dig) return 1 - spdir = os.path.dirname(outdir) ofilename = os.path.join(spdir, config['wrap-file']['source_filename']) with open(ofilename, 'wb') as ofile: ofile.write(us) @@ -65,34 +74,41 @@ shutil.unpack_archive(ofilename, outdir) else: shutil.unpack_archive(ofilename, spdir) - extdir = os.path.join(spdir, config['wrap-file']['directory']) - assert(os.path.isdir(extdir)) - shutil.move(os.path.join(outdir, '.git'), extdir) - subprocess.check_call(['git', 'reset', '--hard'], cwd=extdir) - shutil.rmtree(outdir) - shutil.move(extdir, outdir) + assert(os.path.isdir(outdir)) + shutil.move(os.path.join(tmpdir, '.git'), outdir) + subprocess.check_call(['git', 'reset', '--hard'], cwd=outdir) + shutil.rmtree(tmpdir) shutil.rmtree(os.path.join(outdir, '.git')) os.unlink(ofilename) -def install(sproj): - sproj_dir = os.path.join('subprojects', sproj) - if not os.path.isdir('subprojects'): +def install(sproj, requested_branch=None): + if not os.path.isdir(spdir): print('Run this in your source root and make sure there is a subprojects directory in it.') return 1 - if os.path.isdir(sproj_dir): - print('Subproject is already there. To update, nuke the dir and reinstall.') - return 1 blist = gh_get('https://api.github.com/repos/mesonbuild/{}/branches'.format(sproj)) blist = [b['name'] for b in blist] blist = [b for b in blist if b != 'master'] blist.sort() branch = blist[-1] + if requested_branch is not None: + if requested_branch in blist: + branch = requested_branch + else: + print('Could not find user-requested branch', requested_branch) + print('Available branches for', sproj, ':') + print(blist) + return 1 print('Using branch', branch) - return unpack(sproj, branch, sproj_dir) + return unpack(sproj, branch) + +def print_help(): + print('Usage:') + print(sys.argv[0], 'list') + print(sys.argv[0], 'install', 'package_name', '[branch_name]') def run(args): if not args or args[0] == '-h' or args[0] == '--help': - print(sys.argv[0], 'list/install', 'package_name') + print_help() return 1 command = args[0] args = args[1:] @@ -100,10 +116,13 @@ list_projects() return 0 elif command == 'install': - if len(args) != 1: - print('Install requires exactly one argument.') + if len(args) == 1: + return install(args[0]) + elif len(args) == 2: + return install(args[0], args[1]) + else: + print_help() return 1 - return install(args[0]) else: print('Unknown command') return 1 diff -Nru meson-0.53.2/man/meson.1 meson-0.57.0+really0.56.2/man/meson.1 --- meson-0.53.2/man/meson.1 2020-02-25 18:00:46.000000000 +0000 +++ meson-0.57.0+really0.56.2/man/meson.1 2021-01-09 22:56:47.000000000 +0000 @@ -1,4 +1,4 @@ -.TH MESON "1" "February 2020" "meson 0.53.2" "User Commands" +.TH MESON "1" "January 2021" "meson 0.56.2" "User Commands" .SH NAME meson - a high productivity build system .SH DESCRIPTION diff -Nru meson-0.53.2/MANIFEST.in meson-0.57.0+really0.56.2/MANIFEST.in --- meson-0.53.2/MANIFEST.in 2019-09-16 21:20:45.000000000 +0000 +++ meson-0.57.0+really0.56.2/MANIFEST.in 2020-08-15 16:27:05.000000000 +0000 @@ -4,10 +4,9 @@ graft data graft graphics graft man -graft syntax-highlighting graft tools -include authors.txt -include contributing.txt + +include contributing.md include COPYING include README.md include run_cross_test.py @@ -15,7 +14,6 @@ include run_unittests.py include run_meson_command_tests.py include run_project_tests.py -include mesonrewriter.py include ghwt.py include __main__.py include meson.py diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/manual tests/9 nostdlib/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/manual tests/9 nostdlib/meson.build" --- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/manual tests/9 nostdlib/meson.build" 2016-07-11 18:35:27.000000000 +0000 +++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/manual tests/9 nostdlib/meson.build" 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -project('own libc', 'c') - -# Not related to this test, but could not find a better place for this test. -assert(meson.get_cross_property('nonexisting', 'defaultvalue') == 'defaultvalue', - 'Cross prop getting is broken.') - -# A simple project that uses its own libc. - -# Note that we don't need to specify anything, the flags to use -# stdlib come from the cross file. - -exe = executable('selfcontained', 'prog.c') - -test('standalone test', exe) diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/manual tests/9 nostdlib/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/manual tests/9 nostdlib/prog.c" --- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/manual tests/9 nostdlib/prog.c" 2019-12-04 18:45:50.000000000 +0000 +++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/manual tests/9 nostdlib/prog.c" 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ - -#include - -int main(void) { - const char *message = "Hello without stdlib.\n"; - return simple_print(message, simple_strlen(message)); -} diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/manual tests/9 nostdlib/subprojects/mylibc/libc.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/manual tests/9 nostdlib/subprojects/mylibc/libc.c" --- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/manual tests/9 nostdlib/subprojects/mylibc/libc.c" 2016-05-28 14:39:38.000000000 +0000 +++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/manual tests/9 nostdlib/subprojects/mylibc/libc.c" 1970-01-01 00:00:00.000000000 +0000 @@ -1,35 +0,0 @@ -/* Do not use this as the basis of your own libc. - * The code is probably unoptimal or wonky, as I - * had no prior experience with this, but instead - * just fiddled with the code until it worked. - */ - -#include - -#define STDOUT 1 -#define SYS_WRITE 4 - -int simple_print(const char *msg, const long bufsize) { - int count; - long total_written = 0; - while(total_written < bufsize) { - asm( - "int $0x80\n\t" - : "=a"(count) - : "0"(SYS_WRITE), "b"(STDOUT), "c"(msg+total_written), "d"(bufsize-total_written) - :); - if(count == 0) { - return 1; - } - total_written += count; - } - return 0; -} - -int simple_strlen(const char *str) { - int len = 0; - while(str[len] != '\0') { - len++; - } - return len; -} diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/manual tests/9 nostdlib/subprojects/mylibc/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/manual tests/9 nostdlib/subprojects/mylibc/meson.build" --- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/manual tests/9 nostdlib/subprojects/mylibc/meson.build" 2016-05-28 14:39:38.000000000 +0000 +++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/manual tests/9 nostdlib/subprojects/mylibc/meson.build" 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -project('own libc', 'c') - -# A very simple libc implementation - -# Do not specify -nostdlib & co. They come from cross specifications. - -libc = static_library('c', 'libc.c', 'stubstart.s') - -mylibc_dep = declare_dependency(link_with : libc, - include_directories : include_directories('.') -) diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/manual tests/9 nostdlib/subprojects/mylibc/stdio.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/manual tests/9 nostdlib/subprojects/mylibc/stdio.h" --- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/manual tests/9 nostdlib/subprojects/mylibc/stdio.h" 2016-05-28 14:39:38.000000000 +0000 +++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/manual tests/9 nostdlib/subprojects/mylibc/stdio.h" 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -#pragma once - -int simple_print(const char *msg, const long bufsize); - -int simple_strlen(const char *str); diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/manual tests/9 nostdlib/subprojects/mylibc/stubstart.s" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/manual tests/9 nostdlib/subprojects/mylibc/stubstart.s" --- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/manual tests/9 nostdlib/subprojects/mylibc/stubstart.s" 2016-05-28 14:39:38.000000000 +0000 +++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/manual tests/9 nostdlib/subprojects/mylibc/stubstart.s" 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -.globl _start - -_start: - - call main - movl %eax, %ebx - movl $1, %eax - int $0x80 diff -Nru meson-0.53.2/mesonbuild/arglist.py meson-0.57.0+really0.56.2/mesonbuild/arglist.py --- meson-0.53.2/mesonbuild/arglist.py 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/arglist.py 2020-09-17 22:00:44.000000000 +0000 @@ -0,0 +1,334 @@ +# Copyright 2012-2020 The Meson development team +# Copyright © 2020 Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from functools import lru_cache +import collections +import enum +import os +import re +import typing as T + +from . import mesonlib + +if T.TYPE_CHECKING: + from .linkers import StaticLinker + from .compilers import Compiler + +UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt'] # type: T.List[str] +# execinfo is a compiler lib on FreeBSD and NetBSD +if mesonlib.is_freebsd() or mesonlib.is_netbsd(): + UNIXY_COMPILER_INTERNAL_LIBS.append('execinfo') + + +class Dedup(enum.Enum): + + """What kind of deduplication can be done to compiler args. + + OVERRIDEN - Whether an argument can be 'overridden' by a later argument. + For example, -DFOO defines FOO and -UFOO undefines FOO. In this case, + we can safely remove the previous occurrence and add a new one. The + same is true for include paths and library paths with -I and -L. + UNIQUE - Arguments that once specified cannot be undone, such as `-c` or + `-pipe`. New instances of these can be completely skipped. + NO_DEDUP - Whether it matters where or how many times on the command-line + a particular argument is present. This can matter for symbol + resolution in static or shared libraries, so we cannot de-dup or + reorder them. + """ + + NO_DEDUP = 0 + UNIQUE = 1 + OVERRIDEN = 2 + + +class CompilerArgs(collections.abc.MutableSequence): + ''' + List-like class that manages a list of compiler arguments. Should be used + while constructing compiler arguments from various sources. Can be + operated with ordinary lists, so this does not need to be used + everywhere. + + All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc) + and can converted to the native type of each compiler by using the + .to_native() method to which you must pass an instance of the compiler or + the compiler class. + + New arguments added to this class (either with .append(), .extend(), or +=) + are added in a way that ensures that they override previous arguments. + For example: + + >>> a = ['-Lfoo', '-lbar'] + >>> a += ['-Lpho', '-lbaz'] + >>> print(a) + ['-Lpho', '-Lfoo', '-lbar', '-lbaz'] + + Arguments will also be de-duped if they can be de-duped safely. + + Note that because of all this, this class is not commutative and does not + preserve the order of arguments if it is safe to not. For example: + >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror'] + >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar'] + ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror'] + + ''' + # Arg prefixes that override by prepending instead of appending + prepend_prefixes = () # type: T.Tuple[str, ...] + + # Arg prefixes and args that must be de-duped by returning 2 + dedup2_prefixes = () # type: T.Tuple[str, ...] + dedup2_suffixes = () # type: T.Tuple[str, ...] + dedup2_args = () # type: T.Tuple[str, ...] + + # Arg prefixes and args that must be de-duped by returning 1 + # + # NOTE: not thorough. A list of potential corner cases can be found in + # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 + dedup1_prefixes = () # type: T.Tuple[str, ...] + dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') # type: T.Tuple[str, ...] + # Match a .so of the form path/to/libfoo.so.0.1.0 + # Only UNIX shared libraries require this. Others have a fixed extension. + dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') + dedup1_args = () # type: T.Tuple[str, ...] + # In generate_link() we add external libs without de-dup, but we must + # *always* de-dup these because they're special arguments to the linker + # TODO: these should probably move too + always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS) # type : T.Tuple[str, ...] + + def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'], + iterable: T.Optional[T.Iterable[str]] = None): + self.compiler = compiler + self._container = list(iterable) if iterable is not None else [] # type: T.List[str] + self.pre = collections.deque() # type: T.Deque[str] + self.post = collections.deque() # type: T.Deque[str] + + # Flush the saved pre and post list into the _container list + # + # This correctly deduplicates the entries after _can_dedup definition + # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot. + def flush_pre_post(self) -> None: + new = list() # type: T.List[str] + pre_flush_set = set() # type: T.Set[str] + post_flush = collections.deque() # type: T.Deque[str] + post_flush_set = set() # type: T.Set[str] + + #The two lists are here walked from the front to the back, in order to not need removals for deduplication + for a in self.pre: + dedup = self._can_dedup(a) + if a not in pre_flush_set: + new.append(a) + if dedup is Dedup.OVERRIDEN: + pre_flush_set.add(a) + for a in reversed(self.post): + dedup = self._can_dedup(a) + if a not in post_flush_set: + post_flush.appendleft(a) + if dedup is Dedup.OVERRIDEN: + post_flush_set.add(a) + + #pre and post will overwrite every element that is in the container + #only copy over args that are in _container but not in the post flush or pre flush set + if pre_flush_set or post_flush_set: + for a in self._container: + if a not in post_flush_set and a not in pre_flush_set: + new.append(a) + else: + new.extend(self._container) + new.extend(post_flush) + + self._container = new + self.pre.clear() + self.post.clear() + + def __iter__(self) -> T.Iterator[str]: + self.flush_pre_post() + return iter(self._container) + + @T.overload # noqa: F811 + def __getitem__(self, index: int) -> str: # noqa: F811 + pass + + @T.overload # noqa: F811 + def __getitem__(self, index: slice) -> T.MutableSequence[str]: # noqa: F811 + pass + + def __getitem__(self, index: T.Union[int, slice]) -> T.Union[str, T.MutableSequence[str]]: # noqa: F811 + self.flush_pre_post() + return self._container[index] + + @T.overload # noqa: F811 + def __setitem__(self, index: int, value: str) -> None: # noqa: F811 + pass + + @T.overload # noqa: F811 + def __setitem__(self, index: slice, value: T.Iterable[str]) -> None: # noqa: F811 + pass + + def __setitem__(self, index: T.Union[int, slice], value: T.Union[str, T.Iterable[str]]) -> None: # noqa: F811 + self.flush_pre_post() + self._container[index] = value # type: ignore # TODO: fix 'Invalid index type' and 'Incompatible types in assignment' erros + + def __delitem__(self, index: T.Union[int, slice]) -> None: + self.flush_pre_post() + del self._container[index] + + def __len__(self) -> int: + return len(self._container) + len(self.pre) + len(self.post) + + def insert(self, index: int, value: str) -> None: + self.flush_pre_post() + self._container.insert(index, value) + + def copy(self) -> 'CompilerArgs': + self.flush_pre_post() + return type(self)(self.compiler, self._container.copy()) + + @classmethod + @lru_cache(maxsize=None) + def _can_dedup(cls, arg: str) -> Dedup: + """Returns whether the argument can be safely de-duped. + + In addition to these, we handle library arguments specially. + With GNU ld, we surround library arguments with -Wl,--start/end-gr -> Dedupoup + to recursively search for symbols in the libraries. This is not needed + with other linkers. + """ + + # A standalone argument must never be deduplicated because it is + # defined by what comes _after_ it. Thus dedupping this: + # -D FOO -D BAR + # would yield either + # -D FOO BAR + # or + # FOO -D BAR + # both of which are invalid. + if arg in cls.dedup2_prefixes: + return Dedup.NO_DEDUP + if arg in cls.dedup2_args or \ + arg.startswith(cls.dedup2_prefixes) or \ + arg.endswith(cls.dedup2_suffixes): + return Dedup.OVERRIDEN + if arg in cls.dedup1_args or \ + arg.startswith(cls.dedup1_prefixes) or \ + arg.endswith(cls.dedup1_suffixes) or \ + re.search(cls.dedup1_regex, arg): + return Dedup.UNIQUE + return Dedup.NO_DEDUP + + @classmethod + @lru_cache(maxsize=None) + def _should_prepend(cls, arg: str) -> bool: + return arg.startswith(cls.prepend_prefixes) + + def to_native(self, copy: bool = False) -> T.List[str]: + # Check if we need to add --start/end-group for circular dependencies + # between static libraries, and for recursively searching for symbols + # needed by static libraries that are provided by object files or + # shared libraries. + self.flush_pre_post() + if copy: + new = self.copy() + else: + new = self + return self.compiler.unix_args_to_native(new._container) + + def append_direct(self, arg: str) -> None: + ''' + Append the specified argument without any reordering or de-dup except + for absolute paths to libraries, etc, which can always be de-duped + safely. + ''' + self.flush_pre_post() + if os.path.isabs(arg): + self.append(arg) + else: + self._container.append(arg) + + def extend_direct(self, iterable: T.Iterable[str]) -> None: + ''' + Extend using the elements in the specified iterable without any + reordering or de-dup except for absolute paths where the order of + include search directories is not relevant + ''' + self.flush_pre_post() + for elem in iterable: + self.append_direct(elem) + + def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None: + normal_flags = [] + lflags = [] + for i in iterable: + if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')): + lflags.append(i) + else: + normal_flags.append(i) + self.extend(normal_flags) + self.extend_direct(lflags) + + def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs': + self.flush_pre_post() + new = self.copy() + new += args + return new + + def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs': + ''' + Add two CompilerArgs while taking into account overriding of arguments + and while preserving the order of arguments as much as possible + ''' + tmp_pre = collections.deque() # type: T.Deque[str] + if not isinstance(args, collections.abc.Iterable): + raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args)) + for arg in args: + # If the argument can be de-duped, do it either by removing the + # previous occurrence of it and adding a new one, or not adding the + # new occurrence. + dedup = self._can_dedup(arg) + if dedup is Dedup.UNIQUE: + # Argument already exists and adding a new instance is useless + if arg in self._container or arg in self.pre or arg in self.post: + continue + if self._should_prepend(arg): + tmp_pre.appendleft(arg) + else: + self.post.append(arg) + self.pre.extendleft(tmp_pre) + #pre and post is going to be merged later before a iter call + return self + + def __radd__(self, args: T.Iterable[str]) -> 'CompilerArgs': + self.flush_pre_post() + new = type(self)(self.compiler, args) + new += self + return new + + def __eq__(self, other: object) -> T.Union[bool]: + self.flush_pre_post() + # Only allow equality checks against other CompilerArgs and lists instances + if isinstance(other, CompilerArgs): + return self.compiler == other.compiler and self._container == other._container + elif isinstance(other, list): + return self._container == other + return NotImplemented + + def append(self, arg: str) -> None: + self.__iadd__([arg]) + + def extend(self, args: T.Iterable[str]) -> None: + self.__iadd__(args) + + def __repr__(self) -> str: + self.flush_pre_post() + return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self._container) diff -Nru meson-0.53.2/mesonbuild/ast/__init__.py meson-0.57.0+really0.56.2/mesonbuild/ast/__init__.py --- meson-0.53.2/mesonbuild/ast/__init__.py 2019-03-06 20:48:10.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/ast/__init__.py 2020-08-15 16:27:05.000000000 +0000 @@ -20,6 +20,7 @@ 'AstInterpreter', 'AstIDGenerator', 'AstIndentationGenerator', + 'AstJSONPrinter', 'AstVisitor', 'AstPrinter', 'IntrospectionInterpreter', @@ -30,4 +31,4 @@ from .introspection import IntrospectionInterpreter, build_target_functions from .visitor import AstVisitor from .postprocess import AstConditionLevel, AstIDGenerator, AstIndentationGenerator -from .printer import AstPrinter +from .printer import AstPrinter, AstJSONPrinter diff -Nru meson-0.53.2/mesonbuild/ast/interpreter.py meson-0.57.0+really0.56.2/mesonbuild/ast/interpreter.py --- meson-0.53.2/mesonbuild/ast/interpreter.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/ast/interpreter.py 2020-09-17 22:00:44.000000000 +0000 @@ -19,19 +19,27 @@ from .. import interpreterbase, mparser, mesonlib from .. import environment -from ..interpreterbase import InvalidArguments, BreakRequest, ContinueRequest +from ..interpreterbase import InvalidArguments, BreakRequest, ContinueRequest, TYPE_nvar, TYPE_nkwargs from ..mparser import ( + AndNode, ArgumentNode, ArithmeticNode, ArrayNode, AssignmentNode, BaseNode, + ComparisonNode, ElementaryNode, EmptyNode, + ForeachClauseNode, IdNode, + IfClauseNode, + IndexNode, MethodNode, + NotNode, + OrNode, PlusAssignmentNode, TernaryNode, + UMinusNode, ) import os, sys @@ -59,13 +67,13 @@ REMOVE_SOURCE = 1 class AstInterpreter(interpreterbase.InterpreterBase): - def __init__(self, source_root: str, subdir: str, visitors: T.Optional[T.List[AstVisitor]] = None): - super().__init__(source_root, subdir) + def __init__(self, source_root: str, subdir: str, subproject: str, visitors: T.Optional[T.List[AstVisitor]] = None): + super().__init__(source_root, subdir, subproject) self.visitors = visitors if visitors is not None else [] - self.visited_subdirs = {} - self.assignments = {} - self.assign_vals = {} - self.reverse_assignment = {} + self.visited_subdirs = {} # type: T.Dict[str, bool] + self.assignments = {} # type: T.Dict[str, BaseNode] + self.assign_vals = {} # type: T.Dict[str, T.Any] + self.reverse_assignment = {} # type: T.Dict[str, BaseNode] self.funcs.update({'project': self.func_do_nothing, 'test': self.func_do_nothing, 'benchmark': self.func_do_nothing, @@ -122,15 +130,15 @@ 'summary': self.func_do_nothing, }) - def func_do_nothing(self, node, args, kwargs): + def func_do_nothing(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> bool: return True - def load_root_meson_file(self): + def load_root_meson_file(self) -> None: super().load_root_meson_file() for i in self.visitors: self.ast.accept(i) - def func_subdir(self, node, args, kwargs): + def func_subdir(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: args = self.flatten_args(args) if len(args) != 1 or not isinstance(args[0], str): sys.stderr.write('Unable to evaluate subdir({}) in AstInterpreter --> Skipping\n'.format(args)) @@ -154,9 +162,9 @@ code = f.read() assert(isinstance(code, str)) try: - codeblock = mparser.Parser(code, subdir).parse() + codeblock = mparser.Parser(code, absname).parse() except mesonlib.MesonException as me: - me.file = buildfilename + me.file = absname raise me self.subdir = subdir @@ -165,64 +173,88 @@ self.evaluate_codeblock(codeblock) self.subdir = prev_subdir - def method_call(self, node): + def method_call(self, node: BaseNode) -> bool: return True - def evaluate_arithmeticstatement(self, cur): + def evaluate_arithmeticstatement(self, cur: ArithmeticNode) -> int: self.evaluate_statement(cur.left) self.evaluate_statement(cur.right) return 0 - def evaluate_uminusstatement(self, cur): + def evaluate_uminusstatement(self, cur: UMinusNode) -> int: self.evaluate_statement(cur.value) return 0 - def evaluate_ternary(self, node): + def evaluate_ternary(self, node: TernaryNode) -> None: assert(isinstance(node, TernaryNode)) self.evaluate_statement(node.condition) self.evaluate_statement(node.trueblock) self.evaluate_statement(node.falseblock) - def evaluate_plusassign(self, node): + def evaluate_dictstatement(self, node: mparser.DictNode) -> TYPE_nkwargs: + def resolve_key(node: mparser.BaseNode) -> str: + if isinstance(node, mparser.StringNode): + return node.value + return '__AST_UNKNOWN__' + arguments, kwargs = self.reduce_arguments(node.args, key_resolver=resolve_key) + assert (not arguments) + self.argument_depth += 1 + for key, value in kwargs.items(): + if isinstance(key, BaseNode): + self.evaluate_statement(key) + self.argument_depth -= 1 + return {} + + def evaluate_plusassign(self, node: PlusAssignmentNode) -> None: assert(isinstance(node, PlusAssignmentNode)) - if node.var_name not in self.assignments: - self.assignments[node.var_name] = [] - self.assign_vals[node.var_name] = [] - self.assignments[node.var_name] += [node.value] # Save a reference to the value node - if hasattr(node.value, 'ast_id'): + # Cheat by doing a reassignment + self.assignments[node.var_name] = node.value # Save a reference to the value node + if node.value.ast_id: self.reverse_assignment[node.value.ast_id] = node - self.assign_vals[node.var_name] += [self.evaluate_statement(node.value)] + self.assign_vals[node.var_name] = self.evaluate_statement(node.value) - def evaluate_indexing(self, node): + def evaluate_indexing(self, node: IndexNode) -> int: return 0 - def unknown_function_called(self, func_name): + def unknown_function_called(self, func_name: str) -> None: pass - def reduce_arguments(self, args): + def reduce_arguments( + self, + args: mparser.ArgumentNode, + key_resolver: T.Callable[[mparser.BaseNode], str] = interpreterbase.default_resolve_key, + duplicate_key_error: T.Optional[str] = None, + ) -> T.Tuple[T.List[TYPE_nvar], TYPE_nkwargs]: if isinstance(args, ArgumentNode): + kwargs = {} # type: T.Dict[str, TYPE_nvar] + for key, val in args.kwargs.items(): + kwargs[key_resolver(key)] = val if args.incorrect_order(): raise InvalidArguments('All keyword arguments must be after positional arguments.') - return self.flatten_args(args.arguments), args.kwargs + return self.flatten_args(args.arguments), kwargs else: return self.flatten_args(args), {} - def evaluate_comparison(self, node): + def evaluate_comparison(self, node: ComparisonNode) -> bool: self.evaluate_statement(node.left) self.evaluate_statement(node.right) return False - def evaluate_andstatement(self, cur): + def evaluate_andstatement(self, cur: AndNode) -> bool: self.evaluate_statement(cur.left) self.evaluate_statement(cur.right) return False - def evaluate_orstatement(self, cur): + def evaluate_orstatement(self, cur: OrNode) -> bool: self.evaluate_statement(cur.left) self.evaluate_statement(cur.right) return False - def evaluate_foreach(self, node): + def evaluate_notstatement(self, cur: NotNode) -> bool: + self.evaluate_statement(cur.value) + return False + + def evaluate_foreach(self, node: ForeachClauseNode) -> None: try: self.evaluate_codeblock(node.block) except ContinueRequest: @@ -230,30 +262,31 @@ except BreakRequest: pass - def evaluate_if(self, node): + def evaluate_if(self, node: IfClauseNode) -> None: for i in node.ifs: self.evaluate_codeblock(i.block) if not isinstance(node.elseblock, EmptyNode): self.evaluate_codeblock(node.elseblock) - def get_variable(self, varname): + def get_variable(self, varname: str) -> int: return 0 - def assignment(self, node): + def assignment(self, node: AssignmentNode) -> None: assert(isinstance(node, AssignmentNode)) - self.assignments[node.var_name] = [node.value] # Save a reference to the value node - if hasattr(node.value, 'ast_id'): + self.assignments[node.var_name] = node.value # Save a reference to the value node + if node.value.ast_id: self.reverse_assignment[node.value.ast_id] = node - self.assign_vals[node.var_name] = [self.evaluate_statement(node.value)] # Evaluate the value just in case + self.assign_vals[node.var_name] = self.evaluate_statement(node.value) # Evaluate the value just in case def resolve_node(self, node: BaseNode, include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.Optional[T.Any]: def quick_resolve(n: BaseNode, loop_detect: T.Optional[T.List[str]] = None) -> T.Any: if loop_detect is None: loop_detect = [] if isinstance(n, IdNode): + assert isinstance(n.value, str) if n.value in loop_detect or n.value not in self.assignments: return [] - return quick_resolve(self.assignments[n.value][0], loop_detect = loop_detect + [n.value]) + return quick_resolve(self.assignments[n.value], loop_detect = loop_detect + [n.value]) elif isinstance(n, ElementaryNode): return n.value else: @@ -266,7 +299,7 @@ if not isinstance(node, BaseNode): return None - assert(hasattr(node, 'ast_id')) + assert node.ast_id if node.ast_id in id_loop_detect: return None # Loop detected id_loop_detect += [node.ast_id] @@ -278,6 +311,11 @@ elif isinstance(node, ElementaryNode): result = node.value + elif isinstance(node, NotNode): + result = self.resolve_node(node.value, include_unknown_args, id_loop_detect) + if isinstance(result, bool): + result = not result + elif isinstance(node, ArrayNode): result = [x for x in node.args.arguments] @@ -296,18 +334,19 @@ elif isinstance(node, MethodNode): src = quick_resolve(node.source_object) - margs = self.flatten_args(node.args, include_unknown_args, id_loop_detect) + margs = self.flatten_args(node.args.arguments, include_unknown_args, id_loop_detect) + mkwargs = {} # type: T.Dict[str, TYPE_nvar] try: if isinstance(src, str): - result = self.string_method_call(src, node.name, margs) + result = self.string_method_call(src, node.name, margs, mkwargs) elif isinstance(src, bool): - result = self.bool_method_call(src, node.name, margs) + result = self.bool_method_call(src, node.name, margs, mkwargs) elif isinstance(src, int): - result = self.int_method_call(src, node.name, margs) + result = self.int_method_call(src, node.name, margs, mkwargs) elif isinstance(src, list): - result = self.array_method_call(src, node.name, margs) + result = self.array_method_call(src, node.name, margs, mkwargs) elif isinstance(src, dict): - result = self.dict_method_call(src, node.name, margs) + result = self.dict_method_call(src, node.name, margs, mkwargs) except mesonlib.MesonException: return None @@ -315,7 +354,7 @@ if isinstance(result, BaseNode): result = self.resolve_node(result, include_unknown_args, id_loop_detect) elif isinstance(result, list): - new_res = [] + new_res = [] # type: T.List[TYPE_nvar] for i in result: if isinstance(i, BaseNode): resolved = self.resolve_node(i, include_unknown_args, id_loop_detect) @@ -327,12 +366,14 @@ return result - def flatten_args(self, args: T.Any, include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.List[T.Any]: + def flatten_args(self, args_raw: T.Union[TYPE_nvar, T.Sequence[TYPE_nvar]], include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.List[TYPE_nvar]: # Make sure we are always dealing with lists - if not isinstance(args, list): - args = [args] + if isinstance(args_raw, list): + args = args_raw + else: + args = [args_raw] - flattend_args = [] + flattend_args = [] # type: T.List[TYPE_nvar] # Resolve the contents of args for i in args: @@ -346,7 +387,7 @@ flattend_args += [i] return flattend_args - def flatten_kwargs(self, kwargs: object, include_unknown_args: bool = False): + def flatten_kwargs(self, kwargs: T.Dict[str, TYPE_nvar], include_unknown_args: bool = False) -> T.Dict[str, TYPE_nvar]: flattend_kwargs = {} for key, val in kwargs.items(): if isinstance(val, BaseNode): diff -Nru meson-0.53.2/mesonbuild/ast/introspection.py meson-0.57.0+really0.56.2/mesonbuild/ast/introspection.py --- meson-0.53.2/mesonbuild/ast/introspection.py 2019-12-29 22:47:27.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/ast/introspection.py 2021-01-06 10:39:48.000000000 +0000 @@ -15,30 +15,45 @@ # This class contains the basic functionality needed to run any interpreter # or an interpreter-based tool -from . import AstInterpreter +from .interpreter import AstInterpreter +from .visitor import AstVisitor from .. import compilers, environment, mesonlib, optinterpreter from .. import coredata as cdata from ..mesonlib import MachineChoice -from ..interpreterbase import InvalidArguments -from ..build import Executable, Jar, SharedLibrary, SharedModule, StaticLibrary +from ..interpreterbase import InvalidArguments, TYPE_nvar +from ..build import BuildTarget, Executable, Jar, SharedLibrary, SharedModule, StaticLibrary from ..mparser import BaseNode, ArithmeticNode, ArrayNode, ElementaryNode, IdNode, FunctionNode, StringNode +import typing as T import os +import argparse build_target_functions = ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries'] -class IntrospectionHelper: +class IntrospectionHelper(argparse.Namespace): # mimic an argparse namespace - def __init__(self, cross_file): - self.cross_file = cross_file - self.native_file = None - self.cmd_line_options = {} + def __init__(self, cross_file: str): + super().__init__() + self.cross_file = cross_file # type: str + self.native_file = None # type: str + self.cmd_line_options = {} # type: T.Dict[str, str] + + def __eq__(self, other: object) -> bool: + return NotImplemented class IntrospectionInterpreter(AstInterpreter): # Interpreter to detect the options without a build directory # Most of the code is stolen from interpreter.Interpreter - def __init__(self, source_root, subdir, backend, visitors=None, cross_file=None, subproject='', subproject_dir='subprojects', env=None): + def __init__(self, + source_root: str, + subdir: str, + backend: str, + visitors: T.Optional[T.List[AstVisitor]] = None, + cross_file: T.Optional[str] = None, + subproject: str = '', + subproject_dir: str = 'subprojects', + env: T.Optional[environment.Environment] = None): visitors = visitors if visitors is not None else [] - super().__init__(source_root, subdir, visitors=visitors) + super().__init__(source_root, subdir, subproject, visitors=visitors) options = IntrospectionHelper(cross_file) self.cross_file = cross_file @@ -46,16 +61,15 @@ self.environment = environment.Environment(source_root, None, options) else: self.environment = env - self.subproject = subproject self.subproject_dir = subproject_dir self.coredata = self.environment.get_coredata() self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt') self.backend = backend self.default_options = {'backend': self.backend} - self.project_data = {} - self.targets = [] - self.dependencies = [] - self.project_node = None + self.project_data = {} # type: T.Dict[str, T.Any] + self.targets = [] # type: T.List[T.Dict[str, T.Any]] + self.dependencies = [] # type: T.List[T.Dict[str, T.Any]] + self.project_node = None # type: BaseNode self.funcs.update({ 'add_languages': self.func_add_languages, @@ -70,7 +84,7 @@ 'both_libraries': self.func_both_lib, }) - def func_project(self, node, args, kwargs): + def func_project(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: if self.project_node: raise InvalidArguments('Second call to project()') self.project_node = node @@ -92,14 +106,15 @@ self.coredata.merge_user_options(oi.options) def_opts = self.flatten_args(kwargs.get('default_options', [])) - self.project_default_options = mesonlib.stringlistify(def_opts) - self.project_default_options = cdata.create_options_dict(self.project_default_options) + _project_default_options = mesonlib.stringlistify(def_opts) + self.project_default_options = cdata.create_options_dict(_project_default_options) self.default_options.update(self.project_default_options) self.coredata.set_default_options(self.default_options, self.subproject, self.environment) if not self.is_subproject() and 'subproject_dir' in kwargs: spdirname = kwargs['subproject_dir'] - if isinstance(spdirname, ElementaryNode): + if isinstance(spdirname, StringNode): + assert isinstance(spdirname.value, str) self.subproject_dir = spdirname.value if not self.is_subproject(): self.project_data['subprojects'] = [] @@ -110,12 +125,13 @@ self.do_subproject(i) self.coredata.init_backend_options(self.backend) - options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')} + options = {k: v for k, v in self.environment.raw_options.items() if k.startswith('backend_')} self.coredata.set_options(options) - self.func_add_languages(None, proj_langs, None) + self._add_languages(proj_langs, MachineChoice.HOST) + self._add_languages(proj_langs, MachineChoice.BUILD) - def do_subproject(self, dirname): + def do_subproject(self, dirname: str) -> None: subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) subpr = os.path.join(subproject_dir_abs, dirname) try: @@ -126,15 +142,29 @@ except (mesonlib.MesonException, RuntimeError): return - def func_add_languages(self, node, args, kwargs): - args = self.flatten_args(args) - for for_machine in [MachineChoice.BUILD, MachineChoice.HOST]: - for lang in sorted(args, key=compilers.sort_clink): - lang = lang.lower() - if lang not in self.coredata.compilers[for_machine]: - self.environment.detect_compiler_for(lang, for_machine) + def func_add_languages(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: + kwargs = self.flatten_kwargs(kwargs) + if 'native' in kwargs: + native = kwargs.get('native', False) + self._add_languages(args, MachineChoice.BUILD if native else MachineChoice.HOST) + else: + for for_machine in [MachineChoice.BUILD, MachineChoice.HOST]: + self._add_languages(args, for_machine) + + def _add_languages(self, raw_langs: T.List[TYPE_nvar], for_machine: MachineChoice) -> None: + langs = [] # type: T.List[str] + for l in self.flatten_args(raw_langs): + if isinstance(l, str): + langs.append(l) + elif isinstance(l, StringNode): + langs.append(l.value) + + for lang in sorted(langs, key=compilers.sort_clink): + lang = lang.lower() + if lang not in self.coredata.compilers[for_machine]: + self.environment.detect_compiler_for(lang, for_machine) - def func_dependency(self, node, args, kwargs): + def func_dependency(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: args = self.flatten_args(args) kwargs = self.flatten_kwargs(kwargs) if not args: @@ -145,7 +175,6 @@ version = kwargs.get('version', []) if not isinstance(version, list): version = [version] - condition_level = node.condition_level if hasattr(node, 'condition_level') else 0 if isinstance(required, ElementaryNode): required = required.value if not isinstance(required, bool): @@ -155,59 +184,70 @@ 'required': required, 'version': version, 'has_fallback': has_fallback, - 'conditional': condition_level > 0, - 'node': node, + 'conditional': node.condition_level > 0, + 'node': node }] - def build_target(self, node, args, kwargs, targetclass): + def build_target(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs_raw: T.Dict[str, TYPE_nvar], targetclass: T.Type[BuildTarget]) -> T.Optional[T.Dict[str, T.Any]]: args = self.flatten_args(args) if not args or not isinstance(args[0], str): - return + return None name = args[0] srcqueue = [node] + extra_queue = [] # Process the sources BEFORE flattening the kwargs, to preserve the original nodes - if 'sources' in kwargs: - srcqueue += mesonlib.listify(kwargs['sources']) + if 'sources' in kwargs_raw: + srcqueue += mesonlib.listify(kwargs_raw['sources']) + + if 'extra_files' in kwargs_raw: + extra_queue += mesonlib.listify(kwargs_raw['extra_files']) - kwargs = self.flatten_kwargs(kwargs, True) + kwargs = self.flatten_kwargs(kwargs_raw, True) - source_nodes = [] - while srcqueue: - curr = srcqueue.pop(0) - arg_node = None - assert(isinstance(curr, BaseNode)) - if isinstance(curr, FunctionNode): - arg_node = curr.args - elif isinstance(curr, ArrayNode): - arg_node = curr.args - elif isinstance(curr, IdNode): - # Try to resolve the ID and append the node to the queue - var_name = curr.value - if var_name in self.assignments and self.assignments[var_name]: - tmp_node = self.assignments[var_name][0] - if isinstance(tmp_node, (ArrayNode, IdNode, FunctionNode)): - srcqueue += [tmp_node] - elif isinstance(curr, ArithmeticNode): - srcqueue += [curr.left, curr.right] - if arg_node is None: - continue - arg_nodes = arg_node.arguments.copy() - # Pop the first element if the function is a build target function - if isinstance(curr, FunctionNode) and curr.func_name in build_target_functions: - arg_nodes.pop(0) - elemetary_nodes = [x for x in arg_nodes if isinstance(x, (str, StringNode))] - srcqueue += [x for x in arg_nodes if isinstance(x, (FunctionNode, ArrayNode, IdNode, ArithmeticNode))] - if elemetary_nodes: - source_nodes += [curr] + def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]: + res = [] # type: T.List[BaseNode] + while inqueue: + curr = inqueue.pop(0) + arg_node = None + assert(isinstance(curr, BaseNode)) + if isinstance(curr, FunctionNode): + arg_node = curr.args + elif isinstance(curr, ArrayNode): + arg_node = curr.args + elif isinstance(curr, IdNode): + # Try to resolve the ID and append the node to the queue + assert isinstance(curr.value, str) + var_name = curr.value + if var_name in self.assignments: + tmp_node = self.assignments[var_name] + if isinstance(tmp_node, (ArrayNode, IdNode, FunctionNode)): + inqueue += [tmp_node] + elif isinstance(curr, ArithmeticNode): + inqueue += [curr.left, curr.right] + if arg_node is None: + continue + arg_nodes = arg_node.arguments.copy() + # Pop the first element if the function is a build target function + if isinstance(curr, FunctionNode) and curr.func_name in build_target_functions: + arg_nodes.pop(0) + elemetary_nodes = [x for x in arg_nodes if isinstance(x, (str, StringNode))] + inqueue += [x for x in arg_nodes if isinstance(x, (FunctionNode, ArrayNode, IdNode, ArithmeticNode))] + if elemetary_nodes: + res += [curr] + return res + + source_nodes = traverse_nodes(srcqueue) + extraf_nodes = traverse_nodes(extra_queue) # Make sure nothing can crash when creating the build class kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in ['install', 'build_by_default', 'build_always']} kwargs_reduced = {k: v.value if isinstance(v, ElementaryNode) else v for k, v in kwargs_reduced.items()} kwargs_reduced = {k: v for k, v in kwargs_reduced.items() if not isinstance(v, BaseNode)} for_machine = MachineChoice.HOST - objects = [] - empty_sources = [] # Passing the unresolved sources list causes errors + objects = [] # type: T.List[T.Any] + empty_sources = [] # type: T.List[T.Any] + # Passing the unresolved sources list causes errors target = targetclass(name, self.subdir, self.subproject, for_machine, empty_sources, objects, self.environment, kwargs_reduced) new_target = { @@ -220,6 +260,7 @@ 'installed': target.should_install(), 'outputs': target.get_outputs(), 'sources': source_nodes, + 'extra_files': extraf_nodes, 'kwargs': kwargs, 'node': node, } @@ -227,7 +268,7 @@ self.targets += [new_target] return new_target - def build_library(self, node, args, kwargs): + def build_library(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: default_library = self.coredata.get_builtin_option('default_library') if default_library == 'shared': return self.build_target(node, args, kwargs, SharedLibrary) @@ -235,31 +276,32 @@ return self.build_target(node, args, kwargs, StaticLibrary) elif default_library == 'both': return self.build_target(node, args, kwargs, SharedLibrary) + return None - def func_executable(self, node, args, kwargs): + def func_executable(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: return self.build_target(node, args, kwargs, Executable) - def func_static_lib(self, node, args, kwargs): + def func_static_lib(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: return self.build_target(node, args, kwargs, StaticLibrary) - def func_shared_lib(self, node, args, kwargs): + def func_shared_lib(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: return self.build_target(node, args, kwargs, SharedLibrary) - def func_both_lib(self, node, args, kwargs): + def func_both_lib(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: return self.build_target(node, args, kwargs, SharedLibrary) - def func_shared_module(self, node, args, kwargs): + def func_shared_module(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: return self.build_target(node, args, kwargs, SharedModule) - def func_library(self, node, args, kwargs): + def func_library(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: return self.build_library(node, args, kwargs) - def func_jar(self, node, args, kwargs): + def func_jar(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: return self.build_target(node, args, kwargs, Jar) - def func_build_target(self, node, args, kwargs): + def func_build_target(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: if 'target_type' not in kwargs: - return + return None target_type = kwargs.pop('target_type') if isinstance(target_type, ElementaryNode): target_type = target_type.value @@ -275,11 +317,12 @@ return self.build_library(node, args, kwargs) elif target_type == 'jar': return self.build_target(node, args, kwargs, Jar) + return None - def is_subproject(self): + def is_subproject(self) -> bool: return self.subproject != '' - def analyze(self): + def analyze(self) -> None: self.load_root_meson_file() self.sanity_check_ast() self.parse_project() diff -Nru meson-0.53.2/mesonbuild/ast/postprocess.py meson-0.57.0+really0.56.2/mesonbuild/ast/postprocess.py --- meson-0.53.2/mesonbuild/ast/postprocess.py 2019-03-06 20:48:10.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/ast/postprocess.py 2020-09-17 22:00:44.000000000 +0000 @@ -17,48 +17,49 @@ from . import AstVisitor from .. import mparser +import typing as T class AstIndentationGenerator(AstVisitor): - def __init__(self): + def __init__(self) -> None: self.level = 0 - def visit_default_func(self, node: mparser.BaseNode): + def visit_default_func(self, node: mparser.BaseNode) -> None: # Store the current level in the node node.level = self.level - def visit_ArrayNode(self, node: mparser.ArrayNode): + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: self.visit_default_func(node) self.level += 1 node.args.accept(self) self.level -= 1 - def visit_DictNode(self, node: mparser.DictNode): + def visit_DictNode(self, node: mparser.DictNode) -> None: self.visit_default_func(node) self.level += 1 node.args.accept(self) self.level -= 1 - def visit_MethodNode(self, node: mparser.MethodNode): + def visit_MethodNode(self, node: mparser.MethodNode) -> None: self.visit_default_func(node) node.source_object.accept(self) self.level += 1 node.args.accept(self) self.level -= 1 - def visit_FunctionNode(self, node: mparser.FunctionNode): + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: self.visit_default_func(node) self.level += 1 node.args.accept(self) self.level -= 1 - def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode): + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: self.visit_default_func(node) self.level += 1 node.items.accept(self) node.block.accept(self) self.level -= 1 - def visit_IfClauseNode(self, node: mparser.IfClauseNode): + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: self.visit_default_func(node) for i in node.ifs: i.accept(self) @@ -67,7 +68,7 @@ node.elseblock.accept(self) self.level -= 1 - def visit_IfNode(self, node: mparser.IfNode): + def visit_IfNode(self, node: mparser.IfNode) -> None: self.visit_default_func(node) self.level += 1 node.condition.accept(self) @@ -75,10 +76,10 @@ self.level -= 1 class AstIDGenerator(AstVisitor): - def __init__(self): - self.counter = {} + def __init__(self) -> None: + self.counter = {} # type: T.Dict[str, int] - def visit_default_func(self, node: mparser.BaseNode): + def visit_default_func(self, node: mparser.BaseNode) -> None: name = type(node).__name__ if name not in self.counter: self.counter[name] = 0 @@ -86,20 +87,20 @@ self.counter[name] += 1 class AstConditionLevel(AstVisitor): - def __init__(self): + def __init__(self) -> None: self.condition_level = 0 - def visit_default_func(self, node: mparser.BaseNode): + def visit_default_func(self, node: mparser.BaseNode) -> None: node.condition_level = self.condition_level - def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode): + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: self.visit_default_func(node) self.condition_level += 1 node.items.accept(self) node.block.accept(self) self.condition_level -= 1 - def visit_IfClauseNode(self, node: mparser.IfClauseNode): + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: self.visit_default_func(node) for i in node.ifs: i.accept(self) @@ -108,7 +109,7 @@ node.elseblock.accept(self) self.condition_level -= 1 - def visit_IfNode(self, node: mparser.IfNode): + def visit_IfNode(self, node: mparser.IfNode) -> None: self.visit_default_func(node) self.condition_level += 1 node.condition.accept(self) diff -Nru meson-0.53.2/mesonbuild/ast/printer.py meson-0.57.0+really0.56.2/mesonbuild/ast/printer.py --- meson-0.53.2/mesonbuild/ast/printer.py 2019-12-29 22:47:27.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/ast/printer.py 2020-08-15 16:27:05.000000000 +0000 @@ -18,6 +18,7 @@ from .. import mparser from . import AstVisitor import re +import typing as T arithmic_map = { 'add': '+', @@ -36,114 +37,111 @@ self.is_newline = True self.last_level = 0 - def post_process(self): + def post_process(self) -> None: self.result = re.sub(r'\s+\n', '\n', self.result) - def append(self, data: str, node: mparser.BaseNode): - level = 0 - if node and hasattr(node, 'level'): - level = node.level - else: - level = self.last_level - self.last_level = level + def append(self, data: str, node: mparser.BaseNode) -> None: + self.last_level = node.level if self.is_newline: - self.result += ' ' * (level * self.indent) + self.result += ' ' * (node.level * self.indent) self.result += data self.is_newline = False - def append_padded(self, data: str, node: mparser.BaseNode): + def append_padded(self, data: str, node: mparser.BaseNode) -> None: if self.result[-1] not in [' ', '\n']: data = ' ' + data self.append(data + ' ', node) - def newline(self): + def newline(self) -> None: self.result += '\n' self.is_newline = True - def visit_BooleanNode(self, node: mparser.BooleanNode): + def visit_BooleanNode(self, node: mparser.BooleanNode) -> None: self.append('true' if node.value else 'false', node) - def visit_IdNode(self, node: mparser.IdNode): + def visit_IdNode(self, node: mparser.IdNode) -> None: + assert isinstance(node.value, str) self.append(node.value, node) - def visit_NumberNode(self, node: mparser.NumberNode): + def visit_NumberNode(self, node: mparser.NumberNode) -> None: self.append(str(node.value), node) - def visit_StringNode(self, node: mparser.StringNode): + def visit_StringNode(self, node: mparser.StringNode) -> None: + assert isinstance(node.value, str) self.append("'" + node.value + "'", node) - def visit_ContinueNode(self, node: mparser.ContinueNode): + def visit_ContinueNode(self, node: mparser.ContinueNode) -> None: self.append('continue', node) - def visit_BreakNode(self, node: mparser.BreakNode): + def visit_BreakNode(self, node: mparser.BreakNode) -> None: self.append('break', node) - def visit_ArrayNode(self, node: mparser.ArrayNode): + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: self.append('[', node) node.args.accept(self) self.append(']', node) - def visit_DictNode(self, node: mparser.DictNode): + def visit_DictNode(self, node: mparser.DictNode) -> None: self.append('{', node) node.args.accept(self) self.append('}', node) - def visit_OrNode(self, node: mparser.OrNode): + def visit_OrNode(self, node: mparser.OrNode) -> None: node.left.accept(self) self.append_padded('or', node) node.right.accept(self) - def visit_AndNode(self, node: mparser.AndNode): + def visit_AndNode(self, node: mparser.AndNode) -> None: node.left.accept(self) self.append_padded('and', node) node.right.accept(self) - def visit_ComparisonNode(self, node: mparser.ComparisonNode): + def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None: node.left.accept(self) self.append_padded(node.ctype, node) node.right.accept(self) - def visit_ArithmeticNode(self, node: mparser.ArithmeticNode): + def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None: node.left.accept(self) self.append_padded(arithmic_map[node.operation], node) node.right.accept(self) - def visit_NotNode(self, node: mparser.NotNode): + def visit_NotNode(self, node: mparser.NotNode) -> None: self.append_padded('not', node) node.value.accept(self) - def visit_CodeBlockNode(self, node: mparser.CodeBlockNode): + def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None: for i in node.lines: i.accept(self) self.newline() - def visit_IndexNode(self, node: mparser.IndexNode): + def visit_IndexNode(self, node: mparser.IndexNode) -> None: node.iobject.accept(self) self.append('[', node) node.index.accept(self) self.append(']', node) - def visit_MethodNode(self, node: mparser.MethodNode): + def visit_MethodNode(self, node: mparser.MethodNode) -> None: node.source_object.accept(self) self.append('.' + node.name + '(', node) node.args.accept(self) self.append(')', node) - def visit_FunctionNode(self, node: mparser.FunctionNode): + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: self.append(node.func_name + '(', node) node.args.accept(self) self.append(')', node) - def visit_AssignmentNode(self, node: mparser.AssignmentNode): + def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None: self.append(node.var_name + ' = ', node) node.value.accept(self) - def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode): + def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None: self.append(node.var_name + ' += ', node) node.value.accept(self) - def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode): - varnames = [x.value for x in node.varnames] + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + varnames = [x for x in node.varnames] self.append_padded('foreach', node) self.append_padded(', '.join(varnames), node) self.append_padded(':', node) @@ -152,34 +150,34 @@ node.block.accept(self) self.append('endforeach', node) - def visit_IfClauseNode(self, node: mparser.IfClauseNode): + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: prefix = '' for i in node.ifs: self.append_padded(prefix + 'if', node) prefix = 'el' i.accept(self) - if node.elseblock: + if not isinstance(node.elseblock, mparser.EmptyNode): self.append('else', node) node.elseblock.accept(self) self.append('endif', node) - def visit_UMinusNode(self, node: mparser.UMinusNode): + def visit_UMinusNode(self, node: mparser.UMinusNode) -> None: self.append_padded('-', node) node.value.accept(self) - def visit_IfNode(self, node: mparser.IfNode): + def visit_IfNode(self, node: mparser.IfNode) -> None: node.condition.accept(self) self.newline() node.block.accept(self) - def visit_TernaryNode(self, node: mparser.TernaryNode): + def visit_TernaryNode(self, node: mparser.TernaryNode) -> None: node.condition.accept(self) self.append_padded('?', node) node.trueblock.accept(self) self.append_padded(':', node) node.falseblock.accept(self) - def visit_ArgumentNode(self, node: mparser.ArgumentNode): + def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: break_args = (len(node.arguments) + len(node.kwargs)) > self.arg_newline_cutoff for i in node.arguments + list(node.kwargs.values()): if not isinstance(i, (mparser.ElementaryNode, mparser.IndexNode)): @@ -192,10 +190,7 @@ if break_args: self.newline() for key, val in node.kwargs.items(): - if isinstance(key, str): - self.append(key, node) - else: - key.accept(self) + key.accept(self) self.append_padded(':', node) val.accept(self) self.append(', ', node) @@ -205,3 +200,160 @@ self.result = re.sub(r', \n$', '\n', self.result) else: self.result = re.sub(r', $', '', self.result) + +class AstJSONPrinter(AstVisitor): + def __init__(self) -> None: + self.result = {} # type: T.Dict[str, T.Any] + self.current = self.result + + def _accept(self, key: str, node: mparser.BaseNode) -> None: + old = self.current + data = {} # type: T.Dict[str, T.Any] + self.current = data + node.accept(self) + self.current = old + self.current[key] = data + + def _accept_list(self, key: str, nodes: T.Sequence[mparser.BaseNode]) -> None: + old = self.current + datalist = [] # type: T.List[T.Dict[str, T.Any]] + for i in nodes: + self.current = {} + i.accept(self) + datalist += [self.current] + self.current = old + self.current[key] = datalist + + def _raw_accept(self, node: mparser.BaseNode, data: T.Dict[str, T.Any]) -> None: + old = self.current + self.current = data + node.accept(self) + self.current = old + + def setbase(self, node: mparser.BaseNode) -> None: + self.current['node'] = type(node).__name__ + self.current['lineno'] = node.lineno + self.current['colno'] = node.colno + self.current['end_lineno'] = node.end_lineno + self.current['end_colno'] = node.end_colno + + def visit_default_func(self, node: mparser.BaseNode) -> None: + self.setbase(node) + + def gen_ElementaryNode(self, node: mparser.ElementaryNode) -> None: + self.current['value'] = node.value + self.setbase(node) + + def visit_BooleanNode(self, node: mparser.BooleanNode) -> None: + self.gen_ElementaryNode(node) + + def visit_IdNode(self, node: mparser.IdNode) -> None: + self.gen_ElementaryNode(node) + + def visit_NumberNode(self, node: mparser.NumberNode) -> None: + self.gen_ElementaryNode(node) + + def visit_StringNode(self, node: mparser.StringNode) -> None: + self.gen_ElementaryNode(node) + + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: + self._accept('args', node.args) + self.setbase(node) + + def visit_DictNode(self, node: mparser.DictNode) -> None: + self._accept('args', node.args) + self.setbase(node) + + def visit_OrNode(self, node: mparser.OrNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.setbase(node) + + def visit_AndNode(self, node: mparser.AndNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.setbase(node) + + def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.current['ctype'] = node.ctype + self.setbase(node) + + def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.current['op'] = arithmic_map[node.operation] + self.setbase(node) + + def visit_NotNode(self, node: mparser.NotNode) -> None: + self._accept('right', node.value) + self.setbase(node) + + def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None: + self._accept_list('lines', node.lines) + self.setbase(node) + + def visit_IndexNode(self, node: mparser.IndexNode) -> None: + self._accept('object', node.iobject) + self._accept('index', node.index) + self.setbase(node) + + def visit_MethodNode(self, node: mparser.MethodNode) -> None: + self._accept('object', node.source_object) + self._accept('args', node.args) + self.current['name'] = node.name + self.setbase(node) + + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: + self._accept('args', node.args) + self.current['name'] = node.func_name + self.setbase(node) + + def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None: + self._accept('value', node.value) + self.current['var_name'] = node.var_name + self.setbase(node) + + def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None: + self._accept('value', node.value) + self.current['var_name'] = node.var_name + self.setbase(node) + + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + self._accept('items', node.items) + self._accept('block', node.block) + self.current['varnames'] = node.varnames + self.setbase(node) + + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: + self._accept_list('ifs', node.ifs) + self._accept('else', node.elseblock) + self.setbase(node) + + def visit_UMinusNode(self, node: mparser.UMinusNode) -> None: + self._accept('right', node.value) + self.setbase(node) + + def visit_IfNode(self, node: mparser.IfNode) -> None: + self._accept('condition', node.condition) + self._accept('block', node.block) + self.setbase(node) + + def visit_TernaryNode(self, node: mparser.TernaryNode) -> None: + self._accept('condition', node.condition) + self._accept('true', node.trueblock) + self._accept('false', node.falseblock) + self.setbase(node) + + def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: + self._accept_list('positional', node.arguments) + kwargs_list = [] # type: T.List[T.Dict[str, T.Dict[str, T.Any]]] + for key, val in node.kwargs.items(): + key_res = {} # type: T.Dict[str, T.Any] + val_res = {} # type: T.Dict[str, T.Any] + self._raw_accept(key, key_res) + self._raw_accept(val, val_res) + kwargs_list += [{'key': key_res, 'val': val_res}] + self.current['kwargs'] = kwargs_list + self.setbase(node) diff -Nru meson-0.53.2/mesonbuild/ast/visitor.py meson-0.57.0+really0.56.2/mesonbuild/ast/visitor.py --- meson-0.53.2/mesonbuild/ast/visitor.py 2019-08-28 17:15:39.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/ast/visitor.py 2020-09-17 22:00:44.000000000 +0000 @@ -18,122 +18,122 @@ from .. import mparser class AstVisitor: - def __init__(self): + def __init__(self) -> None: pass - def visit_default_func(self, node: mparser.BaseNode): + def visit_default_func(self, node: mparser.BaseNode) -> None: pass - def visit_BooleanNode(self, node: mparser.BooleanNode): + def visit_BooleanNode(self, node: mparser.BooleanNode) -> None: self.visit_default_func(node) - def visit_IdNode(self, node: mparser.IdNode): + def visit_IdNode(self, node: mparser.IdNode) -> None: self.visit_default_func(node) - def visit_NumberNode(self, node: mparser.NumberNode): + def visit_NumberNode(self, node: mparser.NumberNode) -> None: self.visit_default_func(node) - def visit_StringNode(self, node: mparser.StringNode): + def visit_StringNode(self, node: mparser.StringNode) -> None: self.visit_default_func(node) - def visit_ContinueNode(self, node: mparser.ContinueNode): + def visit_ContinueNode(self, node: mparser.ContinueNode) -> None: self.visit_default_func(node) - def visit_BreakNode(self, node: mparser.BreakNode): + def visit_BreakNode(self, node: mparser.BreakNode) -> None: self.visit_default_func(node) - def visit_ArrayNode(self, node: mparser.ArrayNode): + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: self.visit_default_func(node) node.args.accept(self) - def visit_DictNode(self, node: mparser.DictNode): + def visit_DictNode(self, node: mparser.DictNode) -> None: self.visit_default_func(node) node.args.accept(self) - def visit_EmptyNode(self, node: mparser.EmptyNode): + def visit_EmptyNode(self, node: mparser.EmptyNode) -> None: self.visit_default_func(node) - def visit_OrNode(self, node: mparser.OrNode): + def visit_OrNode(self, node: mparser.OrNode) -> None: self.visit_default_func(node) node.left.accept(self) node.right.accept(self) - def visit_AndNode(self, node: mparser.AndNode): + def visit_AndNode(self, node: mparser.AndNode) -> None: self.visit_default_func(node) node.left.accept(self) node.right.accept(self) - def visit_ComparisonNode(self, node: mparser.ComparisonNode): + def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None: self.visit_default_func(node) node.left.accept(self) node.right.accept(self) - def visit_ArithmeticNode(self, node: mparser.ArithmeticNode): + def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None: self.visit_default_func(node) node.left.accept(self) node.right.accept(self) - def visit_NotNode(self, node: mparser.NotNode): + def visit_NotNode(self, node: mparser.NotNode) -> None: self.visit_default_func(node) node.value.accept(self) - def visit_CodeBlockNode(self, node: mparser.CodeBlockNode): + def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None: self.visit_default_func(node) for i in node.lines: i.accept(self) - def visit_IndexNode(self, node: mparser.IndexNode): + def visit_IndexNode(self, node: mparser.IndexNode) -> None: self.visit_default_func(node) node.iobject.accept(self) node.index.accept(self) - def visit_MethodNode(self, node: mparser.MethodNode): + def visit_MethodNode(self, node: mparser.MethodNode) -> None: self.visit_default_func(node) node.source_object.accept(self) node.args.accept(self) - def visit_FunctionNode(self, node: mparser.FunctionNode): + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: self.visit_default_func(node) node.args.accept(self) - def visit_AssignmentNode(self, node: mparser.AssignmentNode): + def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None: self.visit_default_func(node) node.value.accept(self) - def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode): + def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None: self.visit_default_func(node) node.value.accept(self) - def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode): + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: self.visit_default_func(node) node.items.accept(self) node.block.accept(self) - def visit_IfClauseNode(self, node: mparser.IfClauseNode): + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: self.visit_default_func(node) for i in node.ifs: i.accept(self) - if node.elseblock: - node.elseblock.accept(self) + node.elseblock.accept(self) - def visit_UMinusNode(self, node: mparser.UMinusNode): + def visit_UMinusNode(self, node: mparser.UMinusNode) -> None: self.visit_default_func(node) node.value.accept(self) - def visit_IfNode(self, node: mparser.IfNode): + def visit_IfNode(self, node: mparser.IfNode) -> None: self.visit_default_func(node) node.condition.accept(self) node.block.accept(self) - def visit_TernaryNode(self, node: mparser.TernaryNode): + def visit_TernaryNode(self, node: mparser.TernaryNode) -> None: self.visit_default_func(node) node.condition.accept(self) node.trueblock.accept(self) node.falseblock.accept(self) - def visit_ArgumentNode(self, node: mparser.ArgumentNode): + def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: self.visit_default_func(node) for i in node.arguments: i.accept(self) - for val in node.kwargs.values(): + for key, val in node.kwargs.items(): + key.accept(self) val.accept(self) diff -Nru meson-0.53.2/mesonbuild/backend/backends.py meson-0.57.0+really0.56.2/mesonbuild/backend/backends.py --- meson-0.53.2/mesonbuild/backend/backends.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/backend/backends.py 2021-01-06 10:39:48.000000000 +0000 @@ -12,22 +12,55 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os, pickle, re +from collections import OrderedDict +from functools import lru_cache +from .._pathlib import Path +import enum +import json +import os +import pickle +import re +import shlex +import subprocess import textwrap +import typing as T + from .. import build from .. import dependencies from .. import mesonlib from .. import mlog -import json -import subprocess -from ..mesonlib import MachineChoice, MesonException, OrderedSet, OptionOverrideProxy -from ..mesonlib import classify_unity_sources -from ..mesonlib import File -from ..compilers import CompilerArgs, VisualStudioLikeCompiler -from collections import OrderedDict -import shlex -from functools import lru_cache -import typing as T +from ..compilers import languages_using_ldflags +from ..mesonlib import ( + File, MachineChoice, MesonException, OrderedSet, OptionOverrideProxy, + classify_unity_sources, unholder +) + +if T.TYPE_CHECKING: + from ..interpreter import Interpreter, Test + + +class TestProtocol(enum.Enum): + + EXITCODE = 0 + TAP = 1 + GTEST = 2 + + @classmethod + def from_str(cls, string: str) -> 'TestProtocol': + if string == 'exitcode': + return cls.EXITCODE + elif string == 'tap': + return cls.TAP + elif string == 'gtest': + return cls.GTEST + raise MesonException('unknown test format {}'.format(string)) + + def __str__(self) -> str: + if self is self.EXITCODE: + return 'exitcode' + elif self is self.GTEST: + return 'gtest' + return 'tap' class CleanTrees: @@ -41,7 +74,7 @@ class InstallData: def __init__(self, source_dir, build_dir, prefix, strip_bin, - install_umask, mesonintrospect): + install_umask, mesonintrospect, version): self.source_dir = source_dir self.build_dir = build_dir self.prefix = prefix @@ -56,21 +89,23 @@ self.install_scripts = [] self.install_subdirs = [] self.mesonintrospect = mesonintrospect + self.version = version class TargetInstallData: - def __init__(self, fname, outdir, aliases, strip, install_name_mappings, install_rpath, install_mode, optional=False): + def __init__(self, fname, outdir, aliases, strip, install_name_mappings, rpath_dirs_to_remove, install_rpath, install_mode, optional=False): self.fname = fname self.outdir = outdir self.aliases = aliases self.strip = strip self.install_name_mappings = install_name_mappings + self.rpath_dirs_to_remove = rpath_dirs_to_remove self.install_rpath = install_rpath self.install_mode = install_mode self.optional = optional class ExecutableSerialisation: def __init__(self, cmd_args, env=None, exe_wrapper=None, - workdir=None, extra_paths=None, capture=None): + workdir=None, extra_paths=None, capture=None) -> None: self.cmd_args = cmd_args self.env = env or {} if exe_wrapper is not None: @@ -79,14 +114,16 @@ self.workdir = workdir self.extra_paths = extra_paths self.capture = capture + self.pickled = False class TestSerialisation: def __init__(self, name: str, project: str, suite: str, fname: T.List[str], - is_cross_built: bool, exe_wrapper: T.Optional[build.Executable], + is_cross_built: bool, exe_wrapper: T.Optional[dependencies.ExternalProgram], needs_exe_wrapper: bool, is_parallel: bool, cmd_args: T.List[str], env: build.EnvironmentVariables, should_fail: bool, timeout: T.Optional[int], workdir: T.Optional[str], - extra_paths: T.List[str], protocol: str, priority: int): + extra_paths: T.List[str], protocol: TestProtocol, priority: int, + cmd_is_built: bool, depends: T.List[str], version: str): self.name = name self.project_name = project self.suite = suite @@ -105,51 +142,60 @@ self.protocol = protocol self.priority = priority self.needs_exe_wrapper = needs_exe_wrapper + self.cmd_is_built = cmd_is_built + self.depends = depends + self.version = version + -def get_backend_from_name(backend, build): +def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional['Interpreter'] = None) -> T.Optional['Backend']: if backend == 'ninja': from . import ninjabackend - return ninjabackend.NinjaBackend(build) + return ninjabackend.NinjaBackend(build, interpreter) elif backend == 'vs': from . import vs2010backend - return vs2010backend.autodetect_vs_version(build) + return vs2010backend.autodetect_vs_version(build, interpreter) elif backend == 'vs2010': from . import vs2010backend - return vs2010backend.Vs2010Backend(build) + return vs2010backend.Vs2010Backend(build, interpreter) elif backend == 'vs2015': from . import vs2015backend - return vs2015backend.Vs2015Backend(build) + return vs2015backend.Vs2015Backend(build, interpreter) elif backend == 'vs2017': from . import vs2017backend - return vs2017backend.Vs2017Backend(build) + return vs2017backend.Vs2017Backend(build, interpreter) elif backend == 'vs2019': from . import vs2019backend - return vs2019backend.Vs2019Backend(build) + return vs2019backend.Vs2019Backend(build, interpreter) elif backend == 'xcode': from . import xcodebackend - return xcodebackend.XCodeBackend(build) + return xcodebackend.XCodeBackend(build, interpreter) return None # This class contains the basic functionality that is needed by all backends. # Feel free to move stuff in and out of it as you see fit. class Backend: - def __init__(self, build): + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional['Interpreter']): # Make it possible to construct a dummy backend # This is used for introspection without a build directory if build is None: self.environment = None return self.build = build + self.interpreter = interpreter self.environment = build.environment self.processed_targets = {} + self.name = '' self.build_dir = self.environment.get_build_dir() self.source_dir = self.environment.get_source_dir() self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(), self.environment.get_build_dir()) - def get_target_filename(self, t): + def generate(self) -> None: + raise RuntimeError('generate is not implemented in {}'.format(type(self).__name__)) + + def get_target_filename(self, t, *, warn_multi_output: bool = True): if isinstance(t, build.CustomTarget): - if len(t.get_outputs()) != 1: + if warn_multi_output and len(t.get_outputs()) != 1: mlog.warning('custom_target {!r} has more than one output! ' 'Using the first one.'.format(t.name)) filename = t.get_outputs()[0] @@ -163,25 +209,24 @@ def get_target_filename_abs(self, target): return os.path.join(self.environment.get_build_dir(), self.get_target_filename(target)) - def get_builtin_options_for_target(self, target): - return OptionOverrideProxy(target.option_overrides, - self.environment.coredata.builtins) - def get_base_options_for_target(self, target): - return OptionOverrideProxy(target.option_overrides, + return OptionOverrideProxy(target.option_overrides_base, self.environment.coredata.builtins, self.environment.coredata.base_options) def get_compiler_options_for_target(self, target): - return OptionOverrideProxy( - target.option_overrides, - self.environment.coredata.compiler_options[target.for_machine]) + comp_reg = self.environment.coredata.compiler_options[target.for_machine] + comp_override = target.option_overrides_compiler + return { + lang: OptionOverrideProxy(comp_override[lang], comp_reg[lang]) + for lang in set(comp_reg.keys()) | set(comp_override.keys()) + } def get_option_for_target(self, option_name, target): - if option_name in target.option_overrides: - override = target.option_overrides[option_name] + if option_name in target.option_overrides_base: + override = target.option_overrides_base[option_name] return self.environment.coredata.validate_option_value(option_name, override) - return self.environment.coredata.get_builtin_option(option_name) + return self.environment.coredata.get_builtin_option(option_name, target.subproject) def get_target_filename_for_linking(self, target): # On some platforms (msvc for instance), the file that is used for @@ -195,7 +240,7 @@ return os.path.join(self.get_target_dir(target), target.get_filename()) elif isinstance(target, (build.CustomTarget, build.CustomTargetIndex)): if not target.is_linkable_target(): - raise MesonException('Tried to link against custom target "%s", which is not linkable.' % target.name) + raise MesonException('Tried to link against custom target "{}", which is not linkable.'.format(target.name)) return os.path.join(self.get_target_dir(target), target.get_filename()) elif isinstance(target, build.Executable): if target.import_filename: @@ -226,7 +271,7 @@ return self.build_to_src def get_target_private_dir(self, target): - return os.path.join(self.get_target_dir(target), target.get_id()) + return os.path.join(self.get_target_filename(target, warn_multi_output=False) + '.p') def get_target_private_dir_abs(self, target): return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target)) @@ -245,19 +290,20 @@ # target that the GeneratedList is used in return os.path.join(self.get_target_private_dir(target), src) - def get_unity_source_file(self, target, suffix): + def get_unity_source_file(self, target, suffix, number): # There is a potential conflict here, but it is unlikely that # anyone both enables unity builds and has a file called foo-unity.cpp. - osrc = target.name + '-unity.' + suffix + osrc = '{}-unity{}.{}'.format(target.name, number, suffix) return mesonlib.File.from_built_file(self.get_target_private_dir(target), osrc) def generate_unity_files(self, target, unity_src): abs_files = [] result = [] compsrcs = classify_unity_sources(target.compilers.values(), unity_src) + unity_size = self.get_option_for_target('unity_size', target) - def init_language_file(suffix): - unity_src = self.get_unity_source_file(target, suffix) + def init_language_file(suffix, unity_file_number): + unity_src = self.get_unity_source_file(target, suffix, unity_file_number) outfileabs = unity_src.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir()) outfileabs_tmp = outfileabs + '.tmp' @@ -268,11 +314,23 @@ result.append(unity_src) return open(outfileabs_tmp, 'w') - # For each language, generate a unity source file and return the list + # For each language, generate unity source files and return the list for comp, srcs in compsrcs.items(): - with init_language_file(comp.get_default_suffix()) as ofile: - for src in srcs: - ofile.write('#include<%s>\n' % src) + files_in_current = unity_size + 1 + unity_file_number = 0 + ofile = None + for src in srcs: + if files_in_current >= unity_size: + if ofile: + ofile.close() + ofile = init_language_file(comp.get_default_suffix(), unity_file_number) + unity_file_number += 1 + files_in_current = 0 + ofile.write('#include<{}>\n'.format(src)) + files_in_current += 1 + if ofile: + ofile.close() + [mesonlib.replace_if_different(x, x + '.tmp') for x in abs_files] return result @@ -302,18 +360,11 @@ return obj_list def as_meson_exe_cmdline(self, tname, exe, cmd_args, workdir=None, - for_machine=MachineChoice.BUILD, extra_bdeps=None, capture=None, force_serialize=False): ''' Serialize an executable for running with a generator or a custom target ''' import hashlib - machine = self.environment.machines[for_machine] - if machine.is_windows() or machine.is_cygwin(): - extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps or []) - else: - extra_paths = [] - if isinstance(exe, dependencies.ExternalProgram): exe_cmd = exe.get_command() exe_for_machine = exe.for_machine @@ -324,6 +375,12 @@ exe_cmd = [exe] exe_for_machine = MachineChoice.BUILD + machine = self.environment.machines[exe_for_machine] + if machine.is_windows() or machine.is_cygwin(): + extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps or []) + else: + extra_paths = [] + is_cross_built = not self.environment.machines.matches_build_machine(exe_for_machine) if is_cross_built and self.environment.need_exe_wrapper(): exe_wrapper = self.environment.get_exe_wrapper() @@ -339,13 +396,30 @@ exe_cmd = ['mono'] + exe_cmd exe_wrapper = None - force_serialize = force_serialize or extra_paths or workdir or \ - exe_wrapper or any('\n' in c for c in cmd_args) + reasons = [] + if extra_paths: + reasons.append('to set PATH') + + if exe_wrapper: + reasons.append('to use exe_wrapper') + + if workdir: + reasons.append('to set workdir') + + if any('\n' in c for c in cmd_args): + reasons.append('because command contains newlines') + + force_serialize = force_serialize or bool(reasons) + + if capture: + reasons.append('to capture output') + if not force_serialize: if not capture: - return None - return (self.environment.get_build_command() + - ['--internal', 'exe', '--capture', capture, '--'] + exe_cmd + cmd_args) + return None, '' + return ((self.environment.get_build_command() + + ['--internal', 'exe', '--capture', capture, '--'] + exe_cmd + cmd_args), + ', '.join(reasons)) workdir = workdir or self.environment.get_build_dir() env = {} @@ -369,7 +443,8 @@ exe_wrapper, workdir, extra_paths, capture) pickle.dump(es, f) - return self.environment.get_build_command() + ['--internal', 'exe', '--unpickle', exe_data] + return (self.environment.get_build_command() + ['--internal', 'exe', '--unpickle', exe_data], + ', '.join(reasons)) def serialize_tests(self): test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat') @@ -398,6 +473,45 @@ return True return False + def get_external_rpath_dirs(self, target): + dirs = set() + args = [] + for lang in languages_using_ldflags: + try: + args.extend(self.environment.coredata.get_external_link_args(target.for_machine, lang)) + except Exception: + pass + # Match rpath formats: + # -Wl,-rpath= + # -Wl,-rpath, + rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)') + # Match solaris style compat runpath formats: + # -Wl,-R + # -Wl,-R, + runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)') + # Match symbols formats: + # -Wl,--just-symbols= + # -Wl,--just-symbols, + symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)') + for arg in args: + rpath_match = rpath_regex.match(arg) + if rpath_match: + for dir in rpath_match.group(1).split(':'): + dirs.add(dir) + runpath_match = runpath_regex.match(arg) + if runpath_match: + for dir in runpath_match.group(1).split(':'): + # The symbols arg is an rpath if the path is a directory + if Path(dir).is_dir(): + dirs.add(dir) + symbols_match = symbols_regex.match(arg) + if symbols_match: + for dir in symbols_match.group(1).split(':'): + # Prevent usage of --just-symbols to specify rpath + if Path(dir).is_dir(): + raise MesonException('Invalid arg for --just-symbols, {} is a directory.'.format(dir)) + return dirs + def rpaths_for_bundled_shared_libraries(self, target, exclude_system=True): paths = [] for dep in target.external_deps: @@ -412,6 +526,9 @@ if exclude_system and self._libdir_is_system(libdir, target.compilers, self.environment): # No point in adding system paths. continue + # Don't remove rpaths specified in LDFLAGS. + if libdir in self.get_external_rpath_dirs(target): + continue # Windows doesn't support rpaths, but we use this function to # emulate rpaths by setting PATH, so also accept DLLs here if os.path.splitext(libpath)[1] not in ['.dll', '.lib', '.so', '.dylib']: @@ -431,8 +548,15 @@ result = OrderedSet() result.add('meson-out') result.update(self.rpaths_for_bundled_shared_libraries(target)) + target.rpath_dirs_to_remove.update([d.encode('utf8') for d in result]) return tuple(result) + @staticmethod + def canonicalize_filename(fname): + for ch in ('/', '\\', ':'): + fname = fname.replace(ch, '_') + return fname + def object_filename_from_source(self, target, source): assert isinstance(source, mesonlib.File) build_dir = self.environment.get_build_dir() @@ -457,13 +581,13 @@ source = 'meson-generated_' + os.path.relpath(rel_src, targetdir) else: if os.path.isabs(rel_src): - # Not from the source directory; hopefully this doesn't conflict with user's source files. - source = os.path.basename(rel_src) + # Use the absolute path directly to avoid file name conflicts + source = rel_src else: source = os.path.relpath(os.path.join(build_dir, rel_src), os.path.join(self.environment.get_source_dir(), target.get_subdir())) machine = self.environment.machines[target.for_machine] - return source.replace('/', '_').replace('\\', '_') + '.' + machine.get_object_suffix() + return self.canonicalize_filename(source) + '.' + machine.get_object_suffix() def determine_ext_objs(self, extobj, proj_dir_to_build_root): result = [] @@ -491,16 +615,18 @@ targetdir = self.get_target_private_dir(extobj.target) - # With unity builds, there's just one object that contains all the - # sources, and we only support extracting all the objects in this mode, - # so just return that. + # With unity builds, sources don't map directly to objects, + # we only support extracting all the objects in this mode, + # so just return all object files. if self.is_unity(extobj.target): compsrcs = classify_unity_sources(extobj.target.compilers.values(), sources) sources = [] - for comp in compsrcs.keys(): - osrc = self.get_unity_source_file(extobj.target, - comp.get_default_suffix()) - sources.append(osrc) + unity_size = self.get_option_for_target('unity_size', extobj.target) + for comp, srcs in compsrcs.items(): + for i in range(len(srcs) // unity_size + 1): + osrc = self.get_unity_source_file(extobj.target, + comp.get_default_suffix(), i) + sources.append(osrc) for osrc in sources: objname = self.object_filename_from_source(extobj.target, osrc) @@ -521,14 +647,14 @@ def create_msvc_pch_implementation(self, target, lang, pch_header): # We have to include the language in the file name, otherwise # pch.c and pch.cpp will both end up as pch.obj in VS backends. - impl_name = 'meson_pch-%s.%s' % (lang, lang) + impl_name = 'meson_pch-{}.{}'.format(lang, lang) pch_rel_to_build = os.path.join(self.get_target_private_dir(target), impl_name) # Make sure to prepend the build dir, since the working directory is # not defined. Otherwise, we might create the file in the wrong path. pch_file = os.path.join(self.build_dir, pch_rel_to_build) os.makedirs(os.path.dirname(pch_file), exist_ok=True) - content = '#include "%s"' % os.path.basename(pch_header) + content = '#include "{}"'.format(os.path.basename(pch_header)) pch_file_tmp = pch_file + '.tmp' with open(pch_file_tmp, 'w') as f: f.write(content) @@ -537,49 +663,33 @@ @staticmethod def escape_extra_args(compiler, args): - # No extra escaping/quoting needed when not running on Windows - if not mesonlib.is_windows(): - return args + # all backslashes in defines are doubly-escaped extra_args = [] - # Compiler-specific escaping is needed for -D args but not for any others - if isinstance(compiler, VisualStudioLikeCompiler): - # MSVC needs escaping when a -D argument ends in \ or \" - for arg in args: - if arg.startswith('-D') or arg.startswith('/D'): - # Without extra escaping for these two, the next character - # gets eaten - if arg.endswith('\\'): - arg += '\\' - elif arg.endswith('\\"'): - arg = arg[:-2] + '\\\\"' - extra_args.append(arg) - else: - # MinGW GCC needs all backslashes in defines to be doubly-escaped - # FIXME: Not sure about Cygwin or Clang - for arg in args: - if arg.startswith('-D') or arg.startswith('/D'): - arg = arg.replace('\\', '\\\\') - extra_args.append(arg) + for arg in args: + if arg.startswith('-D') or arg.startswith('/D'): + arg = arg.replace('\\', '\\\\') + extra_args.append(arg) + return extra_args def generate_basic_compiler_args(self, target, compiler, no_warn_args=False): # Create an empty commands list, and start adding arguments from # various sources in the order in which they must override each other # starting from hard-coded defaults followed by build options and so on. - commands = CompilerArgs(compiler) + commands = compiler.compiler_args() - copt_proxy = self.get_compiler_options_for_target(target) + copt_proxy = self.get_compiler_options_for_target(target)[compiler.language] # First, the trivial ones that are impossible to override. # # Add -nostdinc/-nostdinc++ if needed; can't be overridden - commands += self.get_cross_stdlib_args(target, compiler) + commands += self.get_no_stdlib_args(target, compiler) # Add things like /NOLOGO or -pipe; usually can't be overridden commands += compiler.get_always_args() # Only add warning-flags by default if the buildtype enables it, and if # we weren't explicitly asked to not emit warnings (for Vala, f.ex) if no_warn_args: commands += compiler.get_no_warn_args() - elif self.get_option_for_target('buildtype', target) != 'plain': + else: commands += compiler.get_warn_args(self.get_option_for_target('warning_level', target)) # Add -Werror if werror=true is set in the build options set on the # command-line or default_options inside project(). This only sets the @@ -594,6 +704,10 @@ commands += compiler.get_buildtype_args(self.get_option_for_target('buildtype', target)) commands += compiler.get_optimization_args(self.get_option_for_target('optimization', target)) commands += compiler.get_debug_args(self.get_option_for_target('debug', target)) + # MSVC debug builds have /ZI argument by default and /Zi is added with debug flag + # /ZI needs to be removed in that case to avoid cl's warning to that effect (D9025 : overriding '/ZI' with '/Zi') + if ('/ZI' in commands) and ('/Zi' in commands): + commands.remove('/Zi') # Add compile args added using add_project_arguments() commands += self.build.get_project_args(compiler, target.subproject, target.for_machine) # Add compile args added using add_global_arguments() @@ -609,7 +723,7 @@ # Set -fPIC for static libraries by default unless explicitly disabled if isinstance(target, build.StaticLibrary) and target.pic: commands += compiler.get_pic_args() - if isinstance(target, build.Executable) and target.pie: + elif isinstance(target, (build.StaticLibrary, build.Executable)) and target.pie: commands += compiler.get_pie_args() # Add compile args needed to find external dependencies. Link args are # added while generating the link command. @@ -648,7 +762,7 @@ args = [] for d in deps: if not (d.is_linkable_target()): - raise RuntimeError('Tried to link with a non-library target "%s".' % d.get_basename()) + raise RuntimeError('Tried to link with a non-library target "{}".'.format(d.get_basename())) arg = self.get_target_filename_for_linking(d) if not arg: continue @@ -689,6 +803,7 @@ for deppath in self.rpaths_for_bundled_shared_libraries(target, exclude_system=False): result.add(os.path.normpath(os.path.join(self.environment.get_build_dir(), deppath))) for bdep in extra_bdeps: + prospectives.add(bdep) prospectives.update(bdep.get_transitive_link_deps()) # Internal deps for ld in prospectives: @@ -707,7 +822,7 @@ def write_test_file(self, datafile): self.write_test_serialisation(self.build.get_tests(), datafile) - def create_test_serialisation(self, tests): + def create_test_serialisation(self, tests: T.List['Test']) -> T.List[TestSerialisation]: arr = [] for t in sorted(tests, key=lambda tst: -1 * tst.priority): exe = t.get_exe() @@ -721,7 +836,16 @@ # E.g. an external verifier or simulator program run on a generated executable. # Can always be run without a wrapper. test_for_machine = MachineChoice.BUILD - is_cross = not self.environment.machines.matches_build_machine(test_for_machine) + + # we allow passing compiled executables to tests, which may be cross built. + # We need to consider these as well when considering whether the target is cross or not. + for a in t.cmd_args: + if isinstance(a, build.BuildTarget): + if a.for_machine is MachineChoice.HOST: + test_for_machine = MachineChoice.HOST + break + + is_cross = self.environment.is_cross_build(test_for_machine) if is_cross and self.environment.need_exe_wrapper(): exe_wrapper = self.environment.get_exe_wrapper() else: @@ -734,10 +858,14 @@ extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps) else: extra_paths = [] + cmd_args = [] - for a in t.cmd_args: - if hasattr(a, 'held_object'): - a = a.held_object + depends = set(t.depends) + if isinstance(exe, build.Target): + depends.add(exe) + for a in unholder(t.cmd_args): + if isinstance(a, build.Target): + depends.add(a) if isinstance(a, build.BuildTarget): extra_paths += self.determine_windows_extra_paths(a, []) if isinstance(a, mesonlib.File): @@ -745,6 +873,11 @@ cmd_args.append(a) elif isinstance(a, str): cmd_args.append(a) + elif isinstance(a, build.Executable): + p = self.construct_target_rel_path(a, t.workdir) + if p == a.get_filename(): + p = './' + p + cmd_args.append(p) elif isinstance(a, build.Target): cmd_args.append(self.construct_target_rel_path(a, t.workdir)) else: @@ -753,11 +886,14 @@ exe_wrapper, self.environment.need_exe_wrapper(), t.is_parallel, cmd_args, t.env, t.should_fail, t.timeout, t.workdir, - extra_paths, t.protocol, t.priority) + extra_paths, t.protocol, t.priority, + isinstance(exe, build.Executable), + [x.get_id() for x in depends], + self.environment.coredata.version) arr.append(ts) return arr - def write_test_serialisation(self, tests, datafile): + def write_test_serialisation(self, tests: T.List['Test'], datafile: str): pickle.dump(self.create_test_serialisation(tests), datafile) def construct_target_rel_path(self, a, workdir): @@ -781,23 +917,34 @@ def get_regen_filelist(self): '''List of all files whose alteration means that the build definition needs to be regenerated.''' - deps = [os.path.join(self.build_to_src, df) + deps = [str(Path(self.build_to_src) / df) for df in self.interpreter.get_build_def_files()] if self.environment.is_cross_build(): deps.extend(self.environment.coredata.cross_files) deps.extend(self.environment.coredata.config_files) deps.append('meson-private/coredata.dat') - if os.path.exists(os.path.join(self.environment.get_source_dir(), 'meson_options.txt')): - deps.append(os.path.join(self.build_to_src, 'meson_options.txt')) - for sp in self.build.subprojects.keys(): - fname = os.path.join(self.environment.get_source_dir(), sp, 'meson_options.txt') - if os.path.isfile(fname): - deps.append(os.path.join(self.build_to_src, sp, 'meson_options.txt')) + self.check_clock_skew(deps) return deps - def exe_object_to_cmd_array(self, exe): - if isinstance(exe, build.BuildTarget): - if exe.for_machine is not MachineChoice.BUILD: + def check_clock_skew(self, file_list): + # If a file that leads to reconfiguration has a time + # stamp in the future, it will trigger an eternal reconfigure + # loop. + import time + now = time.time() + for f in file_list: + absf = os.path.join(self.environment.get_build_dir(), f) + ftime = os.path.getmtime(absf) + delta = ftime - now + # On Windows disk time stamps sometimes point + # to the future by a minuscule amount, less than + # 0.001 seconds. I don't know why. + if delta > 0.001: + raise MesonException('Clock skew detected. File {} has a time stamp {:.4f}s in the future.'.format(absf, delta)) + + def build_target_to_cmd_array(self, bt, check_cross): + if isinstance(bt, build.BuildTarget): + if check_cross and isinstance(bt, build.Executable) and bt.for_machine is not MachineChoice.BUILD: if (self.environment.is_cross_build() and self.environment.exe_wrapper is None and self.environment.need_exe_wrapper()): @@ -805,12 +952,12 @@ Cannot use target {} as a generator because it is built for the host machine and no exe wrapper is defined or needs_exe_wrapper is true. You might want to set `native: true` instead to build it for - the build machine.'''.format(exe.name)) + the build machine.'''.format(bt.name)) raise MesonException(s) - exe_arr = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(exe))] + arr = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(bt))] else: - exe_arr = exe.get_command() - return exe_arr + arr = bt.get_command() + return arr def replace_extra_args(self, args, genlist): final_args = [] @@ -828,7 +975,7 @@ m = regex.search(arg) while m is not None: index = int(m.group(1)) - src = '@OUTPUT%d@' % index + src = '@OUTPUT{}@'.format(index) arg = arg.replace(src, os.path.join(private_dir, output_list[index])) m = regex.search(arg) newargs.append(arg) @@ -844,14 +991,10 @@ # also be built by default. XXX: Sometime in the future these should be # built only before running tests. for t in self.build.get_tests(): - exe = t.exe - if hasattr(exe, 'held_object'): - exe = exe.held_object + exe = unholder(t.exe) if isinstance(exe, (build.CustomTarget, build.BuildTarget)): result[exe.get_id()] = exe - for arg in t.cmd_args: - if hasattr(arg, 'held_object'): - arg = arg.held_object + for arg in unholder(t.cmd_args): if not isinstance(arg, (build.CustomTarget, build.BuildTarget)): continue result[arg.get_id()] = arg @@ -891,9 +1034,7 @@ Returns the path to them relative to the build root directory. ''' srcs = [] - for i in target.get_sources(): - if hasattr(i, 'held_object'): - i = i.held_object + for i in unholder(target.get_sources()): if isinstance(i, str): fname = [os.path.join(self.build_to_src, target.subdir, i)] elif isinstance(i, build.BuildTarget): @@ -943,9 +1084,11 @@ inputs = self.get_custom_target_sources(target) # Evaluate the command list cmd = [] + index = -1 for i in target.command: - if isinstance(i, build.Executable): - cmd += self.exe_object_to_cmd_array(i) + index += 1 + if isinstance(i, build.BuildTarget): + cmd += self.build_target_to_cmd_array(i, (index == 0)) continue elif isinstance(i, build.CustomTarget): # GIR scanner will attempt to execute this binary but @@ -961,35 +1104,36 @@ elif not isinstance(i, str): err_msg = 'Argument {0} is of unknown type {1}' raise RuntimeError(err_msg.format(str(i), str(type(i)))) - elif '@SOURCE_ROOT@' in i: - i = i.replace('@SOURCE_ROOT@', source_root) - elif '@BUILD_ROOT@' in i: - i = i.replace('@BUILD_ROOT@', build_root) - elif '@DEPFILE@' in i: - if target.depfile is None: - msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \ - 'keyword argument.'.format(target.name) - raise MesonException(msg) - dfilename = os.path.join(outdir, target.depfile) - i = i.replace('@DEPFILE@', dfilename) - elif '@PRIVATE_DIR@' in i: - if target.absolute_paths: - pdir = self.get_target_private_dir_abs(target) - else: - pdir = self.get_target_private_dir(target) - i = i.replace('@PRIVATE_DIR@', pdir) - elif '@PRIVATE_OUTDIR_' in i: - match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i) - if not match: - msg = 'Custom target {!r} has an invalid argument {!r}' \ - ''.format(target.name, i) - raise MesonException(msg) - source = match.group(0) - if match.group(1) is None and not target.absolute_paths: - lead_dir = '' - else: - lead_dir = self.environment.get_build_dir() - i = i.replace(source, os.path.join(lead_dir, outdir)) + else: + if '@SOURCE_ROOT@' in i: + i = i.replace('@SOURCE_ROOT@', source_root) + if '@BUILD_ROOT@' in i: + i = i.replace('@BUILD_ROOT@', build_root) + if '@DEPFILE@' in i: + if target.depfile is None: + msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \ + 'keyword argument.'.format(target.name) + raise MesonException(msg) + dfilename = os.path.join(outdir, target.depfile) + i = i.replace('@DEPFILE@', dfilename) + if '@PRIVATE_DIR@' in i: + if target.absolute_paths: + pdir = self.get_target_private_dir_abs(target) + else: + pdir = self.get_target_private_dir(target) + i = i.replace('@PRIVATE_DIR@', pdir) + if '@PRIVATE_OUTDIR_' in i: + match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i) + if not match: + msg = 'Custom target {!r} has an invalid argument {!r}' \ + ''.format(target.name, i) + raise MesonException(msg) + source = match.group(0) + if match.group(1) is None and not target.absolute_paths: + lead_dir = '' + else: + lead_dir = self.environment.get_build_dir() + i = i.replace(source, os.path.join(lead_dir, outdir)) cmd.append(i) # Substitute the rest of the template strings values = mesonlib.get_filenames_templates_dict(inputs, outputs) @@ -1015,7 +1159,7 @@ cmd = [i.replace('\\', '/') for i in cmd] return inputs, outputs, cmd - def run_postconf_scripts(self): + def run_postconf_scripts(self) -> None: env = {'MESON_SOURCE_ROOT': self.environment.get_source_dir(), 'MESON_BUILD_ROOT': self.environment.get_build_dir(), 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in self.environment.get_build_command() + ['introspect']]), @@ -1027,8 +1171,8 @@ cmd = s['exe'] + s['args'] subprocess.check_call(cmd, env=child_env) - def create_install_data(self): - strip_bin = self.environment.binaries.host.lookup_entry('strip') + def create_install_data(self) -> InstallData: + strip_bin = self.environment.lookup_binary_entry(MachineChoice.HOST, 'strip') if strip_bin is None: if self.environment.is_cross_build(): mlog.warning('Cross file does not specify strip binary, result will not be stripped.') @@ -1040,7 +1184,8 @@ self.environment.get_prefix(), strip_bin, self.environment.coredata.get_builtin_option('install_umask'), - self.environment.get_build_command() + ['introspect']) + self.environment.get_build_command() + ['introspect'], + self.environment.coredata.version) self.generate_depmf_install(d) self.generate_target_install(d) self.generate_header_install(d) @@ -1090,6 +1235,7 @@ mappings = t.get_link_deps_mapping(d.prefix, self.environment) i = TargetInstallData(self.get_target_filename(t), outdirs[0], t.get_aliases(), should_strip, mappings, + t.rpath_dirs_to_remove, t.install_rpath, install_mode) d.targets.append(i) @@ -1107,14 +1253,14 @@ implib_install_dir = self.environment.get_import_lib_dir() # Install the import library; may not exist for shared modules i = TargetInstallData(self.get_target_filename_for_linking(t), - implib_install_dir, {}, False, {}, '', install_mode, + implib_install_dir, {}, False, {}, set(), '', install_mode, optional=isinstance(t, build.SharedModule)) d.targets.append(i) if not should_strip and t.get_debug_filename(): debug_file = os.path.join(self.get_target_dir(t), t.get_debug_filename()) i = TargetInstallData(debug_file, outdirs[0], - {}, False, {}, '', + {}, False, {}, set(), '', install_mode, optional=True) d.targets.append(i) # Install secondary outputs. Only used for Vala right now. @@ -1124,7 +1270,7 @@ if outdir is False: continue f = os.path.join(self.get_target_dir(t), output) - i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode) + i = TargetInstallData(f, outdir, {}, False, {}, set(), None, install_mode) d.targets.append(i) elif isinstance(t, build.CustomTarget): # If only one install_dir is specified, assume that all @@ -1137,7 +1283,7 @@ if num_outdirs == 1 and num_out > 1: for output in t.get_outputs(): f = os.path.join(self.get_target_dir(t), output) - i = TargetInstallData(f, outdirs[0], {}, False, {}, None, install_mode, + i = TargetInstallData(f, outdirs[0], {}, False, {}, set(), None, install_mode, optional=not t.build_by_default) d.targets.append(i) else: @@ -1146,7 +1292,7 @@ if outdir is False: continue f = os.path.join(self.get_target_dir(t), output) - i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode, + i = TargetInstallData(f, outdir, {}, False, {}, set(), None, install_mode, optional=not t.build_by_default) d.targets.append(i) @@ -1214,7 +1360,11 @@ def generate_subdir_install(self, d): for sd in self.build.get_install_subdirs(): - src_dir = os.path.join(self.environment.get_source_dir(), + if sd.from_source_dir: + from_dir = self.environment.get_source_dir() + else: + from_dir = self.environment.get_build_dir() + src_dir = os.path.join(from_dir, sd.source_subdir, sd.installable_subdir).rstrip('/') dst_dir = os.path.join(self.environment.get_prefix(), @@ -1224,7 +1374,7 @@ d.install_subdirs.append([src_dir, dst_dir, sd.install_mode, sd.exclude]) - def get_introspection_data(self, target_id, target): + def get_introspection_data(self, target_id: str, target: build.Target) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]: ''' Returns a list of source dicts with the following format for a given target: [ @@ -1240,7 +1390,7 @@ This is a limited fallback / reference implementation. The backend should override this method. ''' if isinstance(target, (build.CustomTarget, build.BuildTarget)): - source_list_raw = target.sources + target.extra_files + source_list_raw = target.sources source_list = [] for j in source_list_raw: if isinstance(j, mesonlib.File): diff -Nru meson-0.53.2/mesonbuild/backend/ninjabackend.py meson-0.57.0+really0.56.2/mesonbuild/backend/ninjabackend.py --- meson-0.53.2/mesonbuild/backend/ninjabackend.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/backend/ninjabackend.py 2021-01-06 10:39:48.000000000 +0000 @@ -15,10 +15,12 @@ import os import re import pickle +import shlex import subprocess from collections import OrderedDict +from enum import Enum, unique import itertools -from pathlib import PurePath, Path +from .._pathlib import PurePath, Path from functools import lru_cache from . import backends @@ -28,48 +30,128 @@ from .. import mlog from .. import dependencies from .. import compilers -from ..compilers import (Compiler, CompilerArgs, CCompiler, FortranCompiler, - PGICCompiler, VisualStudioLikeCompiler) -from ..linkers import ArLinker +from ..arglist import CompilerArgs +from ..compilers import ( + Compiler, CCompiler, + DmdDCompiler, + FortranCompiler, PGICCompiler, + VisualStudioCsCompiler, + VisualStudioLikeCompiler, +) +from ..linkers import ArLinker, VisualStudioLinker from ..mesonlib import ( - File, LibType, MachineChoice, MesonException, OrderedSet, PerMachine, ProgressBar, quote_arg + File, LibType, MachineChoice, MesonException, OrderedSet, PerMachine, + ProgressBar, quote_arg, unholder, ) from ..mesonlib import get_compiler_for_source, has_path_sep from .backends import CleanTrees from ..build import InvalidArguments +from ..interpreter import Interpreter FORTRAN_INCLUDE_PAT = r"^\s*#?include\s*['\"](\w+\.\w+)['\"]" FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$" FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)" FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)" +def cmd_quote(s): + # see: https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-commandlinetoargvw#remarks + + # backslash escape any existing double quotes + # any existing backslashes preceding a quote are doubled + s = re.sub(r'(\\*)"', lambda m: '\\' * (len(m.group(1)) * 2 + 1) + '"', s) + # any terminal backslashes likewise need doubling + s = re.sub(r'(\\*)$', lambda m: '\\' * (len(m.group(1)) * 2), s) + # and double quote + s = '"{}"'.format(s) + + return s + +def gcc_rsp_quote(s): + # see: the function buildargv() in libiberty + # + # this differs from sh-quoting in that a backslash *always* escapes the + # following character, even inside single quotes. + + s = s.replace('\\', '\\\\') + + return shlex.quote(s) + +# How ninja executes command lines differs between Unix and Windows +# (see https://ninja-build.org/manual.html#ref_rule_command) if mesonlib.is_windows(): - # FIXME: can't use quote_arg on Windows just yet; there are a number of existing workarounds - # throughout the codebase that cumulatively make the current code work (see, e.g. Backend.escape_extra_args - # and NinjaBuildElement.write below) and need to be properly untangled before attempting this - quote_func = lambda s: '"{}"'.format(s) - execute_wrapper = ['cmd', '/c'] + quote_func = cmd_quote + execute_wrapper = ['cmd', '/c'] # unused rmfile_prefix = ['del', '/f', '/s', '/q', '{}', '&&'] else: quote_func = quote_arg execute_wrapper = [] rmfile_prefix = ['rm', '-f', '{}', '&&'] +def get_rsp_threshold(): + '''Return a conservative estimate of the commandline size in bytes + above which a response file should be used. May be overridden for + debugging by setting environment variable MESON_RSP_THRESHOLD.''' + + if mesonlib.is_windows(): + # Usually 32k, but some projects might use cmd.exe, + # and that has a limit of 8k. + limit = 8192 + else: + # On Linux, ninja always passes the commandline as a single + # big string to /bin/sh, and the kernel limits the size of a + # single argument; see MAX_ARG_STRLEN + limit = 131072 + # Be conservative + limit = limit / 2 + return int(os.environ.get('MESON_RSP_THRESHOLD', limit)) + +# a conservative estimate of the command-line length limit +rsp_threshold = get_rsp_threshold() + +# ninja variables whose value should remain unquoted. The value of these ninja +# variables (or variables we use them in) is interpreted directly by ninja +# (e.g. the value of the depfile variable is a pathname that ninja will read +# from, etc.), so it must not be shell quoted. +raw_names = {'DEPFILE_UNQUOTED', 'DESC', 'pool', 'description', 'targetdep'} + +NINJA_QUOTE_BUILD_PAT = re.compile(r"[$ :\n]") +NINJA_QUOTE_VAR_PAT = re.compile(r"[$ \n]") + def ninja_quote(text, is_build_line=False): if is_build_line: - qcs = ('$', ' ', ':') + quote_re = NINJA_QUOTE_BUILD_PAT else: - qcs = ('$', ' ') - for char in qcs: - text = text.replace(char, '$' + char) + quote_re = NINJA_QUOTE_VAR_PAT + # Fast path for when no quoting is necessary + if not quote_re.search(text): + return text if '\n' in text: errmsg = '''Ninja does not support newlines in rules. The content was: -%s +{} -Please report this error with a test case to the Meson bug tracker.''' % text +Please report this error with a test case to the Meson bug tracker.'''.format(text) raise MesonException(errmsg) - return text + return quote_re.sub(r'$\g<0>', text) + +@unique +class Quoting(Enum): + both = 0 + notShell = 1 + notNinja = 2 + none = 3 + +class NinjaCommandArg: + def __init__(self, s, quoting = Quoting.both): + self.s = s + self.quoting = quoting + + def __str__(self): + return self.s + + @staticmethod + def list(l, q): + return [NinjaCommandArg(i, q) for i in l] class NinjaComment: def __init__(self, comment): @@ -84,48 +166,129 @@ class NinjaRule: def __init__(self, rule, command, args, description, - rspable = False, deps = None, depfile = None, extra = None): + rspable = False, deps = None, depfile = None, extra = None, + rspfile_quote_style = 'gcc'): + + def strToCommandArg(c): + if isinstance(c, NinjaCommandArg): + return c + + # deal with common cases here, so we don't have to explicitly + # annotate the required quoting everywhere + if c == '&&': + # shell constructs shouldn't be shell quoted + return NinjaCommandArg(c, Quoting.notShell) + if c.startswith('$'): + var = re.search(r'\$\{?(\w*)\}?', c).group(1) + if var not in raw_names: + # ninja variables shouldn't be ninja quoted, and their value + # is already shell quoted + return NinjaCommandArg(c, Quoting.none) + else: + # shell quote the use of ninja variables whose value must + # not be shell quoted (as it also used by ninja) + return NinjaCommandArg(c, Quoting.notNinja) + + return NinjaCommandArg(c) + self.name = rule - self.command = command # includes args which never go into a rspfile - self.args = args # args which will go into a rspfile, if used + self.command = list(map(strToCommandArg, command)) # includes args which never go into a rspfile + self.args = list(map(strToCommandArg, args)) # args which will go into a rspfile, if used self.description = description self.deps = deps # depstyle 'gcc' or 'msvc' self.depfile = depfile self.extra = extra self.rspable = rspable # if a rspfile can be used self.refcount = 0 + self.rsprefcount = 0 + self.rspfile_quote_style = rspfile_quote_style # rspfile quoting style is 'gcc' or 'cl' + + if self.depfile == '$DEPFILE': + self.depfile += '_UNQUOTED' + + @staticmethod + def _quoter(x, qf = quote_func): + if isinstance(x, NinjaCommandArg): + if x.quoting == Quoting.none: + return x.s + elif x.quoting == Quoting.notNinja: + return qf(x.s) + elif x.quoting == Quoting.notShell: + return ninja_quote(x.s) + # fallthrough + return ninja_quote(qf(str(x))) def write(self, outfile): - if not self.refcount: - return + if self.rspfile_quote_style == 'cl': + rspfile_quote_func = cmd_quote + else: + rspfile_quote_func = gcc_rsp_quote - outfile.write('rule %s\n' % self.name) - if self.rspable: - outfile.write(' command = %s @$out.rsp\n' % ' '.join(self.command)) - outfile.write(' rspfile = $out.rsp\n') - outfile.write(' rspfile_content = %s\n' % ' '.join(self.args)) - else: - outfile.write(' command = %s\n' % ' '.join(self.command + self.args)) - if self.deps: - outfile.write(' deps = %s\n' % self.deps) - if self.depfile: - outfile.write(' depfile = %s\n' % self.depfile) - outfile.write(' description = %s\n' % self.description) - if self.extra: - for l in self.extra.split('\n'): - outfile.write(' ') - outfile.write(l) - outfile.write('\n') - outfile.write('\n') + def rule_iter(): + if self.refcount: + yield '' + if self.rsprefcount: + yield '_RSP' + + for rsp in rule_iter(): + outfile.write('rule {}{}\n'.format(self.name, rsp)) + if rsp == '_RSP': + outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command]))) + outfile.write(' rspfile = $out.rsp\n') + outfile.write(' rspfile_content = {}\n'.format(' '.join([self._quoter(x, rspfile_quote_func) for x in self.args]))) + else: + outfile.write(' command = {}\n'.format(' '.join([self._quoter(x) for x in (self.command + self.args)]))) + if self.deps: + outfile.write(' deps = {}\n'.format(self.deps)) + if self.depfile: + outfile.write(' depfile = {}\n'.format(self.depfile)) + outfile.write(' description = {}\n'.format(self.description)) + if self.extra: + for l in self.extra.split('\n'): + outfile.write(' ') + outfile.write(l) + outfile.write('\n') + outfile.write('\n') + + def length_estimate(self, infiles, outfiles, elems): + # determine variables + # this order of actions only approximates ninja's scoping rules, as + # documented at: https://ninja-build.org/manual.html#ref_scope + ninja_vars = {} + for e in elems: + (name, value) = e + ninja_vars[name] = value + ninja_vars['deps'] = self.deps + ninja_vars['depfile'] = self.depfile + ninja_vars['in'] = infiles + ninja_vars['out'] = outfiles + + # expand variables in command + command = ' '.join([self._quoter(x) for x in self.command + self.args]) + estimate = len(command) + for m in re.finditer(r'(\${\w*}|\$\w*)?[^$]*', command): + if m.start(1) != -1: + estimate -= m.end(1) - m.start(1) + 1 + chunk = m.group(1) + if chunk[1] == '{': + chunk = chunk[2:-1] + else: + chunk = chunk[1:] + chunk = ninja_vars.get(chunk, []) # undefined ninja variables are empty + estimate += len(' '.join(chunk)) + + # determine command length + return estimate class NinjaBuildElement: - def __init__(self, all_outputs, outfilenames, rule, infilenames): + def __init__(self, all_outputs, outfilenames, rulename, infilenames, implicit_outs=None): + self.implicit_outfilenames = implicit_outs or [] if isinstance(outfilenames, str): self.outfilenames = [outfilenames] else: self.outfilenames = outfilenames - assert(isinstance(rule, str)) - self.rule = rule + assert(isinstance(rulename, str)) + self.rulename = rulename if isinstance(infilenames, str): self.infilenames = [infilenames] else: @@ -148,15 +311,53 @@ self.orderdeps.add(dep) def add_item(self, name, elems): + # Always convert from GCC-style argument naming to the naming used by the + # current compiler. Also filter system include paths, deduplicate, etc. + if isinstance(elems, CompilerArgs): + elems = elems.to_native() if isinstance(elems, str): elems = [elems] self.elems.append((name, elems)) + if name == 'DEPFILE': + self.elems.append((name + '_UNQUOTED', elems)) + + def _should_use_rspfile(self): + # 'phony' is a rule built-in to ninja + if self.rulename == 'phony': + return False + + if not self.rule.rspable: + return False + + infilenames = ' '.join([ninja_quote(i, True) for i in self.infilenames]) + outfilenames = ' '.join([ninja_quote(i, True) for i in self.outfilenames]) + + return self.rule.length_estimate(infilenames, + outfilenames, + self.elems) >= rsp_threshold + + def count_rule_references(self): + if self.rulename != 'phony': + if self._should_use_rspfile(): + self.rule.rsprefcount += 1 + else: + self.rule.refcount += 1 + def write(self, outfile): self.check_outputs() - line = 'build %s: %s %s' % (' '.join([ninja_quote(i, True) for i in self.outfilenames]), - self.rule, - ' '.join([ninja_quote(i, True) for i in self.infilenames])) + ins = ' '.join([ninja_quote(i, True) for i in self.infilenames]) + outs = ' '.join([ninja_quote(i, True) for i in self.outfilenames]) + implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames]) + if implicit_outs: + implicit_outs = ' | ' + implicit_outs + use_rspfile = self._should_use_rspfile() + if use_rspfile: + rulename = self.rulename + '_RSP' + mlog.debug("Command line for building %s is long, using a response file" % self.outfilenames) + else: + rulename = self.rulename + line = 'build {}{}: {} {}'.format(outs, implicit_outs, rulename, ins) if len(self.deps) > 0: line += ' | ' + ' '.join([ninja_quote(x, True) for x in self.deps]) if len(self.orderdeps) > 0: @@ -170,26 +371,24 @@ line = line.replace('\\', '/') outfile.write(line) - # ninja variables whose value should remain unquoted. The value of these - # ninja variables (or variables we use them in) is interpreted directly - # by ninja (e.g. the value of the depfile variable is a pathname that - # ninja will read from, etc.), so it must not be shell quoted. - raw_names = {'DEPFILE', 'DESC', 'pool', 'description', 'targetdep'} + if use_rspfile: + if self.rule.rspfile_quote_style == 'cl': + qf = cmd_quote + else: + qf = gcc_rsp_quote + else: + qf = quote_func for e in self.elems: (name, elems) = e should_quote = name not in raw_names - line = ' %s = ' % name + line = ' {} = '.format(name) newelems = [] for i in elems: if not should_quote or i == '&&': # Hackety hack hack - quoter = ninja_quote + newelems.append(ninja_quote(i)) else: - quoter = lambda x: ninja_quote(quote_func(x)) - i = i.replace('\\', '\\\\') - if quote_func('') == '""': - i = i.replace('"', '\\"') - newelems.append(quoter(i)) + newelems.append(ninja_quote(qf(i))) line += ' '.join(newelems) line += '\n' outfile.write(line) @@ -198,13 +397,13 @@ def check_outputs(self): for n in self.outfilenames: if n in self.all_outputs: - raise MesonException('Multiple producers for Ninja target "%s". Please rename your targets.' % n) + raise MesonException('Multiple producers for Ninja target "{}". Please rename your targets.'.format(n)) self.all_outputs[n] = True class NinjaBackend(backends.Backend): - def __init__(self, build): - super().__init__(build) + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) self.name = 'ninja' self.ninja_filename = 'build.ninja' self.fortran_deps = {} @@ -264,7 +463,14 @@ # 'Note: including file: d:\MyDir\include\stdio.h', however # different locales have different messages with a different # number of colons. Match up to the the drive name 'd:\'. - matchre = re.compile(rb"^(.*\s)[a-zA-Z]:\\.*stdio.h$") + # When used in cross compilation, the path separator is a + # forward slash rather than a backslash so handle both; i.e. + # the path is /MyDir/include/stdio.h. + # With certain cross compilation wrappings of MSVC, the paths + # use backslashes, but without the leading drive name, so + # allow the path to start with any path separator, i.e. + # \MyDir\include\stdio.h. + matchre = re.compile(rb"^(.*\s)([a-zA-Z]:\\|[\\\/]).*stdio.h$") def detect_prefix(out): for line in re.split(rb'\r?\n', out): @@ -283,27 +489,25 @@ raise MesonException('Could not determine vs dep dependency prefix string.') - def generate(self, interp): - self.interpreter = interp + def generate(self): ninja = environment.detect_ninja_command_and_version(log=True) if ninja is None: - raise MesonException('Could not detect Ninja v1.5 or newer') + raise MesonException('Could not detect Ninja v1.7 or newer') (self.ninja_command, self.ninja_version) = ninja outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename) tempfilename = outfilename + '~' with open(tempfilename, 'w', encoding='utf-8') as outfile: - outfile.write('# This is the build file for project "%s"\n' % - self.build.get_project()) + outfile.write('# This is the build file for project "{}"\n'.format(self.build.get_project())) outfile.write('# It is autogenerated by the Meson build system.\n') outfile.write('# Do not edit by hand.\n\n') - outfile.write('ninja_required_version = 1.5.1\n\n') + outfile.write('ninja_required_version = 1.7.1\n\n') num_pools = self.environment.coredata.backend_options['backend_max_links'].value if num_pools > 0: outfile.write('''pool link_pool - depth = %d + depth = {} -''' % num_pools) +'''.format(num_pools)) with self.detect_vs_dep_prefix(tempfilename) as outfile: self.generate_rules() @@ -340,12 +544,16 @@ # http://clang.llvm.org/docs/JSONCompilationDatabase.html def generate_compdb(self): rules = [] + # TODO: Rather than an explicit list here, rules could be marked in the + # rule store as being wanted in compdb for for_machine in MachineChoice: for lang in self.environment.coredata.compilers[for_machine]: - rules += [self.get_compiler_rule_name(lang, for_machine)] - rules += [self.get_pch_rule_name(lang, for_machine)] + rules += ["%s%s" % (rule, ext) for rule in [self.get_compiler_rule_name(lang, for_machine)] + for ext in ['', '_RSP']] + rules += ["%s%s" % (rule, ext) for rule in [self.get_pch_rule_name(lang, for_machine)] + for ext in ['', '_RSP']] compdb_options = ['-x'] if mesonlib.version_compare(self.ninja_version, '>=1.9') else [] - ninja_compdb = [self.ninja_command, '-t', 'compdb'] + compdb_options + rules + ninja_compdb = self.ninja_command + ['-t', 'compdb'] + compdb_options + rules builddir = self.environment.get_build_dir() try: jsondb = subprocess.check_output(ninja_compdb, cwd=builddir) @@ -564,7 +772,7 @@ generated_source_files.append(raw_src) elif self.environment.is_object(rel_src): obj_list.append(rel_src) - elif self.environment.is_library(rel_src): + elif self.environment.is_library(rel_src) or modules.is_module_library(rel_src): pass else: # Assume anything not specifically a source file is a header. This is because @@ -579,7 +787,7 @@ o = self.generate_llvm_ir_compile(target, src) else: o = self.generate_single_compile(target, src, True, - header_deps=header_deps) + order_deps=header_deps) obj_list.append(o) use_pch = self.environment.coredata.base_options.get('b_pch', False) @@ -642,9 +850,7 @@ self.generate_target(t) def custom_target_generator_inputs(self, target): - for s in target.sources: - if hasattr(s, 'held_object'): - s = s.held_object + for s in unholder(target.sources): if isinstance(s, build.GeneratedList): self.generate_genlist_for_target(s, target) @@ -664,7 +870,7 @@ (srcs, ofilenames, cmd) = self.eval_custom_target_command(target) deps = self.unwrap_dep_list(target) deps += self.get_custom_target_depend_files(target) - desc = 'Generating {0} with a {1} command.' + desc = 'Generating {0} with a custom command{1}' if target.build_always_stale: deps.append('PHONY') if target.depfile is None: @@ -678,15 +884,14 @@ for output in d.get_outputs(): elem.add_dep(os.path.join(self.get_target_dir(d), output)) - meson_exe_cmd = self.as_meson_exe_cmdline(target.name, target.command[0], cmd[1:], - for_machine=target.for_machine, - extra_bdeps=target.get_transitive_build_target_deps(), - capture=ofilenames[0] if target.capture else None) + meson_exe_cmd, reason = self.as_meson_exe_cmdline(target.name, target.command[0], cmd[1:], + extra_bdeps=target.get_transitive_build_target_deps(), + capture=ofilenames[0] if target.capture else None) if meson_exe_cmd: cmd = meson_exe_cmd - cmd_type = 'meson_exe.py custom' + cmd_type = ' (wrapped by meson {})'.format(reason) else: - cmd_type = 'custom' + cmd_type = '' if target.depfile is not None: depfile = target.get_dep_outname(elem.infilenames) rel_dfile = os.path.join(self.get_target_dir(target), depfile) @@ -760,7 +965,7 @@ target_name = 'meson-{}'.format(self.build_run_target_name(target)) elem = NinjaBuildElement(self.all_outputs, target_name, 'CUSTOM_COMMAND', []) elem.add_item('COMMAND', cmd) - elem.add_item('description', 'Running external command %s.' % target.name) + elem.add_item('description', 'Running external command {}'.format(target.name)) elem.add_item('pool', 'console') # Alias that runs the target defined above with the name the user specified self.create_target_alias(target_name) @@ -773,6 +978,15 @@ self.processed_targets[target.get_id()] = True def generate_coverage_command(self, elem, outputs): + targets = self.build.get_targets().values() + use_llvm_cov = False + for target in targets: + if not hasattr(target, 'compilers'): + continue + for compiler in target.compilers.values(): + if compiler.get_id() == 'clang' and not compiler.info.is_darwin(): + use_llvm_cov = True + break elem.add_item('COMMAND', self.environment.get_build_command() + ['--internal', 'coverage'] + outputs + @@ -780,12 +994,13 @@ os.path.join(self.environment.get_source_dir(), self.build.get_subproject_dir()), self.environment.get_build_dir(), - self.environment.get_log_dir()]) + self.environment.get_log_dir()] + + (['--use_llvm_cov'] if use_llvm_cov else [])) def generate_coverage_rules(self): e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY') self.generate_coverage_command(e, []) - e.add_item('description', 'Generates coverage reports.') + e.add_item('description', 'Generates coverage reports') self.add_build(e) # Alias that runs the target defined above self.create_target_alias('meson-coverage') @@ -794,21 +1009,21 @@ def generate_coverage_legacy_rules(self): e = NinjaBuildElement(self.all_outputs, 'meson-coverage-xml', 'CUSTOM_COMMAND', 'PHONY') self.generate_coverage_command(e, ['--xml']) - e.add_item('description', 'Generates XML coverage report.') + e.add_item('description', 'Generates XML coverage report') self.add_build(e) # Alias that runs the target defined above self.create_target_alias('meson-coverage-xml') e = NinjaBuildElement(self.all_outputs, 'meson-coverage-text', 'CUSTOM_COMMAND', 'PHONY') self.generate_coverage_command(e, ['--text']) - e.add_item('description', 'Generates text coverage report.') + e.add_item('description', 'Generates text coverage report') self.add_build(e) # Alias that runs the target defined above self.create_target_alias('meson-coverage-text') e = NinjaBuildElement(self.all_outputs, 'meson-coverage-html', 'CUSTOM_COMMAND', 'PHONY') self.generate_coverage_command(e, ['--html']) - e.add_item('description', 'Generates HTML coverage report.') + e.add_item('description', 'Generates HTML coverage report') self.add_build(e) # Alias that runs the target defined above self.create_target_alias('meson-coverage-html') @@ -869,13 +1084,15 @@ deps='gcc', depfile='$DEPFILE', extra='restat = 1')) - c = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \ + c = self.environment.get_build_command() + \ ['--internal', 'regenerate', - ninja_quote(quote_func(self.environment.get_source_dir())), - ninja_quote(quote_func(self.environment.get_build_dir()))] + self.environment.get_source_dir(), + self.environment.get_build_dir(), + '--backend', + 'ninja'] self.add_rule(NinjaRule('REGENERATE_BUILD', - c + ['--backend', 'ninja'], [], + c, [], 'Regenerating build files.', extra='generator = 1')) @@ -892,11 +1109,18 @@ def add_build(self, build): self.build_elements.append(build) - # increment rule refcount - if build.rule != 'phony': - self.ruledict[build.rule].refcount += 1 + if build.rulename != 'phony': + # reference rule + if build.rulename in self.ruledict: + build.rule = self.ruledict[build.rulename] + else: + mlog.warning("build statement for {} references non-existent rule {}".format(build.outfilenames, build.rulename)) def write_rules(self, outfile): + for b in self.build_elements: + if isinstance(b, NinjaBuildElement): + b.count_rule_references() + for r in self.rules: r.write(outfile) @@ -975,12 +1199,12 @@ ofilename = os.path.join(self.get_target_private_dir(target), ofilebase) elem = NinjaBuildElement(self.all_outputs, ofilename, "CUSTOM_COMMAND", rel_sourcefile) elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename]) - elem.add_item('DESC', 'Compiling resource %s.' % rel_sourcefile) + elem.add_item('DESC', 'Compiling resource {}'.format(rel_sourcefile)) self.add_build(elem) deps.append(ofilename) a = '-resource:' + ofilename else: - raise InvalidArguments('Unknown resource file %s.' % r) + raise InvalidArguments('Unknown resource file {}.'.format(r)) args.append(a) return args, deps @@ -992,7 +1216,7 @@ compiler = target.compilers['cs'] rel_srcs = [os.path.normpath(s.rel_to_builddir(self.build_to_src)) for s in src_list] deps = [] - commands = CompilerArgs(compiler, target.extra_args.get('cs', [])) + commands = compiler.compiler_args(target.extra_args.get('cs', [])) commands += compiler.get_buildtype_args(buildtype) commands += compiler.get_optimization_args(self.get_option_for_target('optimization', target)) commands += compiler.get_debug_args(self.get_option_for_target('debug', target)) @@ -1069,7 +1293,7 @@ def generate_java_link(self): rule = 'java_LINKER' command = ['jar', '$ARGS'] - description = 'Creating JAR $out.' + description = 'Creating JAR $out' self.add_rule(NinjaRule(rule, command, [], description)) def determine_dep_vapis(self, target): @@ -1265,7 +1489,7 @@ self.create_target_source_introspection(target, valac, args, all_files, []) return other_src[0], other_src[1], vala_c_src - def generate_rust_target(self, target): + def generate_rust_target(self, target: build.BuildTarget) -> None: rustc = target.compilers['rust'] # Rust compiler takes only the main file as input and # figures out what other files are needed via import @@ -1273,7 +1497,7 @@ main_rust_file = None for i in target.get_sources(): if not rustc.can_compile(i): - raise InvalidArguments('Rust target %s contains a non-rust source file.' % target.get_basename()) + raise InvalidArguments('Rust target {} contains a non-rust source file.'.format(target.get_basename())) if main_rust_file is None: main_rust_file = i.rel_to_builddir(self.build_to_src) if main_rust_file is None: @@ -1309,7 +1533,8 @@ depfile = os.path.join(target.subdir, target.name + '.d') args += ['--emit', 'dep-info={}'.format(depfile), '--emit', 'link'] args += target.get_extra_args('rust') - args += ['-o', os.path.join(target.subdir, target.get_filename())] + args += rustc.get_output_args(os.path.join(target.subdir, target.get_filename())) + args += self.environment.coredata.get_external_args(target.for_machine, rustc.language) orderdeps = [os.path.join(t.subdir, t.get_filename()) for t in target.link_targets] linkdirs = OrderedDict() for d in target.link_targets: @@ -1344,12 +1569,13 @@ self.get_target_dir(target)) else: target_slashname_workaround_dir = self.get_target_dir(target) - rpath_args = rustc.build_rpath_args(self.environment, - self.environment.get_build_dir(), - target_slashname_workaround_dir, - self.determine_rpath_dirs(target), - target.build_rpath, - target.install_rpath) + rpath_args, target.rpath_dirs_to_remove = ( + rustc.build_rpath_args(self.environment, + self.environment.get_build_dir(), + target_slashname_workaround_dir, + self.determine_rpath_dirs(target), + target.build_rpath, + target.install_rpath)) # ... but then add rustc's sysroot to account for rustup # installations for rpath_arg in rpath_args: @@ -1372,11 +1598,11 @@ @classmethod def get_compiler_rule_name(cls, lang: str, for_machine: MachineChoice) -> str: - return '%s_COMPILER%s' % (lang, cls.get_rule_suffix(for_machine)) + return '{}_COMPILER{}'.format(lang, cls.get_rule_suffix(for_machine)) @classmethod def get_pch_rule_name(cls, lang: str, for_machine: MachineChoice) -> str: - return '%s_PCH%s' % (lang, cls.get_rule_suffix(for_machine)) + return '{}_PCH{}'.format(lang, cls.get_rule_suffix(for_machine)) @classmethod def compiler_to_rule_name(cls, compiler: Compiler) -> str: @@ -1448,7 +1674,7 @@ abs_headers.append(absh) header_imports += swiftc.get_header_import_args(absh) else: - raise InvalidArguments('Swift target %s contains a non-swift source file.' % target.get_basename()) + raise InvalidArguments('Swift target {} contains a non-swift source file.'.format(target.get_basename())) os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True) compile_args = swiftc.get_compile_only_args() compile_args += swiftc.get_optimization_args(self.get_option_for_target('optimization', target)) @@ -1464,7 +1690,7 @@ else: expdir = basedir srctreedir = os.path.normpath(os.path.join(self.environment.get_build_dir(), self.build_to_src, expdir)) - sargs = swiftc.get_include_args(srctreedir) + sargs = swiftc.get_include_args(srctreedir, False) compile_args += sargs link_args = swiftc.get_output_args(os.path.join(self.environment.get_build_dir(), self.get_target_filename(target))) link_args += self.build.get_project_link_args(swiftc, target.subproject, target.for_machine) @@ -1475,7 +1701,7 @@ abs_module_dirs = self.determine_swift_dep_dirs(target) module_includes = [] for x in abs_module_dirs: - module_includes += swiftc.get_include_args(x) + module_includes += swiftc.get_include_args(x, False) link_deps = self.get_swift_link_deps(target) abs_link_deps = [os.path.join(self.environment.get_build_dir(), x) for x in link_deps] for d in target.link_targets: @@ -1534,8 +1760,8 @@ for for_machine in MachineChoice: static_linker = self.build.static_linker[for_machine] if static_linker is None: - return - rule = 'STATIC_LINKER%s' % self.get_rule_suffix(for_machine) + continue + rule = 'STATIC_LINKER{}'.format(self.get_rule_suffix(for_machine)) cmdlist = [] args = ['$in'] # FIXME: Must normalize file names with pathlib.Path before writing @@ -1549,14 +1775,15 @@ cmdlist = execute_wrapper + [c.format('$out') for c in rmfile_prefix] cmdlist += static_linker.get_exelist() cmdlist += ['$LINK_ARGS'] - cmdlist += static_linker.get_output_args('$out') - description = 'Linking static target $out.' + cmdlist += NinjaCommandArg.list(static_linker.get_output_args('$out'), Quoting.none) + description = 'Linking static target $out' if num_pools > 0: pool = 'pool = link_pool' else: pool = None self.add_rule(NinjaRule(rule, cmdlist, args, description, rspable=static_linker.can_linker_accept_rsp(), + rspfile_quote_style='cl' if isinstance(static_linker, VisualStudioLinker) else 'gcc', extra=pool)) def generate_dynamic_link_rules(self): @@ -1569,57 +1796,58 @@ or langname == 'rust' \ or langname == 'cs': continue - rule = '%s_LINKER%s' % (langname, self.get_rule_suffix(for_machine)) + rule = '{}_LINKER{}'.format(langname, self.get_rule_suffix(for_machine)) command = compiler.get_linker_exelist() - args = ['$ARGS'] + compiler.get_linker_output_args('$out') + ['$in', '$LINK_ARGS'] - description = 'Linking target $out.' + args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_linker_output_args('$out'), Quoting.none) + ['$in', '$LINK_ARGS'] + description = 'Linking target $out' if num_pools > 0: pool = 'pool = link_pool' else: pool = None self.add_rule(NinjaRule(rule, command, args, description, rspable=compiler.can_linker_accept_rsp(), + rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or + isinstance(compiler, DmdDCompiler)) else 'gcc', extra=pool)) - args = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \ + args = self.environment.get_build_command() + \ ['--internal', 'symbolextractor', + self.environment.get_build_dir(), '$in', + '$IMPLIB', '$out'] symrule = 'SHSYM' symcmd = args + ['$CROSS'] - syndesc = 'Generating symbol file $out.' + syndesc = 'Generating symbol file $out' synstat = 'restat = 1' self.add_rule(NinjaRule(symrule, symcmd, [], syndesc, extra=synstat)) def generate_java_compile_rule(self, compiler): rule = self.compiler_to_rule_name(compiler) - invoc = [ninja_quote(i) for i in compiler.get_exelist()] - command = invoc + ['$ARGS', '$in'] - description = 'Compiling Java object $in.' + command = compiler.get_exelist() + ['$ARGS', '$in'] + description = 'Compiling Java object $in' self.add_rule(NinjaRule(rule, command, [], description)) def generate_cs_compile_rule(self, compiler): rule = self.compiler_to_rule_name(compiler) - invoc = [ninja_quote(i) for i in compiler.get_exelist()] - command = invoc + command = compiler.get_exelist() args = ['$ARGS', '$in'] - description = 'Compiling C Sharp target $out.' + description = 'Compiling C Sharp target $out' self.add_rule(NinjaRule(rule, command, args, description, - rspable=mesonlib.is_windows())) + rspable=mesonlib.is_windows(), + rspfile_quote_style='cl' if isinstance(compiler, VisualStudioCsCompiler) else 'gcc')) def generate_vala_compile_rules(self, compiler): rule = self.compiler_to_rule_name(compiler) - invoc = [ninja_quote(i) for i in compiler.get_exelist()] - command = invoc + ['$ARGS', '$in'] - description = 'Compiling Vala source $in.' + command = compiler.get_exelist() + ['$ARGS', '$in'] + description = 'Compiling Vala source $in' self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1')) def generate_rust_compile_rules(self, compiler): rule = self.compiler_to_rule_name(compiler) - invoc = [ninja_quote(i) for i in compiler.get_exelist()] - command = invoc + ['$ARGS', '$in'] - description = 'Compiling Rust source $in.' + command = compiler.get_exelist() + ['$ARGS', '$in'] + description = 'Compiling Rust source $in' depfile = '$targetdep' depstyle = 'gcc' self.add_rule(NinjaRule(rule, command, [], description, deps=depstyle, @@ -1627,18 +1855,18 @@ def generate_swift_compile_rules(self, compiler): rule = self.compiler_to_rule_name(compiler) - full_exe = [ninja_quote(x) for x in self.environment.get_build_command()] + [ + full_exe = self.environment.get_build_command() + [ '--internal', 'dirchanger', '$RUNDIR', ] - invoc = full_exe + [ninja_quote(i) for i in compiler.get_exelist()] + invoc = full_exe + compiler.get_exelist() command = invoc + ['$ARGS', '$in'] - description = 'Compiling Swift source $in.' + description = 'Compiling Swift source $in' self.add_rule(NinjaRule(rule, command, [], description)) def generate_fortran_dep_hack(self, crstr): - rule = 'FORTRAN_DEP_HACK%s' % (crstr) + rule = 'FORTRAN_DEP_HACK{}'.format(crstr) if mesonlib.is_windows(): cmd = ['cmd', '/C'] else: @@ -1652,9 +1880,9 @@ if self.created_llvm_ir_rule[compiler.for_machine]: return rule = self.get_compiler_rule_name('llvm_ir', compiler.for_machine) - command = [ninja_quote(i) for i in compiler.get_exelist()] - args = ['$ARGS'] + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in'] - description = 'Compiling LLVM IR object $in.' + command = compiler.get_exelist() + args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in'] + description = 'Compiling LLVM IR object $in' self.add_rule(NinjaRule(rule, command, args, description, rspable=compiler.can_linker_accept_rsp())) self.created_llvm_ir_rule[compiler.for_machine] = True @@ -1682,16 +1910,10 @@ if langname == 'fortran': self.generate_fortran_dep_hack(crstr) rule = self.get_compiler_rule_name(langname, compiler.for_machine) - depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') - quoted_depargs = [] - for d in depargs: - if d != '$out' and d != '$in': - d = quote_func(d) - quoted_depargs.append(d) - - command = [ninja_quote(i) for i in compiler.get_exelist()] - args = ['$ARGS'] + quoted_depargs + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in'] - description = 'Compiling %s object $out.' % compiler.get_display_language() + depargs = NinjaCommandArg.list(compiler.get_dependency_gen_args('$out', '$DEPFILE'), Quoting.none) + command = compiler.get_exelist() + args = ['$ARGS'] + depargs + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in'] + description = 'Compiling {} object $out'.format(compiler.get_display_language()) if isinstance(compiler, VisualStudioLikeCompiler): deps = 'msvc' depfile = None @@ -1700,6 +1922,8 @@ depfile = '$DEPFILE' self.add_rule(NinjaRule(rule, command, args, description, rspable=compiler.can_linker_accept_rsp(), + rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or + isinstance(compiler, DmdDCompiler)) else 'gcc', deps=deps, depfile=depfile)) def generate_pch_rule_for(self, langname, compiler): @@ -1708,17 +1932,12 @@ rule = self.compiler_to_pch_rule_name(compiler) depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') - quoted_depargs = [] - for d in depargs: - if d != '$out' and d != '$in': - d = quote_func(d) - quoted_depargs.append(d) if isinstance(compiler, VisualStudioLikeCompiler): output = [] else: - output = compiler.get_output_args('$out') - command = compiler.get_exelist() + ['$ARGS'] + quoted_depargs + output + compiler.get_compile_only_args() + ['$in'] - description = 'Precompiling header $in.' + output = NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + command = compiler.get_exelist() + ['$ARGS'] + depargs + output + compiler.get_compile_only_args() + ['$in'] + description = 'Precompiling header $in' if isinstance(compiler, VisualStudioLikeCompiler): deps = 'msvc' depfile = None @@ -1764,7 +1983,7 @@ generator = genlist.get_generator() subdir = genlist.subdir exe = generator.get_exe() - exe_arr = self.exe_object_to_cmd_array(exe) + exe_arr = self.build_target_to_cmd_array(exe, True) infilelist = genlist.get_inputs() outfilelist = genlist.get_outputs() extra_dependencies = self.get_custom_target_depend_files(genlist) @@ -1794,9 +2013,9 @@ outfilelist = outfilelist[len(generator.outputs):] args = self.replace_paths(target, args, override_subdir=subdir) cmdlist = exe_arr + self.replace_extra_args(args, genlist) - meson_exe_cmd = self.as_meson_exe_cmdline('generator ' + cmdlist[0], - cmdlist[0], cmdlist[1:], - capture=outfiles[0] if generator.capture else None) + meson_exe_cmd, reason = self.as_meson_exe_cmdline('generator ' + cmdlist[0], + cmdlist[0], cmdlist[1:], + capture=outfiles[0] if generator.capture else None) if meson_exe_cmd: cmdlist = meson_exe_cmd abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) @@ -1808,11 +2027,16 @@ elem.add_item('DEPFILE', depfile) if len(extra_dependencies) > 0: elem.add_dep(extra_dependencies) + if len(generator.outputs) == 1: - elem.add_item('DESC', 'Generating {!r}.'.format(sole_output)) + what = '{!r}'.format(sole_output) else: # since there are multiple outputs, we log the source that caused the rebuild - elem.add_item('DESC', 'Generating source from {!r}.'.format(sole_output)) + what = 'from {!r}.'.format(sole_output) + if reason: + reason = ' (wrapped by meson {})'.format(reason) + elem.add_item('DESC', 'Generating {}{}.'.format(what, reason)) + if isinstance(exe, build.BuildTarget): elem.add_dep(self.get_target_filename(exe)) elem.add_item('COMMAND', cmdlist) @@ -1852,9 +2076,8 @@ modname = modmatch.group(1).lower() if modname in module_files: raise InvalidArguments( - 'Namespace collision: module %s defined in ' - 'two files %s and %s.' % - (modname, module_files[modname], s)) + 'Namespace collision: module {} defined in ' + 'two files {} and {}.'.format(modname, module_files[modname], s)) module_files[modname] = s else: submodmatch = submodre.match(line) @@ -1865,9 +2088,8 @@ if submodname in submodule_files: raise InvalidArguments( - 'Namespace collision: submodule %s defined in ' - 'two files %s and %s.' % - (submodname, submodule_files[submodname], s)) + 'Namespace collision: submodule {} defined in ' + 'two files {} and {}.'.format(submodname, submodule_files[submodname], s)) submodule_files[submodname] = s self.fortran_deps[target.get_basename()] = {**module_files, **submodule_files} @@ -1884,12 +2106,15 @@ mod_files = _scan_fortran_file_deps(src, srcdir, dirname, tdeps, compiler) return mod_files - def get_cross_stdlib_args(self, target, compiler): - if self.environment.machines.matches_build_machine(target.for_machine): - return [] - if not self.environment.properties.host.has_stdlib(compiler.language): - return [] - return compiler.get_no_stdinc_args() + def get_no_stdlib_args(self, target, compiler): + if compiler.language in self.build.stdlibs[target.for_machine]: + return compiler.get_no_stdinc_args() + return [] + + def get_no_stdlib_link_args(self, target, linker): + if hasattr(linker, 'language') and linker.language in self.build.stdlibs[target.for_machine]: + return linker.get_no_stdlib_link_args() + return [] def get_compile_debugfile_args(self, compiler, target, objfile): # The way MSVC uses PDB files is documented exactly nowhere so @@ -1946,15 +2171,18 @@ else: return compiler.get_compile_debugfile_args(objfile, pch=False) + def get_link_debugfile_name(self, linker, target, outname): + return linker.get_link_debugfile_name(outname) + def get_link_debugfile_args(self, linker, target, outname): return linker.get_link_debugfile_args(outname) def generate_llvm_ir_compile(self, target, src): + base_proxy = self.get_base_options_for_target(target) compiler = get_compiler_for_source(target.compilers.values(), src) - commands = CompilerArgs(compiler) + commands = compiler.compiler_args() # Compiler args for compiling this target - commands += compilers.get_base_compile_args(self.environment.coredata.base_options, - compiler) + commands += compilers.get_base_compile_args(base_proxy, compiler) if isinstance(src, File): if src.is_built: src_filename = os.path.join(src.subdir, src.fname) @@ -1964,7 +2192,7 @@ src_filename = os.path.basename(src) else: src_filename = src - obj_basename = src_filename.replace('/', '_').replace('\\', '_') + obj_basename = self.canonicalize_filename(src_filename) rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename) rel_obj += '.' + self.environment.machines[target.for_machine].get_object_suffix() commands += self.get_compile_debugfile_args(compiler, target, rel_obj) @@ -1977,9 +2205,6 @@ # Write the Ninja build command compiler_name = self.get_compiler_rule_name('llvm_ir', compiler.for_machine) element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src) - # Convert from GCC-style link argument naming to the naming used by the - # current compiler. - commands = commands.to_native() element.add_item('ARGS', commands) self.add_build(element) return rel_obj @@ -1995,6 +2220,10 @@ curdir = '.' return compiler.get_include_args(curdir, False) + @lru_cache(maxsize=None) + def get_normpath_target(self, source) -> str: + return os.path.normpath(source) + def get_custom_target_dir_include_args(self, target, compiler): custom_target_include_dirs = [] for i in target.get_generated_sources(): @@ -2003,7 +2232,7 @@ # own target build dir. if not isinstance(i, (build.CustomTarget, build.CustomTargetIndex)): continue - idir = self.get_target_dir(i) + idir = self.get_normpath_target(self.get_target_dir(i)) if not idir: idir = '.' if idir not in custom_target_include_dirs: @@ -2017,10 +2246,10 @@ def generate_inc_dir(self, compiler, d, basedir, is_system): # Avoid superfluous '/.' at the end of paths when d is '.' if d not in ('', '.'): - expdir = os.path.join(basedir, d) + expdir = os.path.normpath(os.path.join(basedir, d)) else: expdir = basedir - srctreedir = os.path.join(self.build_to_src, expdir) + srctreedir = os.path.normpath(os.path.join(self.build_to_src, expdir)) sargs = compiler.get_include_args(srctreedir, is_system) # There may be include dirs where a build directory has not been # created for some source dir. For example if someone does this: @@ -2039,7 +2268,7 @@ base_proxy = self.get_base_options_for_target(target) # Create an empty commands list, and start adding arguments from # various sources in the order in which they must override each other - commands = CompilerArgs(compiler) + commands = compiler.compiler_args() # Start with symbol visibility. commands += compiler.gnu_symbol_visibility_args(target.gnu_symbol_visibility) # Add compiler args for compiling this target derived from 'base' build @@ -2119,7 +2348,7 @@ compiler = get_compiler_for_source(target.compilers.values(), src) commands = self._generate_single_compile(target, compiler, is_generated) - commands = CompilerArgs(commands.compiler, commands) + commands = commands.compiler.compiler_args(commands) # Create introspection information if is_generated is False: @@ -2196,9 +2425,6 @@ d = os.path.join(self.get_target_private_dir(target), d) element.add_orderdep(d) element.add_dep(pch_dep) - # Convert from GCC-style link argument naming to the naming used by the - # current compiler. - commands = commands.to_native() for i in self.get_fortran_orderdeps(target, compiler): element.add_orderdep(i) element.add_item('DEPFILE', dep_file) @@ -2300,23 +2526,23 @@ self.add_build(elem) return pch_objects + def get_target_shsym_filename(self, target): + # Always name the .symbols file after the primary build output because it always exists + targetdir = self.get_target_private_dir(target) + return os.path.join(targetdir, target.get_filename() + '.symbols') + def generate_shsym(self, target): - target_name = target.get_filename() target_file = self.get_target_filename(target) - targetdir = self.get_target_private_dir(target) - symname = os.path.join(targetdir, target_name + '.symbols') + symname = self.get_target_shsym_filename(target) elem = NinjaBuildElement(self.all_outputs, symname, 'SHSYM', target_file) + # The library we will actually link to, which is an import library on Windows (not the DLL) + elem.add_item('IMPLIB', self.get_target_filename_for_linking(target)) if self.environment.is_cross_build(): elem.add_item('CROSS', '--cross-host=' + self.environment.machines[target.for_machine].system) self.add_build(elem) - def get_cross_stdlib_link_args(self, target, linker): - if isinstance(target, build.StaticLibrary) or \ - self.environment.machines.matches_build_machine(target.for_machine): - return [] - if not self.environment.properties.host.has_stdlib(linker.language): - return [] - return linker.get_no_stdlib_link_args() + def get_import_filename(self, target): + return os.path.join(self.get_target_dir(target), target.import_filename) def get_target_type_link_args(self, target, linker): commands = [] @@ -2328,7 +2554,7 @@ commands += linker.gen_export_dynamic_link_args(self.environment) # If implib, and that's significant on this platform (i.e. Windows using either GCC or Visual Studio) if target.import_filename: - commands += linker.gen_import_library_args(os.path.join(self.get_target_dir(target), target.import_filename)) + commands += linker.gen_import_library_args(self.get_import_filename(target)) if target.pie: commands += linker.get_pie_link_args() elif isinstance(target, build.SharedLibrary): @@ -2349,7 +2575,7 @@ commands += linker.gen_vs_module_defs_args(target.vs_module_defs.rel_to_builddir(self.build_to_src)) # This is only visited when building for Windows using either GCC or Visual Studio if target.import_filename: - commands += linker.gen_import_library_args(os.path.join(self.get_target_dir(target), target.import_filename)) + commands += linker.gen_import_library_args(self.get_import_filename(target)) elif isinstance(target, build.StaticLibrary): commands += linker.get_std_link_args() else: @@ -2362,12 +2588,15 @@ # If gui_app is significant on this platform, add the appropriate linker arguments. # Unfortunately this can't be done in get_target_type_link_args, because some misguided # libraries (such as SDL2) add -mwindows to their link flags. - commands += linker.get_gui_app_args(target.gui_app) + if target.gui_app is not None: + commands += linker.get_gui_app_args(target.gui_app) + else: + commands += linker.get_win_subsystem_args(target.win_subsystem) return commands def get_link_whole_args(self, linker, target): target_args = self.build_target_link_arguments(linker, target.link_whole_targets) - return linker.get_link_whole_for(target_args) if len(target_args) else [] + return linker.get_link_whole_for(target_args) if target_args else [] @lru_cache(maxsize=None) def guess_library_absolute_path(self, linker, libname, search_dirs, patterns): @@ -2427,10 +2656,10 @@ guessed_dependencies = [] # TODO The get_library_naming requirement currently excludes link targets that use d or fortran as their main linker - if hasattr(linker, 'get_library_naming'): - search_dirs = tuple(search_dirs) + tuple(linker.get_library_dirs(self.environment)) + try: static_patterns = linker.get_library_naming(self.environment, LibType.STATIC, strict=True) shared_patterns = linker.get_library_naming(self.environment, LibType.SHARED, strict=True) + search_dirs = tuple(search_dirs) + tuple(linker.get_library_dirs(self.environment)) for libname in libs: # be conservative and record most likely shared and static resolution, because we don't know exactly # which one the linker will prefer @@ -2442,12 +2671,16 @@ guessed_dependencies.append(staticlibs.resolve().as_posix()) if sharedlibs: guessed_dependencies.append(sharedlibs.resolve().as_posix()) + except (mesonlib.MesonException, AttributeError) as e: + if 'get_library_naming' not in str(e): + raise return guessed_dependencies + absolute_libs def generate_link(self, target, outname, obj_list, linker, extra_args=None, stdlib_args=None): extra_args = extra_args if extra_args is not None else [] stdlib_args = stdlib_args if stdlib_args is not None else [] + implicit_outs = [] if isinstance(target, build.StaticLibrary): linker_base = 'STATIC' else: @@ -2462,7 +2695,7 @@ # # Once all the linker options have been passed, we will start passing # libraries and library paths from internal and external sources. - commands = CompilerArgs(linker) + commands = linker.compiler_args() # First, the trivial ones that are impossible to override. # # Add linker args for linking this target derived from 'base' build @@ -2475,7 +2708,7 @@ linker, isinstance(target, build.SharedModule)) # Add -nostdlib if needed; can't be overridden - commands += self.get_cross_stdlib_link_args(target, linker) + commands += self.get_no_stdlib_link_args(target, linker) # Add things like /NOLOGO; usually can't be overridden commands += linker.get_linker_always_args() # Add buildtype linker args: optimization level, etc. @@ -2483,6 +2716,9 @@ # Add /DEBUG and the pdb filename when using MSVC if self.get_option_for_target('debug', target): commands += self.get_link_debugfile_args(linker, target, outname) + debugfile = self.get_link_debugfile_name(linker, target, outname) + if debugfile is not None: + implicit_outs += [debugfile] # Add link args specific to this BuildTarget type, such as soname args, # PIC, import library generation, etc. commands += self.get_target_type_link_args(target, linker) @@ -2538,7 +2774,14 @@ # to be after all internal and external libraries so that unresolved # symbols from those can be found here. This is needed when the # *_winlibs that we want to link to are static mingw64 libraries. - commands += linker.get_option_link_args(self.environment.coredata.compiler_options[target.for_machine]) + if hasattr(linker, 'get_language'): + # The static linker doesn't know what language it is building, so we + # don't know what option. Fortunately, it doesn't care to see the + # language-specific options either. + # + # We shouldn't check whether we are making a static library, because + # in the LTO case we do use a real compiler here. + commands += linker.get_option_link_args(self.environment.coredata.compiler_options[target.for_machine][linker.get_language()]) dep_targets = [] dep_targets.extend(self.guess_external_link_dependencies(linker, target, commands, internal)) @@ -2554,31 +2797,30 @@ self.get_target_dir(target)) else: target_slashname_workaround_dir = self.get_target_dir(target) - commands += linker.build_rpath_args(self.environment, - self.environment.get_build_dir(), - target_slashname_workaround_dir, - self.determine_rpath_dirs(target), - target.build_rpath, - target.install_rpath) + (rpath_args, target.rpath_dirs_to_remove) = ( + linker.build_rpath_args(self.environment, + self.environment.get_build_dir(), + target_slashname_workaround_dir, + self.determine_rpath_dirs(target), + target.build_rpath, + target.install_rpath)) + commands += rpath_args # Add libraries generated by custom targets custom_target_libraries = self.get_custom_target_provided_libraries(target) commands += extra_args commands += custom_target_libraries commands += stdlib_args # Standard library arguments go last, because they never depend on anything. - # Convert from GCC-style link argument naming to the naming used by the - # current compiler. - commands = commands.to_native() dep_targets.extend([self.get_dependency_filename(t) for t in dependencies]) dep_targets.extend([self.get_dependency_filename(t) for t in target.link_depends]) - elem = NinjaBuildElement(self.all_outputs, outname, linker_rule, obj_list) + elem = NinjaBuildElement(self.all_outputs, outname, linker_rule, obj_list, implicit_outs=implicit_outs) elem.add_dep(dep_targets + custom_target_libraries) elem.add_item('LINK_ARGS', commands) return elem def get_dependency_filename(self, t): if isinstance(t, build.SharedLibrary): - return os.path.join(self.get_target_private_dir(t), t.get_filename() + '.symbols') + return self.get_target_shsym_filename(t) elif isinstance(t, mesonlib.File): if t.is_built: return t.relative_name() @@ -2607,7 +2849,7 @@ d = CleanTrees(self.environment.get_build_dir(), trees) d_file = os.path.join(self.environment.get_scratch_dir(), 'cleantrees.dat') e.add_item('COMMAND', self.environment.get_build_command() + ['--internal', 'cleantrees', d_file]) - e.add_item('description', 'Cleaning custom target directories.') + e.add_item('description', 'Cleaning custom target directories') self.add_build(e) # Alias that runs the target defined above self.create_target_alias('meson-clean-ctlist') @@ -2618,19 +2860,15 @@ def generate_gcov_clean(self): gcno_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcno', 'CUSTOM_COMMAND', 'PHONY') - script_root = self.environment.get_script_dir() - clean_script = os.path.join(script_root, 'delwithsuffix.py') - gcno_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcno']) - gcno_elem.add_item('description', 'Deleting gcno files.') + gcno_elem.add_item('COMMAND', mesonlib.meson_command + ['--internal', 'delwithsuffix', '.', 'gcno']) + gcno_elem.add_item('description', 'Deleting gcno files') self.add_build(gcno_elem) # Alias that runs the target defined above self.create_target_alias('meson-clean-gcno') gcda_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcda', 'CUSTOM_COMMAND', 'PHONY') - script_root = self.environment.get_script_dir() - clean_script = os.path.join(script_root, 'delwithsuffix.py') - gcda_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcda']) - gcda_elem.add_item('description', 'Deleting gcda files.') + gcda_elem.add_item('COMMAND', mesonlib.meson_command + ['--internal', 'delwithsuffix', '.', 'gcda']) + gcda_elem.add_item('description', 'Deleting gcda files') self.add_build(gcda_elem) # Alias that runs the target defined above self.create_target_alias('meson-clean-gcda') @@ -2740,8 +2978,8 @@ self.add_build(elem) elem = NinjaBuildElement(self.all_outputs, 'meson-clean', 'CUSTOM_COMMAND', 'PHONY') - elem.add_item('COMMAND', [self.ninja_command, '-t', 'clean']) - elem.add_item('description', 'Cleaning.') + elem.add_item('COMMAND', self.ninja_command + ['-t', 'clean']) + elem.add_item('description', 'Cleaning') # Alias that runs the above-defined meson-clean target self.create_target_alias('meson-clean') @@ -2779,7 +3017,7 @@ elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '') self.add_build(elem) - def get_introspection_data(self, target_id, target): + def get_introspection_data(self, target_id: str, target: build.Target) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]: if target_id not in self.introspection_data or len(self.introspection_data[target_id]) == 0: return super().get_introspection_data(target_id, target) @@ -2788,12 +3026,6 @@ result += [i] return result -def load(build_dir): - filename = os.path.join(build_dir, 'meson-private', 'install.dat') - with open(filename, 'rb') as f: - obj = pickle.load(f) - return obj - def _scan_fortran_file_deps(src: Path, srcdir: Path, dirname: Path, tdeps, compiler) -> T.List[str]: """ @@ -2823,7 +3055,9 @@ # included files incmatch = incre.match(line) if incmatch is not None: - incfile = srcdir / incmatch.group(1) + incfile = src.parent / incmatch.group(1) + # NOTE: src.parent is most general, in particular for CMake subproject with Fortran file + # having an `include 'foo.f'` statement. if incfile.suffix.lower()[1:] in compiler.file_suffixes: mod_files.extend(_scan_fortran_file_deps(incfile, srcdir, dirname, tdeps, compiler)) # modules diff -Nru meson-0.53.2/mesonbuild/backend/vs2010backend.py meson-0.57.0+really0.56.2/mesonbuild/backend/vs2010backend.py --- meson-0.53.2/mesonbuild/backend/vs2010backend.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/backend/vs2010backend.py 2021-01-06 10:39:48.000000000 +0000 @@ -18,20 +18,21 @@ import xml.dom.minidom import xml.etree.ElementTree as ET import uuid -from pathlib import Path, PurePath +import typing as T +from .._pathlib import Path, PurePath from . import backends from .. import build from .. import dependencies from .. import mlog from .. import compilers -from ..compilers import CompilerArgs +from ..interpreter import Interpreter from ..mesonlib import ( MesonException, File, python_command, replace_if_different ) from ..environment import Environment, build_filename -def autodetect_vs_version(build): +def autodetect_vs_version(build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]): vs_version = os.getenv('VisualStudioVersion', None) vs_install_dir = os.getenv('VSINSTALLDIR', None) if not vs_install_dir: @@ -41,17 +42,17 @@ # vcvarsall.bat doesn't set it, so also use VSINSTALLDIR if vs_version == '14.0' or 'Visual Studio 14' in vs_install_dir: from mesonbuild.backend.vs2015backend import Vs2015Backend - return Vs2015Backend(build) + return Vs2015Backend(build, interpreter) if vs_version == '15.0' or 'Visual Studio 17' in vs_install_dir or \ 'Visual Studio\\2017' in vs_install_dir: from mesonbuild.backend.vs2017backend import Vs2017Backend - return Vs2017Backend(build) + return Vs2017Backend(build, interpreter) if vs_version == '16.0' or 'Visual Studio 19' in vs_install_dir or \ 'Visual Studio\\2019' in vs_install_dir: from mesonbuild.backend.vs2019backend import Vs2019Backend - return Vs2019Backend(build) + return Vs2019Backend(build, interpreter) if 'Visual Studio 10.0' in vs_install_dir: - return Vs2010Backend(build) + return Vs2010Backend(build, interpreter) raise MesonException('Could not detect Visual Studio using VisualStudioVersion: {!r} or VSINSTALLDIR: {!r}!\n' 'Please specify the exact backend to use.'.format(vs_version, vs_install_dir)) @@ -86,8 +87,8 @@ self.depfiles = depfiles class Vs2010Backend(backends.Backend): - def __init__(self, build): - super().__init__(build) + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) self.name = 'vs2010' self.project_file_version = '10.0.30319.1' self.platform_toolset = None @@ -96,6 +97,9 @@ self.subdirs = {} self.handled_target_deps = {} + def get_target_private_dir(self, target): + return os.path.join(self.get_target_dir(target), target.get_id()) + def generate_custom_generator_commands(self, target, parent_node): generator_output_files = [] custom_target_include_dirs = [] @@ -117,7 +121,7 @@ infilelist = genlist.get_inputs() outfilelist = genlist.get_outputs() source_dir = os.path.join(down, self.build_to_src, genlist.subdir) - exe_arr = self.exe_object_to_cmd_array(exe) + exe_arr = self.build_target_to_cmd_array(exe, True) idgroup = ET.SubElement(parent_node, 'ItemGroup') for i in range(len(infilelist)): if len(infilelist) == len(outfilelist): @@ -146,7 +150,7 @@ # Always use a wrapper because MSBuild eats random characters when # there are many arguments. tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) - cmd = self.as_meson_exe_cmdline( + cmd, _ = self.as_meson_exe_cmdline( 'generator ' + cmd[0], cmd[0], cmd[1:], @@ -163,20 +167,23 @@ ET.SubElement(cbs, 'AdditionalInputs').text = ';'.join(deps) return generator_output_files, custom_target_output_files, custom_target_include_dirs - def generate(self, interp): - self.interpreter = interp + def generate(self): target_machine = self.interpreter.builtin['target_machine'].cpu_family_method(None, None) - if target_machine.endswith('64'): + if target_machine == '64' or target_machine == 'x86_64': # amd64 or x86_64 self.platform = 'x64' elif target_machine == 'x86': # x86 self.platform = 'Win32' + elif target_machine == 'aarch64' or target_machine == 'arm64': + self.platform = 'arm64' elif 'arm' in target_machine.lower(): self.platform = 'ARM' else: raise MesonException('Unsupported Visual Studio platform: ' + target_machine) self.buildtype = self.environment.coredata.get_builtin_option('buildtype') + self.optimization = self.environment.coredata.get_builtin_option('optimization') + self.debug = self.environment.coredata.get_builtin_option('debug') sln_filename = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.sln') projlist = self.generate_projects() self.gen_testproj('RUN_TESTS', os.path.join(self.environment.get_build_dir(), 'RUN_TESTS.vcxproj')) @@ -187,11 +194,11 @@ Vs2010Backend.touch_regen_timestamp(self.environment.get_build_dir()) @staticmethod - def get_regen_stampfile(build_dir): + def get_regen_stampfile(build_dir: str) -> None: return os.path.join(os.path.join(build_dir, Environment.private_dir), 'regen.stamp') @staticmethod - def touch_regen_timestamp(build_dir): + def touch_regen_timestamp(build_dir: str) -> None: with open(Vs2010Backend.get_regen_stampfile(build_dir), 'w'): pass @@ -560,12 +567,12 @@ # there are many arguments. tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) extra_bdeps = target.get_transitive_build_target_deps() - wrapper_cmd = self.as_meson_exe_cmdline(target.name, target.command[0], cmd[1:], - # All targets run from the target dir - workdir=tdir_abs, - extra_bdeps=extra_bdeps, - capture=ofilenames[0] if target.capture else None, - force_serialize=True) + wrapper_cmd, _ = self.as_meson_exe_cmdline(target.name, target.command[0], cmd[1:], + # All targets run from the target dir + workdir=tdir_abs, + extra_bdeps=extra_bdeps, + capture=ofilenames[0] if target.capture else None, + force_serialize=True) if target.build_always_stale: # Use a nonexistent file to always consider the target out-of-date. ofilenames += [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(), @@ -588,10 +595,8 @@ raise MesonException('Could not guess language from source file %s.' % src) def add_pch(self, pch_sources, lang, inc_cl): - if len(pch_sources) <= 1: - # We only need per file precompiled headers if we have more than 1 language. - return - self.use_pch(pch_sources, lang, inc_cl) + if lang in pch_sources: + self.use_pch(pch_sources, lang, inc_cl) def create_pch(self, pch_sources, lang, inc_cl): pch = ET.SubElement(inc_cl, 'PrecompiledHeader') @@ -599,6 +604,8 @@ self.add_pch_files(pch_sources, lang, inc_cl) def use_pch(self, pch_sources, lang, inc_cl): + pch = ET.SubElement(inc_cl, 'PrecompiledHeader') + pch.text = 'Use' header = self.add_pch_files(pch_sources, lang, inc_cl) pch_include = ET.SubElement(inc_cl, 'ForcedIncludeFiles') pch_include.text = header + ';%(ForcedIncludeFiles)' @@ -743,19 +750,21 @@ def gen_vcxproj(self, target, ofname, guid): mlog.debug('Generating vcxproj %s.' % target.name) - entrypoint = 'WinMainCRTStartup' subsystem = 'Windows' self.handled_target_deps[target.get_id()] = [] if isinstance(target, build.Executable): conftype = 'Application' - if not target.gui_app: - subsystem = 'Console' - entrypoint = 'mainCRTStartup' + if target.gui_app is not None: + if not target.gui_app: + subsystem = 'Console' + else: + # If someone knows how to set the version properly, + # please send a patch. + subsystem = target.win_subsystem.split(',')[0] elif isinstance(target, build.StaticLibrary): conftype = 'StaticLibrary' elif isinstance(target, build.SharedLibrary): conftype = 'DynamicLibrary' - entrypoint = '_DllMainCrtStartup' elif isinstance(target, build.CustomTarget): return self.gen_custom_target_vcxproj(target, ofname, guid) elif isinstance(target, build.RunTarget): @@ -772,7 +781,9 @@ if self.is_unity(target): sources = self.generate_unity_files(target, sources) compiler = self._get_cl_compiler(target) - buildtype_args = compiler.get_buildtype_args(self.buildtype) + build_args = compiler.get_buildtype_args(self.buildtype) + build_args += compiler.get_optimization_args(self.optimization) + build_args += compiler.get_debug_args(self.debug) buildtype_link_args = compiler.get_buildtype_linker_args(self.buildtype) vscrt_type = self.environment.coredata.base_options['b_vscrt'] project_name = target.name @@ -821,11 +832,18 @@ clconf = ET.SubElement(compiles, 'ClCompile') # CRT type; debug or release if vscrt_type.value == 'from_buildtype': - if self.buildtype == 'debug' or self.buildtype == 'debugoptimized': + if self.buildtype == 'debug': ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL' else: ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL' + elif vscrt_type.value == 'static_from_buildtype': + if self.buildtype == 'debug': + ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebug' + else: + ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded' elif vscrt_type.value == 'mdd': ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' @@ -842,19 +860,33 @@ ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL' # Debug format - if '/ZI' in buildtype_args: - ET.SubElement(type_config, 'DebugInformationFormat').text = 'EditAndContinue' - elif '/Zi' in buildtype_args: - ET.SubElement(type_config, 'DebugInformationFormat').text = 'ProgramDatabase' - elif '/Z7' in buildtype_args: - ET.SubElement(type_config, 'DebugInformationFormat').text = 'OldStyle' + if '/ZI' in build_args: + ET.SubElement(clconf, 'DebugInformationFormat').text = 'EditAndContinue' + elif '/Zi' in build_args: + ET.SubElement(clconf, 'DebugInformationFormat').text = 'ProgramDatabase' + elif '/Z7' in build_args: + ET.SubElement(clconf, 'DebugInformationFormat').text = 'OldStyle' + else: + ET.SubElement(clconf, 'DebugInformationFormat').text = 'None' # Runtime checks - if '/RTC1' in buildtype_args: - ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'EnableFastChecks' - elif '/RTCu' in buildtype_args: - ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck' - elif '/RTCs' in buildtype_args: - ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck' + if '/RTC1' in build_args: + ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'EnableFastChecks' + elif '/RTCu' in build_args: + ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck' + elif '/RTCs' in build_args: + ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck' + # Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise + # cl will give warning D9025: overriding '/Ehs' with cpp_eh value + if 'cpp' in target.compilers: + eh = self.environment.coredata.compiler_options[target.for_machine]['cpp']['eh'] + if eh.value == 'a': + ET.SubElement(clconf, 'ExceptionHandling').text = 'Async' + elif eh.value == 's': + ET.SubElement(clconf, 'ExceptionHandling').text = 'SyncCThrow' + elif eh.value == 'none': + ET.SubElement(clconf, 'ExceptionHandling').text = 'false' + else: # 'sc' or 'default' + ET.SubElement(clconf, 'ExceptionHandling').text = 'Sync' # End configuration ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props') generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(target, root) @@ -884,15 +916,17 @@ # # file_args is also later split out into defines and include_dirs in # case someone passed those in there - file_args = dict((lang, CompilerArgs(comp)) for lang, comp in target.compilers.items()) - file_defines = dict((lang, []) for lang in target.compilers) - file_inc_dirs = dict((lang, []) for lang in target.compilers) + file_args = {l: c.compiler_args() for l, c in target.compilers.items()} + file_defines = {l: [] for l in target.compilers} + file_inc_dirs = {l: [] for l in target.compilers} # The order in which these compile args are added must match # generate_single_compile() and generate_basic_compiler_args() for l, comp in target.compilers.items(): if l in file_args: - file_args[l] += compilers.get_base_compile_args(self.get_base_options_for_target(target), comp) - file_args[l] += comp.get_option_compile_args(self.environment.coredata.compiler_options[target.for_machine]) + file_args[l] += compilers.get_base_compile_args( + self.get_base_options_for_target(target), comp) + file_args[l] += comp.get_option_compile_args( + self.environment.coredata.compiler_options[target.for_machine][comp.language]) # Add compile args added using add_project_arguments() for l, args in self.build.projects_args[target.for_machine].get(target.subproject, {}).items(): @@ -905,10 +939,11 @@ file_args[l] += args # Compile args added from the env or cross file: CFLAGS/CXXFLAGS, etc. We want these # to override all the defaults, but not the per-target compile args. - for key, opt in self.environment.coredata.compiler_options[target.for_machine].items(): - l, suffix = key.split('_', 1) - if suffix == 'args' and l in file_args: - file_args[l] += opt.value + for l in file_args.keys(): + opts = self.environment.coredata.compiler_options[target.for_machine][l] + k = 'args' + if k in opts: + file_args[l] += opts[k].value for args in file_args.values(): # This is where Visual Studio will insert target_args, target_defines, # etc, which are added later from external deps (see below). @@ -949,9 +984,7 @@ t_inc_dirs = [self.relpath(self.get_target_private_dir(target), self.get_target_dir(target))] if target.implicit_include_directories: - t_inc_dirs += ['.'] - if target.implicit_include_directories: - t_inc_dirs += [proj_to_src_dir] + t_inc_dirs += ['.', proj_to_src_dir] args += ['-I' + arg for arg in t_inc_dirs] # Split preprocessor defines and include directories out of the list of @@ -966,9 +999,8 @@ else: define = arg[2:] # De-dup - if define in file_defines[l]: - file_defines[l].remove(define) - file_defines[l].append(define) + if define not in file_defines[l]: + file_defines[l].append(define) elif arg.startswith(('-I', '/I')) or arg == '%(AdditionalIncludeDirectories)': file_args[l].remove(arg) # Don't escape the marker @@ -979,6 +1011,9 @@ # De-dup if inc_dir not in file_inc_dirs[l]: file_inc_dirs[l].append(inc_dir) + # Add include dirs to target as well so that "Go to Document" works in headers + if inc_dir not in target_inc_dirs: + target_inc_dirs.append(inc_dir) # Split compile args needed to find external dependencies # Link args are added while generating the link command @@ -986,30 +1021,30 @@ # Cflags required by external deps might have UNIX-specific flags, # so filter them out if needed if isinstance(d, dependencies.OpenMPDependency): - d_compile_args = compiler.openmp_flags() + ET.SubElement(clconf, 'OpenMPSupport').text = 'true' else: d_compile_args = compiler.unix_args_to_native(d.get_compile_args()) - for arg in d_compile_args: - if arg.startswith(('-D', '/D')): - define = arg[2:] - # De-dup - if define in target_defines: - target_defines.remove(define) - target_defines.append(define) - elif arg.startswith(('-I', '/I')): - inc_dir = arg[2:] - # De-dup - if inc_dir not in target_inc_dirs: - target_inc_dirs.append(inc_dir) - else: - target_args.append(arg) + for arg in d_compile_args: + if arg.startswith(('-D', '/D')): + define = arg[2:] + # De-dup + if define in target_defines: + target_defines.remove(define) + target_defines.append(define) + elif arg.startswith(('-I', '/I')): + inc_dir = arg[2:] + # De-dup + if inc_dir not in target_inc_dirs: + target_inc_dirs.append(inc_dir) + else: + target_args.append(arg) languages += gen_langs + if '/Gw' in build_args: + target_args.append('/Gw') if len(target_args) > 0: target_args.append('%(AdditionalOptions)') ET.SubElement(clconf, "AdditionalOptions").text = ' '.join(target_args) - - target_inc_dirs.append('%(AdditionalIncludeDirectories)') ET.SubElement(clconf, 'AdditionalIncludeDirectories').text = ';'.join(target_inc_dirs) target_defines.append('%(PreprocessorDefinitions)') ET.SubElement(clconf, 'PreprocessorDefinitions').text = ';'.join(target_defines) @@ -1020,7 +1055,7 @@ if self.get_option_for_target('werror', target): ET.SubElement(clconf, 'TreatWarningAsError').text = 'true' # Optimization flags - o_flags = split_o_flags_args(buildtype_args) + o_flags = split_o_flags_args(build_args) if '/Ox' in o_flags: ET.SubElement(clconf, 'Optimization').text = 'Full' elif '/O2' in o_flags: @@ -1043,12 +1078,10 @@ # Note: SuppressStartupBanner is /NOLOGO and is 'true' by default pch_sources = {} if self.environment.coredata.base_options.get('b_pch', False): - pch_node = ET.SubElement(clconf, 'PrecompiledHeader') for lang in ['c', 'cpp']: pch = target.get_pch(lang) if not pch: continue - pch_node.text = 'Use' if compiler.id == 'msvc': if len(pch) == 1: # Auto generate PCH. @@ -1062,24 +1095,22 @@ # I don't know whether its relevant but let's handle other compilers # used with a vs backend pch_sources[lang] = [pch[0], None, lang, None] - if len(pch_sources) == 1: - # If there is only 1 language with precompiled headers, we can use it for the entire project, which - # is cleaner than specifying it for each source file. - self.use_pch(pch_sources, list(pch_sources)[0], clconf) resourcecompile = ET.SubElement(compiles, 'ResourceCompile') ET.SubElement(resourcecompile, 'PreprocessorDefinitions') # Linker options link = ET.SubElement(compiles, 'Link') - extra_link_args = CompilerArgs(compiler) + extra_link_args = compiler.compiler_args() # FIXME: Can these buildtype linker args be added as tags in the # vcxproj file (similar to buildtype compiler args) instead of in # AdditionalOptions? extra_link_args += compiler.get_buildtype_linker_args(self.buildtype) # Generate Debug info - if self.buildtype.startswith('debug'): + if self.debug: self.generate_debug_information(link) + else: + ET.SubElement(link, 'GenerateDebugInformation').text = 'false' if not isinstance(target, build.StaticLibrary): if isinstance(target, build.SharedModule): options = self.environment.coredata.base_options @@ -1100,14 +1131,14 @@ # Extend without reordering or de-dup to preserve `-L -l` sets # https://github.com/mesonbuild/meson/issues/1718 if isinstance(dep, dependencies.OpenMPDependency): - extra_link_args.extend_direct(compiler.openmp_flags()) + ET.SubElement(clconf, 'OpenMPSuppport').text = 'true' else: extra_link_args.extend_direct(dep.get_link_args()) for d in target.get_dependencies(): if isinstance(d, build.StaticLibrary): for dep in d.get_external_deps(): if isinstance(dep, dependencies.OpenMPDependency): - extra_link_args.extend_direct(compiler.openmp_flags()) + ET.SubElement(clconf, 'OpenMPSuppport').text = 'true' else: extra_link_args.extend_direct(dep.get_link_args()) # Add link args for c_* or cpp_* build options. Currently this only @@ -1115,7 +1146,8 @@ # to be after all internal and external libraries so that unresolved # symbols from those can be found here. This is needed when the # *_winlibs that we want to link to are static mingw64 libraries. - extra_link_args += compiler.get_option_link_args(self.environment.coredata.compiler_options[compiler.for_machine]) + extra_link_args += compiler.get_option_link_args( + self.environment.coredata.compiler_options[compiler.for_machine][comp.language]) (additional_libpaths, additional_links, extra_link_args) = self.split_link_args(extra_link_args.to_native()) # Add more libraries to be linked if needed @@ -1176,11 +1208,9 @@ if target.vs_module_defs: relpath = os.path.join(down, target.vs_module_defs.rel_to_builddir(self.build_to_src)) ET.SubElement(link, 'ModuleDefinitionFile').text = relpath - if '/ZI' in buildtype_args or '/Zi' in buildtype_args: + if self.debug: pdb = ET.SubElement(link, 'ProgramDataBaseFileName') pdb.text = '$(OutDir}%s.pdb' % target_name - if isinstance(target, build.Executable): - ET.SubElement(link, 'EntryPointSymbol').text = entrypoint targetmachine = ET.SubElement(link, 'TargetMachine') targetplatform = self.platform.lower() if targetplatform == 'win32': @@ -1189,12 +1219,15 @@ targetmachine.text = 'MachineX64' elif targetplatform == 'arm': targetmachine.text = 'MachineARM' + elif targetplatform == 'arm64': + targetmachine.text = 'MachineARM64' else: raise MesonException('Unsupported Visual Studio target machine: ' + targetplatform) # /nologo ET.SubElement(link, 'SuppressStartupBanner').text = 'true' # /release - ET.SubElement(link, 'SetChecksum').text = 'true' + if not self.environment.coredata.get_builtin_option('debug'): + ET.SubElement(link, 'SetChecksum').text = 'true' meson_file_group = ET.SubElement(root, 'ItemGroup') ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename)) diff -Nru meson-0.53.2/mesonbuild/backend/vs2015backend.py meson-0.57.0+really0.56.2/mesonbuild/backend/vs2015backend.py --- meson-0.53.2/mesonbuild/backend/vs2015backend.py 2019-08-28 17:15:39.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/backend/vs2015backend.py 2020-08-15 16:27:05.000000000 +0000 @@ -14,11 +14,14 @@ from .vs2010backend import Vs2010Backend from ..mesonlib import MesonException +from ..interpreter import Interpreter +from ..build import Build +import typing as T class Vs2015Backend(Vs2010Backend): - def __init__(self, build): - super().__init__(build) + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) self.name = 'vs2015' self.vs_version = '2015' if self.environment is not None: diff -Nru meson-0.53.2/mesonbuild/backend/vs2017backend.py meson-0.57.0+really0.56.2/mesonbuild/backend/vs2017backend.py --- meson-0.53.2/mesonbuild/backend/vs2017backend.py 2019-08-28 17:15:39.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/backend/vs2017backend.py 2020-08-15 16:27:05.000000000 +0000 @@ -13,15 +13,18 @@ # limitations under the License. import os +import typing as T import xml.etree.ElementTree as ET from .vs2010backend import Vs2010Backend from ..mesonlib import MesonException +from ..interpreter import Interpreter +from ..build import Build class Vs2017Backend(Vs2010Backend): - def __init__(self, build): - super().__init__(build) + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) self.name = 'vs2017' self.vs_version = '2017' # We assume that host == build diff -Nru meson-0.53.2/mesonbuild/backend/vs2019backend.py meson-0.57.0+really0.56.2/mesonbuild/backend/vs2019backend.py --- meson-0.53.2/mesonbuild/backend/vs2019backend.py 2019-08-28 17:15:39.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/backend/vs2019backend.py 2021-01-06 10:39:48.000000000 +0000 @@ -13,14 +13,17 @@ # limitations under the License. import os +import typing as T import xml.etree.ElementTree as ET from .vs2010backend import Vs2010Backend +from ..interpreter import Interpreter +from ..build import Build class Vs2019Backend(Vs2010Backend): - def __init__(self, build): - super().__init__(build) + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) self.name = 'vs2019' if self.environment is not None: comps = self.environment.coredata.compilers.host diff -Nru meson-0.53.2/mesonbuild/backend/xcodebackend.py meson-0.57.0+really0.56.2/mesonbuild/backend/xcodebackend.py --- meson-0.53.2/mesonbuild/backend/xcodebackend.py 2019-12-04 18:45:50.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/backend/xcodebackend.py 2021-01-06 10:39:48.000000000 +0000 @@ -18,12 +18,14 @@ from .. import mesonlib from .. import mlog import uuid, os, operator +import typing as T from ..mesonlib import MesonException +from ..interpreter import Interpreter class XCodeBackend(backends.Backend): - def __init__(self, build): - super().__init__(build) + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) self.name = 'xcode' self.project_uid = self.environment.coredata.lang_guids['default'].replace('-', '')[:24] self.project_conflist = self.gen_id() @@ -74,8 +76,7 @@ if not text.endswith('\n'): self.ofile.write('\n') - def generate(self, interp): - self.interpreter = interp + def generate(self): test_data = self.serialize_tests()[0] self.generate_filemap() self.generate_buildmap() @@ -263,7 +264,7 @@ for s in t.sources: if isinstance(s, mesonlib.File): - s = s.fname + s = os.path.join(s.subdir, s.fname) if isinstance(s, str): s = os.path.join(t.subdir, s) diff -Nru meson-0.53.2/mesonbuild/build.py meson-0.57.0+really0.56.2/mesonbuild/build.py --- meson-0.53.2/mesonbuild/build.py 2020-01-23 21:41:11.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/build.py 2021-01-06 10:39:48.000000000 +0000 @@ -12,12 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import copy, os, re -from collections import OrderedDict -import itertools, pathlib +from collections import OrderedDict, defaultdict +from functools import lru_cache +import copy import hashlib +import itertools, pathlib +import os import pickle -from functools import lru_cache +import re import typing as T from . import environment @@ -26,12 +28,18 @@ from .mesonlib import ( File, MesonException, MachineChoice, PerMachine, OrderedSet, listify, extract_as_list, typeslistify, stringlistify, classify_unity_sources, - get_filenames_templates_dict, substitute_values, has_path_sep, + get_filenames_templates_dict, substitute_values, has_path_sep, unholder +) +from .compilers import ( + Compiler, all_languages, is_object, clink_langs, sort_clink, lang_suffixes, + is_known_suffix ) -from .compilers import Compiler, is_object, clink_langs, sort_clink, lang_suffixes from .linkers import StaticLinker from .interpreterbase import FeatureNew +if T.TYPE_CHECKING: + from .interpreter import Test + pch_kwargs = set(['c_pch', 'cpp_pch']) lang_arg_kwargs = set([ @@ -79,6 +87,8 @@ 'override_options', 'sources', 'gnu_symbol_visibility', + 'link_language', + 'win_subsystem', ]) known_build_target_kwargs = ( @@ -89,7 +99,7 @@ rust_kwargs | cs_kwargs) -known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'link_language', 'pie'} +known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'} known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'} known_shmod_kwargs = known_build_target_kwargs | {'vs_module_defs'} known_stlib_kwargs = known_build_target_kwargs | {'pic'} @@ -106,6 +116,12 @@ class InvalidArguments(MesonException): pass +class DependencyOverride: + def __init__(self, dep, node, explicit=True): + self.dep = dep + self.node = node + self.explicit = explicit + class Build: """A class that holds the status of one build including all dependencies and so on. @@ -116,14 +132,14 @@ self.project_version = None self.environment = environment self.projects = {} - self.targets = OrderedDict() + self.targets = OrderedDict() # type: T.Dict[str, 'Target'] self.run_target_names = set() # type: T.Set[T.Tuple[str, str]] self.global_args = PerMachine({}, {}) # type: PerMachine[T.Dict[str, T.List[str]]] self.projects_args = PerMachine({}, {}) # type: PerMachine[T.Dict[str, T.List[str]]] self.global_link_args = PerMachine({}, {}) # type: PerMachine[T.Dict[str, T.List[str]]] self.projects_link_args = PerMachine({}, {}) # type: PerMachine[T.Dict[str, T.List[str]]] - self.tests = [] - self.benchmarks = [] + self.tests = [] # type: T.List['Test'] + self.benchmarks = [] # type: T.List['Test'] self.headers = [] self.man = [] self.data = [] @@ -141,6 +157,7 @@ self.test_setup_default_name = None self.find_overrides = {} self.searched_programs = set() # The list of all programs that have been searched for. + self.dependency_overrides = PerMachine({}, {}) def copy(self): other = Build(self.environment) @@ -165,13 +182,13 @@ def get_subproject_dir(self): return self.subproject_dir - def get_targets(self): + def get_targets(self) -> T.Dict[str, 'Target']: return self.targets - def get_tests(self): + def get_tests(self) -> T.List['Test']: return self.tests - def get_benchmarks(self): + def get_benchmarks(self) -> T.List['Test']: return self.benchmarks def get_headers(self): @@ -341,9 +358,9 @@ def __init__(self, name, subdir, subproject, build_by_default, for_machine: MachineChoice): if has_path_sep(name): # Fix failing test 53 when this becomes an error. - mlog.warning('''Target "%s" has a path separator in its name. + mlog.warning('''Target "{}" has a path separator in its name. This is not supported, it can cause unexpected failures and will become -a hard error in the future.''' % name) +a hard error in the future.'''.format(name)) self.name = name self.subdir = subdir self.subproject = subproject @@ -351,26 +368,28 @@ self.for_machine = for_machine self.install = False self.build_always_stale = False - self.option_overrides = {} + self.option_overrides_base = {} + self.option_overrides_compiler = defaultdict(dict) + self.extra_files = [] # type: T.List[File] if not hasattr(self, 'typename'): raise RuntimeError('Target type is not set for target class "{}". This is a bug'.format(type(self).__name__)) - def __lt__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]: + def __lt__(self, other: object) -> bool: if not hasattr(other, 'get_id') and not callable(other.get_id): return NotImplemented return self.get_id() < other.get_id() - def __le__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]: + def __le__(self, other: object) -> bool: if not hasattr(other, 'get_id') and not callable(other.get_id): return NotImplemented return self.get_id() <= other.get_id() - def __gt__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]: + def __gt__(self, other: object) -> bool: if not hasattr(other, 'get_id') and not callable(other.get_id): return NotImplemented return self.get_id() > other.get_id() - def __ge__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]: + def __ge__(self, other: object) -> bool: if not hasattr(other, 'get_id') and not callable(other.get_id): return NotImplemented return self.get_id() >= other.get_id() @@ -389,13 +408,13 @@ outdirs[0] = default_install_dir return outdirs, custom_install_dir - def get_basename(self): + def get_basename(self) -> str: return self.name - def get_subdir(self): + def get_subdir(self) -> str: return self.subdir - def get_typename(self): + def get_typename(self) -> str: return self.typename @staticmethod @@ -409,7 +428,7 @@ return h.hexdigest()[:7] @staticmethod - def construct_id_from_path(subdir, name, type_suffix): + def construct_id_from_path(subdir: str, name: str, type_suffix: str) -> str: """Construct target ID from subdir, name and type suffix. This helper function is made public mostly for tests.""" @@ -427,7 +446,7 @@ return subdir_part + '@@' + my_id return my_id - def get_id(self): + def get_id(self) -> str: return self.construct_id_from_path( self.subdir, self.name, self.type_suffix()) @@ -441,7 +460,15 @@ # set, use the value of 'install' if it's enabled. self.build_by_default = True - self.option_overrides = self.parse_overrides(kwargs) + option_overrides = self.parse_overrides(kwargs) + + for k, v in option_overrides.items(): + if '_' in k: + lang, k2 = k.split('_', 1) + if lang in all_languages: + self.option_overrides_compiler[lang][k2] = v + continue + self.option_overrides_base[k] = v def parse_overrides(self, kwargs) -> dict: result = {} @@ -458,6 +485,12 @@ def is_linkable_target(self) -> bool: return False + def get_outputs(self) -> T.List[str]: + return [] + + def should_install(self) -> bool: + return False + class BuildTarget(Target): known_kwargs = known_build_target_kwargs @@ -475,6 +508,7 @@ self.link_targets = [] self.link_whole_targets = [] self.link_depends = [] + self.added_deps = set() self.name_prefix_set = False self.name_suffix_set = False self.filename = 'no_name' @@ -485,10 +519,11 @@ self.pch = {} self.extra_args = {} self.generated = [] - self.extra_files = [] self.d_features = {} self.pic = False self.pie = False + # Track build_rpath entries so we can remove them at install time + self.rpath_dirs_to_remove = set() # Sources can be: # 1. Pre-existing source files in the source tree # 2. Pre-existing sources generated by configure_file in the build tree @@ -502,7 +537,7 @@ self.check_unknown_kwargs(kwargs) self.process_compilers() if not any([self.sources, self.generated, self.objects, self.link_whole]): - raise InvalidArguments('Build target %s has no sources.' % name) + raise InvalidArguments('Build target {} has no sources.'.format(name)) self.process_compilers_late() self.validate_sources() self.validate_install(environment) @@ -512,6 +547,9 @@ repr_str = "<{0} {1}: {2}>" return repr_str.format(self.__class__.__name__, self.get_id(), self.filename) + def __str__(self): + return "{}".format(self.name) + def validate_install(self, environment): if self.for_machine is MachineChoice.BUILD and self.need_install: if environment.is_cross_build(): @@ -530,14 +568,11 @@ if k not in known_kwargs: unknowns.append(k) if len(unknowns) > 0: - mlog.warning('Unknown keyword argument(s) in target %s: %s.' % - (self.name, ', '.join(unknowns))) + mlog.warning('Unknown keyword argument(s) in target {}: {}.'.format(self.name, ', '.join(unknowns))) def process_objectlist(self, objects): assert(isinstance(objects, list)) - for s in objects: - if hasattr(s, 'held_object'): - s = s.held_object + for s in unholder(objects): if isinstance(s, (str, File, ExtractedObjects)): self.objects.append(s) elif isinstance(s, (GeneratedList, CustomTarget)): @@ -553,10 +588,7 @@ def process_sourcelist(self, sources): sources = listify(sources) added_sources = {} # If the same source is defined multiple times, use it only once. - for s in sources: - # Holder unpacking. Ugly. - if hasattr(s, 'held_object'): - s = s.held_object + for s in unholder(sources): if isinstance(s, File): if s not in added_sources: self.sources.append(s) @@ -633,9 +665,7 @@ # which is what we need. if not is_object(s): sources.append(s) - for d in self.external_deps: - if hasattr(d, 'held_object'): - d = d.held_object + for d in unholder(self.external_deps): for s in d.sources: if isinstance(s, (str, File)): sources.append(s) @@ -652,14 +682,24 @@ sources.append(s) if sources: # For each source, try to add one compiler that can compile it. - # It's ok if no compilers can do so, because users are expected to - # be able to add arbitrary non-source files to the sources list. + # + # If it has a suffix that belongs to a known language, we must have + # a compiler for that language. + # + # Otherwise, it's ok if no compilers can compile it, because users + # are expected to be able to add arbitrary non-source files to the + # sources list for s in sources: for lang, compiler in compilers.items(): if compiler.can_compile(s): if lang not in self.compilers: self.compilers[lang] = compiler break + else: + if is_known_suffix(s): + raise MesonException('No {} machine compiler for "{}"'. + format(self.for_machine.get_lower_case_name(), s)) + # Re-sort according to clink_langs self.compilers = OrderedDict(sorted(self.compilers.items(), key=lambda t: sort_clink(t[0]))) @@ -697,10 +737,7 @@ link_depends. """ sources = listify(sources) - for s in sources: - if hasattr(s, 'held_object'): - s = s.held_object - + for s in unholder(sources): if isinstance(s, File): self.link_depends.append(s) elif isinstance(s, str): @@ -708,7 +745,7 @@ File.from_source_file(environment.source_dir, self.subdir, s)) elif hasattr(s, 'get_outputs'): self.link_depends.extend( - [File.from_built_file(s.subdir, p) for p in s.get_outputs()]) + [File.from_built_file(s.get_subdir(), p) for p in s.get_outputs()]) else: raise InvalidArguments( 'Link_depends arguments must be strings, Files, ' @@ -747,16 +784,17 @@ def extract_objects(self, srclist): obj_src = [] + sources_set = set(self.sources) for src in srclist: if isinstance(src, str): src = File(False, self.subdir, src) elif isinstance(src, File): - FeatureNew('File argument for extract_objects', '0.50.0').use(self.subproject) + FeatureNew.single_use('File argument for extract_objects', '0.50.0', self.subproject) else: raise MesonException('Object extraction arguments must be strings or Files.') # FIXME: It could be a generated source - if src not in self.sources: - raise MesonException('Tried to extract unknown source %s.' % src) + if src not in sources_set: + raise MesonException('Tried to extract unknown source {}.'.format(src)) obj_src.append(src) return ExtractedObjects(self, obj_src) @@ -792,7 +830,8 @@ def get_link_dep_subdirs(self): result = OrderedSet() for i in self.link_targets: - result.add(i.get_subdir()) + if not isinstance(i, StaticLibrary): + result.add(i.get_subdir()) result.update(i.get_link_dep_subdirs()) return result @@ -811,11 +850,7 @@ kwargs.get('modules', []) self.need_install = kwargs.get('install', self.need_install) llist = extract_as_list(kwargs, 'link_with') - for linktarget in llist: - # Sorry for this hack. Keyword targets are kept in holders - # in kwargs. Unpack here without looking at the exact type. - if hasattr(linktarget, "held_object"): - linktarget = linktarget.held_object + for linktarget in unholder(llist): if isinstance(linktarget, dependencies.ExternalLibrary): raise MesonException('''An external library was used in link_with keyword argument, which is reserved for libraries built as part of this project. External @@ -828,8 +863,7 @@ self.link_whole(linktarget) c_pchlist, cpp_pchlist, clist, cpplist, cudalist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \ - = extract_as_list(kwargs, 'c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cuda_args', 'cs_args', 'vala_args', 'objc_args', - 'objcpp_args', 'fortran_args', 'rust_args') + = [extract_as_list(kwargs, c) for c in ['c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cuda_args', 'cs_args', 'vala_args', 'objc_args', 'objcpp_args', 'fortran_args', 'rust_args']] self.add_pch('c', c_pchlist) self.add_pch('cpp', cpp_pchlist) @@ -857,7 +891,7 @@ if dfeature_debug: dfeatures['debug'] = dfeature_debug if 'd_import_dirs' in kwargs: - dfeature_import_dirs = extract_as_list(kwargs, 'd_import_dirs', unholder=True) + dfeature_import_dirs = unholder(extract_as_list(kwargs, 'd_import_dirs')) for d in dfeature_import_dirs: if not isinstance(d, IncludeDirs): raise InvalidArguments('Arguments to d_import_dirs must be include_directories.') @@ -891,17 +925,26 @@ raise InvalidArguments('Main class must be a string') self.main_class = main_class if isinstance(self, Executable): - self.gui_app = kwargs.get('gui_app', False) - if not isinstance(self.gui_app, bool): - raise InvalidArguments('Argument gui_app must be boolean.') + # This kwarg is deprecated. The value of "none" means that the kwarg + # was not specified and win_subsystem should be used instead. + self.gui_app = None + if 'gui_app' in kwargs: + if 'win_subsystem' in kwargs: + raise InvalidArguments('Can specify only gui_app or win_subsystem for a target, not both.') + self.gui_app = kwargs['gui_app'] + if not isinstance(self.gui_app, bool): + raise InvalidArguments('Argument gui_app must be boolean.') + self.win_subsystem = self.validate_win_subsystem(kwargs.get('win_subsystem', 'console')) elif 'gui_app' in kwargs: raise InvalidArguments('Argument gui_app can only be used on executables.') + elif 'win_subsystem' in kwargs: + raise InvalidArguments('Argument win_subsystem can only be used on executables.') extra_files = extract_as_list(kwargs, 'extra_files') for i in extra_files: assert(isinstance(i, File)) trial = os.path.join(environment.get_source_dir(), i.subdir, i.fname) if not(os.path.isfile(trial)): - raise InvalidArguments('Tried to add non-existing extra file %s.' % i) + raise InvalidArguments('Tried to add non-existing extra file {}.'.format(i)) self.extra_files = extra_files self.install_rpath = kwargs.get('install_rpath', '') if not isinstance(self.install_rpath, str): @@ -915,22 +958,23 @@ raise InvalidArguments('Resource argument is not a string.') trial = os.path.join(environment.get_source_dir(), self.subdir, r) if not os.path.isfile(trial): - raise InvalidArguments('Tried to add non-existing resource %s.' % r) + raise InvalidArguments('Tried to add non-existing resource {}.'.format(r)) self.resources = resources if 'name_prefix' in kwargs: name_prefix = kwargs['name_prefix'] if isinstance(name_prefix, list): if name_prefix: - raise InvalidArguments('name_prefix array must be empty to signify null.') - elif not isinstance(name_prefix, str): - raise InvalidArguments('name_prefix must be a string.') - self.prefix = name_prefix - self.name_prefix_set = True + raise InvalidArguments('name_prefix array must be empty to signify default.') + else: + if not isinstance(name_prefix, str): + raise InvalidArguments('name_prefix must be a string.') + self.prefix = name_prefix + self.name_prefix_set = True if 'name_suffix' in kwargs: name_suffix = kwargs['name_suffix'] if isinstance(name_suffix, list): if name_suffix: - raise InvalidArguments('name_suffix array must be empty to signify null.') + raise InvalidArguments('name_suffix array must be empty to signify default.') else: if not isinstance(name_suffix, str): raise InvalidArguments('name_suffix must be a string.') @@ -948,13 +992,13 @@ if m.is_darwin() or m.is_windows(): self.pic = True else: - self.pic = self._extract_pic_pie(kwargs, 'pic') - if isinstance(self, Executable): + self.pic = self._extract_pic_pie(kwargs, 'pic', environment, 'b_staticpic') + if isinstance(self, Executable) or (isinstance(self, StaticLibrary) and not self.pic): # Executables must be PIE on Android if self.environment.machines[self.for_machine].is_android(): self.pie = True else: - self.pie = self._extract_pic_pie(kwargs, 'pie') + self.pie = self._extract_pic_pie(kwargs, 'pie', environment, 'b_pie') self.implicit_include_directories = kwargs.get('implicit_include_directories', True) if not isinstance(self.implicit_include_directories, bool): raise InvalidArguments('Implicit_include_directories must be a boolean.') @@ -964,17 +1008,28 @@ if self.gnu_symbol_visibility != '': permitted = ['default', 'internal', 'hidden', 'protected', 'inlineshidden'] if self.gnu_symbol_visibility not in permitted: - raise InvalidArguments('GNU symbol visibility arg %s not one of: %s', - self.symbol_visibility, ', '.join(permitted)) + raise InvalidArguments('GNU symbol visibility arg {} not one of: {}'.format(self.symbol_visibility, ', '.join(permitted))) - def _extract_pic_pie(self, kwargs, arg): + def validate_win_subsystem(self, value: str) -> str: + value = value.lower() + if re.fullmatch(r'(boot_application|console|efi_application|efi_boot_service_driver|efi_rom|efi_runtime_driver|native|posix|windows)(,\d+(\.\d+)?)?', value) is None: + raise InvalidArguments('Invalid value for win_subsystem: {}.'.format(value)) + return value + + def _extract_pic_pie(self, kwargs, arg, environment, option): # Check if we have -fPIC, -fpic, -fPIE, or -fpie in cflags all_flags = self.extra_args['c'] + self.extra_args['cpp'] if '-f' + arg.lower() in all_flags or '-f' + arg.upper() in all_flags: mlog.warning("Use the '{}' kwarg instead of passing '{}' manually to {!r}".format(arg, '-f' + arg, self.name)) return True - val = kwargs.get(arg, False) + if arg in kwargs: + val = kwargs[arg] + elif option in environment.coredata.base_options: + val = environment.coredata.base_options[option].value + else: + val = False + if not isinstance(val, bool): raise InvalidArguments('Argument {} to {!r} must be boolean'.format(arg, self.name)) return val @@ -982,29 +1037,22 @@ def get_filename(self): return self.filename - def get_outputs(self): + def get_outputs(self) -> T.List[str]: return self.outputs def get_extra_args(self, language): return self.extra_args.get(language, []) - def get_dependencies(self, exclude=None, for_pkgconfig=False): + def get_dependencies(self, exclude=None): transitive_deps = [] if exclude is None: exclude = [] for t in itertools.chain(self.link_targets, self.link_whole_targets): if t in transitive_deps or t in exclude: continue - # When generating `Libs:` and `Libs.private:` lists in pkg-config - # files we don't want to include static libraries that we link_whole - # or are uninstalled (they're implicitly promoted to link_whole). - # But we still need to include their transitive dependencies, - # a static library we link_whole would itself link to a shared - # library or an installed static library. - if not for_pkgconfig or (not t.is_internal() and t not in self.link_whole_targets): - transitive_deps.append(t) + transitive_deps.append(t) if isinstance(t, StaticLibrary): - transitive_deps += t.get_dependencies(transitive_deps + exclude, for_pkgconfig) + transitive_deps += t.get_dependencies(transitive_deps + exclude) return transitive_deps def get_source_subdir(self): @@ -1019,7 +1067,7 @@ def get_generated_sources(self): return self.generated - def should_install(self): + def should_install(self) -> bool: return self.need_install def has_pch(self): @@ -1036,9 +1084,9 @@ def add_deps(self, deps): deps = listify(deps) - for dep in deps: - if hasattr(dep, 'held_object'): - dep = dep.held_object + for dep in unholder(deps): + if dep in self.added_deps: + continue if isinstance(dep, dependencies.InternalDependency): # Those parts that are internal. self.process_sourcelist(dep.sources) @@ -1053,7 +1101,7 @@ [], dep.get_compile_args(), dep.get_link_args(), - [], [], [], []) + [], [], [], [], {}) self.external_deps.append(extpart) # Deps of deps. self.add_deps(dep.ext_deps) @@ -1077,6 +1125,7 @@ 'either an external dependency (returned by find_library() or ' 'dependency()) or an internal dependency (returned by ' 'declare_dependency()).'.format(type(dep).__name__)) + self.added_deps.add(dep) def get_external_deps(self): return self.external_deps @@ -1085,15 +1134,21 @@ return isinstance(self, StaticLibrary) and not self.need_install def link(self, target): - for t in listify(target, unholder=True): - if isinstance(self, StaticLibrary) and self.need_install and t.is_internal(): - # When we're a static library and we link_with to an - # internal/convenience library, promote to link_whole. - return self.link_whole(t) + for t in unholder(listify(target)): + if isinstance(self, StaticLibrary) and self.need_install: + if isinstance(t, (CustomTarget, CustomTargetIndex)): + if not t.should_install(): + mlog.warning('Try to link an installed static library target {} with a custom target ' + 'that is not installed, this might cause problems when you try to use ' + 'this static library'.format(self.name)) + elif t.is_internal(): + # When we're a static library and we link_with to an + # internal/convenience library, promote to link_whole. + return self.link_whole(t) if not isinstance(t, (Target, CustomTargetIndex)): raise InvalidArguments('{!r} is not a target.'.format(t)) if not t.is_linkable_target(): - raise InvalidArguments('Link target {!r} is not linkable.'.format(t)) + raise InvalidArguments("Link target '{!s}' is not linkable.".format(t)) if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic: msg = "Can't link non-PIC static library {!r} into shared library {!r}. ".format(t.name, self.name) msg += "Use the 'pic' option to static_library to build with PIC." @@ -1107,7 +1162,7 @@ self.link_targets.append(t) def link_whole(self, target): - for t in listify(target, unholder=True): + for t in unholder(listify(target)): if isinstance(t, (CustomTarget, CustomTargetIndex)): if not t.is_linkable_target(): raise InvalidArguments('Custom target {!r} is not linkable.'.format(t)) @@ -1146,7 +1201,7 @@ return elif len(pchlist) == 1: if not environment.is_header(pchlist[0]): - raise InvalidArguments('PCH argument %s is not a header.' % pchlist[0]) + raise InvalidArguments('PCH argument {} is not a header.'.format(pchlist[0])) elif len(pchlist) == 2: if environment.is_header(pchlist[0]): if not environment.is_source(pchlist[1]): @@ -1156,7 +1211,7 @@ raise InvalidArguments('PCH definition must contain one header and at most one source.') pchlist = [pchlist[1], pchlist[0]] else: - raise InvalidArguments('PCH argument %s is of unknown type.' % pchlist[0]) + raise InvalidArguments('PCH argument {} is of unknown type.'.format(pchlist[0])) if (os.path.dirname(pchlist[0]) != os.path.dirname(pchlist[1])): raise InvalidArguments('PCH files must be stored in the same folder.') @@ -1168,15 +1223,12 @@ if not isinstance(f, str): raise MesonException('PCH arguments must be strings.') if not os.path.isfile(os.path.join(self.environment.source_dir, self.subdir, f)): - raise MesonException('File %s does not exist.' % f) + raise MesonException('File {} does not exist.'.format(f)) self.pch[language] = pchlist def add_include_dirs(self, args, set_is_system: T.Optional[str] = None): ids = [] - for a in args: - # FIXME same hack, forcibly unpack from holder. - if hasattr(a, 'held_object'): - a = a.held_object + for a in unholder(args): if not isinstance(a, IncludeDirs): raise InvalidArguments('Include directory to be added is not an include directory object.') ids.append(a) @@ -1209,11 +1261,7 @@ See: https://github.com/mesonbuild/meson/issues/1653 ''' - langs = [] - - # User specified link_language of target (for multi-language targets) - if self.link_language: - return [self.link_language] + langs = [] # type: T.List[str] # Check if any of the external libraries were written in this language for dep in self.external_deps: @@ -1245,6 +1293,12 @@ # Populate list of all compilers, not just those being used to compile # sources in this target all_compilers = self.environment.coredata.compilers[self.for_machine] + + # If the user set the link_language, just return that. + if self.link_language: + comp = all_compilers[self.link_language] + return comp, comp.language_stdlib_only_link_flags() + # Languages used by dependencies dep_langs = self.get_langs_used_by_deps() # Pick a compiler based on the language priority-order @@ -1263,6 +1317,8 @@ if dl != linker.language: stdlib_args += all_compilers[dl].language_stdlib_only_link_flags() added_languages.add(dl) + # Type of var 'linker' is Compiler. + # Pretty hard to fix because the return value is passed everywhere return linker, stdlib_args m = 'Could not get a dynamic linker for build target {!r}' @@ -1290,9 +1346,9 @@ 2. If the target contains only objects, process_compilers guesses and picks the first compiler that smells right. ''' - linker, _ = self.get_clink_dynamic_linker_and_stdlibs() + compiler, _ = self.get_clink_dynamic_linker_and_stdlibs() # Mixing many languages with MSVC is not supported yet so ignore stdlibs. - if linker and linker.get_id() in {'msvc', 'clang-cl', 'intel-cl', 'llvm', 'dmd', 'nvcc'}: + if compiler and compiler.get_linker_id() in {'link', 'lld-link', 'xilink', 'optlink'}: return True return False @@ -1303,20 +1359,19 @@ for link_target in self.link_targets: if isinstance(link_target, SharedModule): if self.environment.machines[self.for_machine].is_darwin(): - raise MesonException('''target links against shared modules. -This is not permitted on OSX''') + raise MesonException( + 'target links against shared modules. This is not permitted on OSX') else: - mlog.warning('''target links against shared modules. This is not -recommended as it is not supported on some platforms''') + mlog.warning('target links against shared modules. This ' + 'is not recommended as it is not supported on some ' + 'platforms') return class Generator: def __init__(self, args, kwargs): if len(args) != 1: raise InvalidArguments('Generator requires exactly one positional argument: the executable') - exe = args[0] - if hasattr(exe, 'held_object'): - exe = exe.held_object + exe = unholder(args[0]) if not isinstance(exe, (Executable, dependencies.ExternalProgram)): raise InvalidArguments('First generator argument must be an executable.') self.exe = exe @@ -1372,9 +1427,9 @@ raise InvalidArguments('Capture must be boolean.') self.capture = capture if 'depends' in kwargs: - depends = listify(kwargs['depends'], unholder=True) + depends = unholder(listify(kwargs['depends'])) for d in depends: - if not isinstance(d, BuildTarget): + if not (isinstance(d, (BuildTarget, CustomTarget))): raise InvalidArguments('Depends entries must be build targets.') self.depends.append(d) @@ -1417,9 +1472,7 @@ class GeneratedList: def __init__(self, generator, subdir, preserve_path_from=None, extra_args=None): - if hasattr(generator, 'held_object'): - generator = generator.held_object - self.generator = generator + self.generator = unholder(generator) self.name = self.generator.exe self.subdir = subdir self.infilelist = [] @@ -1429,10 +1482,10 @@ self.depend_files = [] self.preserve_path_from = preserve_path_from self.extra_args = extra_args if extra_args is not None else [] - if isinstance(generator.exe, dependencies.ExternalProgram): - if not generator.exe.found(): + if isinstance(self.generator.exe, dependencies.ExternalProgram): + if not self.generator.exe.found(): raise InvalidArguments('Tried to use not-found external program as generator') - path = generator.exe.get_path() + path = self.generator.exe.get_path() if os.path.isabs(path): # Can only add a dependency on an external program which we # know the absolute path of @@ -1459,7 +1512,7 @@ def get_inputs(self): return self.infilelist - def get_outputs(self): + def get_outputs(self) -> T.List[str]: return self.outfilelist def get_outputs_for(self, filename): @@ -1496,6 +1549,11 @@ elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('ccrx') or 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('ccrx')): self.suffix = 'abs' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('xc16')): + self.suffix = 'elf' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('c2000') or + 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('c2000')): + self.suffix = 'out' else: self.suffix = environment.machines[for_machine].get_exe_suffix() self.filename = self.name @@ -1585,8 +1643,6 @@ def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs): self.typename = 'static library' - if 'pic' not in kwargs and 'b_staticpic' in environment.coredata.base_options: - kwargs['pic'] = environment.coredata.base_options['b_staticpic'].value super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs) if 'cs' in self.compilers: raise InvalidArguments('Static libraries not supported for C#.') @@ -1874,9 +1930,7 @@ # Visual Studio module-definitions file if 'vs_module_defs' in kwargs: - path = kwargs['vs_module_defs'] - if hasattr(path, 'held_object'): - path = path.held_object + path = unholder(kwargs['vs_module_defs']) if isinstance(path, str): if os.path.isabs(path): self.vs_module_defs = File.from_absolute_file(path) @@ -2008,7 +2062,6 @@ self.depend_files = [] # Files that this target depends on but are not on the command line. self.depfile = None self.process_kwargs(kwargs, backend) - self.extra_files = [] # Whether to use absolute paths for all files on the commandline self.absolute_paths = absolute_paths unknowns = [] @@ -2016,8 +2069,7 @@ if k not in CustomTarget.known_kwargs: unknowns.append(k) if len(unknowns) > 0: - mlog.warning('Unknown keyword arguments in target %s: %s' % - (self.name, ', '.join(unknowns))) + mlog.warning('Unknown keyword arguments in target {}: {}'.format(self.name, ', '.join(unknowns))) def get_default_install_dir(self, environment): return None @@ -2029,9 +2081,7 @@ def get_target_dependencies(self): deps = self.dependencies[:] deps += self.extra_depends - for c in self.sources: - if hasattr(c, 'held_object'): - c = c.held_object + for c in unholder(self.sources): if isinstance(c, (BuildTarget, CustomTarget)): deps.append(c) return deps @@ -2055,7 +2105,7 @@ return bdeps def flatten_command(self, cmd): - cmd = listify(cmd, unholder=True) + cmd = unholder(listify(cmd)) final_cmd = [] for c in cmd: if isinstance(c, str): @@ -2083,7 +2133,7 @@ def process_kwargs(self, kwargs, backend): self.process_kwargs_base(kwargs) - self.sources = extract_as_list(kwargs, 'input', unholder=True) + self.sources = unholder(extract_as_list(kwargs, 'input')) if 'output' not in kwargs: raise InvalidArguments('Missing keyword argument "output".') self.outputs = listify(kwargs['output']) @@ -2142,7 +2192,7 @@ 'when installing a target') if isinstance(kwargs['install_dir'], list): - FeatureNew('multiple install_dir for custom_target', '0.40.0').use(self.subproject) + FeatureNew.single_use('multiple install_dir for custom_target', '0.40.0', self.subproject) # If an item in this list is False, the output corresponding to # the list index of that item will not be installed self.install_dir = typeslistify(kwargs['install_dir'], (str, bool)) @@ -2154,7 +2204,6 @@ if 'build_always' in kwargs and 'build_always_stale' in kwargs: raise InvalidArguments('build_always and build_always_stale are mutually exclusive. Combine build_by_default and build_always_stale.') elif 'build_always' in kwargs: - mlog.deprecation('build_always is deprecated. Combine build_by_default and build_always_stale instead.') if 'build_by_default' not in kwargs: self.build_by_default = kwargs['build_always'] self.build_always_stale = kwargs['build_always'] @@ -2162,13 +2211,11 @@ self.build_always_stale = kwargs['build_always_stale'] if not isinstance(self.build_always_stale, bool): raise InvalidArguments('Argument build_always_stale must be a boolean.') - extra_deps, depend_files = extract_as_list(kwargs, 'depends', 'depend_files', pop = False) - for ed in extra_deps: - while hasattr(ed, 'held_object'): - ed = ed.held_object + extra_deps, depend_files = [extract_as_list(kwargs, c, pop=False) for c in ['depends', 'depend_files']] + for ed in unholder(extra_deps): if not isinstance(ed, (CustomTarget, BuildTarget)): - raise InvalidArguments('Can only depend on toplevel targets: custom_target or build_target (executable or a library) got: %s(%s)' - % (type(ed), ed)) + raise InvalidArguments('Can only depend on toplevel targets: custom_target or build_target (executable or a library) got: {}({})' + .format(type(ed), ed)) self.extra_depends.append(ed) for i in depend_files: if isinstance(i, (File, str)): @@ -2180,7 +2227,7 @@ def get_dependencies(self): return self.dependencies - def should_install(self): + def should_install(self) -> bool: return self.install def get_custom_install_dir(self): @@ -2189,7 +2236,7 @@ def get_custom_install_mode(self): return self.install_mode - def get_outputs(self): + def get_outputs(self) -> T.List[str]: return self.outputs def get_filename(self): @@ -2200,9 +2247,7 @@ def get_generated_lists(self): genlists = [] - for c in self.sources: - if hasattr(c, 'held_object'): - c = c.held_object + for c in unholder(self.sources): if isinstance(c, GeneratedList): genlists.append(c) return genlists @@ -2213,7 +2258,7 @@ def get_dep_outname(self, infilenames): if self.depfile is None: raise InvalidArguments('Tried to get depfile name for custom_target that does not have depfile defined.') - if len(infilenames): + if infilenames: plainname = os.path.basename(infilenames[0]) basename = os.path.splitext(plainname)[0] return self.depfile.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) @@ -2226,7 +2271,7 @@ if len(self.outputs) != 1: return False suf = os.path.splitext(self.outputs[0])[-1] - if suf == '.a' or suf == '.dll' or suf == '.lib' or suf == '.so': + if suf == '.a' or suf == '.dll' or suf == '.lib' or suf == '.so' or suf == '.dylib': return True def get_link_deps_mapping(self, prefix, environment): @@ -2238,6 +2283,18 @@ def get_all_link_deps(self): return [] + def is_internal(self) -> bool: + if not self.should_install(): + return True + for out in self.get_outputs(): + # Can't check if this is a static library, so try to guess + if not out.endswith(('.a', '.lib')): + return False + return True + + def extract_all_objects_recurse(self): + return self.get_outputs() + def type_suffix(self): return "@cus" @@ -2250,6 +2307,10 @@ def __delitem__(self, index): raise NotImplementedError + def __iter__(self): + for i in self.outputs: + yield CustomTargetIndex(self, i) + class RunTarget(Target): def __init__(self, name, command, args, dependencies, subdir, subproject): self.typename = 'run' @@ -2275,13 +2336,13 @@ def get_sources(self): return [] - def should_install(self): + def should_install(self) -> bool: return False - def get_filename(self): + def get_filename(self) -> str: return self.name - def get_outputs(self): + def get_outputs(self) -> T.List[str]: if isinstance(self.name, str): return [self.name] elif isinstance(self.name, list): @@ -2304,10 +2365,10 @@ super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs) for s in self.sources: if not s.endswith('.java'): - raise InvalidArguments('Jar source %s is not a java file.' % s) + raise InvalidArguments('Jar source {} is not a java file.'.format(s)) for t in self.link_targets: if not isinstance(t, Jar): - raise InvalidArguments('Link target %s is not a jar target.' % t) + raise InvalidArguments('Link target {} is not a jar target.'.format(t)) self.filename = self.name + '.jar' self.outputs = [self.filename] self.java_args = kwargs.get('java_args', []) @@ -2353,7 +2414,7 @@ return ''.format( self.target, self.target.get_outputs().index(self.output)) - def get_outputs(self): + def get_outputs(self) -> T.List[str]: return [self.output] def get_subdir(self): @@ -2379,6 +2440,15 @@ if suf == '.a' or suf == '.dll' or suf == '.lib' or suf == '.so': return True + def should_install(self) -> bool: + return self.target.should_install() + + def is_internal(self) -> bool: + return self.target.is_internal() + + def extract_all_objects_recurse(self): + return self.target.extract_all_objects_recurse() + class ConfigureFile: def __init__(self, subdir, sourcename, targetname, configuration_data): @@ -2406,20 +2476,20 @@ return self.targetname class ConfigurationData: - def __init__(self): + def __init__(self) -> None: super().__init__() - self.values = {} + self.values = {} # T.Dict[str, T.Union[str, int, bool]] def __repr__(self): return repr(self.values) - def __contains__(self, value): + def __contains__(self, value: str) -> bool: return value in self.values - def get(self, name): + def get(self, name: str) -> T.Tuple[T.Union[str, int, bool], T.Optional[str]]: return self.values[name] # (val, desc) - def keys(self): + def keys(self) -> T.Iterator[str]: return self.values.keys() # A bit poorly named, but this represents plain data files to copy @@ -2461,9 +2531,7 @@ get all the output basenames. ''' names = [] - for s in sources: - if hasattr(s, 'held_object'): - s = s.held_object + for s in unholder(sources): if isinstance(s, str): names.append(s) elif isinstance(s, (BuildTarget, CustomTarget, CustomTargetIndex, GeneratedList)): @@ -2497,6 +2565,6 @@ raise MesonException(load_fail_msg) return obj -def save(obj, filename): +def save(obj: Build, filename: str) -> None: with open(filename, 'wb') as f: pickle.dump(obj, f) diff -Nru meson-0.53.2/mesonbuild/cmake/client.py meson-0.57.0+really0.56.2/mesonbuild/cmake/client.py --- meson-0.53.2/mesonbuild/cmake/client.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/cmake/client.py 2021-01-06 10:39:48.000000000 +0000 @@ -16,15 +16,16 @@ # or an interpreter-based tool. from .common import CMakeException, CMakeConfiguration, CMakeBuildFile -from .executor import CMakeExecutor -from ..environment import Environment -from ..mesonlib import MachineChoice from .. import mlog from contextlib import contextmanager from subprocess import Popen, PIPE, TimeoutExpired +from .._pathlib import Path import typing as T import json -import os + +if T.TYPE_CHECKING: + from ..environment import Environment + from .executor import CMakeExecutor CMAKE_SERVER_BEGIN_STR = '[== "CMake Server" ==[' CMAKE_SERVER_END_STR = ']== "CMake Server" ==]' @@ -36,7 +37,7 @@ 'progress': ['cookie'], 'reply': ['cookie', 'inReplyTo'], 'signal': ['cookie', 'name'], -} +} # type: T.Dict[str, T.List[str]] CMAKE_REPLY_TYPES = { 'handshake': [], @@ -44,16 +45,16 @@ 'compute': [], 'cmakeInputs': ['buildFiles', 'cmakeRootDirectory', 'sourceDirectory'], 'codemodel': ['configurations'] -} +} # type: T.Dict[str, T.List[str]] # Base CMake server message classes class MessageBase: - def __init__(self, msg_type: str, cookie: str): + def __init__(self, msg_type: str, cookie: str) -> None: self.type = msg_type self.cookie = cookie - def to_dict(self) -> dict: + def to_dict(self) -> T.Dict[str, T.Union[str, T.List[str], T.Dict[str, int]]]: return {'type': self.type, 'cookie': self.cookie} def log(self) -> None: @@ -62,21 +63,21 @@ class RequestBase(MessageBase): cookie_counter = 0 - def __init__(self, msg_type: str): + def __init__(self, msg_type: str) -> None: super().__init__(msg_type, self.gen_cookie()) @staticmethod - def gen_cookie(): + def gen_cookie() -> str: RequestBase.cookie_counter += 1 return 'meson_{}'.format(RequestBase.cookie_counter) class ReplyBase(MessageBase): - def __init__(self, cookie: str, in_reply_to: str): + def __init__(self, cookie: str, in_reply_to: str) -> None: super().__init__('reply', cookie) self.in_reply_to = in_reply_to class SignalBase(MessageBase): - def __init__(self, cookie: str, signal_name: str): + def __init__(self, cookie: str, signal_name: str) -> None: super().__init__('signal', cookie) self.signal_name = signal_name @@ -86,7 +87,7 @@ # Special Message classes class Error(MessageBase): - def __init__(self, cookie: str, message: str): + def __init__(self, cookie: str, message: str) -> None: super().__init__('error', cookie) self.message = message @@ -94,7 +95,7 @@ mlog.error(mlog.bold('CMake server error:'), mlog.red(self.message)) class Message(MessageBase): - def __init__(self, cookie: str, message: str): + def __init__(self, cookie: str, message: str) -> None: super().__init__('message', cookie) self.message = message @@ -103,19 +104,21 @@ pass class Progress(MessageBase): - def __init__(self, cookie: str): + def __init__(self, cookie: str) -> None: super().__init__('progress', cookie) def log(self) -> None: pass class MessageHello(MessageBase): - def __init__(self, supported_protocol_versions: T.List[dict]): + def __init__(self, supported_protocol_versions: T.List[T.Dict[str, int]]) -> None: super().__init__('hello', '') self.supported_protocol_versions = supported_protocol_versions def supports(self, major: int, minor: T.Optional[int] = None) -> bool: for i in self.supported_protocol_versions: + assert 'major' in i + assert 'minor' in i if major == i['major']: if minor is None or minor == i['minor']: return True @@ -124,7 +127,7 @@ # Request classes class RequestHandShake(RequestBase): - def __init__(self, src_dir: str, build_dir: str, generator: str, vers_major: int, vers_minor: T.Optional[int] = None): + def __init__(self, src_dir: Path, build_dir: Path, generator: str, vers_major: int, vers_minor: T.Optional[int] = None) -> None: super().__init__('handshake') self.src_dir = src_dir self.build_dir = build_dir @@ -132,19 +135,19 @@ self.vers_major = vers_major self.vers_minor = vers_minor - def to_dict(self) -> dict: + def to_dict(self) -> T.Dict[str, T.Union[str, T.List[str], T.Dict[str, int]]]: vers = {'major': self.vers_major} if self.vers_minor is not None: vers['minor'] = self.vers_minor # Old CMake versions (3.7) want '/' even on Windows - src_list = os.path.normpath(self.src_dir).split(os.sep) - bld_list = os.path.normpath(self.build_dir).split(os.sep) + self.src_dir = self.src_dir.resolve() + self.build_dir = self.build_dir.resolve() return { **super().to_dict(), - 'sourceDirectory': '/'.join(src_list), - 'buildDirectory': '/'.join(bld_list), + 'sourceDirectory': self.src_dir.as_posix(), + 'buildDirectory': self.build_dir.as_posix(), 'generator': self.generator, 'protocolVersion': vers } @@ -154,55 +157,55 @@ super().__init__('configure') self.args = args - def to_dict(self) -> dict: + def to_dict(self) -> T.Dict[str, T.Union[str, T.List[str], T.Dict[str, int]]]: res = super().to_dict() if self.args: res['cacheArguments'] = self.args return res class RequestCompute(RequestBase): - def __init__(self): + def __init__(self) -> None: super().__init__('compute') class RequestCMakeInputs(RequestBase): - def __init__(self): + def __init__(self) -> None: super().__init__('cmakeInputs') class RequestCodeModel(RequestBase): - def __init__(self): + def __init__(self) -> None: super().__init__('codemodel') # Reply classes class ReplyHandShake(ReplyBase): - def __init__(self, cookie: str): + def __init__(self, cookie: str) -> None: super().__init__(cookie, 'handshake') class ReplyConfigure(ReplyBase): - def __init__(self, cookie: str): + def __init__(self, cookie: str) -> None: super().__init__(cookie, 'configure') class ReplyCompute(ReplyBase): - def __init__(self, cookie: str): + def __init__(self, cookie: str) -> None: super().__init__(cookie, 'compute') class ReplyCMakeInputs(ReplyBase): - def __init__(self, cookie: str, cmake_root: str, src_dir: str, build_files: T.List[CMakeBuildFile]): + def __init__(self, cookie: str, cmake_root: Path, src_dir: Path, build_files: T.List[CMakeBuildFile]) -> None: super().__init__(cookie, 'cmakeInputs') self.cmake_root = cmake_root self.src_dir = src_dir self.build_files = build_files def log(self) -> None: - mlog.log('CMake root: ', mlog.bold(self.cmake_root)) - mlog.log('Source dir: ', mlog.bold(self.src_dir)) + mlog.log('CMake root: ', mlog.bold(self.cmake_root.as_posix())) + mlog.log('Source dir: ', mlog.bold(self.src_dir.as_posix())) mlog.log('Build files:', mlog.bold(str(len(self.build_files)))) with mlog.nested(): for i in self.build_files: mlog.log(str(i)) class ReplyCodeModel(ReplyBase): - def __init__(self, data: dict): + def __init__(self, data: T.Dict[str, T.Any]) -> None: super().__init__(data['cookie'], 'codemodel') self.configs = [] for i in data['configurations']: @@ -218,9 +221,9 @@ # Main client class class CMakeClient: - def __init__(self, env: Environment): + def __init__(self, env: 'Environment') -> None: self.env = env - self.proc = None + self.proc = None # type: T.Optional[Popen] self.type_map = { 'error': lambda data: Error(data['cookie'], data['errorMessage']), 'hello': lambda data: MessageHello(data['supportedProtocolVersions']), @@ -228,7 +231,7 @@ 'progress': lambda data: Progress(data['cookie']), 'reply': self.resolve_type_reply, 'signal': lambda data: SignalBase(data['cookie'], data['name']) - } + } # type: T.Dict[str, T.Callable[[T.Dict[str, T.Any]], MessageBase]] self.reply_map = { 'handshake': lambda data: ReplyHandShake(data['cookie']), @@ -236,10 +239,10 @@ 'compute': lambda data: ReplyCompute(data['cookie']), 'cmakeInputs': self.resolve_reply_cmakeInputs, 'codemodel': lambda data: ReplyCodeModel(data), - } + } # type: T.Dict[str, T.Callable[[T.Dict[str, T.Any]], ReplyBase]] - def readMessageRaw(self) -> dict: - assert(self.proc is not None) + def readMessageRaw(self) -> T.Dict[str, T.Any]: + assert self.proc is not None rawData = [] begin = False while self.proc.poll() is None: @@ -257,7 +260,11 @@ begin = True # Begin of the message if rawData: - return json.loads('\n'.join(rawData)) + res = json.loads('\n'.join(rawData)) + assert isinstance(res, dict) + for i in res.keys(): + assert isinstance(i, str) + return res raise CMakeException('Failed to read data from the CMake server') def readMessage(self) -> MessageBase: @@ -287,7 +294,7 @@ reply.log() - def query_checked(self, request: RequestBase, message: str) -> ReplyBase: + def query_checked(self, request: RequestBase, message: str) -> MessageBase: reply = self.query(request) h = mlog.green('SUCCEEDED') if reply.type == 'reply' else mlog.red('FAILED') mlog.log(message + ':', h) @@ -296,7 +303,7 @@ raise CMakeException('CMake server query failed') return reply - def do_handshake(self, src_dir: str, build_dir: str, generator: str, vers_major: int, vers_minor: T.Optional[int] = None) -> None: + def do_handshake(self, src_dir: Path, build_dir: Path, generator: str, vers_major: int, vers_minor: T.Optional[int] = None) -> None: # CMake prints the hello message on startup msg = self.readMessage() if not isinstance(msg, MessageHello): @@ -305,7 +312,7 @@ request = RequestHandShake(src_dir, build_dir, generator, vers_major, vers_minor) self.query_checked(request, 'CMake server handshake') - def resolve_type_reply(self, data: dict) -> ReplyBase: + def resolve_type_reply(self, data: T.Dict[str, T.Any]) -> ReplyBase: reply_type = data['inReplyTo'] func = self.reply_map.get(reply_type, None) if not func: @@ -315,28 +322,25 @@ raise CMakeException('Key "{}" is missing from CMake server message type {}'.format(i, type)) return func(data) - def resolve_reply_cmakeInputs(self, data: dict) -> ReplyCMakeInputs: + def resolve_reply_cmakeInputs(self, data: T.Dict[str, T.Any]) -> ReplyCMakeInputs: files = [] for i in data['buildFiles']: for j in i['sources']: - files += [CMakeBuildFile(j, i['isCMake'], i['isTemporary'])] - return ReplyCMakeInputs(data['cookie'], data['cmakeRootDirectory'], data['sourceDirectory'], files) + files += [CMakeBuildFile(Path(j), i['isCMake'], i['isTemporary'])] + return ReplyCMakeInputs(data['cookie'], Path(data['cmakeRootDirectory']), Path(data['sourceDirectory']), files) @contextmanager - def connect(self): - self.startup() + def connect(self, cmake_exe: 'CMakeExecutor') -> T.Generator[None, None, None]: + self.startup(cmake_exe) try: yield finally: self.shutdown() - def startup(self) -> None: + def startup(self, cmake_exe: 'CMakeExecutor') -> None: if self.proc is not None: raise CMakeException('The CMake server was already started') - for_machine = MachineChoice.HOST # TODO make parameter - cmake_exe = CMakeExecutor(self.env, '>=3.7', for_machine) - if not cmake_exe.found(): - raise CMakeException('Unable to find CMake') + assert cmake_exe.found() mlog.debug('Starting CMake server with CMake', mlog.bold(' '.join(cmake_exe.get_command())), 'version', mlog.cyan(cmake_exe.version())) self.proc = Popen(cmake_exe.get_command() + ['-E', 'server', '--experimental', '--debug'], stdin=PIPE, stdout=PIPE) diff -Nru meson-0.53.2/mesonbuild/cmake/common.py meson-0.57.0+really0.56.2/mesonbuild/cmake/common.py --- meson-0.53.2/mesonbuild/cmake/common.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/cmake/common.py 2021-01-06 10:39:48.000000000 +0000 @@ -17,18 +17,45 @@ from ..mesonlib import MesonException from .. import mlog +from .._pathlib import Path import typing as T +language_map = { + 'c': 'C', + 'cpp': 'CXX', + 'cuda': 'CUDA', + 'objc': 'OBJC', + 'objcpp': 'OBJCXX', + 'cs': 'CSharp', + 'java': 'Java', + 'fortran': 'Fortran', + 'swift': 'Swift', +} + +blacklist_cmake_defs = [ + 'CMAKE_TOOLCHAIN_FILE', + 'CMAKE_PROJECT_INCLUDE', + 'MESON_PRELOAD_FILE', + 'MESON_PS_CMAKE_CURRENT_BINARY_DIR', + 'MESON_PS_CMAKE_CURRENT_SOURCE_DIR', + 'MESON_PS_DELAYED_CALLS', + 'MESON_PS_LOADED', + 'MESON_FIND_ROOT_PATH', + 'MESON_CMAKE_SYSROOT', + 'MESON_PATHS_LIST', + 'MESON_CMAKE_ROOT', +] + class CMakeException(MesonException): pass class CMakeBuildFile: - def __init__(self, file: str, is_cmake: bool, is_temp: bool): + def __init__(self, file: Path, is_cmake: bool, is_temp: bool) -> None: self.file = file self.is_cmake = is_cmake self.is_temp = is_temp - def __repr__(self): + def __repr__(self) -> str: return '<{}: {}; cmake={}; temp={}>'.format(self.__class__.__name__, self.file, self.is_cmake, self.is_temp) def _flags_to_list(raw: str) -> T.List[str]: @@ -60,65 +87,112 @@ res = list(filter(lambda x: len(x) > 0, res)) return res +def cmake_defines_to_args(raw: T.Any, permissive: bool = False) -> T.List[str]: + res = [] # type: T.List[str] + if not isinstance(raw, list): + raw = [raw] + + for i in raw: + if not isinstance(i, dict): + raise MesonException('Invalid CMake defines. Expected a dict, but got a {}'.format(type(i).__name__)) + for key, val in i.items(): + assert isinstance(key, str) + if key in blacklist_cmake_defs: + mlog.warning('Setting', mlog.bold(key), 'is not supported. See the meson docs for cross compilation support:') + mlog.warning(' - URL: https://mesonbuild.com/CMake-module.html#cross-compilation') + mlog.warning(' --> Ignoring this option') + continue + if isinstance(val, (str, int, float)): + res += ['-D{}={}'.format(key, val)] + elif isinstance(val, bool): + val_str = 'ON' if val else 'OFF' + res += ['-D{}={}'.format(key, val_str)] + else: + raise MesonException('Type "{}" of "{}" is not supported as for a CMake define value'.format(type(val).__name__, key)) + + return res + +# TODO: this functuin will become obsolete once the `cmake_args` kwarg is dropped +def check_cmake_args(args: T.List[str]) -> T.List[str]: + res = [] # type: T.List[str] + dis = ['-D' + x for x in blacklist_cmake_defs] + assert dis # Ensure that dis is not empty. + for i in args: + if any([i.startswith(x) for x in dis]): + mlog.warning('Setting', mlog.bold(i), 'is not supported. See the meson docs for cross compilation support:') + mlog.warning(' - URL: https://mesonbuild.com/CMake-module.html#cross-compilation') + mlog.warning(' --> Ignoring this option') + continue + res += [i] + return res + +class CMakeInclude: + def __init__(self, path: Path, isSystem: bool = False): + self.path = path + self.isSystem = isSystem + + def __repr__(self) -> str: + return ''.format(self.path, self.isSystem) + class CMakeFileGroup: - def __init__(self, data: dict): - self.defines = data.get('defines', '') - self.flags = _flags_to_list(data.get('compileFlags', '')) - self.includes = data.get('includePath', []) - self.is_generated = data.get('isGenerated', False) - self.language = data.get('language', 'C') - self.sources = data.get('sources', []) + def __init__(self, data: T.Dict[str, T.Any]) -> None: + self.defines = data.get('defines', '') # type: str + self.flags = _flags_to_list(data.get('compileFlags', '')) # type: T.List[str] + self.is_generated = data.get('isGenerated', False) # type: bool + self.language = data.get('language', 'C') # type: str + self.sources = [Path(x) for x in data.get('sources', [])] # type: T.List[Path] # Fix the include directories - tmp = [] - for i in self.includes: + self.includes = [] # type: T.List[CMakeInclude] + for i in data.get('includePath', []): if isinstance(i, dict) and 'path' in i: - i['isSystem'] = i.get('isSystem', False) - tmp += [i] + isSystem = i.get('isSystem', False) + assert isinstance(isSystem, bool) + assert isinstance(i['path'], str) + self.includes += [CMakeInclude(Path(i['path']), isSystem)] elif isinstance(i, str): - tmp += [{'path': i, 'isSystem': False}] - self.includes = tmp + self.includes += [CMakeInclude(Path(i))] def log(self) -> None: mlog.log('flags =', mlog.bold(', '.join(self.flags))) mlog.log('defines =', mlog.bold(', '.join(self.defines))) - mlog.log('includes =', mlog.bold(', '.join(self.includes))) + mlog.log('includes =', mlog.bold(', '.join([str(x) for x in self.includes]))) mlog.log('is_generated =', mlog.bold('true' if self.is_generated else 'false')) mlog.log('language =', mlog.bold(self.language)) mlog.log('sources:') for i in self.sources: with mlog.nested(): - mlog.log(i) + mlog.log(i.as_posix()) class CMakeTarget: - def __init__(self, data: dict): - self.artifacts = data.get('artifacts', []) - self.src_dir = data.get('sourceDirectory', '') - self.build_dir = data.get('buildDirectory', '') - self.name = data.get('name', '') - self.full_name = data.get('fullName', '') - self.install = data.get('hasInstallRule', False) - self.install_paths = list(set(data.get('installPaths', []))) - self.link_lang = data.get('linkerLanguage', '') - self.link_libraries = _flags_to_list(data.get('linkLibraries', '')) - self.link_flags = _flags_to_list(data.get('linkFlags', '')) - self.link_lang_flags = _flags_to_list(data.get('linkLanguageFlags', '')) - # self.link_path = data.get('linkPath', '') - self.type = data.get('type', 'EXECUTABLE') - # self.is_generator_provided = data.get('isGeneratorProvided', False) - self.files = [] + def __init__(self, data: T.Dict[str, T.Any]) -> None: + self.artifacts = [Path(x) for x in data.get('artifacts', [])] # type: T.List[Path] + self.src_dir = Path(data.get('sourceDirectory', '')) # type: Path + self.build_dir = Path(data.get('buildDirectory', '')) # type: Path + self.name = data.get('name', '') # type: str + self.full_name = data.get('fullName', '') # type: str + self.install = data.get('hasInstallRule', False) # type: bool + self.install_paths = [Path(x) for x in set(data.get('installPaths', []))] # type: T.List[Path] + self.link_lang = data.get('linkerLanguage', '') # type: str + self.link_libraries = _flags_to_list(data.get('linkLibraries', '')) # type: T.List[str] + self.link_flags = _flags_to_list(data.get('linkFlags', '')) # type: T.List[str] + self.link_lang_flags = _flags_to_list(data.get('linkLanguageFlags', '')) # type: T.List[str] + # self.link_path = Path(data.get('linkPath', '')) # type: Path + self.type = data.get('type', 'EXECUTABLE') # type: str + # self.is_generator_provided = data.get('isGeneratorProvided', False) # type: bool + self.files = [] # type: T.List[CMakeFileGroup] for i in data.get('fileGroups', []): self.files += [CMakeFileGroup(i)] def log(self) -> None: - mlog.log('artifacts =', mlog.bold(', '.join(self.artifacts))) - mlog.log('src_dir =', mlog.bold(self.src_dir)) - mlog.log('build_dir =', mlog.bold(self.build_dir)) + mlog.log('artifacts =', mlog.bold(', '.join([x.as_posix() for x in self.artifacts]))) + mlog.log('src_dir =', mlog.bold(self.src_dir.as_posix())) + mlog.log('build_dir =', mlog.bold(self.build_dir.as_posix())) mlog.log('name =', mlog.bold(self.name)) mlog.log('full_name =', mlog.bold(self.full_name)) mlog.log('install =', mlog.bold('true' if self.install else 'false')) - mlog.log('install_paths =', mlog.bold(', '.join(self.install_paths))) + mlog.log('install_paths =', mlog.bold(', '.join([x.as_posix() for x in self.install_paths]))) mlog.log('link_lang =', mlog.bold(self.link_lang)) mlog.log('link_libraries =', mlog.bold(', '.join(self.link_libraries))) mlog.log('link_flags =', mlog.bold(', '.join(self.link_flags))) @@ -132,18 +206,18 @@ i.log() class CMakeProject: - def __init__(self, data: dict): - self.src_dir = data.get('sourceDirectory', '') - self.build_dir = data.get('buildDirectory', '') - self.name = data.get('name', '') - self.targets = [] + def __init__(self, data: T.Dict[str, T.Any]) -> None: + self.src_dir = Path(data.get('sourceDirectory', '')) # type: Path + self.build_dir = Path(data.get('buildDirectory', '')) # type: Path + self.name = data.get('name', '') # type: str + self.targets = [] # type: T.List[CMakeTarget] for i in data.get('targets', []): self.targets += [CMakeTarget(i)] def log(self) -> None: - mlog.log('src_dir =', mlog.bold(self.src_dir)) - mlog.log('build_dir =', mlog.bold(self.build_dir)) + mlog.log('src_dir =', mlog.bold(self.src_dir.as_posix())) + mlog.log('build_dir =', mlog.bold(self.build_dir.as_posix())) mlog.log('name =', mlog.bold(self.name)) for idx, i in enumerate(self.targets): mlog.log('Target {}:'.format(idx)) @@ -151,9 +225,9 @@ i.log() class CMakeConfiguration: - def __init__(self, data: dict): - self.name = data.get('name', '') - self.projects = [] + def __init__(self, data: T.Dict[str, T.Any]) -> None: + self.name = data.get('name', '') # type: str + self.projects = [] # type: T.List[CMakeProject] for i in data.get('projects', []): self.projects += [CMakeProject(i)] @@ -163,3 +237,78 @@ mlog.log('Project {}:'.format(idx)) with mlog.nested(): i.log() + +class SingleTargetOptions: + def __init__(self) -> None: + self.opts = {} # type: T.Dict[str, str] + self.lang_args = {} # type: T.Dict[str, T.List[str]] + self.link_args = [] # type: T.List[str] + self.install = 'preserve' + + def set_opt(self, opt: str, val: str) -> None: + self.opts[opt] = val + + def append_args(self, lang: str, args: T.List[str]) -> None: + if lang not in self.lang_args: + self.lang_args[lang] = [] + self.lang_args[lang] += args + + def append_link_args(self, args: T.List[str]) -> None: + self.link_args += args + + def set_install(self, install: bool) -> None: + self.install = 'true' if install else 'false' + + def get_override_options(self, initial: T.List[str]) -> T.List[str]: + res = [] # type: T.List[str] + for i in initial: + opt = i[:i.find('=')] + if opt not in self.opts: + res += [i] + res += ['{}={}'.format(k, v) for k, v in self.opts.items()] + return res + + def get_compile_args(self, lang: str, initial: T.List[str]) -> T.List[str]: + if lang in self.lang_args: + return initial + self.lang_args[lang] + return initial + + def get_link_args(self, initial: T.List[str]) -> T.List[str]: + return initial + self.link_args + + def get_install(self, initial: bool) -> bool: + return {'preserve': initial, 'true': True, 'false': False}[self.install] + +class TargetOptions: + def __init__(self) -> None: + self.global_options = SingleTargetOptions() + self.target_options = {} # type: T.Dict[str, SingleTargetOptions] + + def __getitem__(self, tgt: str) -> SingleTargetOptions: + if tgt not in self.target_options: + self.target_options[tgt] = SingleTargetOptions() + return self.target_options[tgt] + + def get_override_options(self, tgt: str, initial: T.List[str]) -> T.List[str]: + initial = self.global_options.get_override_options(initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_override_options(initial) + return initial + + def get_compile_args(self, tgt: str, lang: str, initial: T.List[str]) -> T.List[str]: + initial = self.global_options.get_compile_args(lang, initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_compile_args(lang, initial) + return initial + + def get_link_args(self, tgt: str, initial: T.List[str]) -> T.List[str]: + initial = self.global_options.get_link_args(initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_link_args(initial) + return initial + + def get_install(self, tgt: str, initial: bool) -> bool: + initial = self.global_options.get_install(initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_install(initial) + return initial diff -Nru meson-0.53.2/mesonbuild/cmake/data/preload.cmake meson-0.57.0+really0.56.2/mesonbuild/cmake/data/preload.cmake --- meson-0.53.2/mesonbuild/cmake/data/preload.cmake 2020-02-25 18:00:46.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/cmake/data/preload.cmake 2020-10-26 11:18:42.000000000 +0000 @@ -4,6 +4,9 @@ set(MESON_PS_LOADED ON) +cmake_policy(PUSH) +cmake_policy(SET CMP0054 NEW) # https://cmake.org/cmake/help/latest/policy/CMP0054.html + # Dummy macros that have a special meaning in the meson code macro(meson_ps_execute_delayed_calls) endmacro() @@ -11,6 +14,11 @@ macro(meson_ps_reload_vars) endmacro() +macro(meson_ps_disabled_function) + message(WARNING "The function '${ARGV0}' is disabled in the context of CMake subporjects.\n" + "This should not be an issue but may lead to compilaton errors.") +endmacro() + # Helper macro to inspect the current CMake state macro(meson_ps_inspect_vars) set(MESON_PS_CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}") @@ -31,5 +39,44 @@ _add_custom_target(${ARGV}) endmacro() -set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target) +macro(set_property) + meson_ps_inspect_vars() + _set_property(${ARGV}) +endmacro() + +function(set_source_files_properties) + set(FILES) + set(I 0) + set(PROPERTIES OFF) + + while(I LESS ARGC) + if(NOT PROPERTIES) + if("${ARGV${I}}" STREQUAL "PROPERTIES") + set(PROPERTIES ON) + else() + list(APPEND FILES "${ARGV${I}}") + endif() + + math(EXPR I "${I} + 1") + else() + set(ID_IDX ${I}) + math(EXPR PROP_IDX "${ID_IDX} + 1") + + set(ID "${ARGV${ID_IDX}}") + set(PROP "${ARGV${PROP_IDX}}") + + set_property(SOURCE ${FILES} PROPERTY "${ID}" "${PROP}") + math(EXPR I "${I} + 2") + endif() + endwhile() +endfunction() + +# Disable some functions that would mess up the CMake meson integration +macro(target_precompile_headers) + meson_ps_disabled_function(target_precompile_headers) +endmacro() + +set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target;set_property) meson_ps_reload_vars() + +cmake_policy(POP) diff -Nru meson-0.53.2/mesonbuild/cmake/data/run_ctgt.py meson-0.57.0+really0.56.2/mesonbuild/cmake/data/run_ctgt.py --- meson-0.53.2/mesonbuild/cmake/data/run_ctgt.py 2019-12-04 18:45:59.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/cmake/data/run_ctgt.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,72 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import subprocess -import shutil -import os -import sys - -commands = [[]] -SEPARATOR = ';;;' - -# Generate CMD parameters -parser = argparse.ArgumentParser(description='Wrapper for add_custom_command') -parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to') -parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files') -parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake') -parser.add_argument('commands', nargs=argparse.REMAINDER, help='A "{}" seperated list of commands'.format(SEPARATOR)) - -# Parse -args = parser.parse_args() - -dummy_target = None -if len(args.outputs) == 1 and len(args.original_outputs) == 0: - dummy_target = args.outputs[0] -elif len(args.outputs) != len(args.original_outputs): - print('Length of output list and original output list differ') - sys.exit(1) - -for i in args.commands: - if i == SEPARATOR: - commands += [[]] - continue - - i = i.replace('"', '') # Remove lefover quotes - commands[-1] += [i] - -# Execute -for i in commands: - # Skip empty lists - if not i: - continue - - try: - os.makedirs(args.directory, exist_ok=True) - subprocess.run(i, cwd=args.directory, check=True) - except subprocess.CalledProcessError: - exit(1) - -if dummy_target: - with open(dummy_target, 'a'): - os.utime(dummy_target, None) - exit(0) - -# Copy outputs -zipped_outputs = zip(args.outputs, args.original_outputs) -for expected, generated in zipped_outputs: - do_copy = False - if not os.path.exists(expected): - if not os.path.exists(generated): - print('Unable to find generated file. This can cause the build to fail:') - print(generated) - do_copy = False - else: - do_copy = True - elif os.path.exists(generated): - if os.path.getmtime(generated) > os.path.getmtime(expected): - do_copy = True - - if do_copy: - if os.path.exists(expected): - os.remove(expected) - shutil.copyfile(generated, expected) diff -Nru meson-0.53.2/mesonbuild/cmake/executor.py meson-0.57.0+really0.56.2/mesonbuild/cmake/executor.py --- meson-0.53.2/mesonbuild/cmake/executor.py 2020-02-25 18:00:46.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/cmake/executor.py 2021-01-06 10:39:48.000000000 +0000 @@ -15,37 +15,43 @@ # This class contains the basic functionality needed to run any interpreter # or an interpreter-based tool. -import subprocess -from pathlib import Path +import subprocess as S +from .._pathlib import Path +from threading import Thread import typing as T import re import os -import shutil -import ctypes -import textwrap - -from .. import mlog, mesonlib -from ..mesonlib import PerMachine, Popen_safe, version_compare, MachineChoice -from ..environment import Environment + +from .. import mlog +from ..mesonlib import PerMachine, Popen_safe, version_compare, MachineChoice, is_windows +from ..envconfig import get_env_var if T.TYPE_CHECKING: + from ..environment import Environment from ..dependencies.base import ExternalProgram + from ..compilers import Compiler +TYPE_result = T.Tuple[int, T.Optional[str], T.Optional[str]] +TYPE_cache_key = T.Tuple[str, T.Tuple[str, ...], str, T.FrozenSet[T.Tuple[str, str]]] class CMakeExecutor: # The class's copy of the CMake path. Avoids having to search for it # multiple times in the same Meson invocation. - class_cmakebin = PerMachine(None, None) - class_cmakevers = PerMachine(None, None) - class_cmake_cache = {} + class_cmakebin = PerMachine(None, None) # type: PerMachine[T.Optional[ExternalProgram]] + class_cmakevers = PerMachine(None, None) # type: PerMachine[T.Optional[str]] + class_cmake_cache = {} # type: T.Dict[T.Any, TYPE_result] - def __init__(self, environment: Environment, version: str, for_machine: MachineChoice, silent: bool = False): + def __init__(self, environment: 'Environment', version: str, for_machine: MachineChoice, silent: bool = False): self.min_version = version self.environment = environment self.for_machine = for_machine self.cmakebin, self.cmakevers = self.find_cmake_binary(self.environment, silent=silent) - if self.cmakebin is False: - self.cmakebin = None + self.always_capture_stderr = True + self.print_cmout = False + self.prefix_paths = [] # type: T.List[str] + self.extra_cmake_args = [] # type: T.List[str] + + if self.cmakebin is None: return if not version_compare(self.cmakevers, self.min_version): @@ -56,45 +62,50 @@ self.cmakebin = None return - def find_cmake_binary(self, environment: Environment, silent: bool = False) -> T.Tuple['ExternalProgram', str]: - from ..dependencies.base import ExternalProgram + self.prefix_paths = self.environment.coredata.builtins_per_machine[self.for_machine]['cmake_prefix_path'].value + env_pref_path_raw = get_env_var( + self.for_machine, + self.environment.is_cross_build(), + 'CMAKE_PREFIX_PATH') + if env_pref_path_raw is not None: + env_pref_path = [] # type: T.List[str] + if is_windows(): + # Cannot split on ':' on Windows because its in the drive letter + env_pref_path = env_pref_path_raw.split(os.pathsep) + else: + # https://github.com/mesonbuild/meson/issues/7294 + env_pref_path = re.split(r':|;', env_pref_path_raw) + env_pref_path = [x for x in env_pref_path if x] # Filter out empty strings + if not self.prefix_paths: + self.prefix_paths = [] + self.prefix_paths += env_pref_path + + if self.prefix_paths: + self.extra_cmake_args += ['-DCMAKE_PREFIX_PATH={}'.format(';'.join(self.prefix_paths))] - # Create an iterator of options - def search(): - # Lookup in cross or machine file. - potential_cmakepath = environment.binaries[self.for_machine].lookup_entry('cmake') - if potential_cmakepath is not None: - mlog.debug('CMake binary for %s specified from cross file, native file, or env var as %s.', self.for_machine, potential_cmakepath) - yield ExternalProgram.from_entry('cmake', potential_cmakepath) - # We never fallback if the user-specified option is no good, so - # stop returning options. - return - mlog.debug('CMake binary missing from cross or native file, or env var undefined.') - # Fallback on hard-coded defaults. - # TODO prefix this for the cross case instead of ignoring thing. - if environment.machines.matches_build_machine(self.for_machine): - for potential_cmakepath in environment.default_cmake: - mlog.debug('Trying a default CMake fallback at', potential_cmakepath) - yield ExternalProgram(potential_cmakepath, silent=True) + def find_cmake_binary(self, environment: 'Environment', silent: bool = False) -> T.Tuple[T.Optional['ExternalProgram'], T.Optional[str]]: + from ..dependencies.base import find_external_program, NonExistingExternalProgram # Only search for CMake the first time and store the result in the class # definition - if CMakeExecutor.class_cmakebin[self.for_machine] is False: + if isinstance(CMakeExecutor.class_cmakebin[self.for_machine], NonExistingExternalProgram): mlog.debug('CMake binary for %s is cached as not found' % self.for_machine) + return None, None elif CMakeExecutor.class_cmakebin[self.for_machine] is not None: mlog.debug('CMake binary for %s is cached.' % self.for_machine) else: assert CMakeExecutor.class_cmakebin[self.for_machine] is None + mlog.debug('CMake binary for %s is not cached' % self.for_machine) - for potential_cmakebin in search(): - mlog.debug('Trying CMake binary {} for machine {} at {}' - .format(potential_cmakebin.name, self.for_machine, potential_cmakebin.command)) + for potential_cmakebin in find_external_program( + environment, self.for_machine, 'cmake', 'CMake', + environment.default_cmake, allow_default_for_cross=False): version_if_ok = self.check_cmake(potential_cmakebin) if not version_if_ok: continue if not silent: mlog.log('Found CMake:', mlog.bold(potential_cmakebin.get_path()), - '(%s)' % version_if_ok) + '({})'.format(version_if_ok)) CMakeExecutor.class_cmakebin[self.for_machine] = potential_cmakebin CMakeExecutor.class_cmakevers[self.for_machine] = version_if_ok break @@ -103,8 +114,9 @@ mlog.log('Found CMake:', mlog.red('NO')) # Set to False instead of None to signify that we've already # searched for it and not found it - CMakeExecutor.class_cmakebin[self.for_machine] = False + CMakeExecutor.class_cmakebin[self.for_machine] = NonExistingExternalProgram() CMakeExecutor.class_cmakevers[self.for_machine] = None + return None, None return CMakeExecutor.class_cmakebin[self.for_machine], CMakeExecutor.class_cmakevers[self.for_machine] @@ -124,159 +136,116 @@ return None except PermissionError: msg = 'Found CMake {!r} but didn\'t have permissions to run it.'.format(' '.join(cmakebin.get_command())) - if not mesonlib.is_windows(): + if not is_windows(): msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.' mlog.warning(msg) return None - cmvers = re.sub(r'\s*cmake version\s*', '', out.split('\n')[0]).strip() + cmvers = re.search(r'(cmake|cmake3)\s*version\s*([\d.]+)', out).group(2) return cmvers - def _cache_key(self, args: T.List[str], build_dir: str, env): - fenv = frozenset(env.items()) if env is not None else None + def set_exec_mode(self, print_cmout: T.Optional[bool] = None, always_capture_stderr: T.Optional[bool] = None) -> None: + if print_cmout is not None: + self.print_cmout = print_cmout + if always_capture_stderr is not None: + self.always_capture_stderr = always_capture_stderr + + def _cache_key(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_cache_key: + fenv = frozenset(env.items()) if env is not None else frozenset() targs = tuple(args) - return (self.cmakebin, targs, build_dir, fenv) + return (self.cmakebin.get_path(), targs, build_dir.as_posix(), fenv) + + def _call_cmout_stderr(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: + cmd = self.cmakebin.get_command() + args + proc = S.Popen(cmd, stdout=S.PIPE, stderr=S.PIPE, cwd=str(build_dir), env=env) # TODO [PYTHON_37]: drop Path conversion + + # stdout and stderr MUST be read at the same time to avoid pipe + # blocking issues. The easiest way to do this is with a separate + # thread for one of the pipes. + def print_stdout() -> None: + while True: + line = proc.stdout.readline() + if not line: + break + mlog.log(line.decode(errors='ignore').strip('\n')) + proc.stdout.close() + + t = Thread(target=print_stdout) + t.start() + + try: + # Read stderr line by line and log non trace lines + raw_trace = '' + tline_start_reg = re.compile(r'^\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(.*$') + inside_multiline_trace = False + while True: + line_raw = proc.stderr.readline() + if not line_raw: + break + line = line_raw.decode(errors='ignore') + if tline_start_reg.match(line): + raw_trace += line + inside_multiline_trace = not line.endswith(' )\n') + elif inside_multiline_trace: + raw_trace += line + else: + mlog.warning(line.strip('\n')) + + finally: + proc.stderr.close() + t.join() + proc.wait() + + return proc.returncode, None, raw_trace + + def _call_cmout(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: + cmd = self.cmakebin.get_command() + args + proc = S.Popen(cmd, stdout=S.PIPE, stderr=S.STDOUT, cwd=str(build_dir), env=env) # TODO [PYTHON_37]: drop Path conversion + while True: + line = proc.stdout.readline() + if not line: + break + mlog.log(line.decode(errors='ignore').strip('\n')) + proc.stdout.close() + proc.wait() + return proc.returncode, None, None - def _call_real(self, args: T.List[str], build_dir: str, env) -> T.Tuple[int, str, str]: - os.makedirs(build_dir, exist_ok=True) + def _call_quiet(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: + build_dir.mkdir(parents=True, exist_ok=True) cmd = self.cmakebin.get_command() + args - ret = subprocess.run(cmd, env=env, cwd=build_dir, close_fds=False, - stdout=subprocess.PIPE, stderr=subprocess.PIPE, - universal_newlines=False) + ret = S.run(cmd, env=env, cwd=str(build_dir), close_fds=False, + stdout=S.PIPE, stderr=S.PIPE, universal_newlines=False) # TODO [PYTHON_37]: drop Path conversion rc = ret.returncode out = ret.stdout.decode(errors='ignore') err = ret.stderr.decode(errors='ignore') - call = ' '.join(cmd) - mlog.debug("Called `{}` in {} -> {}".format(call, build_dir, rc)) return rc, out, err - def call(self, args: T.List[str], build_dir: str, env=None, disable_cache: bool = False): + def _call_impl(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: + mlog.debug('Calling CMake ({}) in {} with:'.format(self.cmakebin.get_command(), build_dir)) + for i in args: + mlog.debug(' - "{}"'.format(i)) + if not self.print_cmout: + return self._call_quiet(args, build_dir, env) + else: + if self.always_capture_stderr: + return self._call_cmout_stderr(args, build_dir, env) + else: + return self._call_cmout(args, build_dir, env) + + def call(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]] = None, disable_cache: bool = False) -> TYPE_result: if env is None: - env = os.environ + env = os.environ.copy() + args = args + self.extra_cmake_args if disable_cache: - return self._call_real(args, build_dir, env) + return self._call_impl(args, build_dir, env) # First check if cached, if not call the real cmake function cache = CMakeExecutor.class_cmake_cache key = self._cache_key(args, build_dir, env) if key not in cache: - cache[key] = self._call_real(args, build_dir, env) + cache[key] = self._call_impl(args, build_dir, env) return cache[key] - def call_with_fake_build(self, args: T.List[str], build_dir: str, env=None): - # First check the cache - cache = CMakeExecutor.class_cmake_cache - key = self._cache_key(args, build_dir, env) - if key in cache: - return cache[key] - - os.makedirs(build_dir, exist_ok=True) - - # Try to set the correct compiler for C and C++ - # This step is required to make try_compile work inside CMake - fallback = os.path.realpath(__file__) # A file used as a fallback wehen everything else fails - compilers = self.environment.coredata.compilers[MachineChoice.BUILD] - - def make_abs(exe: str, lang: str) -> str: - if os.path.isabs(exe): - return exe - - p = shutil.which(exe) - if p is None: - mlog.debug('Failed to find a {} compiler for CMake. This might cause CMake to fail.'.format(lang)) - p = fallback - return p - - def choose_compiler(lang: str) -> T.Tuple[str, str]: - exe_list = [] - if lang in compilers: - exe_list = compilers[lang].get_exelist() - else: - try: - comp_obj = self.environment.compiler_from_language(lang, MachineChoice.BUILD) - if comp_obj is not None: - exe_list = comp_obj.get_exelist() - except Exception: - pass - - if len(exe_list) == 1: - return make_abs(exe_list[0], lang), '' - elif len(exe_list) == 2: - return make_abs(exe_list[1], lang), make_abs(exe_list[0], lang) - else: - mlog.debug('Failed to find a {} compiler for CMake. This might cause CMake to fail.'.format(lang)) - return fallback, '' - - c_comp, c_launcher = choose_compiler('c') - cxx_comp, cxx_launcher = choose_compiler('cpp') - fortran_comp, fortran_launcher = choose_compiler('fortran') - - # on Windows, choose_compiler returns path with \ as separator - replace by / before writing to CMAKE file - c_comp = c_comp.replace('\\', '/') - c_launcher = c_launcher.replace('\\', '/') - cxx_comp = cxx_comp.replace('\\', '/') - cxx_launcher = cxx_launcher.replace('\\', '/') - fortran_comp = fortran_comp.replace('\\', '/') - fortran_launcher = fortran_launcher.replace('\\', '/') - - # Reset the CMake cache - (Path(build_dir) / 'CMakeCache.txt').write_text('CMAKE_PLATFORM_INFO_INITIALIZED:INTERNAL=1\n') - - # Fake the compiler files - comp_dir = Path(build_dir) / 'CMakeFiles' / self.cmakevers - comp_dir.mkdir(parents=True, exist_ok=True) - - c_comp_file = comp_dir / 'CMakeCCompiler.cmake' - cxx_comp_file = comp_dir / 'CMakeCXXCompiler.cmake' - fortran_comp_file = comp_dir / 'CMakeFortranCompiler.cmake' - - if c_comp and not c_comp_file.is_file(): - c_comp_file.write_text(textwrap.dedent('''\ - # Fake CMake file to skip the boring and slow stuff - set(CMAKE_C_COMPILER "{}") # Should be a valid compiler for try_compile, etc. - set(CMAKE_C_COMPILER_LAUNCHER "{}") # The compiler launcher (if presentt) - set(CMAKE_C_COMPILER_ID "GNU") # Pretend we have found GCC - set(CMAKE_COMPILER_IS_GNUCC 1) - set(CMAKE_C_COMPILER_LOADED 1) - set(CMAKE_C_COMPILER_WORKS TRUE) - set(CMAKE_C_ABI_COMPILED TRUE) - set(CMAKE_C_SOURCE_FILE_EXTENSIONS c;m) - set(CMAKE_C_IGNORE_EXTENSIONS h;H;o;O;obj;OBJ;def;DEF;rc;RC) - set(CMAKE_SIZEOF_VOID_P "{}") - '''.format(c_comp, c_launcher, ctypes.sizeof(ctypes.c_voidp)))) - - if cxx_comp and not cxx_comp_file.is_file(): - cxx_comp_file.write_text(textwrap.dedent('''\ - # Fake CMake file to skip the boring and slow stuff - set(CMAKE_CXX_COMPILER "{}") # Should be a valid compiler for try_compile, etc. - set(CMAKE_CXX_COMPILER_LAUNCHER "{}") # The compiler launcher (if presentt) - set(CMAKE_CXX_COMPILER_ID "GNU") # Pretend we have found GCC - set(CMAKE_COMPILER_IS_GNUCXX 1) - set(CMAKE_CXX_COMPILER_LOADED 1) - set(CMAKE_CXX_COMPILER_WORKS TRUE) - set(CMAKE_CXX_ABI_COMPILED TRUE) - set(CMAKE_CXX_IGNORE_EXTENSIONS inl;h;hpp;HPP;H;o;O;obj;OBJ;def;DEF;rc;RC) - set(CMAKE_CXX_SOURCE_FILE_EXTENSIONS C;M;c++;cc;cpp;cxx;mm;CPP) - set(CMAKE_SIZEOF_VOID_P "{}") - '''.format(cxx_comp, cxx_launcher, ctypes.sizeof(ctypes.c_voidp)))) - - if fortran_comp and not fortran_comp_file.is_file(): - fortran_comp_file.write_text(textwrap.dedent('''\ - # Fake CMake file to skip the boring and slow stuff - set(CMAKE_Fortran_COMPILER "{}") # Should be a valid compiler for try_compile, etc. - set(CMAKE_Fortran_COMPILER_LAUNCHER "{}") # The compiler launcher (if presentt) - set(CMAKE_Fortran_COMPILER_ID "GNU") # Pretend we have found GCC - set(CMAKE_COMPILER_IS_GNUG77 1) - set(CMAKE_Fortran_COMPILER_LOADED 1) - set(CMAKE_Fortran_COMPILER_WORKS TRUE) - set(CMAKE_Fortran_ABI_COMPILED TRUE) - set(CMAKE_Fortran_IGNORE_EXTENSIONS h;H;o;O;obj;OBJ;def;DEF;rc;RC) - set(CMAKE_Fortran_SOURCE_FILE_EXTENSIONS f;F;fpp;FPP;f77;F77;f90;F90;for;For;FOR;f95;F95) - set(CMAKE_SIZEOF_VOID_P "{}") - '''.format(fortran_comp, fortran_launcher, ctypes.sizeof(ctypes.c_voidp)))) - - return self.call(args, build_dir, env) - def found(self) -> bool: return self.cmakebin is not None @@ -286,8 +255,11 @@ def executable_path(self) -> str: return self.cmakebin.get_path() - def get_command(self): + def get_command(self) -> T.List[str]: return self.cmakebin.get_command() + def get_cmake_prefix_paths(self) -> T.List[str]: + return self.prefix_paths + def machine_choice(self) -> MachineChoice: return self.for_machine diff -Nru meson-0.53.2/mesonbuild/cmake/fileapi.py meson-0.57.0+really0.56.2/mesonbuild/cmake/fileapi.py --- meson-0.53.2/mesonbuild/cmake/fileapi.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/cmake/fileapi.py 2021-01-06 10:39:48.000000000 +0000 @@ -15,21 +15,21 @@ from .common import CMakeException, CMakeBuildFile, CMakeConfiguration import typing as T from .. import mlog -import os +from .._pathlib import Path import json import re STRIP_KEYS = ['cmake', 'reply', 'backtrace', 'backtraceGraph', 'version'] class CMakeFileAPI: - def __init__(self, build_dir: str): - self.build_dir = build_dir - self.api_base_dir = os.path.join(self.build_dir, '.cmake', 'api', 'v1') - self.request_dir = os.path.join(self.api_base_dir, 'query', 'client-meson') - self.reply_dir = os.path.join(self.api_base_dir, 'reply') - self.cmake_sources = [] - self.cmake_configurations = [] - self.kind_resolver_map = { + def __init__(self, build_dir: Path): + self.build_dir = build_dir + self.api_base_dir = self.build_dir / '.cmake' / 'api' / 'v1' + self.request_dir = self.api_base_dir / 'query' / 'client-meson' + self.reply_dir = self.api_base_dir / 'reply' + self.cmake_sources = [] # type: T.List[CMakeBuildFile] + self.cmake_configurations = [] # type: T.List[CMakeConfiguration] + self.kind_resolver_map = { 'codemodel': self._parse_codemodel, 'cmakeFiles': self._parse_cmakeFiles, } @@ -41,7 +41,7 @@ return self.cmake_configurations def setup_request(self) -> None: - os.makedirs(self.request_dir, exist_ok=True) + self.request_dir.mkdir(parents=True, exist_ok=True) query = { 'requests': [ @@ -50,18 +50,17 @@ ] } - with open(os.path.join(self.request_dir, 'query.json'), 'w') as fp: - json.dump(query, fp, indent=2) + query_file = self.request_dir / 'query.json' + query_file.write_text(json.dumps(query, indent=2)) def load_reply(self) -> None: - if not os.path.isdir(self.reply_dir): + if not self.reply_dir.is_dir(): raise CMakeException('No response from the CMake file API') - files = os.listdir(self.reply_dir) root = None reg_index = re.compile(r'^index-.*\.json$') - for i in files: - if reg_index.match(i): + for i in self.reply_dir.iterdir(): + if reg_index.match(i.name): root = i break @@ -74,10 +73,10 @@ index = self._strip_data(index) # Strip unused data (again for loaded files) # Debug output - debug_json = os.path.normpath(os.path.join(self.build_dir, '..', 'fileAPI.json')) - with open(debug_json, 'w') as fp: - json.dump(index, fp, indent=2) - mlog.cmd_ci_include(debug_json) + debug_json = self.build_dir / '..' / 'fileAPI.json' + debug_json = debug_json.resolve() + debug_json.write_text(json.dumps(index, indent=2)) + mlog.cmd_ci_include(debug_json.as_posix()) # parse the JSON for i in index['objects']: @@ -87,7 +86,7 @@ self.kind_resolver_map[i['kind']](i) - def _parse_codemodel(self, data: dict) -> None: + def _parse_codemodel(self, data: T.Dict[str, T.Any]) -> None: assert('configurations' in data) assert('paths' in data) @@ -100,17 +99,17 @@ # resolved and the resulting data structure is identical # to the CMake serve output. - def helper_parse_dir(dir_entry: dict) -> T.Tuple[str, str]: - src_dir = dir_entry.get('source', '.') - bld_dir = dir_entry.get('build', '.') - src_dir = src_dir if os.path.isabs(src_dir) else os.path.join(source_dir, src_dir) - bld_dir = bld_dir if os.path.isabs(bld_dir) else os.path.join(source_dir, bld_dir) - src_dir = os.path.normpath(src_dir) - bld_dir = os.path.normpath(bld_dir) + def helper_parse_dir(dir_entry: T.Dict[str, T.Any]) -> T.Tuple[Path, Path]: + src_dir = Path(dir_entry.get('source', '.')) + bld_dir = Path(dir_entry.get('build', '.')) + src_dir = src_dir if src_dir.is_absolute() else source_dir / src_dir + bld_dir = bld_dir if bld_dir.is_absolute() else build_dir / bld_dir + src_dir = src_dir.resolve() + bld_dir = bld_dir.resolve() return src_dir, bld_dir - def parse_sources(comp_group: dict, tgt: dict) -> T.Tuple[T.List[str], T.List[str], T.List[int]]: + def parse_sources(comp_group: T.Dict[str, T.Any], tgt: T.Dict[str, T.Any]) -> T.Tuple[T.List[Path], T.List[Path], T.List[int]]: gen = [] src = [] idx = [] @@ -120,21 +119,21 @@ if i >= len(src_list_raw) or 'path' not in src_list_raw[i]: continue if src_list_raw[i].get('isGenerated', False): - gen += [src_list_raw[i]['path']] + gen += [Path(src_list_raw[i]['path'])] else: - src += [src_list_raw[i]['path']] + src += [Path(src_list_raw[i]['path'])] idx += [i] return src, gen, idx - def parse_target(tgt: dict) -> dict: + def parse_target(tgt: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]: src_dir, bld_dir = helper_parse_dir(cnf.get('paths', {})) # Parse install paths (if present) install_paths = [] if 'install' in tgt: - prefix = tgt['install']['prefix']['path'] - install_paths = [os.path.join(prefix, x['path']) for x in tgt['install']['destinations']] + prefix = Path(tgt['install']['prefix']['path']) + install_paths = [prefix / x['path'] for x in tgt['install']['destinations']] install_paths = list(set(install_paths)) # On the first look, it looks really nice that the CMake devs have @@ -160,7 +159,7 @@ # maybe we can make use of that in addition to the # implicit dependency detection tgt_data = { - 'artifacts': [x.get('path', '') for x in tgt.get('artifacts', [])], + 'artifacts': [Path(x.get('path', '')) for x in tgt.get('artifacts', [])], 'sourceDirectory': src_dir, 'buildDirectory': bld_dir, 'name': tgt.get('name', ''), @@ -230,7 +229,7 @@ }] return tgt_data - def parse_project(pro: dict) -> dict: + def parse_project(pro: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]: # Only look at the first directory specified in directoryIndexes # TODO Figure out what the other indexes are there for p_src_dir = source_dir @@ -268,15 +267,15 @@ self.cmake_configurations += [CMakeConfiguration(cnf_data)] - def _parse_cmakeFiles(self, data: dict) -> None: - assert('inputs' in data) - assert('paths' in data) + def _parse_cmakeFiles(self, data: T.Dict[str, T.Any]) -> None: + assert 'inputs' in data + assert 'paths' in data - src_dir = data['paths']['source'] + src_dir = Path(data['paths']['source']) for i in data['inputs']: - path = i['path'] - path = path if os.path.isabs(path) else os.path.join(src_dir, path) + path = Path(i['path']) + path = path if path.is_absolute() else src_dir / path self.cmake_sources += [CMakeBuildFile(path, i.get('isCMake', False), i.get('isGenerated', False))] def _strip_data(self, data: T.Any) -> T.Any: @@ -309,10 +308,13 @@ return data - def _reply_file_content(self, filename: str) -> dict: - real_path = os.path.join(self.reply_dir, filename) - if not os.path.exists(real_path): + def _reply_file_content(self, filename: Path) -> T.Dict[str, T.Any]: + real_path = self.reply_dir / filename + if not real_path.exists(): raise CMakeException('File "{}" does not exist'.format(real_path)) - with open(real_path, 'r') as fp: - return json.load(fp) + data = json.loads(real_path.read_text()) + assert isinstance(data, dict) + for i in data.keys(): + assert isinstance(i, str) + return data diff -Nru meson-0.53.2/mesonbuild/cmake/generator.py meson-0.57.0+really0.56.2/mesonbuild/cmake/generator.py --- meson-0.53.2/mesonbuild/cmake/generator.py 2019-08-28 17:15:39.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/cmake/generator.py 2020-10-18 21:29:13.000000000 +0000 @@ -13,6 +13,7 @@ # limitations under the License. from .. import mesonlib +import typing as T def parse_generator_expressions(raw: str) -> str: '''Parse CMake generator expressions @@ -73,7 +74,7 @@ 'ANGLE-R': lambda x: '>', 'COMMA': lambda x: ',', 'SEMICOLON': lambda x: ';', - } + } # type: T.Dict[str, T.Callable[[str], str]] # Recursively evaluate generator expressions def eval_generator_expressions() -> str: diff -Nru meson-0.53.2/mesonbuild/cmake/__init__.py meson-0.57.0+really0.56.2/mesonbuild/cmake/__init__.py --- meson-0.53.2/mesonbuild/cmake/__init__.py 2020-01-23 21:41:11.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/cmake/__init__.py 2020-10-18 21:29:13.000000000 +0000 @@ -18,20 +18,27 @@ __all__ = [ 'CMakeClient', 'CMakeExecutor', + 'CMakeExecScope', 'CMakeException', 'CMakeFileAPI', 'CMakeInterpreter', 'CMakeTarget', + 'CMakeToolchain', 'CMakeTraceLine', 'CMakeTraceParser', + 'SingleTargetOptions', + 'TargetOptions', 'parse_generator_expressions', 'language_map', + 'cmake_defines_to_args', + 'check_cmake_args', ] -from .common import CMakeException +from .common import CMakeException, SingleTargetOptions, TargetOptions, cmake_defines_to_args, language_map, check_cmake_args from .client import CMakeClient from .executor import CMakeExecutor from .fileapi import CMakeFileAPI from .generator import parse_generator_expressions -from .interpreter import CMakeInterpreter, language_map +from .interpreter import CMakeInterpreter +from .toolchain import CMakeToolchain, CMakeExecScope from .traceparser import CMakeTarget, CMakeTraceLine, CMakeTraceParser diff -Nru meson-0.53.2/mesonbuild/cmake/interpreter.py meson-0.57.0+really0.56.2/mesonbuild/cmake/interpreter.py --- meson-0.53.2/mesonbuild/cmake/interpreter.py 2020-02-25 18:00:46.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/cmake/interpreter.py 2021-01-09 10:14:21.000000000 +0000 @@ -15,22 +15,22 @@ # This class contains the basic functionality needed to run any interpreter # or an interpreter-based tool. -from .common import CMakeException, CMakeTarget -from .client import CMakeClient, RequestCMakeInputs, RequestConfigure, RequestCompute, RequestCodeModel +from .common import CMakeException, CMakeTarget, TargetOptions, CMakeConfiguration, language_map, check_cmake_args +from .client import CMakeClient, RequestCMakeInputs, RequestConfigure, RequestCompute, RequestCodeModel, ReplyCMakeInputs, ReplyCodeModel from .fileapi import CMakeFileAPI from .executor import CMakeExecutor +from .toolchain import CMakeToolchain, CMakeExecScope from .traceparser import CMakeTraceParser, CMakeGeneratorTarget -from .. import mlog -from ..environment import Environment -from ..mesonlib import MachineChoice, version_compare +from .. import mlog, mesonlib +from ..mesonlib import MachineChoice, OrderedSet, version_compare, path_is_in_root, relative_to_if_possible +from ..mesondata import mesondata from ..compilers.compilers import lang_suffixes, header_suffixes, obj_suffixes, lib_suffixes, is_header -from subprocess import Popen, PIPE -from threading import Thread from enum import Enum from functools import lru_cache -from pathlib import Path +from .._pathlib import Path import typing as T -import os, re +import re +from os import environ from ..mparser import ( Token, @@ -52,6 +52,11 @@ if T.TYPE_CHECKING: from ..build import Build from ..backend.backends import Backend + from ..environment import Environment + +TYPE_mixed = T.Union[str, int, bool, Path, BaseNode] +TYPE_mixed_list = T.Union[TYPE_mixed, T.Sequence[TYPE_mixed]] +TYPE_mixed_kwargs = T.Dict[str, TYPE_mixed_list] # Disable all warnings automaticall enabled with --trace and friends # See https://cmake.org/cmake/help/latest/variable/CMAKE_POLICY_WARNING_CMPNNNN.html @@ -65,6 +70,7 @@ 'CMP0067', 'CMP0082', 'CMP0089', + 'CMP0102', ] backend_generator_map = { @@ -76,18 +82,6 @@ 'vs2019': 'Visual Studio 16 2019', } -language_map = { - 'c': 'C', - 'cpp': 'CXX', - 'cuda': 'CUDA', - 'objc': 'OBJC', - 'objcpp': 'OBJCXX', - 'cs': 'CSharp', - 'java': 'Java', - 'fortran': 'Fortran', - 'swift': 'Swift', -} - target_type_map = { 'STATIC_LIBRARY': 'static_library', 'MODULE_LIBRARY': 'shared_module', @@ -138,8 +132,8 @@ class OutputTargetMap: rm_so_version = re.compile(r'(\.[0-9]+)+$') - def __init__(self, build_dir: str): - self.tgt_map = {} + def __init__(self, build_dir: Path): + self.tgt_map = {} # type: T.Dict[str, T.Union['ConverterTarget', 'ConverterCustomTarget']] self.build_dir = build_dir def add(self, tgt: T.Union['ConverterTarget', 'ConverterCustomTarget']) -> None: @@ -165,6 +159,14 @@ def target(self, name: str) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]: return self._return_first_valid_key([self._target_key(name)]) + def executable(self, name: str) -> T.Optional['ConverterTarget']: + tgt = self.target(name) + if tgt is None or not isinstance(tgt, ConverterTarget): + return None + if tgt.meson_func() != 'executable': + return None + return tgt + def artifact(self, name: str) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]: keys = [] candidates = [name, OutputTargetMap.rm_so_version.sub('', name)] @@ -175,94 +177,121 @@ new_name = OutputTargetMap.rm_so_version.sub('', new_name) candidates += ['{}.{}'.format(new_name, i)] for i in candidates: - keys += [self._rel_artifact_key(i), os.path.basename(i), self._base_artifact_key(i)] + keys += [self._rel_artifact_key(Path(i)), Path(i).name, self._base_artifact_key(Path(i))] return self._return_first_valid_key(keys) - def generated(self, name: str) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]: - return self._return_first_valid_key([self._rel_generated_file_key(name), self._base_generated_file_key(name)]) + def generated(self, name: Path) -> T.Optional['ConverterCustomTarget']: + res = self._return_first_valid_key([self._rel_generated_file_key(name), self._base_generated_file_key(name)]) + assert res is None or isinstance(res, ConverterCustomTarget) + return res # Utility functions to generate local keys - def _rel_path(self, fname: str) -> T.Optional[str]: - fname = os.path.normpath(os.path.join(self.build_dir, fname)) - if os.path.commonpath([self.build_dir, fname]) != self.build_dir: - return None - return os.path.relpath(fname, self.build_dir) + def _rel_path(self, fname: Path) -> T.Optional[Path]: + try: + return fname.resolve().relative_to(self.build_dir) + except ValueError: + pass + return None def _target_key(self, tgt_name: str) -> str: return '__tgt_{}__'.format(tgt_name) - def _rel_generated_file_key(self, fname: str) -> T.Optional[str]: + def _rel_generated_file_key(self, fname: Path) -> T.Optional[str]: path = self._rel_path(fname) - return '__relgen_{}__'.format(path) if path else None + return '__relgen_{}__'.format(path.as_posix()) if path else None - def _base_generated_file_key(self, fname: str) -> str: - return '__gen_{}__'.format(os.path.basename(fname)) + def _base_generated_file_key(self, fname: Path) -> str: + return '__gen_{}__'.format(fname.name) - def _rel_artifact_key(self, fname: str) -> T.Optional[str]: + def _rel_artifact_key(self, fname: Path) -> T.Optional[str]: path = self._rel_path(fname) - return '__relart_{}__'.format(path) if path else None + return '__relart_{}__'.format(path.as_posix()) if path else None - def _base_artifact_key(self, fname: str) -> str: - return '__art_{}__'.format(os.path.basename(fname)) + def _base_artifact_key(self, fname: Path) -> str: + return '__art_{}__'.format(fname.name) class ConverterTarget: - def __init__(self, target: CMakeTarget, env: Environment): - self.env = env - self.artifacts = target.artifacts - self.src_dir = target.src_dir - self.build_dir = target.build_dir - self.name = target.name - self.cmake_name = target.name - self.full_name = target.full_name - self.type = target.type - self.install = target.install - self.install_dir = '' + def __init__(self, target: CMakeTarget, env: 'Environment', for_machine: MachineChoice) -> None: + self.env = env + self.for_machine = for_machine + self.artifacts = target.artifacts + self.src_dir = target.src_dir + self.build_dir = target.build_dir + self.name = target.name + self.cmake_name = target.name + self.full_name = target.full_name + self.type = target.type + self.install = target.install + self.install_dir = None # type: T.Optional[Path] self.link_libraries = target.link_libraries - self.link_flags = target.link_flags + target.link_lang_flags - self.depends_raw = [] - self.depends = [] + self.link_flags = target.link_flags + target.link_lang_flags + self.depends_raw = [] # type: T.List[str] + self.depends = [] # type: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] if target.install_paths: self.install_dir = target.install_paths[0] - self.languages = [] - self.sources = [] - self.generated = [] - self.includes = [] - self.sys_includes = [] - self.link_with = [] - self.object_libs = [] - self.compile_opts = {} - self.public_compile_opts = [] - self.pie = False + self.languages = set() # type: T.Set[str] + self.sources = [] # type: T.List[Path] + self.generated = [] # type: T.List[Path] + self.generated_ctgt = [] # type: T.List[CustomTargetReference] + self.includes = [] # type: T.List[Path] + self.sys_includes = [] # type: T.List[Path] + self.link_with = [] # type: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] + self.object_libs = [] # type: T.List[ConverterTarget] + self.compile_opts = {} # type: T.Dict[str, T.List[str]] + self.public_compile_opts = [] # type: T.List[str] + self.pie = False # Project default override options (c_std, cpp_std, etc.) - self.override_options = [] + self.override_options = [] # type: T.List[str] # Convert the target name to a valid meson target name self.name = _sanitize_cmake_name(self.name) + self.generated_raw = [] # type: T.List[Path] + for i in target.files: - # Determine the meson language + languages = set() # type: T.Set[str] + src_suffixes = set() # type: T.Set[str] + + # Insert suffixes + for j in i.sources: + if not j.suffix: + continue + src_suffixes.add(j.suffix[1:]) + + # Determine the meson language(s) + # Extract the default language from the explicit CMake field lang_cmake_to_meson = {val.lower(): key for key, val in language_map.items()} - lang = lang_cmake_to_meson.get(i.language.lower(), 'c') - if lang not in self.languages: - self.languages += [lang] - if lang not in self.compile_opts: - self.compile_opts[lang] = [] + languages.add(lang_cmake_to_meson.get(i.language.lower(), 'c')) + + # Determine missing languages from the source suffixes + for sfx in src_suffixes: + for key, val in lang_suffixes.items(): + if sfx in val: + languages.add(key) + break + + # Register the new languages and initialize the compile opts array + for lang in languages: + self.languages.add(lang) + if lang not in self.compile_opts: + self.compile_opts[lang] = [] # Add arguments, but avoid duplicates args = i.flags args += ['-D{}'.format(x) for x in i.defines] - self.compile_opts[lang] += [x for x in args if x not in self.compile_opts[lang]] + for lang in languages: + self.compile_opts[lang] += [x for x in args if x not in self.compile_opts[lang]] # Handle include directories - self.includes += [x['path'] for x in i.includes if x not in self.includes and not x['isSystem']] - self.sys_includes += [x['path'] for x in i.includes if x not in self.sys_includes and x['isSystem']] + self.includes += [x.path for x in i.includes if x.path not in self.includes and not x.isSystem] + self.sys_includes += [x.path for x in i.includes if x.path not in self.sys_includes and x.isSystem] # Add sources to the right array if i.is_generated: - self.generated += i.sources + self.generated_raw += i.sources else: self.sources += i.sources @@ -271,8 +300,8 @@ std_regex = re.compile(r'([-]{1,2}std=|/std:v?|[-]{1,2}std:)(.*)') - def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: str, subdir: str, install_prefix: str, trace: CMakeTraceParser) -> None: - # Detect setting the C and C++ standard + def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: Path, subdir: Path, install_prefix: Path, trace: CMakeTraceParser) -> None: + # Detect setting the C and C++ standard and do additional compiler args manipulation for i in ['c', 'cpp']: if i not in self.compile_opts: continue @@ -280,10 +309,27 @@ temp = [] for j in self.compile_opts[i]: m = ConverterTarget.std_regex.match(j) + ctgt = output_target_map.generated(Path(j)) if m: - self.override_options += ['{}_std={}'.format(i, m.group(2))] + std = m.group(2) + supported = self._all_lang_stds(i) + if std not in supported: + mlog.warning( + 'Unknown {0}_std "{1}" -> Ignoring. Try setting the project-' + 'level {0}_std if build errors occur. Known ' + '{0}_stds are: {2}'.format(i, std, ' '.join(supported)), + once=True + ) + continue + self.override_options += ['{}_std={}'.format(i, std)] elif j in ['-fPIC', '-fpic', '-fPIE', '-fpie']: self.pie = True + elif isinstance(ctgt, ConverterCustomTarget): + # Sometimes projects pass generated source files as compiler + # flags. Add these as generated sources to ensure that the + # corresponding custom target is run.2 + self.generated_raw += [Path(j)] + temp += [j] elif j in blacklist_compiler_flags: pass else: @@ -299,13 +345,6 @@ tgt = trace.targets.get(self.cmake_name) if tgt: self.depends_raw = trace.targets[self.cmake_name].depends - if self.type.upper() == 'INTERFACE_LIBRARY': - props = tgt.properties - - self.includes += props.get('INTERFACE_INCLUDE_DIRECTORIES', []) - self.public_compile_opts += props.get('INTERFACE_COMPILE_DEFINITIONS', []) - self.public_compile_opts += props.get('INTERFACE_COMPILE_OPTIONS', []) - self.link_flags += props.get('INTERFACE_LINK_OPTIONS', []) # TODO refactor this copy paste from CMakeDependency for future releases reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-l?pthread)$') @@ -322,7 +361,13 @@ cfg = '' otherDeps = [] libraries = [] - mlog.debug(tgt) + mlog.debug(str(tgt)) + + if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properties: + self.includes += [Path(x) for x in tgt.properties['INTERFACE_INCLUDE_DIRECTORIES'] if x] + + if 'INTERFACE_LINK_OPTIONS' in tgt.properties: + self.link_flags += [x for x in tgt.properties['INTERFACE_LINK_OPTIONS'] if x] if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properties: self.public_compile_opts += ['-D' + re.sub('^-D', '', x) for x in tgt.properties['INTERFACE_COMPILE_DEFINITIONS'] if x] @@ -338,8 +383,15 @@ cfgs += [x for x in tgt.properties['CONFIGURATIONS'] if x] cfg = cfgs[0] - if 'RELEASE' in cfgs: - cfg = 'RELEASE' + is_debug = self.env.coredata.get_builtin_option('debug'); + if is_debug: + if 'DEBUG' in cfgs: + cfg = 'DEBUG' + elif 'RELEASE' in cfgs: + cfg = 'RELEASE' + else: + if 'RELEASE' in cfgs: + cfg = 'RELEASE' if 'IMPORTED_IMPLIB_{}'.format(cfg) in tgt.properties: libraries += [x for x in tgt.properties['IMPORTED_IMPLIB_{}'.format(cfg)] if x] @@ -364,7 +416,7 @@ for j in otherDeps: if j in trace.targets: to_process += [j] - elif reg_is_lib.match(j) or os.path.exists(j): + elif reg_is_lib.match(j) or Path(j).exists(): libraries += [j] for j in libraries: @@ -380,7 +432,7 @@ # Let meson handle this arcane magic if ',-rpath,' in i: continue - if not os.path.isabs(i): + if not Path(i).is_absolute(): link_with = output_target_map.artifact(i) if link_with: self.link_with += [link_with] @@ -394,69 +446,71 @@ for i in self.languages: supported += list(lang_suffixes[i]) supported = ['.{}'.format(x) for x in supported] - self.sources = [x for x in self.sources if any([x.endswith(y) for y in supported])] - self.generated = [x for x in self.generated if any([x.endswith(y) for y in supported])] + self.sources = [x for x in self.sources if any([x.name.endswith(y) for y in supported])] + self.generated_raw = [x for x in self.generated_raw if any([x.name.endswith(y) for y in supported])] # Make paths relative - def rel_path(x: str, is_header: bool, is_generated: bool) -> T.Optional[str]: - if not os.path.isabs(x): - x = os.path.normpath(os.path.join(self.src_dir, x)) - if not os.path.exists(x) and not any([x.endswith(y) for y in obj_suffixes]) and not is_generated: - mlog.warning('CMake: path', mlog.bold(x), 'does not exist.') - mlog.warning(' --> Ignoring. This can lead to build errors.') + def rel_path(x: Path, is_header: bool, is_generated: bool) -> T.Optional[Path]: + if not x.is_absolute(): + x = self.src_dir / x + x = x.resolve() + assert x.is_absolute() + if not x.exists() and not any([x.name.endswith(y) for y in obj_suffixes]) and not is_generated: + if path_is_in_root(x, Path(self.env.get_build_dir()), resolve=True): + x.mkdir(parents=True, exist_ok=True) + return x.relative_to(Path(self.env.get_build_dir()) / subdir) + else: + mlog.warning('CMake: path', mlog.bold(x.as_posix()), 'does not exist.') + mlog.warning(' --> Ignoring. This can lead to build errors.') + return None + if x in trace.explicit_headers: return None if ( - os.path.isabs(x) - and os.path.commonpath([x, self.env.get_source_dir()]) == self.env.get_source_dir() + path_is_in_root(x, Path(self.env.get_source_dir())) and not ( - os.path.commonpath([x, root_src_dir]) == root_src_dir or - os.path.commonpath([x, self.env.get_build_dir()]) == self.env.get_build_dir() + path_is_in_root(x, root_src_dir) or + path_is_in_root(x, Path(self.env.get_build_dir())) ) ): - mlog.warning('CMake: path', mlog.bold(x), 'is inside the root project but', mlog.bold('not'), 'inside the subproject.') + mlog.warning('CMake: path', mlog.bold(x.as_posix()), 'is inside the root project but', mlog.bold('not'), 'inside the subproject.') mlog.warning(' --> Ignoring. This can lead to build errors.') return None - if os.path.isabs(x) and os.path.commonpath([x, self.env.get_build_dir()]) == self.env.get_build_dir(): - if is_header: - return os.path.relpath(x, os.path.join(self.env.get_build_dir(), subdir)) - else: - return os.path.relpath(x, root_src_dir) - if os.path.isabs(x) and os.path.commonpath([x, root_src_dir]) == root_src_dir: - return os.path.relpath(x, root_src_dir) - return x - - def custom_target(x: str): - ctgt = output_target_map.generated(x) - if ctgt: - assert(isinstance(ctgt, ConverterCustomTarget)) - ref = ctgt.get_ref(x) - assert(isinstance(ref, CustomTargetReference) and ref.valid()) - return ref + if path_is_in_root(x, Path(self.env.get_build_dir())) and is_header: + return x.relative_to(Path(self.env.get_build_dir()) / subdir) + if path_is_in_root(x, root_src_dir): + return x.relative_to(root_src_dir) return x - build_dir_rel = os.path.relpath(self.build_dir, os.path.join(self.env.get_build_dir(), subdir)) - self.includes = list(set([rel_path(x, True, False) for x in set(self.includes)] + [build_dir_rel])) - self.sys_includes = list(set([rel_path(x, True, False) for x in set(self.sys_includes)])) + build_dir_rel = self.build_dir.relative_to(Path(self.env.get_build_dir()) / subdir) + self.generated_raw = [rel_path(x, False, True) for x in self.generated_raw] + self.includes = list(OrderedSet([rel_path(x, True, False) for x in OrderedSet(self.includes)] + [build_dir_rel])) + self.sys_includes = list(OrderedSet([rel_path(x, True, False) for x in OrderedSet(self.sys_includes)])) self.sources = [rel_path(x, False, False) for x in self.sources] - self.generated = [rel_path(x, False, True) for x in self.generated] # Resolve custom targets - self.generated = [custom_target(x) for x in self.generated] + for gen_file in self.generated_raw: + ctgt = output_target_map.generated(gen_file) + if ctgt: + assert isinstance(ctgt, ConverterCustomTarget) + ref = ctgt.get_ref(gen_file) + assert isinstance(ref, CustomTargetReference) and ref.valid() + self.generated_ctgt += [ref] + elif gen_file is not None: + self.generated += [gen_file] # Remove delete entries - self.includes = [x for x in self.includes if x is not None] + self.includes = [x for x in self.includes if x is not None] self.sys_includes = [x for x in self.sys_includes if x is not None] - self.sources = [x for x in self.sources if x is not None] - self.generated = [x for x in self.generated if x is not None] + self.sources = [x for x in self.sources if x is not None] # Make sure '.' is always in the include directories - if '.' not in self.includes: - self.includes += ['.'] + if Path('.') not in self.includes: + self.includes += [Path('.')] # make install dir relative to the install prefix - if self.install_dir and os.path.isabs(self.install_dir): - if os.path.commonpath([self.install_dir, install_prefix]) == install_prefix: - self.install_dir = os.path.relpath(self.install_dir, install_prefix) + if self.install_dir and self.install_dir.is_absolute(): + if path_is_in_root(self.install_dir, install_prefix): + self.install_dir = self.install_dir.relative_to(install_prefix) # Remove blacklisted options and libs def check_flag(flag: str) -> bool: @@ -469,24 +523,35 @@ self.link_libraries = [x for x in self.link_libraries if x.lower() not in blacklist_link_libs] self.link_flags = [x for x in self.link_flags if check_flag(x)] + # Handle OSX frameworks + def handle_frameworks(flags: T.List[str]) -> T.List[str]: + res = [] # type: T.List[str] + for i in flags: + p = Path(i) + if not p.exists() or not p.name.endswith('.framework'): + res += [i] + continue + res += ['-framework', p.stem] + return res + + self.link_libraries = handle_frameworks(self.link_libraries) + self.link_flags = handle_frameworks(self.link_flags) + # Handle explicit CMake add_dependency() calls for i in self.depends_raw: - tgt = output_target_map.target(i) - if tgt: - self.depends.append(tgt) + dep_tgt = output_target_map.target(i) + if dep_tgt: + self.depends.append(dep_tgt) - def process_object_libs(self, obj_target_list: T.List['ConverterTarget'], linker_workaround: bool): + def process_object_libs(self, obj_target_list: T.List['ConverterTarget'], linker_workaround: bool) -> None: # Try to detect the object library(s) from the generated input sources - temp = [x for x in self.generated if isinstance(x, str)] - temp = [os.path.basename(x) for x in temp] - temp = [x for x in temp if any([x.endswith('.' + y) for y in obj_suffixes])] - temp = [os.path.splitext(x)[0] for x in temp] + temp = [x for x in self.generated if any([x.name.endswith('.' + y) for y in obj_suffixes])] + stem = [x.stem for x in temp] exts = self._all_source_suffixes() # Temp now stores the source filenames of the object files for i in obj_target_list: - source_files = [x for x in i.sources + i.generated if isinstance(x, str)] - source_files = [os.path.basename(x) for x in source_files] - for j in temp: + source_files = [x.name for x in i.sources + i.generated] + for j in stem: # On some platforms (specifically looking at you Windows with vs20xy backend) CMake does # not produce object files with the format `foo.cpp.obj`, instead it skipps the language # suffix and just produces object files like `foo.obj`. Thus we have to do our best to @@ -501,20 +566,22 @@ self._append_objlib_sources(i) else: self.includes += i.includes - self.includes = list(set(self.includes)) + self.includes = list(OrderedSet(self.includes)) self.object_libs += [i] break # Filter out object files from the sources - self.generated = [x for x in self.generated if not isinstance(x, str) or not any([x.endswith('.' + y) for y in obj_suffixes])] + self.generated = [x for x in self.generated if not any([x.name.endswith('.' + y) for y in obj_suffixes])] def _append_objlib_sources(self, tgt: 'ConverterTarget') -> None: - self.includes += tgt.includes - self.sources += tgt.sources - self.generated += tgt.generated - self.sources = list(set(self.sources)) - self.generated = list(set(self.generated)) - self.includes = list(set(self.includes)) + self.includes += tgt.includes + self.sources += tgt.sources + self.generated += tgt.generated + self.generated_ctgt += tgt.generated_ctgt + self.includes = list(OrderedSet(self.includes)) + self.sources = list(OrderedSet(self.sources)) + self.generated = list(OrderedSet(self.generated)) + self.generated_ctgt = list(OrderedSet(self.generated_ctgt)) # Inherit compiler arguments since they may be required for building for lang, opts in tgt.compile_opts.items(): @@ -529,7 +596,21 @@ suffixes += [x for x in exts] return suffixes - def process_inter_target_dependencies(self): + @lru_cache(maxsize=None) + def _all_lang_stds(self, lang: str) -> T.List[str]: + lang_opts = self.env.coredata.compiler_options.build.get(lang, None) + if not lang_opts or 'std' not in lang_opts: + return [] + res = lang_opts['std'].choices + + # TODO: Get rid of this once we have propper typing for options + assert isinstance(res, list) + for i in res: + assert isinstance(i, str) + + return res + + def process_inter_target_dependencies(self) -> None: # Move the dependencies from all transfer_dependencies_from to the target to_process = list(self.depends) processed = [] @@ -540,9 +621,9 @@ to_process += [x for x in i.depends if x not in processed] else: new_deps += [i] - self.depends = list(set(new_deps)) + self.depends = list(OrderedSet(new_deps)) - def cleanup_dependencies(self): + def cleanup_dependencies(self) -> None: # Clear the dependencies from targets that where moved from if self.meson_func() in transfer_dependencies_from: self.depends = [] @@ -556,7 +637,7 @@ mlog.log(' -- full_name: ', mlog.bold(self.full_name)) mlog.log(' -- type: ', mlog.bold(self.type)) mlog.log(' -- install: ', mlog.bold('true' if self.install else 'false')) - mlog.log(' -- install_dir: ', mlog.bold(self.install_dir)) + mlog.log(' -- install_dir: ', mlog.bold(self.install_dir.as_posix() if self.install_dir else '')) mlog.log(' -- link_libraries: ', mlog.bold(str(self.link_libraries))) mlog.log(' -- link_with: ', mlog.bold(str(self.link_with))) mlog.log(' -- object_libs: ', mlog.bold(str(self.object_libs))) @@ -566,6 +647,7 @@ mlog.log(' -- sys_includes: ', mlog.bold(str(self.sys_includes))) mlog.log(' -- sources: ', mlog.bold(str(self.sources))) mlog.log(' -- generated: ', mlog.bold(str(self.generated))) + mlog.log(' -- generated_ctgt: ', mlog.bold(str(self.generated_ctgt))) mlog.log(' -- pie: ', mlog.bold('true' if self.pie else 'false')) mlog.log(' -- override_opts: ', mlog.bold(str(self.override_options))) mlog.log(' -- depends: ', mlog.bold(str(self.depends))) @@ -574,7 +656,7 @@ mlog.log(' -', key, '=', mlog.bold(str(val))) class CustomTargetReference: - def __init__(self, ctgt: 'ConverterCustomTarget', index: int): + def __init__(self, ctgt: 'ConverterCustomTarget', index: int) -> None: self.ctgt = ctgt # type: ConverterCustomTarget self.index = index # type: int @@ -594,24 +676,27 @@ tgt_counter = 0 # type: int out_counter = 0 # type: int - def __init__(self, target: CMakeGeneratorTarget): - assert(target.current_bin_dir is not None) - assert(target.current_src_dir is not None) + def __init__(self, target: CMakeGeneratorTarget, env: 'Environment', for_machine: MachineChoice) -> None: + assert target.current_bin_dir is not None + assert target.current_src_dir is not None self.name = target.name if not self.name: self.name = 'custom_tgt_{}'.format(ConverterCustomTarget.tgt_counter) ConverterCustomTarget.tgt_counter += 1 - self.cmake_name = str(self.name) + self.cmake_name = str(self.name) self.original_outputs = list(target.outputs) - self.outputs = [os.path.basename(x) for x in self.original_outputs] - self.conflict_map = {} - self.command = target.command - self.working_dir = target.working_dir - self.depends_raw = target.depends - self.inputs = [] - self.depends = [] - self.current_bin_dir = Path(target.current_bin_dir) - self.current_src_dir = Path(target.current_src_dir) + self.outputs = [x.name for x in self.original_outputs] + self.conflict_map = {} # type: T.Dict[str, str] + self.command = [] # type: T.List[T.List[T.Union[str, ConverterTarget]]] + self.working_dir = target.working_dir + self.depends_raw = target.depends + self.inputs = [] # type: T.List[T.Union[str, CustomTargetReference]] + self.depends = [] # type: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] + self.current_bin_dir = target.current_bin_dir # type: Path + self.current_src_dir = target.current_src_dir # type: Path + self.env = env + self.for_machine = for_machine + self._raw_target = target # Convert the target name to a valid meson target name self.name = _sanitize_cmake_name(self.name) @@ -619,15 +704,15 @@ def __repr__(self) -> str: return '<{}: {} {}>'.format(self.__class__.__name__, self.name, self.outputs) - def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: str, subdir: str, all_outputs: T.List[str]) -> None: + def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: Path, all_outputs: T.List[str], trace: CMakeTraceParser) -> None: # Default the working directory to ${CMAKE_CURRENT_BINARY_DIR} - if not self.working_dir: - self.working_dir = self.current_bin_dir.as_posix() + if self.working_dir is None: + self.working_dir = self.current_bin_dir # relative paths in the working directory are always relative # to ${CMAKE_CURRENT_BINARY_DIR} - if not os.path.isabs(self.working_dir): - self.working_dir = (self.current_bin_dir / self.working_dir).as_posix() + if not self.working_dir.is_absolute(): + self.working_dir = self.current_bin_dir / self.working_dir # Modify the original outputs if they are relative. Again, # relative paths are relative to ${CMAKE_CURRENT_BINARY_DIR} @@ -636,7 +721,7 @@ return x else: return self.current_bin_dir / x - self.original_outputs = [ensure_absolute(Path(x)).as_posix() for x in self.original_outputs] + self.original_outputs = [ensure_absolute(x) for x in self.original_outputs] # Ensure that there is no duplicate output in the project so # that meson can handle cases where the same filename is @@ -653,16 +738,32 @@ self.outputs = temp_outputs # Check if the command is a build target - commands = [] - for i in self.command: - assert(isinstance(i, list)) - cmd = [] + commands = [] # type: T.List[T.List[T.Union[str, ConverterTarget]]] + for curr_cmd in self._raw_target.command: + assert(isinstance(curr_cmd, list)) + cmd = [] # type: T.List[T.Union[str, ConverterTarget]] - for j in i: + for j in curr_cmd: if not j: continue - target = output_target_map.target(j) - cmd += [target] if target else [j] + target = output_target_map.executable(j) + if target: + # When cross compiling, binaries have to be executed with an exe_wrapper (for instance wine for mingw-w64) + if self.env.exe_wrapper is not None and self.env.properties[self.for_machine].get_cmake_use_exe_wrapper(): + from ..dependencies import ExternalProgram + assert isinstance(self.env.exe_wrapper, ExternalProgram) + cmd += self.env.exe_wrapper.get_command() + cmd += [target] + continue + elif j in trace.targets: + trace_tgt = trace.targets[j] + if trace_tgt.type == 'EXECUTABLE' and 'IMPORTED_LOCATION' in trace_tgt.properties: + cmd += trace_tgt.properties['IMPORTED_LOCATION'] + continue + mlog.debug('CMake: Found invalid CMake target "{}" --> ignoring \n{}'.format(j, trace_tgt)) + + # Fallthrough on error + cmd += [j] commands += [cmd] self.command = commands @@ -673,18 +774,17 @@ self.outputs = [self.name + '.h'] # Check dependencies and input files - root = Path(root_src_dir) for i in self.depends_raw: if not i: continue raw = Path(i) art = output_target_map.artifact(i) tgt = output_target_map.target(i) - gen = output_target_map.generated(i) + gen = output_target_map.generated(raw) rel_to_root = None try: - rel_to_root = raw.relative_to(root) + rel_to_root = raw.relative_to(root_src_dir) except ValueError: rel_to_root = None @@ -693,7 +793,7 @@ # as outputs from other targets. # See https://github.com/mesonbuild/meson/issues/6632 if not raw.is_absolute() and (self.current_src_dir / raw).exists(): - self.inputs += [(self.current_src_dir / raw).relative_to(root).as_posix()] + self.inputs += [(self.current_src_dir / raw).relative_to(root_src_dir).as_posix()] elif raw.is_absolute() and raw.exists() and rel_to_root is not None: self.inputs += [rel_to_root.as_posix()] elif art: @@ -701,9 +801,11 @@ elif tgt: self.depends += [tgt] elif gen: - self.inputs += [gen.get_ref(i)] + ctgt_ref = gen.get_ref(raw) + assert ctgt_ref is not None + self.inputs += [ctgt_ref] - def process_inter_target_dependencies(self): + def process_inter_target_dependencies(self) -> None: # Move the dependencies from all transfer_dependencies_from to the target to_process = list(self.depends) processed = [] @@ -714,20 +816,20 @@ to_process += [x for x in i.depends if x not in processed] else: new_deps += [i] - self.depends = list(set(new_deps)) + self.depends = list(OrderedSet(new_deps)) - def get_ref(self, fname: str) -> T.Optional[CustomTargetReference]: - fname = os.path.basename(fname) + def get_ref(self, fname: Path) -> T.Optional[CustomTargetReference]: + name = fname.name try: - if fname in self.conflict_map: - fname = self.conflict_map[fname] - idx = self.outputs.index(fname) + if name in self.conflict_map: + name = self.conflict_map[name] + idx = self.outputs.index(name) return CustomTargetReference(self, idx) except ValueError: return None def log(self) -> None: - mlog.log('Custom Target', mlog.bold(self.name)) + mlog.log('Custom Target', mlog.bold(self.name), '({})'.format(self.cmake_name)) mlog.log(' -- command: ', mlog.bold(str(self.command))) mlog.log(' -- outputs: ', mlog.bold(str(self.outputs))) mlog.log(' -- conflict_map: ', mlog.bold(str(self.conflict_map))) @@ -741,142 +843,106 @@ FILE = 2 class CMakeInterpreter: - def __init__(self, build: 'Build', subdir: str, src_dir: str, install_prefix: str, env: Environment, backend: 'Backend'): - assert(hasattr(backend, 'name')) - self.build = build - self.subdir = subdir - self.src_dir = src_dir - self.build_dir_rel = os.path.join(subdir, '__CMake_build') - self.build_dir = os.path.join(env.get_build_dir(), self.build_dir_rel) + def __init__(self, build: 'Build', subdir: Path, src_dir: Path, install_prefix: Path, env: 'Environment', backend: 'Backend'): + self.build = build + self.subdir = subdir + self.src_dir = src_dir + self.build_dir_rel = subdir / '__CMake_build' + self.build_dir = Path(env.get_build_dir()) / self.build_dir_rel self.install_prefix = install_prefix - self.env = env - self.backend_name = backend.name - self.linkers = set() # type: T.Set[str] - self.cmake_api = CMakeAPI.SERVER - self.client = CMakeClient(self.env) - self.fileapi = CMakeFileAPI(self.build_dir) + self.env = env + self.for_machine = MachineChoice.HOST # TODO make parameter + self.backend_name = backend.name + self.linkers = set() # type: T.Set[str] + self.cmake_api = CMakeAPI.SERVER + self.client = CMakeClient(self.env) + self.fileapi = CMakeFileAPI(self.build_dir) # Raw CMake results - self.bs_files = [] - self.codemodel_configs = None - self.raw_trace = None + self.bs_files = [] # type: T.List[Path] + self.codemodel_configs = None # type: T.Optional[T.List[CMakeConfiguration]] + self.raw_trace = None # type: T.Optional[str] # Analysed data - self.project_name = '' - self.languages = [] - self.targets = [] - self.custom_targets = [] # type: T.List[ConverterCustomTarget] - self.trace = CMakeTraceParser() + self.project_name = '' + self.languages = [] # type: T.List[str] + self.targets = [] # type: T.List[ConverterTarget] + self.custom_targets = [] # type: T.List[ConverterCustomTarget] + self.trace = CMakeTraceParser('', Path('.')) # Will be replaced in analyse self.output_target_map = OutputTargetMap(self.build_dir) # Generated meson data - self.generated_targets = {} - self.internal_name_map = {} + self.generated_targets = {} # type: T.Dict[str, T.Dict[str, T.Optional[str]]] + self.internal_name_map = {} # type: T.Dict[str, str] + + # Do some special handling for object libraries for certain configurations + self._object_lib_workaround = False + if self.backend_name.startswith('vs'): + for comp in self.env.coredata.compilers[self.for_machine].values(): + if comp.get_linker_id() == 'link': + self._object_lib_workaround = True + break - def configure(self, extra_cmake_options: T.List[str]) -> None: - for_machine = MachineChoice.HOST # TODO make parameter + def configure(self, extra_cmake_options: T.List[str]) -> CMakeExecutor: # Find CMake - cmake_exe = CMakeExecutor(self.env, '>=3.7', for_machine) + # TODO: Using MachineChoice.BUILD should always be correct here, but also evaluate the use of self.for_machine + cmake_exe = CMakeExecutor(self.env, '>=3.7', MachineChoice.BUILD) if not cmake_exe.found(): raise CMakeException('Unable to find CMake') + self.trace = CMakeTraceParser(cmake_exe.version(), self.build_dir, permissive=True) - preload_file = Path(__file__).resolve().parent / 'data' / 'preload.cmake' + preload_file = mesondata['cmake/data/preload.cmake'].write_to_private(self.env) + toolchain = CMakeToolchain(self.env, self.for_machine, CMakeExecScope.SUBPROJECT, self.build_dir.parent, preload_file) + toolchain_file = toolchain.write() - # Prefere CMAKE_PROJECT_INCLUDE over CMAKE_TOOLCHAIN_FILE if possible, - # since CMAKE_PROJECT_INCLUDE was actually designed for code injection. - preload_var = 'CMAKE_PROJECT_INCLUDE' - if version_compare(cmake_exe.version(), '<3.15'): - preload_var = 'CMAKE_TOOLCHAIN_FILE' + # TODO: drop this check once the deprecated `cmake_args` kwarg is removed + extra_cmake_options = check_cmake_args(extra_cmake_options) generator = backend_generator_map[self.backend_name] - cmake_args = cmake_exe.get_command() - trace_args = ['--trace', '--trace-expand', '--no-warn-unused-cli'] + cmake_args = [] + cmake_args += ['-G', generator] + cmake_args += ['-DCMAKE_INSTALL_PREFIX={}'.format(self.install_prefix)] + cmake_args += extra_cmake_options + trace_args = self.trace.trace_args() cmcmp_args = ['-DCMAKE_POLICY_WARNING_{}=OFF'.format(x) for x in disable_policy_warnings] - pload_args = ['-D{}={}'.format(preload_var, str(preload_file))] if version_compare(cmake_exe.version(), '>=3.14'): self.cmake_api = CMakeAPI.FILE self.fileapi.setup_request() - # Map meson compiler to CMake variables - for lang, comp in self.env.coredata.compilers[for_machine].items(): - if lang not in language_map: - continue - self.linkers.add(comp.get_linker_id()) - cmake_lang = language_map[lang] - exelist = comp.get_exelist() - if len(exelist) == 1: - cmake_args += ['-DCMAKE_{}_COMPILER={}'.format(cmake_lang, exelist[0])] - elif len(exelist) == 2: - cmake_args += ['-DCMAKE_{}_COMPILER_LAUNCHER={}'.format(cmake_lang, exelist[0]), - '-DCMAKE_{}_COMPILER={}'.format(cmake_lang, exelist[1])] - if hasattr(comp, 'get_linker_exelist') and comp.get_id() == 'clang-cl': - cmake_args += ['-DCMAKE_LINKER={}'.format(comp.get_linker_exelist()[0])] - cmake_args += ['-G', generator] - cmake_args += ['-DCMAKE_INSTALL_PREFIX={}'.format(self.install_prefix)] - cmake_args += extra_cmake_options - # Run CMake mlog.log() with mlog.nested(): mlog.log('Configuring the build directory with', mlog.bold('CMake'), 'version', mlog.cyan(cmake_exe.version())) - mlog.log(mlog.bold('Running:'), ' '.join(cmake_args)) - mlog.log(mlog.bold(' - build directory: '), self.build_dir) - mlog.log(mlog.bold(' - source directory: '), self.src_dir) + mlog.log(mlog.bold('Running CMake with:'), ' '.join(cmake_args)) + mlog.log(mlog.bold(' - build directory: '), self.build_dir.as_posix()) + mlog.log(mlog.bold(' - source directory: '), self.src_dir.as_posix()) + mlog.log(mlog.bold(' - toolchain file: '), toolchain_file.as_posix()) + mlog.log(mlog.bold(' - preload file: '), preload_file.as_posix()) mlog.log(mlog.bold(' - trace args: '), ' '.join(trace_args)) - mlog.log(mlog.bold(' - preload file: '), str(preload_file)) mlog.log(mlog.bold(' - disabled policy warnings:'), '[{}]'.format(', '.join(disable_policy_warnings))) mlog.log() - os.makedirs(self.build_dir, exist_ok=True) - os_env = os.environ.copy() + self.build_dir.mkdir(parents=True, exist_ok=True) + os_env = environ.copy() os_env['LC_ALL'] = 'C' - final_command = cmake_args + trace_args + cmcmp_args + pload_args + [self.src_dir] - proc = Popen(final_command, stdout=PIPE, stderr=PIPE, cwd=self.build_dir, env=os_env) - - def print_stdout(): - while True: - line = proc.stdout.readline() - if not line: - break - mlog.log(line.decode('utf-8').strip('\n')) - proc.stdout.close() - - t = Thread(target=print_stdout) - t.start() - - # Read stderr line by line and log non trace lines - self.raw_trace = '' - tline_start_reg = re.compile(r'^\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(.*$') - inside_multiline_trace = False - while True: - line = proc.stderr.readline() - if not line: - break - line = line.decode('utf-8') - if tline_start_reg.match(line): - self.raw_trace += line - inside_multiline_trace = not line.endswith(' )\n') - elif inside_multiline_trace: - self.raw_trace += line - else: - mlog.warning(line.strip('\n')) - - proc.stderr.close() - proc.wait() + final_args = cmake_args + trace_args + cmcmp_args + toolchain.get_cmake_args() + [self.src_dir.as_posix()] - t.join() + cmake_exe.set_exec_mode(print_cmout=True, always_capture_stderr=self.trace.requires_stderr()) + rc, _, self.raw_trace = cmake_exe.call(final_args, self.build_dir, env=os_env, disable_cache=True) mlog.log() - h = mlog.green('SUCCEEDED') if proc.returncode == 0 else mlog.red('FAILED') + h = mlog.green('SUCCEEDED') if rc == 0 else mlog.red('FAILED') mlog.log('CMake configuration:', h) - if proc.returncode != 0: + if rc != 0: raise CMakeException('Failed to configure the CMake subproject') + return cmake_exe + def initialise(self, extra_cmake_options: T.List[str]) -> None: # Run configure the old way because doing it # with the server doesn't work for some reason # Additionally, the File API requires a configure anyway - self.configure(extra_cmake_options) + cmake_exe = self.configure(extra_cmake_options) # Continue with the file API If supported if self.cmake_api is CMakeAPI.FILE: @@ -886,14 +952,14 @@ # Load the buildsystem file list cmake_files = self.fileapi.get_cmake_sources() self.bs_files = [x.file for x in cmake_files if not x.is_cmake and not x.is_temp] - self.bs_files = [os.path.relpath(x, self.env.get_source_dir()) for x in self.bs_files] - self.bs_files = list(set(self.bs_files)) + self.bs_files = [relative_to_if_possible(x, Path(self.env.get_source_dir())) for x in self.bs_files] + self.bs_files = list(OrderedSet(self.bs_files)) # Load the codemodel configurations self.codemodel_configs = self.fileapi.get_cmake_configurations() return - with self.client.connect(): + with self.client.connect(cmake_exe): generator = backend_generator_map[self.backend_name] self.client.do_handshake(self.src_dir, self.build_dir, generator, 1) @@ -905,14 +971,16 @@ # Get CMake build system files bs_reply = self.client.query_checked(RequestCMakeInputs(), 'Querying build system files') + assert isinstance(bs_reply, ReplyCMakeInputs) # Now get the CMake code model cm_reply = self.client.query_checked(RequestCodeModel(), 'Querying the CMake code model') + assert isinstance(cm_reply, ReplyCodeModel) src_dir = bs_reply.src_dir self.bs_files = [x.file for x in bs_reply.build_files if not x.is_cmake and not x.is_temp] - self.bs_files = [os.path.relpath(os.path.join(src_dir, x), self.env.get_source_dir()) for x in self.bs_files] - self.bs_files = list(set(self.bs_files)) + self.bs_files = [relative_to_if_possible(src_dir / x, Path(self.env.get_source_dir()), resolve=True) for x in self.bs_files] + self.bs_files = list(OrderedSet(self.bs_files)) self.codemodel_configs = cm_reply.configs def analyse(self) -> None: @@ -924,79 +992,79 @@ self.languages = [] self.targets = [] self.custom_targets = [] - self.trace = CMakeTraceParser(permissive=True) # Parse the trace self.trace.parse(self.raw_trace) # Find all targets added_target_names = [] # type: T.List[str] - for i in self.codemodel_configs: - for j in i.projects: + for i_0 in self.codemodel_configs: + for j_0 in i_0.projects: if not self.project_name: - self.project_name = j.name - for k in j.targets: + self.project_name = j_0.name + for k_0 in j_0.targets: # Avoid duplicate targets from different configurations and known # dummy CMake internal target types - if k.type not in skip_targets and k.name not in added_target_names: - added_target_names += [k.name] - self.targets += [ConverterTarget(k, self.env)] + if k_0.type not in skip_targets and k_0.name not in added_target_names: + added_target_names += [k_0.name] + self.targets += [ConverterTarget(k_0, self.env, self.for_machine)] # Add interface targets from trace, if not already present. # This step is required because interface targets were removed from # the CMake file API output. api_target_name_list = [x.name for x in self.targets] - for i in self.trace.targets.values(): - if i.type != 'INTERFACE' or i.name in api_target_name_list or i.imported: + for i_1 in self.trace.targets.values(): + if i_1.type != 'INTERFACE' or i_1.name in api_target_name_list or i_1.imported: continue dummy = CMakeTarget({ - 'name': i.name, + 'name': i_1.name, 'type': 'INTERFACE_LIBRARY', 'sourceDirectory': self.src_dir, 'buildDirectory': self.build_dir, }) - self.targets += [ConverterTarget(dummy, self.env)] + self.targets += [ConverterTarget(dummy, self.env, self.for_machine)] - for i in self.trace.custom_targets: - self.custom_targets += [ConverterCustomTarget(i)] + for i_2 in self.trace.custom_targets: + self.custom_targets += [ConverterCustomTarget(i_2, self.env, self.for_machine)] # generate the output_target_map - for i in [*self.targets, *self.custom_targets]: - self.output_target_map.add(i) + for i_3 in [*self.targets, *self.custom_targets]: + assert isinstance(i_3, (ConverterTarget, ConverterCustomTarget)) + self.output_target_map.add(i_3) # First pass: Basic target cleanup object_libs = [] custom_target_outputs = [] # type: T.List[str] - for i in self.custom_targets: - i.postprocess(self.output_target_map, self.src_dir, self.subdir, custom_target_outputs) - for i in self.targets: - i.postprocess(self.output_target_map, self.src_dir, self.subdir, self.install_prefix, self.trace) - if i.type == 'OBJECT_LIBRARY': - object_libs += [i] - self.languages += [x for x in i.languages if x not in self.languages] + for ctgt in self.custom_targets: + ctgt.postprocess(self.output_target_map, self.src_dir, custom_target_outputs, self.trace) + for tgt in self.targets: + tgt.postprocess(self.output_target_map, self.src_dir, self.subdir, self.install_prefix, self.trace) + if tgt.type == 'OBJECT_LIBRARY': + object_libs += [tgt] + self.languages += [x for x in tgt.languages if x not in self.languages] # Second pass: Detect object library dependencies - for i in self.targets: - i.process_object_libs(object_libs, self._object_lib_workaround()) + for tgt in self.targets: + tgt.process_object_libs(object_libs, self._object_lib_workaround) # Third pass: Reassign dependencies to avoid some loops - for i in self.targets: - i.process_inter_target_dependencies() - for i in self.custom_targets: - i.process_inter_target_dependencies() + for tgt in self.targets: + tgt.process_inter_target_dependencies() + for ctgt in self.custom_targets: + ctgt.process_inter_target_dependencies() # Fourth pass: Remove rassigned dependencies - for i in self.targets: - i.cleanup_dependencies() + for tgt in self.targets: + tgt.cleanup_dependencies() mlog.log('CMake project', mlog.bold(self.project_name), 'has', mlog.bold(str(len(self.targets) + len(self.custom_targets))), 'build targets.') - def pretend_to_be_meson(self) -> CodeBlockNode: + def pretend_to_be_meson(self, options: TargetOptions) -> CodeBlockNode: if not self.project_name: raise CMakeException('CMakeInterpreter was not analysed') - def token(tid: str = 'string', val='') -> Token: - return Token(tid, self.subdir, 0, 0, 0, None, val) + def token(tid: str = 'string', val: TYPE_mixed = '') -> Token: + return Token(tid, self.subdir.as_posix(), 0, 0, 0, None, val) def string(value: str) -> StringNode: return StringNode(token(val=value)) @@ -1007,64 +1075,67 @@ def number(value: int) -> NumberNode: return NumberNode(token(val=value)) - def nodeify(value): + def nodeify(value: TYPE_mixed_list) -> BaseNode: if isinstance(value, str): return string(value) + if isinstance(value, Path): + return string(value.as_posix()) elif isinstance(value, bool): - return BooleanNode(token(), value) + return BooleanNode(token(val=value)) elif isinstance(value, int): return number(value) elif isinstance(value, list): return array(value) - return value + elif isinstance(value, BaseNode): + return value + raise RuntimeError('invalid type of value: {} ({})'.format(type(value).__name__, str(value))) def indexed(node: BaseNode, index: int) -> IndexNode: return IndexNode(node, nodeify(index)) - def array(elements) -> ArrayNode: + def array(elements: TYPE_mixed_list) -> ArrayNode: args = ArgumentNode(token()) if not isinstance(elements, list): elements = [args] args.arguments += [nodeify(x) for x in elements if x is not None] return ArrayNode(args, 0, 0, 0, 0) - def function(name: str, args=None, kwargs=None) -> FunctionNode: + def function(name: str, args: T.Optional[TYPE_mixed_list] = None, kwargs: T.Optional[TYPE_mixed_kwargs] = None) -> FunctionNode: args = [] if args is None else args kwargs = {} if kwargs is None else kwargs args_n = ArgumentNode(token()) if not isinstance(args, list): + assert isinstance(args, (str, int, bool, Path, BaseNode)) args = [args] args_n.arguments = [nodeify(x) for x in args if x is not None] - args_n.kwargs = {k: nodeify(v) for k, v in kwargs.items() if v is not None} - func_n = FunctionNode(self.subdir, 0, 0, 0, 0, name, args_n) + args_n.kwargs = {id_node(k): nodeify(v) for k, v in kwargs.items() if v is not None} + func_n = FunctionNode(self.subdir.as_posix(), 0, 0, 0, 0, name, args_n) return func_n - def method(obj: BaseNode, name: str, args=None, kwargs=None) -> MethodNode: + def method(obj: BaseNode, name: str, args: T.Optional[TYPE_mixed_list] = None, kwargs: T.Optional[TYPE_mixed_kwargs] = None) -> MethodNode: args = [] if args is None else args kwargs = {} if kwargs is None else kwargs args_n = ArgumentNode(token()) if not isinstance(args, list): + assert isinstance(args, (str, int, bool, Path, BaseNode)) args = [args] args_n.arguments = [nodeify(x) for x in args if x is not None] - args_n.kwargs = {k: nodeify(v) for k, v in kwargs.items() if v is not None} - return MethodNode(self.subdir, 0, 0, obj, name, args_n) + args_n.kwargs = {id_node(k): nodeify(v) for k, v in kwargs.items() if v is not None} + return MethodNode(self.subdir.as_posix(), 0, 0, obj, name, args_n) def assign(var_name: str, value: BaseNode) -> AssignmentNode: - return AssignmentNode(self.subdir, 0, 0, var_name, value) + return AssignmentNode(self.subdir.as_posix(), 0, 0, var_name, value) # Generate the root code block and the project function call root_cb = CodeBlockNode(token()) root_cb.lines += [function('project', [self.project_name] + self.languages)] # Add the run script for custom commands - run_script = '{}/data/run_ctgt.py'.format(os.path.dirname(os.path.realpath(__file__))) - run_script_var = 'ctgt_run_script' - root_cb.lines += [assign(run_script_var, function('find_program', [[run_script]], {'required': True}))] # Add the targets - processing = [] - processed = {} - name_map = {} + processing = [] # type: T.List[str] + processed = {} # type: T.Dict[str, T.Dict[str, T.Optional[str]]] + name_map = {} # type: T.Dict[str, str] def extract_tgt(tgt: T.Union[ConverterTarget, ConverterCustomTarget, CustomTargetReference]) -> IdNode: tgt_name = None @@ -1081,24 +1152,24 @@ raise CMakeException('Cycle in CMake inputs/dependencies detected') processing.append(tgt.name) - def resolve_ctgt_ref(ref: CustomTargetReference) -> BaseNode: + def resolve_ctgt_ref(ref: CustomTargetReference) -> T.Union[IdNode, IndexNode]: tgt_var = extract_tgt(ref) if len(ref.ctgt.outputs) == 1: return tgt_var else: return indexed(tgt_var, ref.index) - def process_target(tgt: ConverterTarget): + def process_target(tgt: ConverterTarget) -> None: detect_cycle(tgt) # First handle inter target dependencies - link_with = [] - objec_libs = [] # type: T.List[IdNode] - sources = [] - generated = [] - generated_filenames = [] - custom_targets = [] - dependencies = [] + link_with = [] # type: T.List[IdNode] + objec_libs = [] # type: T.List[IdNode] + sources = [] # type: T.List[Path] + generated = [] # type: T.List[T.Union[IdNode, IndexNode]] + generated_filenames = [] # type: T.List[str] + custom_targets = [] # type: T.List[ConverterCustomTarget] + dependencies = [] # type: T.List[IdNode] for i in tgt.link_with: assert(isinstance(i, ConverterTarget)) if i.name not in processed: @@ -1117,16 +1188,17 @@ dependencies += [extract_tgt(i)] # Generate the source list and handle generated sources - for i in tgt.sources + tgt.generated: - if isinstance(i, CustomTargetReference): - if i.ctgt.name not in processed: - process_custom_target(i.ctgt) - generated += [resolve_ctgt_ref(i)] - generated_filenames += [i.filename()] - if i.ctgt not in custom_targets: - custom_targets += [i.ctgt] - else: - sources += [i] + sources += tgt.sources + sources += tgt.generated + + for ctgt_ref in tgt.generated_ctgt: + ctgt = ctgt_ref.ctgt + if ctgt.name not in processed: + process_custom_target(ctgt) + generated += [resolve_ctgt_ref(ctgt_ref)] + generated_filenames += [ctgt_ref.filename()] + if ctgt not in custom_targets: + custom_targets += [ctgt] # Add all header files from all used custom targets. This # ensures that all custom targets are built before any @@ -1134,12 +1206,12 @@ # header files are present. This step is necessary because # CMake always ensures that a custom target is executed # before another target if at least one output is used. - for i in custom_targets: - for j in i.outputs: + for ctgt in custom_targets: + for j in ctgt.outputs: if not is_header(j) or j in generated_filenames: continue - generated += [resolve_ctgt_ref(i.get_ref(j))] + generated += [resolve_ctgt_ref(ctgt.get_ref(Path(j)))] generated_filenames += [j] # Determine the meson function to use for the build target @@ -1155,21 +1227,26 @@ dep_var = '{}_dep'.format(tgt.name) tgt_var = tgt.name + install_tgt = options.get_install(tgt.cmake_name, tgt.install) + # Generate target kwargs tgt_kwargs = { - 'build_by_default': False, - 'link_args': tgt.link_flags + tgt.link_libraries, + 'build_by_default': install_tgt, + 'link_args': options.get_link_args(tgt.cmake_name, tgt.link_flags + tgt.link_libraries), 'link_with': link_with, 'include_directories': id_node(inc_var), - 'install': tgt.install, - 'install_dir': tgt.install_dir, - 'override_options': tgt.override_options, + 'install': install_tgt, + 'override_options': options.get_override_options(tgt.cmake_name, tgt.override_options), 'objects': [method(x, 'extract_all_objects') for x in objec_libs], - } + } # type: TYPE_mixed_kwargs + + # Only set if installed and only override if it is set + if install_tgt and tgt.install_dir: + tgt_kwargs['install_dir'] = tgt.install_dir # Handle compiler args for key, val in tgt.compile_opts.items(): - tgt_kwargs['{}_args'.format(key)] = val + tgt_kwargs['{}_args'.format(key)] = options.get_compile_args(tgt.cmake_name, key, val) # Handle -fPCI, etc if tgt_func == 'executable': @@ -1183,7 +1260,7 @@ 'link_with': id_node(tgt_var), 'compile_args': tgt.public_compile_opts, 'include_directories': id_node(inc_var), - } + } # type: TYPE_mixed_kwargs if dependencies: generated += dependencies @@ -1201,11 +1278,15 @@ tgt_var = None else: src_node = assign(src_var, function('files', sources)) - tgt_node = assign(tgt_var, function(tgt_func, [tgt_var, [id_node(src_var)] + generated], tgt_kwargs)) + tgt_node = assign(tgt_var, function(tgt_func, [tgt_var, id_node(src_var), *generated], tgt_kwargs)) node_list += [src_node, tgt_node] if tgt_func in ['static_library', 'shared_library']: dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs)) node_list += [dep_node] + elif tgt_func in ['shared_module']: + del dep_kwargs['link_with'] + dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs)) + node_list += [dep_node] else: dep_var = None @@ -1223,7 +1304,7 @@ detect_cycle(tgt) tgt_var = tgt.name # type: str - def resolve_source(x: T.Any) -> T.Any: + def resolve_source(x: T.Union[str, ConverterTarget, ConverterCustomTarget, CustomTargetReference]) -> T.Union[str, IdNode, IndexNode]: if isinstance(x, ConverterTarget): if x.name not in processed: process_target(x) @@ -1240,12 +1321,13 @@ return x # Generate the command list - command = [] - command += [id_node(run_script_var)] + command = [] # type: T.List[T.Union[str, IdNode, IndexNode]] + command += mesonlib.meson_command + command += ['--internal', 'cmake_run_ctgt'] command += ['-o', '@OUTPUT@'] if tgt.original_outputs: - command += ['-O'] + tgt.original_outputs - command += ['-d', tgt.working_dir] + command += ['-O'] + [x.as_posix() for x in tgt.original_outputs] + command += ['-d', tgt.working_dir.as_posix()] # Generate the commands. Subcommands are separated by ';;;' for cmd in tgt.command: @@ -1256,19 +1338,19 @@ 'output': tgt.outputs, 'command': command, 'depends': [resolve_source(x) for x in tgt.depends], - } + } # type: TYPE_mixed_kwargs root_cb.lines += [assign(tgt_var, function('custom_target', [tgt.name], tgt_kwargs))] processed[tgt.name] = {'inc': None, 'src': None, 'dep': None, 'tgt': tgt_var, 'func': 'custom_target'} name_map[tgt.cmake_name] = tgt.name # Now generate the target function calls - for i in self.custom_targets: - if i.name not in processed: - process_custom_target(i) - for i in self.targets: - if i.name not in processed: - process_target(i) + for ctgt in self.custom_targets: + if ctgt.name not in processed: + process_custom_target(ctgt) + for tgt in self.targets: + if tgt.name not in processed: + process_target(tgt) self.generated_targets = processed self.internal_name_map = name_map @@ -1289,6 +1371,3 @@ def target_list(self) -> T.List[str]: return list(self.internal_name_map.keys()) - - def _object_lib_workaround(self) -> bool: - return 'link' in self.linkers and self.backend_name.startswith('vs') diff -Nru meson-0.53.2/mesonbuild/cmake/toolchain.py meson-0.57.0+really0.56.2/mesonbuild/cmake/toolchain.py --- meson-0.53.2/mesonbuild/cmake/toolchain.py 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/cmake/toolchain.py 2021-01-06 10:39:48.000000000 +0000 @@ -0,0 +1,217 @@ +# Copyright 2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .._pathlib import Path +from ..envconfig import CMakeSkipCompilerTest +from ..mesonlib import MachineChoice +from .common import language_map +from .. import mlog + +import shutil +import typing as T +from enum import Enum +from textwrap import dedent + +if T.TYPE_CHECKING: + from ..envconfig import MachineInfo, Properties, CMakeVariables + from ..environment import Environment + from ..compilers import Compiler + + +_MESON_TO_CMAKE_MAPPING = { + 'arm': 'ARMCC', + 'armclang': 'ARMClang', + 'clang': 'Clang', + 'clang-cl': 'MSVC', + 'flang': 'Flang', + 'g95': 'G95', + 'gcc': 'GNU', + 'intel': 'Intel', + 'intel-cl': 'MSVC', + 'msvc': 'MSVC', + 'pathscale': 'PathScale', + 'pgi': 'PGI', + 'sun': 'SunPro', +} + +class CMakeExecScope(Enum): + SUBPROJECT = 'subproject' + DEPENDENCY = 'dependency' + +class CMakeToolchain: + def __init__(self, env: 'Environment', for_machine: MachineChoice, exec_scope: CMakeExecScope, out_dir: Path, preload_file: T.Optional[Path] = None) -> None: + self.env = env + self.for_machine = for_machine + self.exec_scope = exec_scope + self.preload_file = preload_file + self.toolchain_file = out_dir / 'CMakeMesonToolchainFile.cmake' + self.toolchain_file = self.toolchain_file.resolve() + self.minfo = self.env.machines[self.for_machine] + self.properties = self.env.properties[self.for_machine] + self.compilers = self.env.coredata.compilers[self.for_machine] + self.cmakevars = self.env.cmakevars[self.for_machine] + + self.variables = self.get_defaults() + self.variables.update(self.cmakevars.get_variables()) + + assert self.toolchain_file.is_absolute() + + def write(self) -> Path: + if not self.toolchain_file.parent.exists(): + self.toolchain_file.parent.mkdir(parents=True) + self.toolchain_file.write_text(self.generate()) + mlog.cmd_ci_include(self.toolchain_file.as_posix()) + return self.toolchain_file + + def get_cmake_args(self) -> T.List[str]: + args = ['-DCMAKE_TOOLCHAIN_FILE=' + self.toolchain_file.as_posix()] + if self.preload_file is not None: + args += ['-DMESON_PRELOAD_FILE=' + self.preload_file.as_posix()] + return args + + def generate(self) -> str: + res = dedent('''\ + ###################################### + ### AUTOMATICALLY GENERATED FILE ### + ###################################### + + # This file was generated from the configuration in the + # relevant meson machine file. See the meson documentation + # https://mesonbuild.com/Machine-files.html for more information + + if(DEFINED MESON_PRELOAD_FILE) + include("${MESON_PRELOAD_FILE}") + endif() + + ''') + + # Escape all \ in the values + for key, value in self.variables.items(): + self.variables[key] = [x.replace('\\', '/') for x in value] + + # Set variables from the current machine config + res += '# Variables from meson\n' + for key, value in self.variables.items(): + res += 'set(' + key + for i in value: + res += ' "{}"'.format(i) + + res += ')\n' + res += '\n' + + # Add the user provided toolchain file + user_file = self.properties.get_cmake_toolchain_file() + if user_file is not None: + res += dedent(''' + # Load the CMake toolchain file specified by the user + include("{}") + + '''.format(user_file.as_posix())) + + return res + + def get_defaults(self) -> T.Dict[str, T.List[str]]: + defaults = {} # type: T.Dict[str, T.List[str]] + + # Do nothing if the user does not want automatic defaults + if not self.properties.get_cmake_defaults(): + return defaults + + # Best effort to map the meson system name to CMAKE_SYSTEM_NAME, which + # is not trivial since CMake lacks a list of all supported + # CMAKE_SYSTEM_NAME values. + SYSTEM_MAP = { + 'android': 'Android', + 'linux': 'Linux', + 'windows': 'Windows', + 'freebsd': 'FreeBSD', + 'darwin': 'Darwin', + } # type: T.Dict[str, str] + + # Only set these in a cross build. Otherwise CMake will trip up in native + # builds and thing they are cross (which causes TRY_RUN() to break) + if self.env.is_cross_build(when_building_for=self.for_machine): + defaults['CMAKE_SYSTEM_NAME'] = [SYSTEM_MAP.get(self.minfo.system, self.minfo.system)] + defaults['CMAKE_SYSTEM_PROCESSOR'] = [self.minfo.cpu_family] + + defaults['CMAKE_SIZEOF_VOID_P'] = ['8' if self.minfo.is_64_bit else '4'] + + sys_root = self.properties.get_sys_root() + if sys_root: + defaults['CMAKE_SYSROOT'] = [sys_root] + + # Determine whether CMake the compiler test should be skipped + skip_check = self.properties.get_cmake_skip_compiler_test() == CMakeSkipCompilerTest.ALWAYS + if self.properties.get_cmake_skip_compiler_test() == CMakeSkipCompilerTest.DEP_ONLY and self.exec_scope == CMakeExecScope.DEPENDENCY: + skip_check = True + + def make_abs(exe: str) -> str: + if Path(exe).is_absolute(): + return exe + + p = shutil.which(exe) + if p is None: + return exe + return p + + # Set the compiler variables + for lang, comp_obj in self.compilers.items(): + exe_list = [make_abs(x) for x in comp_obj.get_exelist()] + comp_id = CMakeToolchain.meson_compiler_to_cmake_id(comp_obj) + comp_version = comp_obj.version.upper() + + prefix = 'CMAKE_{}_'.format(language_map.get(lang, lang.upper())) + + if not exe_list: + continue + elif len(exe_list) == 2: + defaults[prefix + 'COMPILER'] = [exe_list[1]] + defaults[prefix + 'COMPILER_LAUNCHER'] = [exe_list[0]] + else: + defaults[prefix + 'COMPILER'] = exe_list + if comp_obj.get_id() == 'clang-cl': + defaults['CMAKE_LINKER'] = comp_obj.get_linker_exelist() + + # Setting the variables after this check cause CMake to skip + # validating the compiler + if not skip_check: + continue + + defaults[prefix + 'COMPILER_ID'] = [comp_id] + defaults[prefix + 'COMPILER_VERSION'] = [comp_version] + #defaults[prefix + 'COMPILER_LOADED'] = ['1'] + defaults[prefix + 'COMPILER_FORCED'] = ['1'] + defaults[prefix + 'COMPILER_WORKS'] = ['TRUE'] + #defaults[prefix + 'ABI_COMPILED'] = ['TRUE'] + + return defaults + + @staticmethod + def meson_compiler_to_cmake_id(cobj: 'Compiler') -> str: + """Translate meson compiler's into CMAKE compiler ID's. + + Most of these can be handled by a simple table lookup, with a few + exceptions. + + Clang and Apple's Clang are both identified as "clang" by meson. To make + things more complicated gcc and vanilla clang both use Apple's ld64 on + macOS. The only way to know for sure is to do an isinstance() check. + """ + from ..compilers import (AppleClangCCompiler, AppleClangCPPCompiler, + AppleClangObjCCompiler, AppleClangObjCPPCompiler) + if isinstance(cobj, (AppleClangCCompiler, AppleClangCPPCompiler, + AppleClangObjCCompiler, AppleClangObjCPPCompiler)): + return 'AppleClang' + # If no mapping, try GNU and hope that the build files don't care + return _MESON_TO_CMAKE_MAPPING.get(cobj.get_id(), 'GNU') diff -Nru meson-0.53.2/mesonbuild/cmake/traceparser.py meson-0.57.0+really0.56.2/mesonbuild/cmake/traceparser.py --- meson-0.53.2/mesonbuild/cmake/traceparser.py 2020-02-25 18:00:46.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/cmake/traceparser.py 2021-01-06 10:39:48.000000000 +0000 @@ -18,36 +18,46 @@ from .common import CMakeException from .generator import parse_generator_expressions from .. import mlog +from ..mesonlib import version_compare import typing as T +from .._pathlib import Path import re -import os +import json +import textwrap class CMakeTraceLine: - def __init__(self, file, line, func, args): + def __init__(self, file: Path, line: int, func: str, args: T.List[str]) -> None: self.file = file self.line = line self.func = func.lower() self.args = args - def __repr__(self): + def __repr__(self) -> str: s = 'CMake TRACE: {0}:{1} {2}({3})' return s.format(self.file, self.line, self.func, self.args) class CMakeTarget: - def __init__(self, name, target_type, properties=None, imported: bool = False, tline: T.Optional[CMakeTraceLine] = None): + def __init__( + self, + name: str, + target_type: str, + properties: T.Optional[T.Dict[str, T.List[str]]] = None, + imported: bool = False, + tline: T.Optional[CMakeTraceLine] = None + ): if properties is None: properties = {} - self.name = name - self.type = target_type - self.properties = properties - self.imported = imported - self.tline = tline - self.depends = [] - self.current_bin_dir = None - self.current_src_dir = None + self.name = name + self.type = target_type + self.properties = properties + self.imported = imported + self.tline = tline + self.depends = [] # type: T.List[str] + self.current_bin_dir = None # type: T.Optional[Path] + self.current_src_dir = None # type: T.Optional[Path] - def __repr__(self): + def __repr__(self) -> str: s = 'CMake TARGET:\n -- name: {}\n -- type: {}\n -- imported: {}\n -- properties: {{\n{} }}\n -- tline: {}' propSTR = '' for i in self.properties: @@ -60,23 +70,30 @@ return for key, val in self.properties.items(): self.properties[key] = [x.strip() for x in val] + assert all([';' not in x for x in self.properties[key]]) class CMakeGeneratorTarget(CMakeTarget): - def __init__(self, name): + def __init__(self, name: str) -> None: super().__init__(name, 'CUSTOM', {}) - self.outputs = [] # type: T.List[str] + self.outputs = [] # type: T.List[Path] self.command = [] # type: T.List[T.List[str]] - self.working_dir = None # type: T.Optional[str] + self.working_dir = None # type: T.Optional[Path] class CMakeTraceParser: - def __init__(self, permissive: bool = False): + def __init__(self, cmake_version: str, build_dir: Path, permissive: bool = True) -> None: self.vars = {} # type: T.Dict[str, T.List[str]] self.targets = {} # type: T.Dict[str, CMakeTarget] + self.explicit_headers = set() # type: T.Set[Path] + # T.List of targes that were added with add_custom_command to generate files self.custom_targets = [] # type: T.List[CMakeGeneratorTarget] self.permissive = permissive # type: bool + self.cmake_version = cmake_version # type: str + self.trace_file = 'cmake_trace.txt' + self.trace_file_path = build_dir / self.trace_file + self.trace_format = 'json-v1' if version_compare(cmake_version, '>=3.17') else 'human' # State for delayed command execution. Delayed command execution is realised # with a custom CMake file that overrides some functions and adds some @@ -106,12 +123,41 @@ # meaning here in the trace parser. 'meson_ps_execute_delayed_calls': self._meson_ps_execute_delayed_calls, 'meson_ps_reload_vars': self._meson_ps_reload_vars, - } + 'meson_ps_disabled_function': self._meson_ps_disabled_function, + } # type: T.Dict[str, T.Callable[[CMakeTraceLine], None]] + def trace_args(self) -> T.List[str]: + arg_map = { + 'human': ['--trace', '--trace-expand'], + 'json-v1': ['--trace-expand', '--trace-format=json-v1'], + } - def parse(self, trace: str) -> None: - # First parse the trace - lexer1 = self._lex_trace(trace) + base_args = ['--no-warn-unused-cli'] + if not self.requires_stderr(): + base_args += ['--trace-redirect={}'.format(self.trace_file)] + + return arg_map[self.trace_format] + base_args + + def requires_stderr(self) -> bool: + return version_compare(self.cmake_version, '<3.16') + + def parse(self, trace: T.Optional[str] = None) -> None: + # First load the trace (if required) + if not self.requires_stderr(): + if not self.trace_file_path.exists and not self.trace_file_path.is_file(): + raise CMakeException('CMake: Trace file "{}" not found'.format(str(self.trace_file_path))) + trace = self.trace_file_path.read_text(errors='ignore') + if not trace: + raise CMakeException('CMake: The CMake trace was not provided or is empty') + + # Second parse the trace + lexer1 = None + if self.trace_format == 'human': + lexer1 = self._lex_trace_human(trace) + elif self.trace_format == 'json-v1': + lexer1 = self._lex_trace_json(trace) + else: + raise CMakeException('CMake: Internal error: Invalid trace format {}. Expected [human, json-v1]'.format(self.trace_format)) # Primary pass -- parse everything for l in lexer1: @@ -150,16 +196,18 @@ return None - def var_to_bool(self, var): - if var not in self.vars: - return False - - if len(self.vars[var]) < 1: + def _str_to_bool(self, expr: T.Union[str, T.List[str]]) -> bool: + if not expr: return False + if isinstance(expr, list): + expr_str = expr[0] + else: + expr_str = expr + expr_str = expr_str.upper() + return expr_str not in ['0', 'OFF', 'NO', 'FALSE', 'N', 'IGNORE'] and not expr_str.endswith('NOTFOUND') - if self.vars[var][0].upper() in ['1', 'ON', 'TRUE']: - return True - return False + def var_to_bool(self, var: str) -> bool: + return self._str_to_bool(self.vars.get(var, [])) def _gen_exception(self, function: str, error: str, tline: CMakeTraceLine) -> None: # Generate an exception if the parser is not in permissive mode @@ -215,7 +263,7 @@ else: self.vars[identifier] = value.split(';') - def _cmake_unset(self, tline: CMakeTraceLine): + def _cmake_unset(self, tline: CMakeTraceLine) -> None: # DOC: https://cmake.org/cmake/help/latest/command/unset.html if len(tline.args) < 1: return self._gen_exception('unset', 'requires at least one argument', tline) @@ -223,11 +271,12 @@ if tline.args[0] in self.vars: del self.vars[tline.args[0]] - def _cmake_add_executable(self, tline: CMakeTraceLine): + def _cmake_add_executable(self, tline: CMakeTraceLine) -> None: # DOC: https://cmake.org/cmake/help/latest/command/add_executable.html args = list(tline.args) # Make a working copy # Make sure the exe is imported + is_imported = True if 'IMPORTED' not in args: return self._gen_exception('add_executable', 'non imported executables are not supported', tline) @@ -236,9 +285,9 @@ if len(args) < 1: return self._gen_exception('add_executable', 'requires at least 1 argument', tline) - self.targets[args[0]] = CMakeTarget(args[0], 'EXECUTABLE', {}) + self.targets[args[0]] = CMakeTarget(args[0], 'EXECUTABLE', {}, tline=tline, imported=is_imported) - def _cmake_add_library(self, tline: CMakeTraceLine): + def _cmake_add_library(self, tline: CMakeTraceLine) -> None: # DOC: https://cmake.org/cmake/help/latest/command/add_library.html args = list(tline.args) # Make a working copy @@ -272,9 +321,9 @@ else: self.targets[args[0]] = CMakeTarget(args[0], 'NORMAL', {}, tline=tline) - def _cmake_add_custom_command(self, tline: CMakeTraceLine, name=None): + def _cmake_add_custom_command(self, tline: CMakeTraceLine, name: T.Optional[str] = None) -> None: # DOC: https://cmake.org/cmake/help/latest/command/add_custom_command.html - args = list(tline.args) # Make a working copy + args = self._flatten_args(list(tline.args)) # Commands can be passed as ';' seperated lists if not args: return self._gen_exception('add_custom_command', 'requires at least 1 argument', tline) @@ -290,22 +339,24 @@ target = CMakeGeneratorTarget(name) def handle_output(key: str, target: CMakeGeneratorTarget) -> None: - target.outputs += key.split(';') + target.outputs += [Path(key)] def handle_command(key: str, target: CMakeGeneratorTarget) -> None: if key == 'ARGS': return - target.command[-1] += key.split(';') + target.command[-1] += [key] def handle_depends(key: str, target: CMakeGeneratorTarget) -> None: - target.depends += key.split(';') + target.depends += [key] + working_dir = None def handle_working_dir(key: str, target: CMakeGeneratorTarget) -> None: - if target.working_dir is None: - target.working_dir = key + nonlocal working_dir + if working_dir is None: + working_dir = key else: - target.working_dir += ' ' - target.working_dir += key + working_dir += ' ' + working_dir += key fn = None @@ -327,9 +378,13 @@ if fn is not None: fn(i, target) - target.current_bin_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_BINARY_DIR') - target.current_src_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_SOURCE_DIR') - target.outputs = self._guess_files(target.outputs) + cbinary_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_BINARY_DIR') + csource_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_SOURCE_DIR') + + target.working_dir = Path(working_dir) if working_dir else None + target.current_bin_dir = Path(cbinary_dir) if cbinary_dir else None + target.current_src_dir = Path(csource_dir) if csource_dir else None + target.outputs = [Path(x) for x in self._guess_files([str(y) for y in target.outputs])] target.depends = self._guess_files(target.depends) target.command = [self._guess_files(x) for x in target.command] @@ -337,7 +392,7 @@ if name: self.targets[name] = target - def _cmake_add_custom_target(self, tline: CMakeTraceLine): + def _cmake_add_custom_target(self, tline: CMakeTraceLine) -> None: # DOC: https://cmake.org/cmake/help/latest/command/add_custom_target.html # We only the first parameter (the target name) is interesting if len(tline.args) < 1: @@ -350,9 +405,7 @@ # DOC: https://cmake.org/cmake/help/latest/command/set_property.html args = list(tline.args) - # We only care for TARGET properties - if args.pop(0) != 'TARGET': - return + scope = args.pop(0) append = False targets = [] @@ -367,7 +420,7 @@ if curr == 'PROPERTY': break - targets.append(curr) + targets += curr.split(';') if not args: return self._gen_exception('set_property', 'faild to parse argument list', tline) @@ -377,11 +430,14 @@ return identifier = args.pop(0) - value = ' '.join(args).split(';') + if self.trace_format == 'human': + value = ' '.join(args).split(';') + else: + value = [y for x in args for y in x.split(';')] if not value: return - for i in targets: + def do_target(tgt: str) -> None: if i not in self.targets: return self._gen_exception('set_property', 'TARGET {} not found'.format(i), tline) @@ -393,6 +449,33 @@ else: self.targets[i].properties[identifier] = value + def do_source(src: str) -> None: + if identifier != 'HEADER_FILE_ONLY' or not self._str_to_bool(value): + return + + current_src_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_SOURCE_DIR') + if not current_src_dir: + mlog.warning(textwrap.dedent('''\ + CMake trace: set_property(SOURCE) called before the preload script was loaded. + Unable to determine CMAKE_CURRENT_SOURCE_DIR. This can lead to build errors. + ''')) + current_src_dir = '.' + + cur_p = Path(current_src_dir) + src_p = Path(src) + + if not src_p.is_absolute(): + src_p = cur_p / src_p + self.explicit_headers.add(src_p) + + if scope == 'TARGET': + for i in targets: + do_target(i) + elif scope == 'SOURCE': + files = self._guess_files(targets) + for i in files: + do_source(i) + def _cmake_set_target_properties(self, tline: CMakeTraceLine) -> None: # DOC: https://cmake.org/cmake/help/latest/command/set_target_properties.html args = list(tline.args) @@ -417,21 +500,27 @@ # Neither of these is awesome for obvious reasons. I'm going to try # option 1 first and fall back to 2, as 1 requires less code and less # synchroniztion for cmake changes. + # + # With the JSON output format, introduced in CMake 3.17, spaces are + # handled properly and we don't have to do either options arglist = [] # type: T.List[T.Tuple[str, T.List[str]]] - name = args.pop(0) - values = [] - prop_regex = re.compile(r'^[A-Z_]+$') - for a in args: - if prop_regex.match(a): - if values: - arglist.append((name, ' '.join(values).split(';'))) - name = a - values = [] - else: - values.append(a) - if values: - arglist.append((name, ' '.join(values).split(';'))) + if self.trace_format == 'human': + name = args.pop(0) + values = [] # type: T.List[str] + prop_regex = re.compile(r'^[A-Z_]+$') + for a in args: + if prop_regex.match(a): + if values: + arglist.append((name, ' '.join(values).split(';'))) + name = a + values = [] + else: + values.append(a) + if values: + arglist.append((name, ' '.join(values).split(';'))) + else: + arglist = [(x[0], x[1].split(';')) for x in zip(args[::2], args[1::2])] for name, value in arglist: for i in targets: @@ -474,7 +563,7 @@ # DOC: https://cmake.org/cmake/help/latest/command/target_link_libraries.html self._parse_common_target_options('target_link_options', 'LINK_LIBRARIES', 'INTERFACE_LINK_LIBRARIES', tline) - def _parse_common_target_options(self, func: str, private_prop: str, interface_prop: str, tline: CMakeTraceLine, ignore: T.Optional[T.List[str]] = None, paths: bool = False): + def _parse_common_target_options(self, func: str, private_prop: str, interface_prop: str, tline: CMakeTraceLine, ignore: T.Optional[T.List[str]] = None, paths: bool = False) -> None: if ignore is None: ignore = ['BEFORE'] @@ -500,10 +589,10 @@ continue if mode in ['INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'LINK_PUBLIC']: - interface += [i] + interface += i.split(';') if mode in ['PUBLIC', 'PRIVATE', 'LINK_PRIVATE']: - private += [i] + private += i.split(';') if paths: interface = self._guess_files(interface) @@ -512,11 +601,11 @@ interface = [x for x in interface if x] private = [x for x in private if x] - for i in [(private_prop, private), (interface_prop, interface)]: - if not i[0] in self.targets[target].properties: - self.targets[target].properties[i[0]] = [] + for j in [(private_prop, private), (interface_prop, interface)]: + if not j[0] in self.targets[target].properties: + self.targets[target].properties[j[0]] = [] - self.targets[target].properties[i[0]] += i[1] + self.targets[target].properties[j[0]] += j[1] def _meson_ps_execute_delayed_calls(self, tline: CMakeTraceLine) -> None: for l in self.stored_commands: @@ -530,7 +619,14 @@ def _meson_ps_reload_vars(self, tline: CMakeTraceLine) -> None: self.delayed_commands = self.get_cmake_var('MESON_PS_DELAYED_CALLS') - def _lex_trace(self, trace): + def _meson_ps_disabled_function(self, tline: CMakeTraceLine) -> None: + args = list(tline.args) + if not args: + mlog.error('Invalid preload.cmake script! At least one argument to `meson_ps_disabled_function` is expected') + return + mlog.warning('The CMake function "{}" was disabed to avoid compatibility issues with Meson.'.format(args[0])) + + def _lex_trace_human(self, trace: str) -> T.Generator[CMakeTraceLine, None, None]: # The trace format is: '(): ( )\n' reg_tline = re.compile(r'\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(([\s\S]*?) ?\)\s*\n', re.MULTILINE) reg_other = re.compile(r'[^\n]*\n') @@ -553,38 +649,79 @@ func = mo_file_line.group(4) args = mo_file_line.group(5) args = parse_generator_expressions(args) - args = args.split(' ') - args = list(map(lambda x: x.strip(), args)) + argl = args.split(' ') + argl = list(map(lambda x: x.strip(), argl)) + + yield CMakeTraceLine(Path(file), int(line), func, argl) - yield CMakeTraceLine(file, line, func, args) + def _lex_trace_json(self, trace: str) -> T.Generator[CMakeTraceLine, None, None]: + lines = trace.splitlines(keepends=False) + lines.pop(0) # The first line is the version + for i in lines: + data = json.loads(i) + assert isinstance(data['file'], str) + assert isinstance(data['line'], int) + assert isinstance(data['cmd'], str) + assert isinstance(data['args'], list) + args = data['args'] + for j in args: + assert isinstance(j, str) + args = [parse_generator_expressions(x) for x in args] + yield CMakeTraceLine(Path(data['file']), data['line'], data['cmd'], args) + + def _flatten_args(self, args: T.List[str]) -> T.List[str]: + # Split lists in arguments + res = [] # type: T.List[str] + for i in args: + res += i.split(';') + return res def _guess_files(self, broken_list: T.List[str]) -> T.List[str]: - #Try joining file paths that contain spaces + # Nothing has to be done for newer formats + if self.trace_format != 'human': + return broken_list + + # Try joining file paths that contain spaces - reg_start = re.compile(r'^([A-Za-z]:)?/.*/[^./]+$') + reg_start = re.compile(r'^([A-Za-z]:)?/(.*/)*[^./]+$') reg_end = re.compile(r'^.*\.[a-zA-Z]+$') fixed_list = [] # type: T.List[str] curr_str = None # type: T.Optional[str] + path_found = False # type: bool for i in broken_list: if curr_str is None: curr_str = i - elif os.path.isfile(curr_str): + path_found = False + elif Path(curr_str).is_file(): # Abort concatenation if curr_str is an existing file fixed_list += [curr_str] curr_str = i + path_found = False elif not reg_start.match(curr_str): # Abort concatenation if curr_str no longer matches the regex fixed_list += [curr_str] curr_str = i - elif reg_end.match(i) or os.path.exists('{} {}'.format(curr_str, i)): + path_found = False + elif reg_end.match(i): # File detected curr_str = '{} {}'.format(curr_str, i) fixed_list += [curr_str] curr_str = None + path_found = False + elif Path('{} {}'.format(curr_str, i)).exists(): + # Path detected + curr_str = '{} {}'.format(curr_str, i) + path_found = True + elif path_found: + # Add path to fixed_list after ensuring the whole path is in curr_str + fixed_list += [curr_str] + curr_str = i + path_found = False else: curr_str = '{} {}'.format(curr_str, i) + path_found = False if curr_str: fixed_list += [curr_str] diff -Nru meson-0.53.2/mesonbuild/compilers/c_function_attributes.py meson-0.57.0+really0.56.2/mesonbuild/compilers/c_function_attributes.py --- meson-0.53.2/mesonbuild/compilers/c_function_attributes.py 2019-08-28 17:15:39.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/c_function_attributes.py 2020-08-15 16:27:05.000000000 +0000 @@ -56,6 +56,8 @@ 'int foo(const char * p, ...) __attribute__((format(printf, 1, 2)));', 'format_arg': 'char * foo(const char * p) __attribute__((format_arg(1)));', + 'force_align_arg_pointer': + '__attribute__((force_align_arg_pointer)) int foo(void) { return 0; }', 'gnu_inline': 'inline __attribute__((gnu_inline)) int foo(void) { return 0; }', 'hot': diff -Nru meson-0.53.2/mesonbuild/compilers/compilers.py meson-0.57.0+really0.56.2/mesonbuild/compilers/compilers.py --- meson-0.53.2/mesonbuild/compilers/compilers.py 2020-02-25 18:00:46.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/compilers.py 2021-01-06 10:39:48.000000000 +0000 @@ -12,40 +12,50 @@ # See the License for the specific language governing permissions and # limitations under the License. +import abc import contextlib, os.path, re, tempfile -import collections.abc +import enum +import itertools import typing as T +from functools import lru_cache -from ..linkers import StaticLinker, GnuLikeDynamicLinkerMixin, SolarisDynamicLinker from .. import coredata from .. import mlog from .. import mesonlib +from ..linkers import LinkerEnvVarsMixin from ..mesonlib import ( EnvironmentException, MachineChoice, MesonException, - Popen_safe, split_args + Popen_safe, split_args, LibType ) from ..envconfig import ( - Properties, + get_env_var ) +from ..arglist import CompilerArgs + if T.TYPE_CHECKING: + from ..build import BuildTarget from ..coredata import OptionDictType from ..envconfig import MachineInfo from ..environment import Environment from ..linkers import DynamicLinker # noqa: F401 + from ..dependencies import Dependency + + CompilerType = T.TypeVar('CompilerType', bound=Compiler) + _T = T.TypeVar('_T') """This file contains the data files of all compilers Meson knows about. To support a new compiler, add its information below. Also add corresponding autodetection code in environment.py.""" -header_suffixes = ('h', 'hh', 'hpp', 'hxx', 'H', 'ipp', 'moc', 'vapi', 'di') -obj_suffixes = ('o', 'obj', 'res') -lib_suffixes = ('a', 'lib', 'dll', 'dll.a', 'dylib', 'so') +header_suffixes = ('h', 'hh', 'hpp', 'hxx', 'H', 'ipp', 'moc', 'vapi', 'di') # type: T.Tuple[str, ...] +obj_suffixes = ('o', 'obj', 'res') # type: T.Tuple[str, ...] +lib_suffixes = ('a', 'lib', 'dll', 'dll.a', 'dylib', 'so') # type: T.Tuple[str, ...] # Mapping of language to suffixes of files that should always be in that language # This means we can't include .h headers here since they could be C, C++, ObjC, etc. lang_suffixes = { 'c': ('c',), - 'cpp': ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx'), + 'cpp': ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx', 'ino'), 'cuda': ('cu',), # f90, f95, f03, f08 are for free-form fortran ('f90' recommended) # f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended) @@ -58,23 +68,26 @@ 'cs': ('cs',), 'swift': ('swift',), 'java': ('java',), -} +} # type: T.Dict[str, T.Tuple[str, ...]] all_languages = lang_suffixes.keys() -cpp_suffixes = lang_suffixes['cpp'] + ('h',) -c_suffixes = lang_suffixes['c'] + ('h',) +cpp_suffixes = lang_suffixes['cpp'] + ('h',) # type: T.Tuple[str, ...] +c_suffixes = lang_suffixes['c'] + ('h',) # type: T.Tuple[str, ...] # List of languages that by default consume and output libraries following the # C ABI; these can generally be used interchangebly -clib_langs = ('objcpp', 'cpp', 'objc', 'c', 'fortran',) +clib_langs = ('objcpp', 'cpp', 'objc', 'c', 'fortran',) # type: T.Tuple[str, ...] # List of languages that can be linked with C code directly by the linker # used in build.py:process_compilers() and build.py:get_dynamic_linker() -clink_langs = ('d', 'cuda') + clib_langs -clink_suffixes = () +clink_langs = ('d', 'cuda') + clib_langs # type: T.Tuple[str, ...] +clink_suffixes = tuple() # type: T.Tuple[str, ...] for _l in clink_langs + ('vala',): clink_suffixes += lang_suffixes[_l] clink_suffixes += ('h', 'll', 's') +all_suffixes = set(itertools.chain(*lang_suffixes.values(), clink_suffixes)) # type: T.Set[str] # Languages that should use LDFLAGS arguments when linking. -languages_using_ldflags = ('objcpp', 'cpp', 'objc', 'c', 'fortran', 'd', 'cuda') +languages_using_ldflags = {'objcpp', 'cpp', 'objc', 'c', 'fortran', 'd', 'cuda'} # type: T.Set[str] +# Languages that should use CPPFLAGS arguments when linking. +languages_using_cppflags = {'c', 'cpp', 'objc', 'objcpp'} # type: T.Set[str] soregex = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') # Environment variables that each lang uses. @@ -86,16 +99,14 @@ 'fortran': 'FFLAGS', 'd': 'DFLAGS', 'vala': 'VALAFLAGS', - 'rust': 'RUSTFLAGS'} + 'rust': 'RUSTFLAGS'} # type: T.Dict[str, str] -unixy_compiler_internal_libs = ('m', 'c', 'pthread', 'dl', 'rt') -# execinfo is a compiler lib on FreeBSD and NetBSD -if mesonlib.is_freebsd() or mesonlib.is_netbsd(): - unixy_compiler_internal_libs += ('execinfo',) +cexe_mapping = {'c': 'CC', + 'cpp': 'CXX'} # All these are only for C-linkable languages; see `clink_langs` above. -def sort_clink(lang): +def sort_clink(lang: str) -> int: ''' Sorting function to sort the list of languages according to reversed(compilers.clink_langs) and append the unknown langs in the end. @@ -106,36 +117,40 @@ return 1 return -clink_langs.index(lang) -def is_header(fname): - if hasattr(fname, 'fname'): +def is_header(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): fname = fname.fname suffix = fname.split('.')[-1] return suffix in header_suffixes -def is_source(fname): - if hasattr(fname, 'fname'): +def is_source(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): fname = fname.fname suffix = fname.split('.')[-1].lower() return suffix in clink_suffixes -def is_assembly(fname): - if hasattr(fname, 'fname'): +def is_assembly(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): fname = fname.fname return fname.split('.')[-1].lower() == 's' -def is_llvm_ir(fname): - if hasattr(fname, 'fname'): +def is_llvm_ir(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): fname = fname.fname return fname.split('.')[-1] == 'll' -def is_object(fname): - if hasattr(fname, 'fname'): - fname = fname.fname +@lru_cache(maxsize=None) +def cached_by_name(fname: 'mesonlib.FileOrString') -> bool: suffix = fname.split('.')[-1] return suffix in obj_suffixes -def is_library(fname): - if hasattr(fname, 'fname'): +def is_object(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + return cached_by_name(fname) + +def is_library(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): fname = fname.fname if soregex.match(fname): @@ -144,19 +159,34 @@ suffix = fname.split('.')[-1] return suffix in lib_suffixes +def is_known_suffix(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + suffix = fname.split('.')[-1] + + return suffix in all_suffixes + + +class CompileCheckMode(enum.Enum): + + PREPROCESS = 'preprocess' + COMPILE = 'compile' + LINK = 'link' + + cuda_buildtype_args = {'plain': [], 'debug': [], 'debugoptimized': [], 'release': [], 'minsize': [], - } + } # type: T.Dict[str, T.List[str]] java_buildtype_args = {'plain': [], 'debug': ['-g'], 'debugoptimized': ['-g'], 'release': [], 'minsize': [], 'custom': [], - } + } # type: T.Dict[str, T.List[str]] rust_buildtype_args = {'plain': [], 'debug': [], @@ -164,31 +194,31 @@ 'release': [], 'minsize': [], 'custom': [], - } + } # type: T.Dict[str, T.List[str]] d_gdc_buildtype_args = {'plain': [], 'debug': [], 'debugoptimized': ['-finline-functions'], - 'release': ['-frelease', '-finline-functions'], + 'release': ['-finline-functions'], 'minsize': [], 'custom': [], - } + } # type: T.Dict[str, T.List[str]] d_ldc_buildtype_args = {'plain': [], 'debug': [], 'debugoptimized': ['-enable-inlining', '-Hkeep-all-bodies'], - 'release': ['-release', '-enable-inlining', '-Hkeep-all-bodies'], + 'release': ['-enable-inlining', '-Hkeep-all-bodies'], 'minsize': [], 'custom': [], - } + } # type: T.Dict[str, T.List[str]] d_dmd_buildtype_args = {'plain': [], 'debug': [], 'debugoptimized': ['-inline'], - 'release': ['-release', '-inline'], + 'release': ['-inline'], 'minsize': [], 'custom': [], - } + } # type: T.Dict[str, T.List[str]] mono_buildtype_args = {'plain': [], 'debug': [], @@ -196,7 +226,7 @@ 'release': ['-optimize+'], 'minsize': [], 'custom': [], - } + } # type: T.Dict[str, T.List[str]] swift_buildtype_args = {'plain': [], 'debug': [], @@ -204,14 +234,14 @@ 'release': [], 'minsize': [], 'custom': [], - } + } # type: T.Dict[str, T.List[str]] gnu_winlibs = ['-lkernel32', '-luser32', '-lgdi32', '-lwinspool', '-lshell32', - '-lole32', '-loleaut32', '-luuid', '-lcomdlg32', '-ladvapi32'] + '-lole32', '-loleaut32', '-luuid', '-lcomdlg32', '-ladvapi32'] # type: T.List[str] msvc_winlibs = ['kernel32.lib', 'user32.lib', 'gdi32.lib', 'winspool.lib', 'shell32.lib', 'ole32.lib', 'oleaut32.lib', - 'uuid.lib', 'comdlg32.lib', 'advapi32.lib'] + 'uuid.lib', 'comdlg32.lib', 'advapi32.lib'] # type: T.List[str] clike_optimization_args = {'0': [], 'g': [], @@ -219,7 +249,7 @@ '2': ['-O2'], '3': ['-O3'], 's': ['-Os'], - } + } # type: T.Dict[str, T.List[str]] cuda_optimization_args = {'0': [], 'g': ['-O0'], @@ -227,13 +257,13 @@ '2': ['-O2'], '3': ['-O3'], 's': ['-O3'] - } + } # type: T.Dict[str, T.List[str]] cuda_debug_args = {False: [], - True: ['-g']} + True: ['-g']} # type: T.Dict[bool, T.List[str]] clike_debug_args = {False: [], - True: ['-g']} + True: ['-g']} # type: T.Dict[bool, T.List[str]] base_options = {'b_pch': coredata.UserBooleanOption('Use precompiled headers', True), 'b_lto': coredata.UserBooleanOption('Use link time optimization', False), @@ -259,20 +289,23 @@ 'b_bitcode': coredata.UserBooleanOption('Generate and embed bitcode (only macOS/iOS/tvOS)', False), 'b_vscrt': coredata.UserComboOption('VS run-time library type to use.', - ['none', 'md', 'mdd', 'mt', 'mtd', 'from_buildtype'], + ['none', 'md', 'mdd', 'mt', 'mtd', 'from_buildtype', 'static_from_buildtype'], 'from_buildtype'), - } + } # type: OptionDictType -def option_enabled(boptions, options, option): +def option_enabled(boptions: T.List[str], options: 'OptionDictType', + option: str) -> bool: try: if option not in boptions: return False - return options[option].value + ret = options[option].value + assert isinstance(ret, bool), 'must return bool' # could also be str + return ret except KeyError: return False -def get_base_compile_args(options, compiler): - args = [] +def get_base_compile_args(options: 'OptionDictType', compiler: 'Compiler') -> T.List[str]: + args = [] # type T.List[str] try: if options['b_lto'].value: args.extend(compiler.get_lto_compile_args()) @@ -303,7 +336,7 @@ if (options['b_ndebug'].value == 'true' or (options['b_ndebug'].value == 'if-release' and options['buildtype'].value in {'release', 'plain'})): - args += ['-DNDEBUG'] + args += compiler.get_disable_assert_args() except KeyError: pass # This does not need a try...except @@ -320,8 +353,9 @@ pass return args -def get_base_link_args(options, linker, is_shared_module): - args = [] +def get_base_link_args(options: 'OptionDictType', linker: 'Compiler', + is_shared_module: bool) -> T.List[str]: + args = [] # type: T.List[str] try: if options['b_lto'].value: args.extend(linker.get_lto_link_args()) @@ -355,9 +389,10 @@ # -Wl,-dead_strip_dylibs is incompatible with bitcode args.extend(linker.get_asneeded_args()) - # Apple's ld (the only one that supports bitcode) does not like any - # -undefined arguments at all, so don't pass these when using bitcode + # Apple's ld (the only one that supports bitcode) does not like -undefined + # arguments or -headerpad_max_install_names when bitcode is enabled if not bitcode: + args.extend(linker.headerpad_args()) if (not is_shared_module and option_enabled(linker.base_options, options, 'b_lundef')): args.extend(linker.no_undefined_link_args()) @@ -380,317 +415,59 @@ pass class RunResult: - def __init__(self, compiled, returncode=999, stdout='UNDEFINED', stderr='UNDEFINED'): + def __init__(self, compiled: bool, returncode: int = 999, + stdout: str = 'UNDEFINED', stderr: str = 'UNDEFINED'): self.compiled = compiled self.returncode = returncode self.stdout = stdout self.stderr = stderr -class CompilerArgs(collections.abc.MutableSequence): - ''' - List-like class that manages a list of compiler arguments. Should be used - while constructing compiler arguments from various sources. Can be - operated with ordinary lists, so this does not need to be used - everywhere. - - All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc) - and can converted to the native type of each compiler by using the - .to_native() method to which you must pass an instance of the compiler or - the compiler class. - - New arguments added to this class (either with .append(), .extend(), or +=) - are added in a way that ensures that they override previous arguments. - For example: - - >>> a = ['-Lfoo', '-lbar'] - >>> a += ['-Lpho', '-lbaz'] - >>> print(a) - ['-Lpho', '-Lfoo', '-lbar', '-lbaz'] - - Arguments will also be de-duped if they can be de-duped safely. - - Note that because of all this, this class is not commutative and does not - preserve the order of arguments if it is safe to not. For example: - >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror'] - ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror'] - >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar'] - ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror'] - - ''' - # NOTE: currently this class is only for C-like compilers, but it can be - # extended to other languages easily. Just move the following to the - # compiler class and initialize when self.compiler is set. - - # Arg prefixes that override by prepending instead of appending - prepend_prefixes = ('-I', '-L') - # Arg prefixes and args that must be de-duped by returning 2 - dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U') - dedup2_suffixes = () - dedup2_args = () - # Arg prefixes and args that must be de-duped by returning 1 - # - # NOTE: not thorough. A list of potential corner cases can be found in - # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 - dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic') - dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') - # Match a .so of the form path/to/libfoo.so.0.1.0 - # Only UNIX shared libraries require this. Others have a fixed extension. - dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') - dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread') - # In generate_link() we add external libs without de-dup, but we must - # *always* de-dup these because they're special arguments to the linker - always_dedup_args = tuple('-l' + lib for lib in unixy_compiler_internal_libs) - - def __init__(self, compiler: T.Union['Compiler', StaticLinker], - iterable: T.Optional[T.Iterable[str]] = None): - self.compiler = compiler - self.__container = list(iterable) if iterable is not None else [] # type: T.List[str] - - @T.overload # noqa: F811 - def __getitem__(self, index: int) -> str: # noqa: F811 - pass - - @T.overload # noqa: F811 - def __getitem__(self, index: slice) -> T.List[str]: # noqa: F811 - pass - - def __getitem__(self, index): # noqa: F811 - return self.__container[index] - - @T.overload # noqa: F811 - def __setitem__(self, index: int, value: str) -> None: # noqa: F811 - pass - - @T.overload # noqa: F811 - def __setitem__(self, index: slice, value: T.List[str]) -> None: # noqa: F811 - pass - - def __setitem__(self, index, value) -> None: # noqa: F811 - self.__container[index] = value - - def __delitem__(self, index: T.Union[int, slice]) -> None: - del self.__container[index] - - def __len__(self) -> int: - return len(self.__container) - - def insert(self, index: int, value: str) -> None: - self.__container.insert(index, value) - - def copy(self) -> 'CompilerArgs': - return CompilerArgs(self.compiler, self.__container.copy()) - - @classmethod - def _can_dedup(cls, arg): - ''' - Returns whether the argument can be safely de-duped. This is dependent - on three things: - - a) Whether an argument can be 'overridden' by a later argument. For - example, -DFOO defines FOO and -UFOO undefines FOO. In this case, we - can safely remove the previous occurrence and add a new one. The same - is true for include paths and library paths with -I and -L. For - these we return `2`. See `dedup2_prefixes` and `dedup2_args`. - b) Arguments that once specified cannot be undone, such as `-c` or - `-pipe`. New instances of these can be completely skipped. For these - we return `1`. See `dedup1_prefixes` and `dedup1_args`. - c) Whether it matters where or how many times on the command-line - a particular argument is present. This can matter for symbol - resolution in static or shared libraries, so we cannot de-dup or - reorder them. For these we return `0`. This is the default. - - In addition to these, we handle library arguments specially. - With GNU ld, we surround library arguments with -Wl,--start/end-group - to recursively search for symbols in the libraries. This is not needed - with other linkers. - ''' - # A standalone argument must never be deduplicated because it is - # defined by what comes _after_ it. Thus dedupping this: - # -D FOO -D BAR - # would yield either - # -D FOO BAR - # or - # FOO -D BAR - # both of which are invalid. - if arg in cls.dedup2_prefixes: - return 0 - if arg in cls.dedup2_args or \ - arg.startswith(cls.dedup2_prefixes) or \ - arg.endswith(cls.dedup2_suffixes): - return 2 - if arg in cls.dedup1_args or \ - arg.startswith(cls.dedup1_prefixes) or \ - arg.endswith(cls.dedup1_suffixes) or \ - re.search(cls.dedup1_regex, arg): - return 1 - return 0 - - @classmethod - def _should_prepend(cls, arg): - if arg.startswith(cls.prepend_prefixes): - return True - return False - - def to_native(self, copy: bool = False) -> T.List[str]: - # Check if we need to add --start/end-group for circular dependencies - # between static libraries, and for recursively searching for symbols - # needed by static libraries that are provided by object files or - # shared libraries. - if copy: - new = self.copy() - else: - new = self - # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which - # all act like (or are) gnu ld - # TODO: this could probably be added to the DynamicLinker instead - if (isinstance(self.compiler, Compiler) and - self.compiler.linker is not None and - isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker))): - group_start = -1 - group_end = -1 - for i, each in enumerate(new): - if not each.startswith(('-Wl,-l', '-l')) and not each.endswith('.a') and \ - not soregex.match(each): - continue - group_end = i - if group_start < 0: - # First occurrence of a library - group_start = i - if group_start >= 0: - # Last occurrence of a library - new.insert(group_end + 1, '-Wl,--end-group') - new.insert(group_start, '-Wl,--start-group') - # Remove system/default include paths added with -isystem - if hasattr(self.compiler, 'get_default_include_dirs'): - default_dirs = self.compiler.get_default_include_dirs() - bad_idx_list = [] # type: T.List[int] - for i, each in enumerate(new): - # Remove the -isystem and the path if the path is a default path - if (each == '-isystem' and - i < (len(new) - 1) and - new[i + 1] in default_dirs): - bad_idx_list += [i, i + 1] - elif each.startswith('-isystem=') and each[9:] in default_dirs: - bad_idx_list += [i] - elif each.startswith('-isystem') and each[8:] in default_dirs: - bad_idx_list += [i] - for i in reversed(bad_idx_list): - new.pop(i) - return self.compiler.unix_args_to_native(new.__container) - - def append_direct(self, arg: str) -> None: - ''' - Append the specified argument without any reordering or de-dup except - for absolute paths to libraries, etc, which can always be de-duped - safely. - ''' - if os.path.isabs(arg): - self.append(arg) - else: - self.__container.append(arg) - def extend_direct(self, iterable: T.Iterable[str]) -> None: - ''' - Extend using the elements in the specified iterable without any - reordering or de-dup except for absolute paths where the order of - include search directories is not relevant - ''' - for elem in iterable: - self.append_direct(elem) - - def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None: - normal_flags = [] - lflags = [] - for i in iterable: - if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')): - lflags.append(i) - else: - normal_flags.append(i) - self.extend(normal_flags) - self.extend_direct(lflags) - - def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs': - new = self.copy() - new += args - return new - - def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs': - ''' - Add two CompilerArgs while taking into account overriding of arguments - and while preserving the order of arguments as much as possible - ''' - pre = [] # type: T.List[str] - post = [] # type: T.List[str] - if not isinstance(args, collections.abc.Iterable): - raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args)) - for arg in args: - # If the argument can be de-duped, do it either by removing the - # previous occurrence of it and adding a new one, or not adding the - # new occurrence. - dedup = self._can_dedup(arg) - if dedup == 1: - # Argument already exists and adding a new instance is useless - if arg in self or arg in pre or arg in post: - continue - if dedup == 2: - # Remove all previous occurrences of the arg and add it anew - if arg in self: - self.remove(arg) - if arg in pre: - pre.remove(arg) - if arg in post: - post.remove(arg) - if self._should_prepend(arg): - pre.append(arg) - else: - post.append(arg) - # Insert at the beginning - self[:0] = pre - # Append to the end - self.__container += post - return self - - def __radd__(self, args: T.Iterable[str]): - new = CompilerArgs(self.compiler, args) - new += self - return new - - def __eq__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]: - # Only allow equality checks against other CompilerArgs and lists instances - if isinstance(other, CompilerArgs): - return self.compiler == other.compiler and self.__container == other.__container - elif isinstance(other, list): - return self.__container == other - return NotImplemented +class CompileResult: - def append(self, arg: str) -> None: - self.__iadd__([arg]) + """The result of Compiler.compiles (and friends).""" - def extend(self, args: T.Iterable[str]) -> None: - self.__iadd__(args) + def __init__(self, stdo: T.Optional[str] = None, stde: T.Optional[str] = None, + args: T.Optional[T.List[str]] = None, + returncode: int = 999, pid: int = -1, + text_mode: bool = True, + input_name: T.Optional[str] = None, + output_name: T.Optional[str] = None, + command: T.Optional[T.List[str]] = None, cached: bool = False): + self.stdout = stdo + self.stderr = stde + self.input_name = input_name + self.output_name = output_name + self.command = command or [] + self.args = args or [] + self.cached = cached + self.returncode = returncode + self.pid = pid + self.text_mode = text_mode - def __repr__(self) -> str: - return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self.__container) -class Compiler: +class Compiler(metaclass=abc.ABCMeta): # Libraries to ignore in find_library() since they are provided by the # compiler or the C library. Currently only used for MSVC. - ignore_libs = () + ignore_libs = [] # type: T.List[str] # Libraries that are internal compiler implementations, and must not be # manually searched. - internal_libs = () + internal_libs = [] # type: T.List[str] LINKER_PREFIX = None # type: T.Union[None, str, T.List[str]] INVOKES_LINKER = True - def __init__(self, exelist, version, for_machine: MachineChoice, info: 'MachineInfo', - linker: T.Optional['DynamicLinker'] = None, **kwargs): - if isinstance(exelist, str): - self.exelist = [exelist] - elif isinstance(exelist, list): - self.exelist = exelist - else: - raise TypeError('Unknown argument to Compiler') + # TODO: these could be forward declarations once we drop 3.5 support + if T.TYPE_CHECKING: + language = 'unset' + id = '' + warn_args = {} # type: T.Dict[str, T.List[str]] + + def __init__(self, exelist: T.List[str], version: str, + for_machine: MachineChoice, info: 'MachineInfo', + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, is_cross: bool = False): + self.exelist = exelist # In case it's been overridden by a child class already if not hasattr(self, 'file_suffixes'): self.file_suffixes = lang_suffixes[self.language] @@ -698,27 +475,24 @@ self.can_compile_suffixes = set(self.file_suffixes) self.default_suffix = self.file_suffixes[0] self.version = version - if 'full_version' in kwargs: - self.full_version = kwargs['full_version'] - else: - self.full_version = None + self.full_version = full_version self.for_machine = for_machine - self.base_options = [] + self.base_options = [] # type: T.List[str] self.linker = linker self.info = info + self.is_cross = is_cross - def __repr__(self): + def __repr__(self) -> str: repr_str = "<{0}: v{1} `{2}`>" return repr_str.format(self.__class__.__name__, self.version, ' '.join(self.exelist)) - def can_compile(self, src) -> bool: - if hasattr(src, 'fname'): + @lru_cache(maxsize=None) + def can_compile(self, src: 'mesonlib.FileOrString') -> bool: + if isinstance(src, mesonlib.File): src = src.fname suffix = os.path.splitext(src)[1].lower() - if suffix and suffix[1:] in self.can_compile_suffixes: - return True - return False + return bool(suffix) and suffix[1:] in self.can_compile_suffixes def get_id(self) -> str: return self.id @@ -748,42 +522,54 @@ def get_default_suffix(self) -> str: return self.default_suffix - def get_define(self, dname, prefix, env, extra_args, dependencies) -> T.Tuple[str, bool]: + def get_define(self, dname: str, prefix: str, env: 'Environment', + extra_args: T.List[str], dependencies: T.List['Dependency'], + disable_cache: bool = False) -> T.Tuple[str, bool]: raise EnvironmentException('%s does not support get_define ' % self.get_id()) - def compute_int(self, expression, low, high, guess, prefix, env, extra_args, dependencies) -> int: + def compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int], + guess: T.Optional[int], prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]], dependencies: T.Optional[T.List['Dependency']]) -> int: raise EnvironmentException('%s does not support compute_int ' % self.get_id()) - def compute_parameters_with_absolute_paths(self, parameter_list, build_dir): + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: raise EnvironmentException('%s does not support compute_parameters_with_absolute_paths ' % self.get_id()) - def has_members(self, typename, membernames, prefix, env, *, - extra_args=None, dependencies=None) -> T.Tuple[bool, bool]: + def has_members(self, typename: str, membernames: T.List[str], + prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: raise EnvironmentException('%s does not support has_member(s) ' % self.get_id()) - def has_type(self, typename, prefix, env, extra_args, *, - dependencies=None) -> T.Tuple[bool, bool]: + def has_type(self, typename: str, prefix: str, env: 'Environment', + extra_args: T.List[str], *, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: raise EnvironmentException('%s does not support has_type ' % self.get_id()) - def symbols_have_underscore_prefix(self, env) -> bool: + def symbols_have_underscore_prefix(self, env: 'Environment') -> bool: raise EnvironmentException('%s does not support symbols_have_underscore_prefix ' % self.get_id()) - def get_exelist(self): - return self.exelist[:] + def get_exelist(self) -> T.List[str]: + return self.exelist.copy() def get_linker_exelist(self) -> T.List[str]: return self.linker.get_exelist() + @abc.abstractmethod + def get_output_args(self, outputname: str) -> T.List[str]: + pass + def get_linker_output_args(self, outputname: str) -> T.List[str]: return self.linker.get_output_args(outputname) - def get_builtin_define(self, *args, **kwargs): + def get_builtin_define(self, define: str) -> T.Optional[str]: raise EnvironmentException('%s does not support get_builtin_define.' % self.id) - def has_builtin_define(self, *args, **kwargs): + def has_builtin_define(self, define: str) -> bool: raise EnvironmentException('%s does not support has_builtin_define.' % self.id) - def get_always_args(self): + def get_always_args(self) -> T.List[str]: return [] def can_linker_accept_rsp(self) -> bool: @@ -792,78 +578,119 @@ """ return self.linker.get_accepts_rsp() - def get_linker_always_args(self): + def get_linker_always_args(self) -> T.List[str]: return self.linker.get_always_args() - def get_linker_lib_prefix(self): + def get_linker_lib_prefix(self) -> str: return self.linker.get_lib_prefix() - def gen_import_library_args(self, implibname): + def gen_import_library_args(self, implibname: str) -> T.List[str]: """ Used only on Windows for libraries that need an import library. This currently means C, C++, Fortran. """ return [] - def get_linker_args_from_envvars(self) -> T.List[str]: - return self.linker.get_args_from_envvars() + def get_linker_args_from_envvars(self, + for_machine: MachineChoice, + is_cross: bool) -> T.List[str]: + return self.linker.get_args_from_envvars(for_machine, is_cross) - def get_options(self) -> T.Dict[str, coredata.UserOption]: + def get_options(self) -> 'OptionDictType': return {} - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: return [] def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: return self.linker.get_option_args(options) - def check_header(self, *args, **kwargs) -> T.Tuple[bool, bool]: + def check_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + """Check that header is usable. + + Returns a two item tuple of bools. The first bool is whether the + check succeeded, the second is whether the result was cached (True) + or run fresh (False). + """ raise EnvironmentException('Language %s does not support header checks.' % self.get_display_language()) - def has_header(self, *args, **kwargs) -> T.Tuple[bool, bool]: + def has_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + disable_cache: bool = False) -> T.Tuple[bool, bool]: + """Check that header is exists. + + This check will return true if the file exists, even if it contains: + + ```c + # error "You thought you could use this, LOLZ!" + ``` + + Use check_header if your header only works in some cases. + + Returns a two item tuple of bools. The first bool is whether the + check succeeded, the second is whether the result was cached (True) + or run fresh (False). + """ raise EnvironmentException('Language %s does not support header checks.' % self.get_display_language()) - def has_header_symbol(self, *args, **kwargs) -> T.Tuple[bool, bool]: + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: raise EnvironmentException('Language %s does not support header symbol checks.' % self.get_display_language()) - def compiles(self, *args, **kwargs) -> T.Tuple[bool, bool]: - raise EnvironmentException('Language %s does not support compile checks.' % self.get_display_language()) - - def links(self, *args, **kwargs) -> T.Tuple[bool, bool]: - raise EnvironmentException('Language %s does not support link checks.' % self.get_display_language()) - - def run(self, *args, **kwargs) -> RunResult: + def run(self, code: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> RunResult: raise EnvironmentException('Language %s does not support run checks.' % self.get_display_language()) - def sizeof(self, *args, **kwargs) -> int: + def sizeof(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: raise EnvironmentException('Language %s does not support sizeof checks.' % self.get_display_language()) - def alignment(self, *args, **kwargs) -> int: + def alignment(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: raise EnvironmentException('Language %s does not support alignment checks.' % self.get_display_language()) - def has_function(self, *args, **kwargs) -> T.Tuple[bool, bool]: + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + """See if a function exists. + + Returns a two item tuple of bools. The first bool is whether the + check succeeded, the second is whether the result was cached (True) + or run fresh (False). + """ raise EnvironmentException('Language %s does not support function checks.' % self.get_display_language()) - @classmethod - def unix_args_to_native(cls, args): + def unix_args_to_native(self, args: T.List[str]) -> T.List[str]: "Always returns a copy that can be independently mutated" - return args[:] + return args.copy() @classmethod def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]: "Always returns a copy that can be independently mutated" - return args[:] + return args.copy() - def find_library(self, *args, **kwargs): + def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]: raise EnvironmentException('Language {} does not support library finding.'.format(self.get_display_language())) - def get_library_dirs(self, *args, **kwargs): - return () + def get_library_naming(self, env: 'Environment', libtype: LibType, + strict: bool = False) -> T.Optional[T.Tuple[str, ...]]: + raise EnvironmentException( + 'Language {} does not support get_library_naming.'.format( + self.get_display_language())) - def get_program_dirs(self, *args, **kwargs): + def get_program_dirs(self, env: 'Environment') -> T.List[str]: return [] - def has_multi_arguments(self, args, env) -> T.Tuple[bool, bool]: + def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: raise EnvironmentException( 'Language {} does not support has_multi_arguments.'.format( self.get_display_language())) @@ -871,7 +698,8 @@ def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: return self.linker.has_multi_arguments(args, env) - def _get_compile_output(self, dirname, mode): + def _get_compile_output(self, dirname: str, mode: str) -> str: + # TODO: mode should really be an enum # In pre-processor mode, the output is sent to stdout and discarded if mode == 'preprocess': return None @@ -883,105 +711,120 @@ suffix = 'obj' return os.path.join(dirname, 'output.' + suffix) - def get_compiler_args_for_mode(self, mode): - args = [] + def get_compiler_args_for_mode(self, mode: CompileCheckMode) -> T.List[str]: + # TODO: mode should really be an enum + args = [] # type: T.List[str] args += self.get_always_args() - if mode == 'compile': + if mode is CompileCheckMode.COMPILE: args += self.get_compile_only_args() - if mode == 'preprocess': + elif mode is CompileCheckMode.PREPROCESS: args += self.get_preprocess_only_args() + else: + assert mode is CompileCheckMode.LINK return args + def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs: + """Return an appropriate CompilerArgs instance for this class.""" + return CompilerArgs(self, args) + @contextlib.contextmanager - def compile(self, code, extra_args=None, *, mode='link', want_output=False, temp_dir=None): + def compile(self, code: 'mesonlib.FileOrString', + extra_args: T.Union[None, CompilerArgs, T.List[str]] = None, + *, mode: str = 'link', want_output: bool = False, + temp_dir: T.Optional[str] = None) -> T.Iterator[T.Optional[CompileResult]]: + # TODO: there isn't really any reason for this to be a contextmanager if extra_args is None: extra_args = [] try: with tempfile.TemporaryDirectory(dir=temp_dir) as tmpdirname: + no_ccache = False if isinstance(code, str): srcname = os.path.join(tmpdirname, 'testfile.' + self.default_suffix) with open(srcname, 'w') as ofile: ofile.write(code) + # ccache would result in a cache miss + no_ccache = True + contents = code elif isinstance(code, mesonlib.File): srcname = code.fname + with open(code.fname, 'r') as f: + contents = f.read() # Construct the compiler command-line - commands = CompilerArgs(self) + commands = self.compiler_args() commands.append(srcname) # Preprocess mode outputs to stdout, so no output args if mode != 'preprocess': output = self._get_compile_output(tmpdirname, mode) commands += self.get_output_args(output) - commands.extend(self.get_compiler_args_for_mode(mode)) + commands.extend(self.get_compiler_args_for_mode(CompileCheckMode(mode))) # extra_args must be last because it could contain '/link' to # pass args to VisualStudio's linker. In that case everything # in the command line after '/link' is given to the linker. commands += extra_args # Generate full command-line with the exelist - commands = self.get_exelist() + commands.to_native() + command_list = self.get_exelist() + commands.to_native() mlog.debug('Running compile:') mlog.debug('Working directory: ', tmpdirname) - mlog.debug('Command line: ', ' '.join(commands), '\n') - mlog.debug('Code:\n', code) + mlog.debug('Command line: ', ' '.join(command_list), '\n') + mlog.debug('Code:\n', contents) os_env = os.environ.copy() os_env['LC_ALL'] = 'C' - p, p.stdo, p.stde = Popen_safe(commands, cwd=tmpdirname, env=os_env) - mlog.debug('Compiler stdout:\n', p.stdo) - mlog.debug('Compiler stderr:\n', p.stde) - p.commands = commands - p.input_name = srcname + if no_ccache: + os_env['CCACHE_DISABLE'] = '1' + p, stdo, stde = Popen_safe(command_list, cwd=tmpdirname, env=os_env) + mlog.debug('Compiler stdout:\n', stdo) + mlog.debug('Compiler stderr:\n', stde) + + result = CompileResult(stdo, stde, list(commands), p.returncode, p.pid, input_name=srcname) if want_output: - p.output_name = output - p.cached = False # Make sure that the cached attribute always exists - yield p + result.output_name = output + yield result except OSError: # On Windows antivirus programs and the like hold on to files so # they can't be deleted. There's not much to do in this case. Also, # catch OSError because the directory is then no longer empty. - pass + return @contextlib.contextmanager - def cached_compile(self, code, cdata: coredata.CoreData, *, extra_args=None, mode: str = 'link', temp_dir=None): - assert(isinstance(cdata, coredata.CoreData)) + def cached_compile(self, code: str, cdata: coredata.CoreData, *, + extra_args: T.Union[None, T.List[str], CompilerArgs] = None, + mode: str = 'link', + temp_dir: T.Optional[str] = None) -> T.Iterator[T.Optional[CompileResult]]: + # TODO: There's isn't really any reason for this to be a context manager # Calculate the key - textra_args = tuple(extra_args) if extra_args is not None else None - key = (tuple(self.exelist), self.version, code, textra_args, mode) + textra_args = tuple(extra_args) if extra_args is not None else tuple() # type: T.Tuple[str, ...] + key = (tuple(self.exelist), self.version, code, textra_args, mode) # type: coredata.CompilerCheckCacheKey - # Check if not cached - if key not in cdata.compiler_check_cache: + # Check if not cached, and generate, otherwise get from the cache + if key in cdata.compiler_check_cache: + p = cdata.compiler_check_cache[key] # type: CompileResult + p.cached = True + mlog.debug('Using cached compile:') + mlog.debug('Cached command line: ', ' '.join(p.command), '\n') + mlog.debug('Code:\n', code) + mlog.debug('Cached compiler stdout:\n', p.stdout) + mlog.debug('Cached compiler stderr:\n', p.stderr) + yield p + else: with self.compile(code, extra_args=extra_args, mode=mode, want_output=False, temp_dir=temp_dir) as p: - # Remove all attributes except the following - # This way the object can be serialized - tokeep = ['args', 'commands', 'input_name', 'output_name', - 'pid', 'returncode', 'stdo', 'stde', 'text_mode'] - todel = [x for x in vars(p).keys() if x not in tokeep] - for i in todel: - delattr(p, i) - p.cached = False cdata.compiler_check_cache[key] = p yield p - return - # Return cached - p = cdata.compiler_check_cache[key] - p.cached = True - mlog.debug('Using cached compile:') - mlog.debug('Cached command line: ', ' '.join(p.commands), '\n') - mlog.debug('Code:\n', code) - mlog.debug('Cached compiler stdout:\n', p.stdo) - mlog.debug('Cached compiler stderr:\n', p.stde) - yield p - - def get_colorout_args(self, colortype): + def get_colorout_args(self, colortype: str) -> T.List[str]: + # TODO: colortype can probably be an emum return [] # Some compilers (msvc) write debug info to a separate file. # These args specify where it should be written. - def get_compile_debugfile_args(self, rel_obj, **kwargs): + def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]: return [] + def get_link_debugfile_name(self, targetfile: str) -> str: + return self.linker.get_debugfile_name(targetfile) + def get_link_debugfile_args(self, targetfile: str) -> T.List[str]: return self.linker.get_debugfile_args(targetfile) @@ -1000,49 +843,64 @@ def no_undefined_link_args(self) -> T.List[str]: return self.linker.no_undefined_args() - # Compiler arguments needed to enable the given instruction set. - # May be [] meaning nothing needed or None meaning the given set - # is not supported. - def get_instruction_set_args(self, instruction_set): + def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]: + """Compiler arguments needed to enable the given instruction set. + + Return type ay be an empty list meaning nothing needed or None + meaning the given set is not supported. + """ return None def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: return self.linker.build_rpath_args( env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) - def thread_flags(self, env): + def thread_flags(self, env: 'Environment') -> T.List[str]: return [] - def openmp_flags(self): + def thread_link_flags(self, env: 'Environment') -> T.List[str]: + return self.linker.thread_flags(env) + + def openmp_flags(self) -> T.List[str]: raise EnvironmentException('Language %s does not support OpenMP flags.' % self.get_display_language()) - def language_stdlib_only_link_flags(self): + def openmp_link_flags(self) -> T.List[str]: + return self.openmp_flags() + + def language_stdlib_only_link_flags(self) -> T.List[str]: return [] - def gnu_symbol_visibility_args(self, vistype): + def gnu_symbol_visibility_args(self, vistype: str) -> T.List[str]: return [] - def get_gui_app_args(self, value): + def get_gui_app_args(self, value: bool) -> T.List[str]: return [] - def has_func_attribute(self, name, env): + def get_win_subsystem_args(self, value: str) -> T.List[str]: + # This returns an empty array rather than throws to simplify the code. + # Otherwise we would have to check whenever calling this function whether + # the target is for Windows. There are also many cases where this is + # a meaningless choice, such as with Jave or C#. + return [] + + def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]: raise EnvironmentException( 'Language {} does not support function attributes.'.format(self.get_display_language())) - def get_pic_args(self): + def get_pic_args(self) -> T.List[str]: m = 'Language {} does not support position-independent code' raise EnvironmentException(m.format(self.get_display_language())) - def get_pie_args(self): + def get_pie_args(self) -> T.List[str]: m = 'Language {} does not support position-independent executable' raise EnvironmentException(m.format(self.get_display_language())) def get_pie_link_args(self) -> T.List[str]: return self.linker.get_pie_args() - def get_argument_syntax(self): + def get_argument_syntax(self) -> str: """Returns the argument family type. Compilers fall into families if they try to emulate the command line @@ -1053,22 +911,19 @@ """ return 'other' - def get_profile_generate_args(self): + def get_profile_generate_args(self) -> T.List[str]: raise EnvironmentException( '%s does not support get_profile_generate_args ' % self.get_id()) - def get_profile_use_args(self): + def get_profile_use_args(self) -> T.List[str]: raise EnvironmentException( '%s does not support get_profile_use_args ' % self.get_id()) - def get_undefined_link_args(self) -> T.List[str]: - return self.linker.get_undefined_link_args() - - def remove_linkerlike_args(self, args): + def remove_linkerlike_args(self, args: T.List[str]) -> T.List[str]: rm_exact = ('-headerpad_max_install_names',) rm_prefixes = ('-Wl,', '-L',) - rm_next = ('-L',) - ret = [] + rm_next = ('-L', '-framework',) + ret = [] # T.List[str] iargs = iter(args) for arg in iargs: # Remove this argument @@ -1100,11 +955,14 @@ def get_asneeded_args(self) -> T.List[str]: return self.linker.get_asneeded_args() + def headerpad_args(self) -> T.List[str]: + return self.linker.headerpad_args() + def bitcode_args(self) -> T.List[str]: return self.linker.bitcode_args() - def get_linker_debug_crt_args(self) -> T.List[str]: - return self.linker.get_debug_crt_args() + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + raise EnvironmentException('{} does not implement get_buildtype_args'.format(self.id)) def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: return self.linker.get_buildtype_args(buildtype) @@ -1117,13 +975,13 @@ env, prefix, shlib_name, suffix, soversion, darwin_versions, is_shared_module) - def get_target_link_args(self, target): + def get_target_link_args(self, target: 'BuildTarget') -> T.List[str]: return target.link_args - def get_dependency_compile_args(self, dep): + def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]: return dep.get_compile_args() - def get_dependency_link_args(self, dep): + def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]: return dep.get_link_args() @classmethod @@ -1132,60 +990,243 @@ """ return [] + def get_coverage_args(self) -> T.List[str]: + return [] -def get_largefile_args(compiler): - ''' - Enable transparent large-file-support for 32-bit UNIX systems - ''' - if not (compiler.info.is_windows() or compiler.info.is_darwin()): - # Enable large-file support unconditionally on all platforms other - # than macOS and Windows. macOS is now 64-bit-only so it doesn't - # need anything special, and Windows doesn't have automatic LFS. - # You must use the 64-bit counterparts explicitly. - # glibc, musl, and uclibc, and all BSD libcs support this. On Android, - # support for transparent LFS is available depending on the version of - # Bionic: https://github.com/android/platform_bionic#32-bit-abi-bugs - # https://code.google.com/p/android/issues/detail?id=64613 - # - # If this breaks your code, fix it! It's been 20+ years! - return ['-D_FILE_OFFSET_BITS=64'] - # We don't enable -D_LARGEFILE64_SOURCE since that enables - # transitionary features and must be enabled by programs that use - # those features explicitly. - return [] + def get_coverage_link_args(self) -> T.List[str]: + return self.linker.get_coverage_args() + + def get_disable_assert_args(self) -> T.List[str]: + return [] + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + raise EnvironmentError('This compiler does not support Windows CRT selection') + + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + raise EnvironmentError('This compiler does not support Windows CRT selection') + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_preprocess_only_args(self) -> T.List[str]: + raise EnvironmentError('This compiler does not have a preprocessor') + + def get_default_include_dirs(self) -> T.List[str]: + return [] + + def get_largefile_args(self) -> T.List[str]: + '''Enable transparent large-file-support for 32-bit UNIX systems''' + if not (self.get_argument_syntax() == 'msvc' or self.info.is_darwin()): + # Enable large-file support unconditionally on all platforms other + # than macOS and MSVC. macOS is now 64-bit-only so it doesn't + # need anything special, and MSVC doesn't have automatic LFS. + # You must use the 64-bit counterparts explicitly. + # glibc, musl, and uclibc, and all BSD libcs support this. On Android, + # support for transparent LFS is available depending on the version of + # Bionic: https://github.com/android/platform_bionic#32-bit-abi-bugs + # https://code.google.com/p/android/issues/detail?id=64613 + # + # If this breaks your code, fix it! It's been 20+ years! + return ['-D_FILE_OFFSET_BITS=64'] + # We don't enable -D_LARGEFILE64_SOURCE since that enables + # transitionary features and must be enabled by programs that use + # those features explicitly. + return [] + + def get_library_dirs(self, env: 'Environment', + elf_class: T.Optional[int] = None) -> T.List[str]: + return [] + + def find_framework_paths(self, env: 'Environment') -> T.List[str]: + raise EnvironmentException('{} does not support find_framework_paths'.format(self.id)) + + def attribute_check_func(self, name: str) -> str: + raise EnvironmentException('{} does not support attribute checks'.format(self.id)) + + def get_pch_suffix(self) -> str: + raise EnvironmentException('{} does not support pre compiled headers'.format(self.id)) + + def get_pch_name(self, name: str) -> str: + raise EnvironmentException('{} does not support pre compiled headers'.format(self.id)) + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + raise EnvironmentException('{} does not support pre compiled headers'.format(self.id)) + + def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]: + raise EnvironmentException('{} does not support function attributes'.format(self.id)) + + def name_string(self) -> str: + return ' '.join(self.exelist) + + @abc.abstractmethod + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + """Check that this compiler actually works. + + This should provide a simple compile/link test. Somthing as simple as: + ```python + main(): return 0 + ``` + is good enough here. + """ + + def split_shlib_to_parts(self, fname: str) -> T.Tuple[T.Optional[str], str]: + return None, fname + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return [] + + def get_std_exe_link_args(self) -> T.List[str]: + # TODO: is this a linker property? + return [] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + return [] + + def depfile_for_object(self, objfile: str) -> str: + return objfile + '.' + self.get_depfile_suffix() + + def get_depfile_suffix(self) -> str: + raise EnvironmentError('{} does not implement get_depfile_suffix'.format(self.id)) + + def get_no_stdinc_args(self) -> T.List[str]: + """Arguments to turn off default inclusion of standard libraries.""" + return [] + + def get_warn_args(self, level: str) -> T.List[str]: + return [] + + def get_werror_args(self) -> T.List[str]: + return [] + + @abc.abstractmethod + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + pass + + def get_module_incdir_args(self) -> T.Tuple[str, ...]: + raise EnvironmentError('{} does not implement get_module_incdir_args'.format(self.id)) + + def get_module_outdir_args(self, path: str) -> T.List[str]: + raise EnvironmentError('{} does not implement get_module_outdir_args'.format(self.id)) + + def module_name_to_filename(self, module_name: str) -> str: + raise EnvironmentError('{} does not implement module_name_to_filename'.format(self.id)) + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + """Arguments to pass the compiler and/or linker for checks. + The default implementation turns off optimizations. mode should be + one of: -def get_args_from_envvars(lang: str, use_linker_args: bool) -> T.Tuple[T.List[str], T.List[str]]: + Examples of things that go here: + - extra arguments for error checking + """ + return self.get_no_optimization_args() + + def get_no_optimization_args(self) -> T.List[str]: + """Arguments to the compiler to turn off all optimizations.""" + return [] + + def build_wrapper_args(self, env: 'Environment', + extra_args: T.Union[None, CompilerArgs, T.List[str]], + dependencies: T.Optional[T.List['Dependency']], + mode: CompileCheckMode = CompileCheckMode.COMPILE) -> CompilerArgs: + """Arguments to pass the build_wrapper helper. + + This generally needs to be set on a per-language baises. It provides + a hook for languages to handle dependencies and extra args. The base + implementation handles the most common cases, namely adding the + check_arguments, unwrapping dependencies, and appending extra args. + """ + if callable(extra_args): + extra_args = extra_args(mode) + if extra_args is None: + extra_args = [] + if dependencies is None: + dependencies = [] + + # Collect compiler arguments + args = self.compiler_args(self.get_compiler_check_args(mode)) + for d in dependencies: + # Add compile flags needed by dependencies + args += d.get_compile_args() + if mode is CompileCheckMode.LINK: + # Add link flags needed to find dependencies + args += d.get_link_args() + + if mode is CompileCheckMode.COMPILE: + # Add DFLAGS from the env + args += env.coredata.get_external_args(self.for_machine, self.language) + elif mode is CompileCheckMode.LINK: + # Add LDFLAGS from the env + args += env.coredata.get_external_link_args(self.for_machine, self.language) + # extra_args must override all other arguments, so we add them last + args += extra_args + return args + + @contextlib.contextmanager + def _build_wrapper(self, code: str, env: 'Environment', + extra_args: T.Union[None, CompilerArgs, T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + mode: str = 'compile', want_output: bool = False, + disable_cache: bool = False, + temp_dir: str = None) -> T.Iterator[T.Optional[CompileResult]]: + """Helper for getting a cacched value when possible. + + This method isn't meant to be called externally, it's mean to be + wrapped by other methods like compiles() and links(). + """ + args = self.build_wrapper_args(env, extra_args, dependencies, CompileCheckMode(mode)) + if disable_cache or want_output: + with self.compile(code, extra_args=args, mode=mode, want_output=want_output, temp_dir=env.scratch_dir) as r: + yield r + else: + with self.cached_compile(code, env.coredata, extra_args=args, mode=mode, temp_dir=env.scratch_dir) as r: + yield r + + def compiles(self, code: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], CompilerArgs] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + mode: str = 'compile', + disable_cache: bool = False) -> T.Tuple[bool, bool]: + with self._build_wrapper(code, env, extra_args, dependencies, mode, disable_cache=disable_cache) as p: + return p.returncode == 0, p.cached + + + def links(self, code: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], CompilerArgs] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + mode: str = 'compile', + disable_cache: bool = False) -> T.Tuple[bool, bool]: + return self.compiles(code, env, extra_args=extra_args, + dependencies=dependencies, mode='link', disable_cache=disable_cache) + + def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]: + """Used by D for extra language features.""" + # TODO: using a TypeDict here would improve this + raise EnvironmentError('{} does not implement get_feature_args'.format(self.id)) + + +def get_args_from_envvars(lang: str, + for_machine: MachineChoice, + is_cross: bool, + use_linker_args: bool) -> T.Tuple[T.List[str], T.List[str]]: """ Returns a tuple of (compile_flags, link_flags) for the specified language from the inherited environment """ - def log_var(var, val: T.Optional[str]): - if val: - mlog.log('Appending {} from environment: {!r}'.format(var, val)) - else: - mlog.debug('No {} in the environment, not changing global flags.'.format(var)) - if lang not in cflags_mapping: return [], [] compile_flags = [] # type: T.List[str] link_flags = [] # type: T.List[str] - env_compile_flags = os.environ.get(cflags_mapping[lang]) - log_var(cflags_mapping[lang], env_compile_flags) + env_compile_flags = get_env_var(for_machine, is_cross, cflags_mapping[lang]) if env_compile_flags is not None: compile_flags += split_args(env_compile_flags) # Link flags (same for all languages) if lang in languages_using_ldflags: - # This is duplicated between the linkers, but I'm not sure how else - # to handle this - env_link_flags = split_args(os.environ.get('LDFLAGS', '')) - else: - env_link_flags = [] - log_var('LDFLAGS', env_link_flags) - link_flags += env_link_flags + link_flags += LinkerEnvVarsMixin.get_args_from_envvars(for_machine, is_cross) if use_linker_args: # When the compiler is used as a wrapper around the linker (such as # with GCC and Clang), the compile flags can be needed while linking @@ -1194,43 +1235,40 @@ link_flags = compile_flags + link_flags # Pre-processor flags for certain languages - if lang in {'c', 'cpp', 'objc', 'objcpp'}: - env_preproc_flags = os.environ.get('CPPFLAGS') - log_var('CPPFLAGS', env_preproc_flags) + if lang in languages_using_cppflags: + env_preproc_flags = get_env_var(for_machine, is_cross, 'CPPFLAGS') if env_preproc_flags is not None: compile_flags += split_args(env_preproc_flags) return compile_flags, link_flags -def get_global_options(lang: str, comp: T.Type[Compiler], - properties: Properties) -> T.Dict[str, coredata.UserOption]: +def get_global_options(lang: str, + comp: T.Type[Compiler], + for_machine: MachineChoice, + is_cross: bool) -> 'OptionDictType': """Retreive options that apply to all compilers for a given language.""" description = 'Extra arguments passed to the {}'.format(lang) opts = { - lang + '_args': coredata.UserArrayOption( + 'args': coredata.UserArrayOption( description + ' compiler', [], split_args=True, user_input=True, allow_dups=True), - lang + '_link_args': coredata.UserArrayOption( + 'link_args': coredata.UserArrayOption( description + ' linker', [], split_args=True, user_input=True, allow_dups=True), - } + } # type: OptionDictType - if properties.fallback: - # Get from env vars. - # XXX: True here is a hack - compile_args, link_args = get_args_from_envvars(lang, comp.INVOKES_LINKER) - else: - compile_args = [] - link_args = [] + # Get from env vars. + compile_args, link_args = get_args_from_envvars( + lang, + for_machine, + is_cross, + comp.INVOKES_LINKER) for k, o in opts.items(): - if k in properties: - # Get from configuration files. - o.set_value(properties[k]) - elif k == lang + '_args': + if k == 'args': o.set_value(compile_args) - elif k == lang + '_link_args': + elif k == 'link_args': o.set_value(link_args) return opts diff -Nru meson-0.53.2/mesonbuild/compilers/cpp.py meson-0.57.0+really0.56.2/mesonbuild/compilers/cpp.py --- meson-0.53.2/mesonbuild/compilers/cpp.py 2020-02-25 18:00:46.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/cpp.py 2021-01-06 10:39:48.000000000 +0000 @@ -21,29 +21,38 @@ from .. import mlog from ..mesonlib import MesonException, MachineChoice, version_compare +from ..linkers import LinkerEnvVarsMixin from .compilers import ( gnu_winlibs, msvc_winlibs, Compiler, + CompileCheckMode, ) from .c_function_attributes import CXX_FUNC_ATTRIBUTES, C_FUNC_ATTRIBUTES from .mixins.clike import CLikeCompiler from .mixins.ccrx import CcrxCompiler +from .mixins.c2000 import C2000Compiler from .mixins.arm import ArmCompiler, ArmclangCompiler -from .mixins.visualstudio import VisualStudioLikeCompiler +from .mixins.visualstudio import MSVCCompiler, ClangClCompiler from .mixins.gnu import GnuCompiler from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler from .mixins.clang import ClangCompiler from .mixins.elbrus import ElbrusCompiler from .mixins.pgi import PGICompiler -from .mixins.islinker import BasicLinkerIsCompilerMixin, LinkerEnvVarsMixin from .mixins.emscripten import EmscriptenMixin if T.TYPE_CHECKING: + from ..coredata import OptionDictType + from ..dependencies import Dependency, ExternalProgram from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers import DynamicLinker + from .mixins.clike import CLikeCompiler as CompilerMixinBase +else: + CompilerMixinBase = object -def non_msvc_eh_options(eh, args): +def non_msvc_eh_options(eh: str, args: T.List[str]) -> None: if eh == 'none': args.append('-fno-exceptions') elif eh == 's' or eh == 'c': @@ -53,7 +62,7 @@ class CPPCompiler(CLikeCompiler, Compiler): @classmethod - def attribute_check_func(cls, name): + def attribute_check_func(cls, name: str) -> str: try: return CXX_FUNC_ATTRIBUTES.get(name, C_FUNC_ATTRIBUTES[name]) except KeyError: @@ -61,30 +70,37 @@ language = 'cpp' - def __init__(self, exelist, version, for_machine: MachineChoice, is_cross: bool, - info: 'MachineInfo', exe_wrap: T.Optional[str] = None, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): # If a child ObjCPP class has already set it, don't set it ourselves - Compiler.__init__(self, exelist, version, for_machine, info, **kwargs) - CLikeCompiler.__init__(self, is_cross, exe_wrap) + Compiler.__init__(self, exelist, version, for_machine, info, + is_cross=is_cross, linker=linker, + full_version=full_version) + CLikeCompiler.__init__(self, exe_wrapper) @staticmethod - def get_display_language(): + def get_display_language() -> str: return 'C++' - def get_no_stdinc_args(self): + def get_no_stdinc_args(self) -> T.List[str]: return ['-nostdinc++'] - def sanity_check(self, work_dir, environment): + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: code = 'class breakCCompiler;int main(void) { return 0; }\n' - return self.sanity_check_impl(work_dir, environment, 'sanitycheckcpp.cc', code) + return self._sanity_check_impl(work_dir, environment, 'sanitycheckcpp.cc', code) - def get_compiler_check_args(self): + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: # -fpermissive allows non-conforming code to compile which is necessary # for many C++ checks. Particularly, the has_header_symbol check is # too strict without this and always fails. - return super().get_compiler_check_args() + ['-fpermissive'] + return super().get_compiler_check_args(mode) + ['-fpermissive'] - def has_header_symbol(self, hname, symbol, prefix, env, *, extra_args=None, dependencies=None): + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: # Check if it's a C-like symbol found, cached = super().has_header_symbol(hname, symbol, prefix, env, extra_args=extra_args, @@ -102,7 +118,7 @@ return self.compiles(t.format(**fargs), env, extra_args=extra_args, dependencies=dependencies) - def _test_cpp_std_arg(self, cpp_std_value): + def _test_cpp_std_arg(self, cpp_std_value: str) -> bool: # Test whether the compiler understands a -std=XY argument assert(cpp_std_value.startswith('-std=')) @@ -121,7 +137,7 @@ return False @functools.lru_cache() - def _find_best_cpp_std(self, cpp_std): + def _find_best_cpp_std(self, cpp_std: str) -> str: # The initial version mapping approach to make falling back # from '-std=c++14' to '-std=c++1y' was too brittle. For instance, # Apple's Clang uses a different versioning scheme to upstream LLVM, @@ -153,76 +169,105 @@ class ClangCPPCompiler(ClangCompiler, CPPCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrapper, **kwargs) - ClangCompiler.__init__(self) + info, exe_wrapper, linker=linker, full_version=full_version) + ClangCompiler.__init__(self, defines) default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] self.warn_args = {'0': [], '1': default_warn_args, '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-Wpedantic']} - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CPPCompiler.get_options(self) - opts.update({'cpp_eh': coredata.UserComboOption('C++ exception handling type.', - ['none', 'default', 'a', 's', 'sc'], - 'default'), - 'cpp_rtti': coredata.UserBooleanOption('Enable RTTI', True), - 'cpp_std': coredata.UserComboOption('C++ language standard to use', - ['none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z', 'c++2a', - 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++1z', 'gnu++2a'], - 'none')}) + opts.update({ + 'eh': coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + 'rtti': coredata.UserBooleanOption('Enable RTTI', True), + 'std': coredata.UserComboOption( + 'C++ language standard to use', + ['none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z', 'c++2a', + 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++1z', 'gnu++2a'], + 'none', + ), + }) + if self.info.is_windows() or self.info.is_cygwin(): + opts.update({ + 'winlibs': coredata.UserArrayOption( + 'Standard Win libraries to link against', + gnu_winlibs, + ), + }) return opts - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['cpp_std'] + std = options['std'] if std.value != 'none': args.append(self._find_best_cpp_std(std.value)) - non_msvc_eh_options(options['cpp_eh'].value, args) + non_msvc_eh_options(options['eh'].value, args) - if not options['cpp_rtti'].value: + if not options['rtti'].value: args.append('-fno-rtti') return args - def get_option_link_args(self, options): + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: + if self.info.is_windows() or self.info.is_cygwin(): + # without a typedict mypy can't understand this. + libs = options['winlibs'].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs return [] - def language_stdlib_only_link_flags(self): + def language_stdlib_only_link_flags(self) -> T.List[str]: return ['-lstdc++'] class AppleClangCPPCompiler(ClangCPPCompiler): - - pass + def language_stdlib_only_link_flags(self) -> T.List[str]: + return ['-lc++'] -class EmscriptenCPPCompiler(LinkerEnvVarsMixin, EmscriptenMixin, BasicLinkerIsCompilerMixin, ClangCPPCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross: bool, info: 'MachineInfo', exe_wrapper=None, **kwargs): +class EmscriptenCPPCompiler(EmscriptenMixin, LinkerEnvVarsMixin, ClangCPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): if not is_cross: raise MesonException('Emscripten compiler can only be used for cross compilation.') - ClangCPPCompiler.__init__(self, exelist=exelist, version=version, - for_machine=for_machine, is_cross=is_cross, - info=info, exe_wrapper=exe_wrapper, **kwargs) + ClangCPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper=exe_wrapper, linker=linker, + defines=defines, full_version=full_version) self.id = 'emscripten' - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['cpp_std'] + std = options['std'] if std.value != 'none': args.append(self._find_best_cpp_std(std.value)) return args class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): - CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, exe_wrapper, **kwargs) + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) ArmclangCompiler.__init__(self) default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] self.warn_args = {'0': [], @@ -230,35 +275,47 @@ '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-Wpedantic']} - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CPPCompiler.get_options(self) - opts.update({'cpp_eh': coredata.UserComboOption('C++ exception handling type.', - ['none', 'default', 'a', 's', 'sc'], - 'default'), - 'cpp_std': coredata.UserComboOption('C++ language standard to use', - ['none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', - 'gnu++98', 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17'], - 'none')}) + opts.update({ + 'eh': coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + 'std': coredata.UserComboOption( + 'C++ language standard to use', + [ + 'none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', + 'gnu++98', 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17', + ], + 'none', + ), + }) return opts - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['cpp_std'] + std = options['std'] if std.value != 'none': args.append('-std=' + std.value) - non_msvc_eh_options(options['cpp_eh'].value, args) + non_msvc_eh_options(options['eh'].value, args) return args - def get_option_link_args(self, options): + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: return [] class GnuCPPCompiler(GnuCompiler, CPPCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrap, defines, **kwargs): - CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrap, **kwargs) + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) GnuCompiler.__init__(self, defines) default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] self.warn_args = {'0': [], @@ -266,84 +323,137 @@ '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-Wpedantic']} - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CPPCompiler.get_options(self) - opts.update({'cpp_eh': coredata.UserComboOption('C++ exception handling type.', - ['none', 'default', 'a', 's', 'sc'], - 'default'), - 'cpp_rtti': coredata.UserBooleanOption('Enable RTTI', True), - 'cpp_std': coredata.UserComboOption('C++ language standard to use', - ['none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z', 'c++2a', - 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++1z', 'gnu++2a'], - 'none'), - 'cpp_debugstl': coredata.UserBooleanOption('STL debug mode', - False)}) + opts.update({ + 'eh': coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + 'rtti': coredata.UserBooleanOption('Enable RTTI', True), + 'std': coredata.UserComboOption( + 'C++ language standard to use', + ['none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z', 'c++2a', + 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++1z', 'gnu++2a'], + 'none', + ), + 'debugstl': coredata.UserBooleanOption( + 'STL debug mode', + False, + ) + }) if self.info.is_windows() or self.info.is_cygwin(): opts.update({ - 'cpp_winlibs': coredata.UserArrayOption('Standard Win libraries to link against', - gnu_winlibs), }) + 'winlibs': coredata.UserArrayOption( + 'Standard Win libraries to link against', + gnu_winlibs, + ), + }) return opts - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['cpp_std'] + std = options['std'] if std.value != 'none': args.append(self._find_best_cpp_std(std.value)) - non_msvc_eh_options(options['cpp_eh'].value, args) + non_msvc_eh_options(options['eh'].value, args) - if not options['cpp_rtti'].value: + if not options['rtti'].value: args.append('-fno-rtti') - if options['cpp_debugstl'].value: + if options['debugstl'].value: args.append('-D_GLIBCXX_DEBUG=1') return args - def get_option_link_args(self, options): + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: if self.info.is_windows() or self.info.is_cygwin(): - return options['cpp_winlibs'].value[:] + # without a typedict mypy can't understand this. + libs = options['winlibs'].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs return [] - def get_pch_use_args(self, pch_dir, header): + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: return ['-fpch-preprocess', '-include', os.path.basename(header)] - def language_stdlib_only_link_flags(self): + def language_stdlib_only_link_flags(self) -> T.List[str]: return ['-lstdc++'] class PGICPPCompiler(PGICompiler, CPPCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): - CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs) + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + PGICompiler.__init__(self) + + +class NvidiaHPC_CPPCompiler(PGICompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) PGICompiler.__init__(self) + self.id = 'nvidia_hpc' + class ElbrusCPPCompiler(GnuCPPCompiler, ElbrusCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, - defines=None, **kwargs): - GnuCPPCompiler.__init__(self, exelist, version, for_machine, - is_cross, info, exe_wrapper, defines, - **kwargs) + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + GnuCPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, + full_version=full_version, defines=defines) ElbrusCompiler.__init__(self) - # It does not support c++/gnu++ 17 and 1z, but still does support 0x, 1y, and gnu++98. - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CPPCompiler.get_options(self) - opts.update({'cpp_eh': coredata.UserComboOption('C++ exception handling type.', - ['none', 'default', 'a', 's', 'sc'], - 'default'), - 'cpp_std': coredata.UserComboOption('C++ language standard to use', - ['none', 'c++98', 'c++03', 'c++0x', 'c++11', 'c++14', 'c++1y', - 'gnu++98', 'gnu++03', 'gnu++0x', 'gnu++11', 'gnu++14', 'gnu++1y'], - 'none'), - 'cpp_debugstl': coredata.UserBooleanOption('STL debug mode', - False)}) + + cpp_stds = [ + 'none', 'c++98', 'c++03', 'c++0x', 'c++11', 'c++14', 'c++1y', + 'gnu++98', 'gnu++03', 'gnu++0x', 'gnu++11', 'gnu++14', 'gnu++1y', + ] + + if version_compare(self.version, '>=1.24.00'): + cpp_stds += [ 'c++1z', 'c++17', 'gnu++1z', 'gnu++17' ] + + if version_compare(self.version, '>=1.25.00'): + cpp_stds += [ 'c++2a', 'gnu++2a' ] + + opts.update({ + 'eh': coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + 'std': coredata.UserComboOption( + 'C++ language standard to use', + cpp_stds, + 'none', + ), + 'debugstl': coredata.UserBooleanOption( + 'STL debug mode', + False, + ), + }) return opts # Elbrus C++ compiler does not have lchmod, but there is only linker warning, not compiler error. # So we should explicitly fail at this case. - def has_function(self, funcname, prefix, env, *, extra_args=None, dependencies=None): + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: if funcname == 'lchmod': return False, False else: @@ -352,24 +462,26 @@ dependencies=dependencies) # Elbrus C++ compiler does not support RTTI, so don't check for it. - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['cpp_std'] + std = options['std'] if std.value != 'none': args.append(self._find_best_cpp_std(std.value)) - non_msvc_eh_options(options['cpp_eh'].value, args) + non_msvc_eh_options(options['eh'].value, args) - if options['cpp_debugstl'].value: + if options['debugstl'].value: args.append('-D_GLIBCXX_DEBUG=1') return args class IntelCPPCompiler(IntelGnuLikeCompiler, CPPCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrap, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrap, **kwargs) + info, exe_wrapper, linker=linker, full_version=full_version) IntelGnuLikeCompiler.__init__(self) self.lang_header = 'c++-header' default_warn_args = ['-Wall', '-w3', '-diag-disable:remark', @@ -379,7 +491,7 @@ '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra']} - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CPPCompiler.get_options(self) # Every Unix compiler under the sun seems to accept -std=c++03, # with the exception of ICC. Instead of preventing the user from @@ -393,39 +505,47 @@ c_stds += ['c++17'] if version_compare(self.version, '>=17.0.0'): g_stds += ['gnu++14'] - opts.update({'cpp_eh': coredata.UserComboOption('C++ exception handling type.', - ['none', 'default', 'a', 's', 'sc'], - 'default'), - 'cpp_rtti': coredata.UserBooleanOption('Enable RTTI', True), - 'cpp_std': coredata.UserComboOption('C++ language standard to use', - ['none'] + c_stds + g_stds, - 'none'), - 'cpp_debugstl': coredata.UserBooleanOption('STL debug mode', - False)}) + if version_compare(self.version, '>=19.1.0'): + c_stds += ['c++2a'] + g_stds += ['gnu++2a'] + opts.update({ + 'eh': coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + 'rtti': coredata.UserBooleanOption('Enable RTTI', True), + 'std': coredata.UserComboOption( + 'C++ language standard to use', + ['none'] + c_stds + g_stds, + 'none', + ), + 'debugstl': coredata.UserBooleanOption('STL debug mode', False), + }) return opts - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['cpp_std'] + std = options['std'] if std.value != 'none': remap_cpp03 = { 'c++03': 'c++98', 'gnu++03': 'gnu++98' } args.append('-std=' + remap_cpp03.get(std.value, std.value)) - if options['cpp_eh'].value == 'none': + if options['eh'].value == 'none': args.append('-fno-exceptions') - if not options['cpp_rtti'].value: + if not options['rtti'].value: args.append('-fno-rtti') - if options['cpp_debugstl'].value: + if options['debugstl'].value: args.append('-D_GLIBCXX_DEBUG=1') return args - def get_option_link_args(self, options): + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: return [] -class VisualStudioLikeCPPCompilerMixin: +class VisualStudioLikeCPPCompilerMixin(CompilerMixinBase): """Mixin for C++ specific method overrides in MSVC-like compilers.""" @@ -441,25 +561,34 @@ 'c++latest': (False, "latest"), } - def get_option_link_args(self, options): - return options['cpp_winlibs'].value[:] - - def _get_options_impl(self, opts, cpp_stds: T.List[str]): - opts.update({'cpp_eh': coredata.UserComboOption('C++ exception handling type.', - ['none', 'default', 'a', 's', 'sc'], - 'default'), - 'cpp_rtti': coredata.UserBooleanOption('Enable RTTI', True), - 'cpp_std': coredata.UserComboOption('C++ language standard to use', - cpp_stds, - 'none'), - 'cpp_winlibs': coredata.UserArrayOption('Windows libs to link against.', - msvc_winlibs)}) + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: + # need a typeddict for this + return T.cast(T.List[str], options['winlibs'].value[:]) + + def _get_options_impl(self, opts: 'OptionDictType', cpp_stds: T.List[str]) -> 'OptionDictType': + opts.update({ + 'eh': coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + 'rtti': coredata.UserBooleanOption('Enable RTTI', True), + 'std': coredata.UserComboOption( + 'C++ language standard to use', + cpp_stds, + 'none', + ), + 'winlibs': coredata.UserArrayOption( + 'Windows libs to link against.', + msvc_winlibs, + ), + }) return opts - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - eh = options['cpp_eh'] + eh = options['eh'] if eh.value == 'default': args.append('/EHsc') elif eh.value == 'none': @@ -467,10 +596,10 @@ else: args.append('/EH' + eh.value) - if not options['cpp_rtti'].value: + if not options['rtti'].value: args.append('/GR-') - permissive, ver = self.VC_VERSION_MAP[options['cpp_std'].value] + permissive, ver = self.VC_VERSION_MAP[options['std'].value] if ver is not None: args.append('/std:c++{}'.format(ver)) @@ -480,46 +609,50 @@ return args - def get_compiler_check_args(self): + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: # XXX: this is a hack because so much GnuLike stuff is in the base CPPCompiler class. - return CLikeCompiler.get_compiler_check_args(self) + return Compiler.get_compiler_check_args(self, mode) -class CPP11AsCPP14Mixin: +class CPP11AsCPP14Mixin(CompilerMixinBase): """Mixin class for VisualStudio and ClangCl to replace C++11 std with C++14. This is a limitation of Clang and MSVC that ICL doesn't share. """ - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: # Note: there is no explicit flag for supporting C++11; we attempt to do the best we can # which means setting the C++ standard version to C++14, in compilers that support it # (i.e., after VS2015U3) # if one is using anything before that point, one cannot set the standard. - if options['cpp_std'].value in {'vc++11', 'c++11'}: + if options['std'].value in {'vc++11', 'c++11'}: mlog.warning(self.id, 'does not support C++11;', 'attempting best effort; setting the standard to C++14', once=True) # Don't mutate anything we're going to change, we need to use # deepcopy since we're messing with members, and we can't simply # copy the members because the option proxy doesn't support it. options = copy.deepcopy(options) - if options['cpp_std'].value == 'vc++11': - options['cpp_std'].value = 'vc++14' + if options['std'].value == 'vc++11': + options['std'].value = 'vc++14' else: - options['cpp_std'].value = 'c++14' + options['std'].value = 'c++14' return super().get_option_compile_args(options) -class VisualStudioCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixin, VisualStudioLikeCompiler, CPPCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross: bool, info: 'MachineInfo', exe_wrap, target, **kwargs): - CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrap, **kwargs) - VisualStudioLikeCompiler.__init__(self, target) - self.base_options = ['b_pch', 'b_vscrt'] # FIXME add lto, pgo and the like +class VisualStudioCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixin, MSVCCompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + MSVCCompiler.__init__(self, target) + self.base_options = ['b_pch', 'b_vscrt', 'b_ndebug'] # FIXME add lto, pgo and the like self.id = 'msvc' - def get_options(self): + def get_options(self) -> 'OptionDictType': cpp_stds = ['none', 'c++11', 'vc++11'] # Visual Studio 2015 and later if version_compare(self.version, '>=19'): @@ -529,11 +662,11 @@ cpp_stds.extend(['vc++14', 'c++17', 'vc++17']) return self._get_options_impl(super().get_options(), cpp_stds) - def get_option_compile_args(self, options): - if options['cpp_std'].value != 'none' and version_compare(self.version, '<19.00.24210'): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: + if options['std'].value != 'none' and version_compare(self.version, '<19.00.24210'): mlog.warning('This version of MSVC does not support cpp_std arguments') options = copy.copy(options) - options['cpp_std'].value = 'none' + options['std'].value = 'none' args = super().get_option_compile_args(options) @@ -545,85 +678,137 @@ del args[i] return args -class ClangClCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixin, VisualStudioLikeCompiler, CPPCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrap, target, **kwargs): +class ClangClCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixin, ClangClCompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrap, **kwargs) - VisualStudioLikeCompiler.__init__(self, target) + info, exe_wrapper, linker=linker, full_version=full_version) + ClangClCompiler.__init__(self, target) self.id = 'clang-cl' - def get_options(self): + def get_options(self) -> 'OptionDictType': cpp_stds = ['none', 'c++11', 'vc++11', 'c++14', 'vc++14', 'c++17', 'vc++17', 'c++latest'] return self._get_options_impl(super().get_options(), cpp_stds) class IntelClCPPCompiler(VisualStudioLikeCPPCompilerMixin, IntelVisualStudioLikeCompiler, CPPCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrap, target, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrap, **kwargs) + info, exe_wrapper, linker=linker, full_version=full_version) IntelVisualStudioLikeCompiler.__init__(self, target) - def get_options(self): + def get_options(self) -> 'OptionDictType': # This has only been tested with version 19.0, cpp_stds = ['none', 'c++11', 'vc++11', 'c++14', 'vc++14', 'c++17', 'vc++17', 'c++latest'] return self._get_options_impl(super().get_options(), cpp_stds) + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + # XXX: this is a hack because so much GnuLike stuff is in the base CPPCompiler class. + return IntelVisualStudioLikeCompiler.get_compiler_check_args(self, mode) + class ArmCPPCompiler(ArmCompiler, CPPCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrap=None, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrap, **kwargs) + info, exe_wrapper, linker=linker, full_version=full_version) ArmCompiler.__init__(self) - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CPPCompiler.get_options(self) - opts.update({'cpp_std': coredata.UserComboOption('C++ language standard to use', - ['none', 'c++03', 'c++11'], - 'none')}) + opts.update({ + 'std': coredata.UserComboOption( + 'C++ language standard to use', + ['none', 'c++03', 'c++11'], + 'none', + ), + }) return opts - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['cpp_std'] + std = options['std'] if std.value == 'c++11': args.append('--cpp11') elif std.value == 'c++03': args.append('--cpp') return args - def get_option_link_args(self, options): + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: return [] - def get_compiler_check_args(self): + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: return [] class CcrxCPPCompiler(CcrxCompiler, CPPCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrap=None, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrap, **kwargs) + info, exe_wrapper, linker=linker, full_version=full_version) CcrxCompiler.__init__(self) # Override CCompiler.get_always_args - def get_always_args(self): + def get_always_args(self) -> T.List[str]: + return ['-nologo', '-lang=cpp'] + + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: + return [] + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_output_args(self, target: str) -> T.List[str]: + return ['-output=obj=%s' % target] + + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: + return [] + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + return [] + +class C2000CPPCompiler(C2000Compiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + C2000Compiler.__init__(self) + + def get_options(self) -> 'OptionDictType': + opts = CPPCompiler.get_options(self) + opts.update({'cpp_std': coredata.UserComboOption('C++ language standard to use', + ['none', 'c++03'], + 'none')}) + return opts + + def get_always_args(self) -> T.List[str]: return ['-nologo', '-lang=cpp'] - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: return [] - def get_compile_only_args(self): + def get_compile_only_args(self) -> T.List[str]: return [] - def get_output_args(self, target): + def get_output_args(self, target: str) -> T.List[str]: return ['-output=obj=%s' % target] - def get_option_link_args(self, options): + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: return [] - def get_compiler_check_args(self): + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: return [] diff -Nru meson-0.53.2/mesonbuild/compilers/c.py meson-0.57.0+really0.56.2/mesonbuild/compilers/c.py --- meson-0.53.2/mesonbuild/compilers/c.py 2020-02-25 18:00:46.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/c.py 2021-01-06 10:39:48.000000000 +0000 @@ -17,17 +17,20 @@ from .. import coredata from ..mesonlib import MachineChoice, MesonException, mlog, version_compare +from ..linkers import LinkerEnvVarsMixin from .c_function_attributes import C_FUNC_ATTRIBUTES from .mixins.clike import CLikeCompiler from .mixins.ccrx import CcrxCompiler +from .mixins.xc16 import Xc16Compiler +from .mixins.compcert import CompCertCompiler +from .mixins.c2000 import C2000Compiler from .mixins.arm import ArmCompiler, ArmclangCompiler -from .mixins.visualstudio import VisualStudioLikeCompiler +from .mixins.visualstudio import MSVCCompiler, ClangClCompiler from .mixins.gnu import GnuCompiler from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler from .mixins.clang import ClangCompiler from .mixins.elbrus import ElbrusCompiler from .mixins.pgi import PGICompiler -from .mixins.islinker import BasicLinkerIsCompilerMixin, LinkerEnvVarsMixin from .mixins.emscripten import EmscriptenMixin from .compilers import ( gnu_winlibs, @@ -36,13 +39,22 @@ ) if T.TYPE_CHECKING: + from ..coredata import OptionDictType + from ..dependencies import Dependency, ExternalProgram from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers import DynamicLinker + + CompilerMixinBase = Compiler +else: + CompilerMixinBase = object + class CCompiler(CLikeCompiler, Compiler): @staticmethod - def attribute_check_func(name): + def attribute_check_func(name: str) -> str: try: return C_FUNC_ATTRIBUTES[name] except KeyError: @@ -50,20 +62,26 @@ language = 'c' - def __init__(self, exelist, version, for_machine: MachineChoice, is_cross: bool, - info: 'MachineInfo', exe_wrapper: T.Optional[str] = None, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): # If a child ObjC or CPP class has already set it, don't set it ourselves - Compiler.__init__(self, exelist, version, for_machine, info, **kwargs) - CLikeCompiler.__init__(self, is_cross, exe_wrapper) + Compiler.__init__(self, exelist, version, for_machine, info, + is_cross=is_cross, full_version=full_version, linker=linker) + CLikeCompiler.__init__(self, exe_wrapper) - def get_no_stdinc_args(self): + def get_no_stdinc_args(self) -> T.List[str]: return ['-nostdinc'] - def sanity_check(self, work_dir, environment): + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: code = 'int main(void) { int class=0; return class; }\n' - return self.sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code) + return self._sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code) - def has_header_symbol(self, hname, symbol, prefix, env, *, extra_args=None, dependencies=None): + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol} t = '''{prefix} #include <{header}> @@ -82,18 +100,22 @@ _C17_VERSION = '>=6.0.0' _C18_VERSION = '>=8.0.0' + _C2X_VERSION = '>=9.0.0' - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): - CCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs) - ClangCompiler.__init__(self) + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, linker=linker, full_version=full_version) + ClangCompiler.__init__(self, defines) default_warn_args = ['-Wall', '-Winvalid-pch'] self.warn_args = {'0': [], '1': default_warn_args, '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-Wpedantic']} - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CCompiler.get_options(self) c_stds = ['c89', 'c99', 'c11'] g_stds = ['gnu89', 'gnu99', 'gnu11'] @@ -105,19 +127,40 @@ if version_compare(self.version, self._C18_VERSION): c_stds += ['c18'] g_stds += ['gnu18'] - opts.update({'c_std': coredata.UserComboOption('C language standard to use', - ['none'] + c_stds + g_stds, - 'none')}) + if version_compare(self.version, self._C2X_VERSION): + c_stds += ['c2x'] + g_stds += ['gnu2x'] + opts.update({ + 'std': coredata.UserComboOption( + 'C language standard to use', + ['none'] + c_stds + g_stds, + 'none', + ), + }) + if self.info.is_windows() or self.info.is_cygwin(): + opts.update({ + 'winlibs': coredata.UserArrayOption( + 'Standard Win libraries to link against', + gnu_winlibs, + ), + }) return opts - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['c_std'] + std = options['std'] if std.value != 'none': args.append('-std=' + std.value) return args - def get_option_link_args(self, options): + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: + if self.info.is_windows() or self.info.is_cygwin(): + # without a typedict mypy can't understand this. + libs = options['winlibs'].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs return [] @@ -131,24 +174,30 @@ _C17_VERSION = '>=10.0.0' _C18_VERSION = '>=11.0.0' + _C2X_VERSION = '>=11.0.0' -class EmscriptenCCompiler(LinkerEnvVarsMixin, EmscriptenMixin, BasicLinkerIsCompilerMixin, ClangCCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross: bool, info: 'MachineInfo', exe_wrapper=None, **kwargs): +class EmscriptenCCompiler(EmscriptenMixin, LinkerEnvVarsMixin, ClangCCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): if not is_cross: raise MesonException('Emscripten compiler can only be used for cross compilation.') - ClangCCompiler.__init__(self, exelist=exelist, version=version, - for_machine=for_machine, is_cross=is_cross, - info=info, exe_wrapper=exe_wrapper, **kwargs) + ClangCCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper=exe_wrapper, linker=linker, + defines=defines, full_version=full_version) self.id = 'emscripten' class ArmclangCCompiler(ArmclangCompiler, CCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): CCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrapper, **kwargs) + info, exe_wrapper, linker=linker, full_version=full_version) ArmclangCompiler.__init__(self) default_warn_args = ['-Wall', '-Winvalid-pch'] self.warn_args = {'0': [], @@ -156,31 +205,39 @@ '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-Wpedantic']} - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CCompiler.get_options(self) - opts.update({'c_std': coredata.UserComboOption('C language standard to use', - ['none', 'c90', 'c99', 'c11', - 'gnu90', 'gnu99', 'gnu11'], - 'none')}) + opts.update({ + 'std': coredata.UserComboOption( + 'C language standard to use', + ['none', 'c90', 'c99', 'c11', 'gnu90', 'gnu99', 'gnu11'], + 'none', + ), + }) return opts - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['c_std'] + std = options['std'] if std.value != 'none': args.append('-std=' + std.value) return args - def get_option_link_args(self, options): + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: return [] class GnuCCompiler(GnuCompiler, CCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, - defines=None, **kwargs): - CCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrapper, **kwargs) + + _C18_VERSION = '>=8.0.0' + _C2X_VERSION = '>=9.0.0' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, linker=linker, full_version=full_version) GnuCompiler.__init__(self, defines) default_warn_args = ['-Wall', '-Winvalid-pch'] self.warn_args = {'0': [], @@ -188,68 +245,106 @@ '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-Wpedantic']} - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CCompiler.get_options(self) c_stds = ['c89', 'c99', 'c11'] g_stds = ['gnu89', 'gnu99', 'gnu11'] - v = '>=8.0.0' - if version_compare(self.version, v): + if version_compare(self.version, self._C18_VERSION): c_stds += ['c17', 'c18'] g_stds += ['gnu17', 'gnu18'] - opts.update({'c_std': coredata.UserComboOption('C language standard to use', - ['none'] + c_stds + g_stds, - 'none')}) + if version_compare(self.version, self._C2X_VERSION): + c_stds += ['c2x'] + g_stds += ['gnu2x'] + opts.update({ + 'std': coredata.UserComboOption( + 'C language standard to use', + ['none'] + c_stds + g_stds, + 'none', + ), + }) if self.info.is_windows() or self.info.is_cygwin(): opts.update({ - 'c_winlibs': coredata.UserArrayOption('Standard Win libraries to link against', - gnu_winlibs), }) + 'winlibs': coredata.UserArrayOption( + 'Standard Win libraries to link against', + gnu_winlibs, + ), + }) return opts - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['c_std'] + std = options['std'] if std.value != 'none': args.append('-std=' + std.value) return args - def get_option_link_args(self, options): + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: if self.info.is_windows() or self.info.is_cygwin(): - return options['c_winlibs'].value[:] + # without a typeddict mypy can't figure this out + libs = options['winlibs'].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs return [] - def get_pch_use_args(self, pch_dir, header): + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: return ['-fpch-preprocess', '-include', os.path.basename(header)] class PGICCompiler(PGICompiler, CCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + PGICompiler.__init__(self) + + +class NvidiaHPC_CCompiler(PGICompiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): CCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrapper, **kwargs) + info, exe_wrapper, linker=linker, full_version=full_version) PGICompiler.__init__(self) + self.id = 'nvidia_hpc' class ElbrusCCompiler(GnuCCompiler, ElbrusCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, - defines=None, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): GnuCCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrapper, defines, **kwargs) + info, exe_wrapper, defines=defines, + linker=linker, full_version=full_version) ElbrusCompiler.__init__(self) # It does support some various ISO standards and c/gnu 90, 9x, 1x in addition to those which GNU CC supports. - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CCompiler.get_options(self) - opts.update({'c_std': coredata.UserComboOption('C language standard to use', - ['none', 'c89', 'c90', 'c9x', 'c99', 'c1x', 'c11', - 'gnu89', 'gnu90', 'gnu9x', 'gnu99', 'gnu1x', 'gnu11', - 'iso9899:2011', 'iso9899:1990', 'iso9899:199409', 'iso9899:1999'], - 'none')}) + opts.update({ + 'std': coredata.UserComboOption( + 'C language standard to use', + [ + 'none', 'c89', 'c90', 'c9x', 'c99', 'c1x', 'c11', + 'gnu89', 'gnu90', 'gnu9x', 'gnu99', 'gnu1x', 'gnu11', + 'iso9899:2011', 'iso9899:1990', 'iso9899:199409', 'iso9899:1999', + ], + 'none', + ), + }) return opts # Elbrus C compiler does not have lchmod, but there is only linker warning, not compiler error. # So we should explicitly fail at this case. - def has_function(self, funcname, prefix, env, *, extra_args=None, dependencies=None): + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: if funcname == 'lchmod': return False, False else: @@ -259,10 +354,12 @@ class IntelCCompiler(IntelGnuLikeCompiler, CCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): CCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrapper, **kwargs) + info, exe_wrapper, linker=linker, full_version=full_version) IntelGnuLikeCompiler.__init__(self) self.lang_header = 'c-header' default_warn_args = ['-Wall', '-w3', '-diag-disable:remark'] @@ -271,80 +368,149 @@ '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra']} - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CCompiler.get_options(self) c_stds = ['c89', 'c99'] g_stds = ['gnu89', 'gnu99'] if version_compare(self.version, '>=16.0.0'): c_stds += ['c11'] - opts.update({'c_std': coredata.UserComboOption('C language standard to use', - ['none'] + c_stds + g_stds, - 'none')}) + opts.update({ + 'std': coredata.UserComboOption( + 'C language standard to use', + ['none'] + c_stds + g_stds, + 'none', + ), + }) return opts - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['c_std'] + std = options['std'] if std.value != 'none': args.append('-std=' + std.value) return args -class VisualStudioLikeCCompilerMixin: +class VisualStudioLikeCCompilerMixin(CompilerMixinBase): """Shared methods that apply to MSVC-like C compilers.""" - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = super().get_options() - opts.update({'c_winlibs': coredata.UserArrayOption('Windows libs to link against.', - msvc_winlibs)}) + opts.update({ + 'winlibs': coredata.UserArrayOption( + 'Windows libs to link against.', + msvc_winlibs, + ), + }) return opts - def get_option_link_args(self, options): - return options['c_winlibs'].value[:] - + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: + # need a TypeDict to make this work + libs = options['winlibs'].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs + + +class VisualStudioCCompiler(MSVCCompiler, VisualStudioLikeCCompilerMixin, CCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, + full_version=full_version) + MSVCCompiler.__init__(self, target) -class VisualStudioCCompiler(VisualStudioLikeCompiler, VisualStudioLikeCCompilerMixin, CCompiler): + def get_options(self) -> 'OptionDictType': + opts = super().get_options() + c_stds = ['none', 'c89', 'c99', 'c11', + # Need to have these to be compatible with projects + # that set c_std to e.g. gnu99. + # https://github.com/mesonbuild/meson/issues/7611 + 'gnu89', 'gnu90', 'gnu9x', 'gnu99', 'gnu1x', 'gnu11'] + opts.update({ + 'std': coredata.UserComboOption( + 'C language standard to use', + c_stds, + 'none', + ), + }) + return opts - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrap, target: str, - **kwargs): - CCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrap, **kwargs) - VisualStudioLikeCompiler.__init__(self, target) - self.id = 'msvc' + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: + args = [] + std = options['std'] + # As of MVSC 16.7, /std:c11 is the only valid C standard option. + if std.value in {'c11', 'gnu11'}: + args.append('/std:c11') + return args -class ClangClCCompiler(VisualStudioLikeCompiler, VisualStudioLikeCCompilerMixin, CCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrap, target, **kwargs): +class ClangClCCompiler(ClangClCompiler, VisualStudioLikeCCompilerMixin, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): CCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrap, **kwargs) - VisualStudioLikeCompiler.__init__(self, target) - self.id = 'clang-cl' + info, exe_wrapper, linker=linker, + full_version=full_version) + ClangClCompiler.__init__(self, target) + + def get_options(self) -> 'OptionDictType': + # Clang-cl can compile up to c99, but doesn't have a std-swtich for + # them. Unlike recent versions of MSVC it doesn't (as of 10.0.1) + # support c11 + opts = super().get_options() + c_stds = ['none', 'c89', 'c99', + # Need to have these to be compatible with projects + # that set c_std to e.g. gnu99. + # https://github.com/mesonbuild/meson/issues/7611 + 'gnu89', 'gnu90', 'gnu9x', 'gnu99'] + opts.update({ + 'std': coredata.UserComboOption( + 'C language standard to use', + c_stds, + 'none', + ), + }) + return opts class IntelClCCompiler(IntelVisualStudioLikeCompiler, VisualStudioLikeCCompilerMixin, CCompiler): """Intel "ICL" compiler abstraction.""" - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrap, target, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): CCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrap, **kwargs) + info, exe_wrapper, linker=linker, + full_version=full_version) IntelVisualStudioLikeCompiler.__init__(self, target) - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = super().get_options() c_stds = ['none', 'c89', 'c99', 'c11'] - opts.update({'c_std': coredata.UserComboOption('C language standard to use', - c_stds, - 'none')}) + opts.update({ + 'std': coredata.UserComboOption( + 'C language standard to use', + c_stds, + 'none', + ), + }) return opts - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['c_std'] + std = options['std'] if std.value == 'c89': mlog.warning("ICL doesn't explicitly implement c89, setting the standard to 'none', which is close.", once=True) elif std.value != 'none': @@ -353,70 +519,213 @@ class ArmCCompiler(ArmCompiler, CCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): CCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrapper, **kwargs) + info, exe_wrapper, linker=linker, + full_version=full_version) ArmCompiler.__init__(self) - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CCompiler.get_options(self) - opts.update({'c_std': coredata.UserComboOption('C language standard to use', - ['none', 'c90', 'c99'], - 'none')}) + opts.update({ + 'std': coredata.UserComboOption( + 'C language standard to use', + ['none', 'c90', 'c99'], + 'none', + ), + }) return opts - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['c_std'] + std = options['std'] if std.value != 'none': args.append('--' + std.value) return args class CcrxCCompiler(CcrxCompiler, CCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): CCompiler.__init__(self, exelist, version, for_machine, is_cross, - info, exe_wrapper, **kwargs) + info, exe_wrapper, linker=linker, full_version=full_version) CcrxCompiler.__init__(self) # Override CCompiler.get_always_args - def get_always_args(self): + def get_always_args(self) -> T.List[str]: return ['-nologo'] - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = CCompiler.get_options(self) - opts.update({'c_std': coredata.UserComboOption('C language standard to use', - ['none', 'c89', 'c99'], - 'none')}) + opts.update({ + 'std': coredata.UserComboOption( + 'C language standard to use', + ['none', 'c89', 'c99'], + 'none', + ), + }) return opts - def get_no_stdinc_args(self): + def get_no_stdinc_args(self) -> T.List[str]: return [] - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['c_std'] + std = options['std'] if std.value == 'c89': args.append('-lang=c') elif std.value == 'c99': args.append('-lang=c99') return args - def get_compile_only_args(self): + def get_compile_only_args(self) -> T.List[str]: return [] - def get_no_optimization_args(self): + def get_no_optimization_args(self) -> T.List[str]: return ['-optimize=0'] - def get_output_args(self, target): + def get_output_args(self, target: str) -> T.List[str]: return ['-output=obj=%s' % target] - def get_werror_args(self): + def get_werror_args(self) -> T.List[str]: return ['-change_message=error'] - def get_include_args(self, path, is_system): + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: if path == '': path = '.' return ['-include=' + path] + + +class Xc16CCompiler(Xc16Compiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + Xc16Compiler.__init__(self) + + def get_options(self) -> 'OptionDictType': + opts = CCompiler.get_options(self) + opts.update({'c_std': coredata.UserComboOption('C language standard to use', + ['none', 'c89', 'c99', 'gnu89', 'gnu99'], + 'none')}) + return opts + + def get_no_stdinc_args(self) -> T.List[str]: + return [] + + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: + args = [] + std = options['c_std'] + if std.value != 'none': + args.append('-ansi') + args.append('-std=' + std.value) + return args + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-O0'] + + def get_output_args(self, target: str) -> T.List[str]: + return ['-o%s' % target] + + def get_werror_args(self) -> T.List[str]: + return ['-change_message=error'] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + return ['-I' + path] + +class CompCertCCompiler(CompCertCompiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + CompCertCompiler.__init__(self) + + def get_options(self) -> 'OptionDictType': + opts = CCompiler.get_options(self) + opts.update({'c_std': coredata.UserComboOption('C language standard to use', + ['none', 'c89', 'c99'], + 'none')}) + return opts + + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: + return [] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-O0'] + + def get_output_args(self, target: str) -> T.List[str]: + return ['-o{}'.format(target)] + + def get_werror_args(self) -> T.List[str]: + return ['-Werror'] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + return ['-I' + path] + +class C2000CCompiler(C2000Compiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + C2000Compiler.__init__(self) + + # Override CCompiler.get_always_args + def get_always_args(self) -> T.List[str]: + return [] + + def get_options(self) -> 'OptionDictType': + opts = CCompiler.get_options(self) + opts.update({'c_std': coredata.UserComboOption('C language standard to use', + ['none', 'c89', 'c99', 'c11'], + 'none')}) + return opts + + def get_no_stdinc_args(self) -> T.List[str]: + return [] + + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: + args = [] + std = options['c_std'] + if std.value != 'none': + args.append('--' + std.value) + return args + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-Ooff'] + + def get_output_args(self, target: str) -> T.List[str]: + return ['--output_file=%s' % target] + + def get_werror_args(self) -> T.List[str]: + return ['-change_message=error'] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + return ['--include_path=' + path] diff -Nru meson-0.53.2/mesonbuild/compilers/cs.py meson-0.57.0+really0.56.2/mesonbuild/compilers/cs.py --- meson-0.53.2/mesonbuild/compilers/cs.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/cs.py 2020-10-18 21:29:13.000000000 +0000 @@ -13,6 +13,7 @@ # limitations under the License. import os.path, subprocess +import textwrap import typing as T from ..mesonlib import EnvironmentException @@ -22,6 +23,7 @@ if T.TYPE_CHECKING: from ..envconfig import MachineInfo + from ..environment import Environment cs_optimization_args = {'0': [], 'g': [], @@ -29,64 +31,43 @@ '2': ['-optimize+'], '3': ['-optimize+'], 's': ['-optimize+'], - } + } # type: T.Dict[str, T.List[str]] class CsCompiler(BasicLinkerIsCompilerMixin, Compiler): language = 'cs' - def __init__(self, exelist, version, for_machine: MachineChoice, - info: 'MachineInfo', comp_id, runner=None): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', comp_id: str, runner: T.Optional[str] = None): super().__init__(exelist, version, for_machine, info) self.id = comp_id - self.is_cross = False self.runner = runner @classmethod - def get_display_language(cls): + def get_display_language(cls) -> str: return 'C sharp' - def get_always_args(self): + def get_always_args(self) -> T.List[str]: return ['/nologo'] - def get_linker_always_args(self): + def get_linker_always_args(self) -> T.List[str]: return ['/nologo'] - def get_output_args(self, fname): + def get_output_args(self, fname: str) -> T.List[str]: return ['-out:' + fname] - def get_link_args(self, fname): + def get_link_args(self, fname: str) -> T.List[str]: return ['-r:' + fname] - def get_werror_args(self): + def get_werror_args(self) -> T.List[str]: return ['-warnaserror'] - def split_shlib_to_parts(self, fname): - return None, fname - - def get_dependency_gen_args(self, outtarget, outfile): - return [] - - def get_linker_exelist(self): - return self.exelist[:] - - def get_compile_only_args(self): + def get_pic_args(self) -> T.List[str]: return [] - def get_coverage_args(self): - return [] - - def get_std_exe_link_args(self): - return [] - - def get_include_args(self, path): - return [] - - def get_pic_args(self): - return [] - - def compute_parameters_with_absolute_paths(self, parameter_list, build_dir): + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: for idx, i in enumerate(parameter_list): if i[:2] == '-L': parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) @@ -95,29 +76,27 @@ return parameter_list - def name_string(self): - return ' '.join(self.exelist) - - def get_pch_use_args(self, pch_dir, header): + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: return [] - def get_pch_name(self, header_name): + def get_pch_name(self, header_name: str) -> str: return '' - def sanity_check(self, work_dir, environment): + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: src = 'sanity.cs' obj = 'sanity.exe' source_name = os.path.join(work_dir, src) with open(source_name, 'w') as ofile: - ofile.write('''public class Sanity { - static public void Main () { - } -} -''') + ofile.write(textwrap.dedent(''' + public class Sanity { + static public void Main () { + } + } + ''')) pc = subprocess.Popen(self.exelist + self.get_always_args() + [src], cwd=work_dir) pc.wait() if pc.returncode != 0: - raise EnvironmentException('Mono compiler %s can not compile programs.' % self.name_string()) + raise EnvironmentException('C# compiler %s can not compile programs.' % self.name_string()) if self.runner: cmdlist = [self.runner, obj] else: @@ -127,32 +106,32 @@ if pe.returncode != 0: raise EnvironmentException('Executables created by Mono compiler %s are not runnable.' % self.name_string()) - def needs_static_linker(self): + def needs_static_linker(self) -> bool: return False - def get_buildtype_args(self, buildtype): + def get_buildtype_args(self, buildtype: str) -> T.List[str]: return mono_buildtype_args[buildtype] - def get_debug_args(self, is_debug): + def get_debug_args(self, is_debug: bool) -> T.List[str]: return ['-debug'] if is_debug else [] - def get_optimization_args(self, optimization_level): + def get_optimization_args(self, optimization_level: str) -> T.List[str]: return cs_optimization_args[optimization_level] class MonoCompiler(CsCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, info: 'MachineInfo'): super().__init__(exelist, version, for_machine, info, 'mono', runner='mono') class VisualStudioCsCompiler(CsCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, info: 'MachineInfo'): super().__init__(exelist, version, for_machine, info, 'csc') - def get_buildtype_args(self, buildtype): + def get_buildtype_args(self, buildtype: str) -> T.List[str]: res = mono_buildtype_args[buildtype] if not self.info.is_windows(): tmp = [] diff -Nru meson-0.53.2/mesonbuild/compilers/cuda.py meson-0.57.0+really0.56.2/mesonbuild/compilers/cuda.py --- meson-0.53.2/mesonbuild/compilers/cuda.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/cuda.py 2021-01-06 10:39:48.000000000 +0000 @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import enum import os.path import typing as T -from functools import partial from .. import coredata from .. import mlog @@ -23,8 +23,18 @@ cuda_debug_args) if T.TYPE_CHECKING: + from ..build import BuildTarget + from ..coredata import OptionDictType + from ..dependencies import Dependency, ExternalProgram from ..environment import Environment # noqa: F401 from ..envconfig import MachineInfo + from ..linkers import DynamicLinker + + +class _Phase(enum.Enum): + + COMPILER = 'compiler' + LINKER = 'linker' class CudaCompiler(Compiler): @@ -32,12 +42,14 @@ LINKER_PREFIX = '-Xlinker=' language = 'cuda' - _universal_flags = {'compiler': ['-I', '-D', '-U', '-E'], 'linker': ['-l', '-L']} + _universal_flags = {_Phase.COMPILER: ['-I', '-D', '-U', '-E'], _Phase.LINKER: ['-l', '-L']} # type: T.Dict[_Phase, T.List[str]] - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, exe_wrapper, host_compiler, info: 'MachineInfo', **kwargs): - super().__init__(exelist, version, for_machine, info, **kwargs) - self.is_cross = is_cross + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, exe_wrapper: T.Optional['ExternalProgram'], + host_compiler: Compiler, info: 'MachineInfo', + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + super().__init__(exelist, version, for_machine, info, linker=linker, full_version=full_version, is_cross=is_cross) self.exe_wrapper = exe_wrapper self.host_compiler = host_compiler self.base_options = host_compiler.base_options @@ -45,29 +57,23 @@ self.warn_args = {level: self._to_host_flags(flags) for level, flags in host_compiler.warn_args.items()} @classmethod - def _to_host_flags(cls, flags, phase='compiler'): - return list(map(partial(cls._to_host_flag, phase=phase), flags)) + def _to_host_flags(cls, flags: T.List[str], phase: _Phase = _Phase.COMPILER) -> T.List[str]: + return [cls._to_host_flag(f, phase=phase) for f in flags] @classmethod - def _to_host_flag(cls, flag, phase): + def _to_host_flag(cls, flag: str, phase: _Phase) -> str: if not flag[0] in ['-', '/'] or flag[:2] in cls._universal_flags[phase]: return flag - return '-X{}={}'.format(phase, flag) + return '-X{}={}'.format(phase.value, flag) - def needs_static_linker(self): + def needs_static_linker(self) -> bool: return False - def get_always_args(self): - return [] - - def get_no_stdinc_args(self): - return [] - - def thread_link_flags(self, environment): + def thread_link_flags(self, environment: 'Environment') -> T.List[str]: return self._to_host_flags(self.host_compiler.thread_link_flags(environment)) - def sanity_check(self, work_dir, environment): + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist)) mlog.debug('Is cross compiler: %s.' % str(self.is_cross)) @@ -138,7 +144,7 @@ if self.exe_wrapper is None: return else: - cmdlist = self.exe_wrapper + [binary_name] + cmdlist = self.exe_wrapper.get_command() + [binary_name] else: cmdlist = self.exelist + ['--run', '"' + binary_name + '"'] mlog.debug('Sanity check run command line: ', ' '.join(cmdlist)) @@ -160,31 +166,47 @@ else: mlog.debug('cudaGetDeviceCount() returned ' + stde) - def has_header_symbol(self, hname, symbol, prefix, env, extra_args=None, dependencies=None): - result, cached = super().has_header_symbol(hname, symbol, prefix, env, extra_args, dependencies) - if result: - return True, cached + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol} + # Check if it's a C-like symbol + t = '''{prefix} + #include <{header}> + int main(void) {{ + /* If it's not defined as a macro, try to use as a symbol */ + #ifndef {symbol} + {symbol}; + #endif + return 0; + }}''' + found, cached = self.compiles(t.format_map(fargs), env, extra_args=extra_args, dependencies=dependencies) + if found: + return True, cached + # Check if it's a class or a template t = '''{prefix} #include <{header}> using {symbol}; - int main(void) {{ return 0; }}''' - return self.compiles(t.format(**fargs), env, extra_args, dependencies) + int main(void) {{ + return 0; + }}''' + return self.compiles(t.format_map(fargs), env, extra_args=extra_args, dependencies=dependencies) - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = super().get_options() opts.update({'cuda_std': coredata.UserComboOption('C++ language standard to use', ['none', 'c++03', 'c++11', 'c++14'], 'none')}) return opts - def _to_host_compiler_options(self, options): - overrides = {name: opt.value for name, opt in options.copy().items()} + def _to_host_compiler_options(self, options: 'OptionDictType') -> 'OptionDictType': + overrides = {name: opt.value for name, opt in options.items()} return OptionOverrideProxy(overrides, self.host_compiler.get_options()) - def get_option_compile_args(self, options): + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] # On Windows, the version of the C++ standard used by nvcc is dictated by # the combination of CUDA version and MSVC version; the --std= is thus ignored @@ -205,83 +227,78 @@ arg = arg.replace('-Wl,', '', 1) arg = arg.replace(' ', '\\') # espace whitespace cooked.append(arg) - return cls._to_host_flags(cooked, 'linker') + return cls._to_host_flags(cooked, _Phase.LINKER) - def get_option_link_args(self, options): + def get_option_link_args(self, options: 'OptionDictType') -> T.List[str]: return self._cook_link_args(self.host_compiler.get_option_link_args(self._to_host_compiler_options(options))) - def name_string(self): - return ' '.join(self.exelist) - - def get_soname_args(self, *args): - return self._cook_link_args(self.host_compiler.get_soname_args(*args)) + def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, + suffix: str, soversion: str, + darwin_versions: T.Tuple[str, str], + is_shared_module: bool) -> T.List[str]: + return self._cook_link_args(self.host_compiler.get_soname_args( + env, prefix, shlib_name, suffix, soversion, darwin_versions, + is_shared_module)) - def get_dependency_gen_args(self, outtarget, outfile): - return [] - - def get_compile_only_args(self): + def get_compile_only_args(self) -> T.List[str]: return ['-c'] - def get_no_optimization_args(self): + def get_no_optimization_args(self) -> T.List[str]: return ['-O0'] - def get_optimization_args(self, optimization_level): + def get_optimization_args(self, optimization_level: str) -> T.List[str]: # alternatively, consider simply redirecting this to the host compiler, which would # give us more control over options like "optimize for space" (which nvcc doesn't support): # return self._to_host_flags(self.host_compiler.get_optimization_args(optimization_level)) return cuda_optimization_args[optimization_level] - def get_debug_args(self, is_debug): + def get_debug_args(self, is_debug: bool) -> T.List[str]: return cuda_debug_args[is_debug] - def get_werror_args(self): + def get_werror_args(self) -> T.List[str]: return ['-Werror=cross-execution-space-call,deprecated-declarations,reorder'] - def get_warn_args(self, level): + def get_warn_args(self, level: str) -> T.List[str]: return self.warn_args[level] - def get_buildtype_args(self, buildtype): + def get_buildtype_args(self, buildtype: str) -> T.List[str]: # nvcc doesn't support msvc's "Edit and Continue" PDB format; "downgrade" to # a regular PDB to avoid cl's warning to that effect (D9025 : overriding '/ZI' with '/Zi') host_args = ['/Zi' if arg == '/ZI' else arg for arg in self.host_compiler.get_buildtype_args(buildtype)] return cuda_buildtype_args[buildtype] + self._to_host_flags(host_args) - def get_include_args(self, path, is_system): + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: if path == '': path = '.' return ['-I' + path] - def get_compile_debugfile_args(self, rel_obj, **kwargs): - return self._to_host_flags(self.host_compiler.get_compile_debugfile_args(rel_obj, **kwargs)) + def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_compile_debugfile_args(rel_obj, pch)) - def get_link_debugfile_args(self, targetfile): + def get_link_debugfile_args(self, targetfile: str) -> T.List[str]: return self._cook_link_args(self.host_compiler.get_link_debugfile_args(targetfile)) - def depfile_for_object(self, objfile): - return objfile + '.' + self.get_depfile_suffix() - - def get_depfile_suffix(self): + def get_depfile_suffix(self) -> str: return 'd' - def get_linker_debug_crt_args(self) -> T.List[str]: - return self._cook_link_args(self.host_compiler.get_linker_debug_crt_args()) - - def get_buildtype_linker_args(self, buildtype): + def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: return self._cook_link_args(self.host_compiler.get_buildtype_linker_args(buildtype)) def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: - return self._cook_link_args(self.host_compiler.build_rpath_args( - env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)) + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + (rpath_args, rpath_dirs_to_remove) = self.host_compiler.build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + return (self._cook_link_args(rpath_args), rpath_dirs_to_remove) - def linker_to_compiler_args(self, args): + def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]: return args - def get_pic_args(self): + def get_pic_args(self) -> T.List[str]: return self._to_host_flags(self.host_compiler.get_pic_args()) - def compute_parameters_with_absolute_paths(self, parameter_list, build_dir): + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: return [] def get_output_args(self, target: str) -> T.List[str]: @@ -290,13 +307,14 @@ def get_std_exe_link_args(self) -> T.List[str]: return self._cook_link_args(self.host_compiler.get_std_exe_link_args()) - def find_library(self, libname, env, extra_dirs, libtype: LibType = LibType.PREFER_SHARED): + def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]: return ['-l' + libname] # FIXME - def get_crt_compile_args(self, crt_val, buildtype): + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: return self._to_host_flags(self.host_compiler.get_crt_compile_args(crt_val, buildtype)) - def get_crt_link_args(self, crt_val, buildtype): + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: # nvcc defaults to static, release version of msvc runtime and provides no # native option to override it; override it with /NODEFAULTLIB host_link_arg_overrides = [] @@ -305,11 +323,11 @@ host_link_arg_overrides += ['/NODEFAULTLIB:LIBCMT.lib'] return self._cook_link_args(host_link_arg_overrides + self.host_compiler.get_crt_link_args(crt_val, buildtype)) - def get_target_link_args(self, target): + def get_target_link_args(self, target: 'BuildTarget') -> T.List[str]: return self._cook_link_args(super().get_target_link_args(target)) - def get_dependency_compile_args(self, dep): + def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]: return self._to_host_flags(super().get_dependency_compile_args(dep)) - def get_dependency_link_args(self, dep): + def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]: return self._cook_link_args(super().get_dependency_link_args(dep)) diff -Nru meson-0.53.2/mesonbuild/compilers/d.py meson-0.57.0+really0.56.2/mesonbuild/compilers/d.py --- meson-0.53.2/mesonbuild/compilers/d.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/d.py 2021-01-06 10:39:48.000000000 +0000 @@ -19,19 +19,24 @@ EnvironmentException, MachineChoice, version_compare, ) +from ..arglist import CompilerArgs from .compilers import ( d_dmd_buildtype_args, d_gdc_buildtype_args, d_ldc_buildtype_args, clike_debug_args, Compiler, - CompilerArgs, ) from .mixins.gnu import GnuCompiler -from .mixins.islinker import LinkerEnvVarsMixin, BasicLinkerIsCompilerMixin if T.TYPE_CHECKING: + from .compilers import Compiler as CompilerMixinBase + from ..dependencies import Dependency, ExternalProgram from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers import DynamicLinker +else: + CompilerMixinBase = object d_feature_args = {'gcc': {'unittest': '-funittest', 'debug': '-fdebug', @@ -48,7 +53,7 @@ 'version': '-version', 'import_dir': '-J' } - } + } # type: T.Dict[str, T.Dict[str, str]] ldc_optimization_args = {'0': [], 'g': [], @@ -56,7 +61,7 @@ '2': ['-O2'], '3': ['-O3'], 's': ['-Os'], - } + } # type: T.Dict[str, T.List[str]] dmd_optimization_args = {'0': [], 'g': [], @@ -64,23 +69,35 @@ '2': ['-O'], '3': ['-O'], 's': ['-O'], - } + } # type: T.Dict[str, T.List[str]] -class DmdLikeCompilerMixin: +class DmdLikeCompilerMixin(CompilerMixinBase): - LINKER_PREFIX = '-L' + """Mixin class for DMD and LDC. - def get_output_args(self, target): - return ['-of=' + target] + LDC has a number of DMD like arguments, and this class allows for code + sharing between them as makes sense. + """ - def get_linker_output_args(self, target): - return ['-of=' + target] + if T.TYPE_CHECKING: + mscrt_args = {} # type: T.Dict[str, T.List[str]] - def get_include_args(self, path, is_system): + def _get_target_arch_args(self) -> T.List[str]: ... + + LINKER_PREFIX = '-L=' + + def get_output_args(self, outputname: str) -> T.List[str]: + return ['-of=' + outputname] + + def get_linker_output_args(self, outputname: str) -> T.List[str]: + return ['-of=' + outputname] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: return ['-I=' + path] - def compute_parameters_with_absolute_paths(self, parameter_list, build_dir): + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: for idx, i in enumerate(parameter_list): if i[:3] == '-I=': parameter_list[idx] = i[:3] + os.path.normpath(os.path.join(build_dir, i[3:])) @@ -93,37 +110,34 @@ return parameter_list - def get_warn_args(self, level): + def get_warn_args(self, level: str) -> T.List[str]: return ['-wi'] - def get_werror_args(self): + def get_werror_args(self) -> T.List[str]: return ['-w'] - def get_dependency_gen_args(self, outtarget, outfile): - # DMD and LDC does not currently return Makefile-compatible dependency info. - return [] - - def get_coverage_args(self): + def get_coverage_args(self) -> T.List[str]: return ['-cov'] - def get_preprocess_only_args(self): + def get_coverage_link_args(self) -> T.List[str]: + return [] + + def get_preprocess_only_args(self) -> T.List[str]: return ['-E'] - def get_compile_only_args(self): + def get_compile_only_args(self) -> T.List[str]: return ['-c'] - def depfile_for_object(self, objfile): - return objfile + '.' + self.get_depfile_suffix() - - def get_depfile_suffix(self): + def get_depfile_suffix(self) -> str: return 'deps' - def get_pic_args(self): + def get_pic_args(self) -> T.List[str]: if self.info.is_windows(): return [] return ['-fPIC'] - def get_feature_args(self, kwargs, build_to_src): + def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]: + # TODO: using a TypeDict here would improve this res = [] if 'unittest' in kwargs: unittest = kwargs.pop('unittest') @@ -205,37 +219,41 @@ return res - def get_buildtype_linker_args(self, buildtype): + def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: if buildtype != 'plain': - return self.get_target_arch_args() - return [] - - def get_std_exe_link_args(self): + return self._get_target_arch_args() return [] - def gen_import_library_args(self, implibname): - return ['-Wl,--out-implib=' + implibname] + def gen_import_library_args(self, implibname: str) -> T.List[str]: + return self.linker.import_library_args(implibname) - def build_rpath_args(self, env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): + def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, + rpath_paths: str, build_rpath: str, + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: if self.info.is_windows(): - return [] + return ([], set()) - # This method is to be used by LDC and DMD. - # GDC can deal with the verbatim flags. - if not rpath_paths and not install_rpath: - return [] - paths = ':'.join([os.path.join(build_dir, p) for p in rpath_paths]) - if build_rpath != '': - paths += ':' + build_rpath - if len(paths) < len(install_rpath): - padding = 'X' * (len(install_rpath) - len(paths)) - if not paths: - paths = padding - else: - paths = paths + ':' + padding - return ['-Wl,-rpath,{}'.format(paths)] + # GNU ld, solaris ld, and lld acting like GNU ld + if self.linker.id.startswith('ld'): + # The way that dmd and ldc pass rpath to gcc is different than we would + # do directly, each argument -rpath and the value to rpath, need to be + # split into two separate arguments both prefaced with the -L=. + args = [] + (rpath_args, rpath_dirs_to_remove) = super().build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + for r in rpath_args: + if ',' in r: + a, b = r.split(',', maxsplit=1) + args.append(a) + args.append(self.LINKER_PREFIX + b) + else: + args.append(r) + return (args, rpath_dirs_to_remove) + + return super().build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) - def translate_args_to_nongnu(self, args): + def _translate_args_to_nongnu(self, args: T.List[str]) -> T.List[str]: dcargs = [] # Translate common arguments to flags the LDC/DMD compilers # can understand. @@ -243,11 +261,11 @@ # and are therefore out of the user's control. for arg in args: # Translate OS specific arguments first. - osargs = [] + osargs = [] # type: T.List[str] if self.info.is_windows(): osargs = self.translate_arg_to_windows(arg) elif self.info.is_darwin(): - osargs = self.translate_arg_to_osx(arg) + osargs = self._translate_arg_to_osx(arg) if osargs: dcargs.extend(osargs) continue @@ -326,7 +344,7 @@ return dcargs @classmethod - def translate_arg_to_windows(cls, arg): + def translate_arg_to_windows(cls, arg: str) -> T.List[str]: args = [] if arg.startswith('-Wl,'): # Translate linker arguments here. @@ -352,57 +370,92 @@ return args @classmethod - def translate_arg_to_osx(cls, arg): + def _translate_arg_to_osx(cls, arg: str) -> T.List[str]: args = [] if arg.startswith('-install_name'): args.append('-L=' + arg) return args - def get_debug_args(self, is_debug): + def get_debug_args(self, is_debug: bool) -> T.List[str]: ddebug_args = [] if is_debug: ddebug_args = [d_feature_args[self.id]['debug']] return clike_debug_args[is_debug] + ddebug_args - def get_crt_args(self, crt_val, buildtype): + def _get_crt_args(self, crt_val: str, buildtype: str) -> T.List[str]: if not self.info.is_windows(): return [] if crt_val in self.mscrt_args: return self.mscrt_args[crt_val] - assert(crt_val == 'from_buildtype') + assert(crt_val in ['from_buildtype', 'static_from_buildtype']) + + dbg = 'mdd' + rel = 'md' + if crt_val == 'static_from_buildtype': + dbg = 'mtd' + rel = 'mt' # Match what build type flags used to do. if buildtype == 'plain': return [] elif buildtype == 'debug': - return self.mscrt_args['mdd'] + return self.mscrt_args[dbg] elif buildtype == 'debugoptimized': - return self.mscrt_args['md'] + return self.mscrt_args[rel] elif buildtype == 'release': - return self.mscrt_args['md'] + return self.mscrt_args[rel] elif buildtype == 'minsize': - return self.mscrt_args['md'] + return self.mscrt_args[rel] else: assert(buildtype == 'custom') raise EnvironmentException('Requested C runtime based on buildtype, but buildtype is "custom".') - def get_soname_args(self, *args, **kwargs) -> T.List[str]: + def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, + suffix: str, soversion: str, + darwin_versions: T.Tuple[str, str], + is_shared_module: bool) -> T.List[str]: + sargs = super().get_soname_args(env, prefix, shlib_name, suffix, + soversion, darwin_versions, is_shared_module) + # LDC and DMD actually do use a linker, but they proxy all of that with # their own arguments - soargs = [] - for arg in Compiler.get_soname_args(self, *args, **kwargs): - soargs.append('-L=' + arg) - return soargs + if self.linker.id.startswith('ld.'): + soargs = [] + for arg in sargs: + a, b = arg.split(',', maxsplit=1) + soargs.append(a) + soargs.append(self.LINKER_PREFIX + b) + return soargs + elif self.linker.id.startswith('ld64'): + soargs = [] + for arg in sargs: + if not arg.startswith(self.LINKER_PREFIX): + soargs.append(self.LINKER_PREFIX + arg) + else: + soargs.append(arg) + return soargs + else: + return sargs def get_allow_undefined_link_args(self) -> T.List[str]: - args = [] - for arg in self.linker.get_allow_undefined_args(): - args.append('-L=' + arg) + args = self.linker.get_allow_undefined_args() + if self.info.is_darwin(): + # On macOS we're passing these options to the C compiler, but + # they're linker options and need -Wl, so clang/gcc knows what to + # do with them. I'm assuming, but don't know for certain, that + # ldc/dmd do some kind of mapping internally for arguments they + # understand, but pass arguments they don't understand directly. + args = [a.replace('-L=', '-Xcc=-Wl,') for a in args] return args +class DCompilerArgs(CompilerArgs): + prepend_prefixes = ('-I', '-L') + dedup2_prefixes = ('-I', ) + + class DCompiler(Compiler): mscrt_args = { 'none': ['-mscrtlib='], @@ -414,20 +467,23 @@ language = 'd' - def __init__(self, exelist, version, for_machine: MachineChoice, - info: 'MachineInfo', arch, is_cross, exe_wrapper, **kwargs): - super().__init__(exelist, version, for_machine, info, **kwargs) - self.id = 'unknown' + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', arch: str, *, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, + is_cross: bool = False): + super().__init__(exelist, version, for_machine, info, linker=linker, + full_version=full_version, is_cross=is_cross) self.arch = arch self.exe_wrapper = exe_wrapper - self.is_cross = is_cross - def sanity_check(self, work_dir, environment): + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: source_name = os.path.join(work_dir, 'sanity.d') output_name = os.path.join(work_dir, 'dtest') with open(source_name, 'w') as ofile: ofile.write('''void main() { }''') - pc = subprocess.Popen(self.exelist + self.get_output_args(output_name) + self.get_target_arch_args() + [source_name], cwd=work_dir) + pc = subprocess.Popen(self.exelist + self.get_output_args(output_name) + self._get_target_arch_args() + [source_name], cwd=work_dir) pc.wait() if pc.returncode != 0: raise EnvironmentException('D compiler %s can not compile programs.' % self.name_string()) @@ -441,21 +497,19 @@ if subprocess.call(cmdlist) != 0: raise EnvironmentException('Executables created by D compiler %s are not runnable.' % self.name_string()) - def needs_static_linker(self): + def needs_static_linker(self) -> bool: return True - def depfile_for_object(self, objfile): - return objfile + '.' + self.get_depfile_suffix() - - def get_depfile_suffix(self): + def get_depfile_suffix(self) -> str: return 'deps' - def get_pic_args(self): + def get_pic_args(self) -> T.List[str]: if self.info.is_windows(): return [] return ['-fPIC'] - def get_feature_args(self, kwargs, build_to_src): + def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]: + # TODO: using a TypeDict here would improve this res = [] if 'unittest' in kwargs: unittest = kwargs.pop('unittest') @@ -537,54 +591,18 @@ return res - def get_buildtype_linker_args(self, buildtype): + def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: if buildtype != 'plain': - return self.get_target_arch_args() + return self._get_target_arch_args() return [] - def get_std_exe_link_args(self): - return [] - - def _get_compiler_check_args(self, env, extra_args, dependencies, mode='compile'): - if callable(extra_args): - extra_args = extra_args(mode) - if extra_args is None: - extra_args = [] - elif isinstance(extra_args, str): - extra_args = [extra_args] - if dependencies is None: - dependencies = [] - elif not isinstance(dependencies, list): - dependencies = [dependencies] - # Collect compiler arguments - args = CompilerArgs(self) - for d in dependencies: - # Add compile flags needed by dependencies - args += d.get_compile_args() - if mode == 'link': - # Add link flags needed to find dependencies - args += d.get_link_args() - - if mode == 'compile': - # Add DFLAGS from the env - args += env.coredata.get_external_args(self.for_machine, self.language) - elif mode == 'link': - # Add LDFLAGS from the env - args += env.coredata.get_external_link_args(self.for_machine, self.language) - # extra_args must override all other arguments, so we add them last - args += extra_args - return args - - def compiles(self, code, env, *, extra_args=None, dependencies=None, mode='compile'): - args = self._get_compiler_check_args(env, extra_args, dependencies, mode) + def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> DCompilerArgs: + return DCompilerArgs(self, args) - with self.cached_compile(code, env.coredata, extra_args=args, mode=mode) as p: - return p.returncode == 0, p.cached - - def has_multi_arguments(self, args, env): + def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: return self.compiles('int i;\n', env, extra_args=args) - def get_target_arch_args(self): + def _get_target_arch_args(self) -> T.List[str]: # LDC2 on Windows targets to current OS architecture, but # it should follow the target specified by the MSVC toolchain. if self.info.is_windows(): @@ -593,27 +611,27 @@ return ['-m32'] return [] - def get_crt_compile_args(self, crt_val, buildtype): + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: return [] - def get_crt_link_args(self, crt_val, buildtype): + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: return [] - def thread_link_flags(self, env): - return ['-pthread'] - - def name_string(self): - return ' '.join(self.exelist) - -class GnuDCompiler(DCompiler, GnuCompiler): +class GnuDCompiler(GnuCompiler, DCompiler): # we mostly want DCompiler, but that gives us the Compiler.LINKER_PREFIX instead LINKER_PREFIX = GnuCompiler.LINKER_PREFIX - def __init__(self, exelist, version, for_machine: MachineChoice, - info: 'MachineInfo', is_cross, exe_wrapper, arch, **kwargs): - DCompiler.__init__(self, exelist, version, for_machine, info, is_cross, exe_wrapper, arch, **kwargs) + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', arch: str, *, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, + is_cross: bool = False): + DCompiler.__init__(self, exelist, version, for_machine, info, arch, + exe_wrapper=exe_wrapper, linker=linker, + full_version=full_version, is_cross=is_cross) GnuCompiler.__init__(self, {}) self.id = 'gcc' default_warn_args = ['-Wall', '-Wdeprecated'] @@ -621,33 +639,32 @@ '1': default_warn_args, '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-Wpedantic']} - self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic', 'b_vscrt'] + self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic', + 'b_vscrt', 'b_coverage', 'b_pgo', 'b_ndebug'] self._has_color_support = version_compare(self.version, '>=4.9') # dependencies were implemented before, but broken - support was fixed in GCC 7.1+ # (and some backported versions) self._has_deps_support = version_compare(self.version, '>=7.1') - def get_colorout_args(self, colortype): + def get_colorout_args(self, colortype: str) -> T.List[str]: if self._has_color_support: super().get_colorout_args(colortype) return [] - def get_dependency_gen_args(self, outtarget, outfile): + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: if self._has_deps_support: return super().get_dependency_gen_args(outtarget, outfile) return [] - def get_warn_args(self, level): + def get_warn_args(self, level: str) -> T.List[str]: return self.warn_args[level] - def get_coverage_args(self): - return [] - - def get_buildtype_args(self, buildtype): + def get_buildtype_args(self, buildtype: str) -> T.List[str]: return d_gdc_buildtype_args[buildtype] - def compute_parameters_with_absolute_paths(self, parameter_list, build_dir): + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: for idx, i in enumerate(parameter_list): if i[:2] == '-I' or i[:2] == '-L': parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) @@ -657,68 +674,98 @@ def get_allow_undefined_link_args(self) -> T.List[str]: return self.linker.get_allow_undefined_args() + def get_linker_always_args(self) -> T.List[str]: + args = super().get_linker_always_args() + if self.info.is_windows(): + return args + return args + ['-shared-libphobos'] + + def get_disable_assert_args(self) -> T.List[str]: + return ['-frelease'] + -class LLVMDCompiler(DmdLikeCompilerMixin, LinkerEnvVarsMixin, BasicLinkerIsCompilerMixin, DCompiler): +class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - info: 'MachineInfo', arch, **kwargs): - DCompiler.__init__(self, exelist, version, for_machine, info, arch, False, None, **kwargs) + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', arch: str, *, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, + is_cross: bool = False): + DCompiler.__init__(self, exelist, version, for_machine, info, arch, + exe_wrapper=exe_wrapper, linker=linker, + full_version=full_version, is_cross=is_cross) self.id = 'llvm' - self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt'] + self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug'] - def get_colorout_args(self, colortype): + def get_colorout_args(self, colortype: str) -> T.List[str]: if colortype == 'always': return ['-enable-color'] return [] - def get_warn_args(self, level): - if level == '2' or level == '3': + def get_warn_args(self, level: str) -> T.List[str]: + if level in {'2', '3'}: return ['-wi', '-dw'] elif level == '1': return ['-wi'] - else: - return [] + return [] - def get_buildtype_args(self, buildtype): + def get_buildtype_args(self, buildtype: str) -> T.List[str]: if buildtype != 'plain': - return self.get_target_arch_args() + d_ldc_buildtype_args[buildtype] + return self._get_target_arch_args() + d_ldc_buildtype_args[buildtype] return d_ldc_buildtype_args[buildtype] - def get_pic_args(self): + def get_pic_args(self) -> T.List[str]: return ['-relocation-model=pic'] - def get_std_shared_lib_link_args(self): - return ['-shared'] - - def get_crt_link_args(self, crt_val, buildtype): - return self.get_crt_args(crt_val, buildtype) + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return self._get_crt_args(crt_val, buildtype) - def unix_args_to_native(self, args): - return self.translate_args_to_nongnu(args) + def unix_args_to_native(self, args: T.List[str]) -> T.List[str]: + return self._translate_args_to_nongnu(args) - def get_optimization_args(self, optimization_level): + def get_optimization_args(self, optimization_level: str) -> T.List[str]: return ldc_optimization_args[optimization_level] + @classmethod + def use_linker_args(cls, linker: str) -> T.List[str]: + return ['-linker={}'.format(linker)] + + def get_linker_always_args(self) -> T.List[str]: + args = super().get_linker_always_args() + if self.info.is_windows(): + return args + return args + ['-link-defaultlib-shared'] + + def get_disable_assert_args(self) -> T.List[str]: + return ['--release'] + -class DmdDCompiler(DmdLikeCompilerMixin, LinkerEnvVarsMixin, BasicLinkerIsCompilerMixin, DCompiler): +class DmdDCompiler(DmdLikeCompilerMixin, DCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - info: 'MachineInfo', arch, **kwargs): - DCompiler.__init__(self, exelist, version, for_machine, info, arch, False, None, **kwargs) + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', arch: str, *, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, + is_cross: bool = False): + DCompiler.__init__(self, exelist, version, for_machine, info, arch, + exe_wrapper=exe_wrapper, linker=linker, + full_version=full_version, is_cross=is_cross) self.id = 'dmd' - self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt'] + self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug'] - def get_colorout_args(self, colortype): + def get_colorout_args(self, colortype: str) -> T.List[str]: if colortype == 'always': return ['-color=on'] return [] - def get_buildtype_args(self, buildtype): + def get_buildtype_args(self, buildtype: str) -> T.List[str]: if buildtype != 'plain': - return self.get_target_arch_args() + d_dmd_buildtype_args[buildtype] + return self._get_target_arch_args() + d_dmd_buildtype_args[buildtype] return d_dmd_buildtype_args[buildtype] - def get_std_exe_link_args(self): + def get_std_exe_link_args(self) -> T.List[str]: if self.info.is_windows(): # DMD links against D runtime only when main symbol is found, # so these needs to be inserted when linking static D libraries. @@ -729,7 +776,7 @@ return ['phobos.lib'] return [] - def get_std_shared_lib_link_args(self): + def get_std_shared_lib_link_args(self) -> T.List[str]: libname = 'libphobos2.so' if self.info.is_windows(): if self.arch == 'x86_64': @@ -740,7 +787,7 @@ libname = 'phobos.lib' return ['-shared', '-defaultlib=' + libname] - def get_target_arch_args(self): + def _get_target_arch_args(self) -> T.List[str]: # DMD32 and DMD64 on 64-bit Windows defaults to 32-bit (OMF). # Force the target to 64-bit in order to stay consistent # across the different platforms. @@ -752,11 +799,23 @@ return ['-m32'] return [] - def get_crt_compile_args(self, crt_val, buildtype): - return self.get_crt_args(crt_val, buildtype) + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return self._get_crt_args(crt_val, buildtype) - def unix_args_to_native(self, args): - return self.translate_args_to_nongnu(args) + def unix_args_to_native(self, args: T.List[str]) -> T.List[str]: + return self._translate_args_to_nongnu(args) - def get_optimization_args(self, optimization_level): + def get_optimization_args(self, optimization_level: str) -> T.List[str]: return dmd_optimization_args[optimization_level] + + def can_linker_accept_rsp(self) -> bool: + return False + + def get_linker_always_args(self) -> T.List[str]: + args = super().get_linker_always_args() + if self.info.is_windows(): + return args + return args + ['-defaultlib=phobos2', '-debuglib=phobos2'] + + def get_disable_assert_args(self) -> T.List[str]: + return ['-release'] diff -Nru meson-0.53.2/mesonbuild/compilers/fortran.py meson-0.57.0+really0.56.2/mesonbuild/compilers/fortran.py --- meson-0.53.2/mesonbuild/compilers/fortran.py 2020-02-25 18:00:46.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/fortran.py 2021-01-06 10:39:48.000000000 +0000 @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from pathlib import Path +from .._pathlib import Path import typing as T import subprocess, os @@ -29,38 +29,41 @@ from .mixins.clang import ClangCompiler from .mixins.elbrus import ElbrusCompiler from .mixins.pgi import PGICompiler -from .. import mlog from mesonbuild.mesonlib import ( version_compare, EnvironmentException, MesonException, MachineChoice, LibType ) if T.TYPE_CHECKING: + from ..coredata import OptionDictType + from ..dependencies import Dependency, ExternalProgram from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers import DynamicLinker class FortranCompiler(CLikeCompiler, Compiler): language = 'fortran' - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): - Compiler.__init__(self, exelist, version, for_machine, info, **kwargs) - CLikeCompiler.__init__(self, is_cross, exe_wrapper) - self.id = 'unknown' - - def has_function(self, funcname, prefix, env, *, extra_args=None, dependencies=None): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + Compiler.__init__(self, exelist, version, for_machine, info, + is_cross=is_cross, full_version=full_version, linker=linker) + CLikeCompiler.__init__(self, exe_wrapper) + + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: raise MesonException('Fortran does not have "has_function" capability.\n' 'It is better to test if a Fortran capability is working like:\n\n' "meson.get_compiler('fortran').links('block; end block; end program')\n\n" 'that example is to see if the compiler has Fortran 2008 Block element.') - def sanity_check(self, work_dir: Path, environment): - """ - Check to be sure a minimal program can compile and execute - with this compiler & platform. - """ - work_dir = Path(work_dir) + def sanity_check(self, work_dir_: str, environment: 'Environment') -> None: + work_dir = Path(work_dir_) source_name = work_dir / 'sanitycheckf.f90' binary_name = work_dir / 'sanitycheckf' if binary_name.is_file(): @@ -68,7 +71,8 @@ source_name.write_text('print *, "Fortran compilation is working."; end') - extra_flags = environment.coredata.get_external_args(self.for_machine, self.language) + extra_flags = [] + extra_flags += environment.coredata.get_external_args(self.for_machine, self.language) extra_flags += environment.coredata.get_external_link_args(self.for_machine, self.language) extra_flags += self.get_always_args() # %% build the test executable "sanitycheckf" @@ -84,7 +88,7 @@ if self.exe_wrapper is None: # Can't check if the binaries run so we have to assume they do return - cmdlist = self.exe_wrapper + [str(binary_name)] + cmdlist = self.exe_wrapper.get_command() + [str(binary_name)] else: cmdlist = [str(binary_name)] # %% Run the test executable @@ -95,31 +99,26 @@ except OSError: raise EnvironmentException('Executables created by Fortran compiler %s are not runnable.' % self.name_string()) - def get_std_warn_args(self, level): - return FortranCompiler.std_warn_args - - def get_buildtype_args(self, buildtype): + def get_buildtype_args(self, buildtype: str) -> T.List[str]: return gnulike_buildtype_args[buildtype] - def get_optimization_args(self, optimization_level): + def get_optimization_args(self, optimization_level: str) -> T.List[str]: return gnu_optimization_args[optimization_level] - def get_debug_args(self, is_debug): + def get_debug_args(self, is_debug: bool) -> T.List[str]: return clike_debug_args[is_debug] - def get_dependency_gen_args(self, outtarget, outfile): - return [] - - def get_preprocess_only_args(self): + def get_preprocess_only_args(self) -> T.List[str]: return ['-cpp'] + super().get_preprocess_only_args() - def get_module_incdir_args(self): + def get_module_incdir_args(self) -> T.Tuple[str, ...]: return ('-I', ) - def get_module_outdir_args(self, path): + def get_module_outdir_args(self, path: str) -> T.List[str]: return ['-module', path] - def compute_parameters_with_absolute_paths(self, parameter_list, build_dir): + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: for idx, i in enumerate(parameter_list): if i[:2] == '-I' or i[:2] == '-L': parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) @@ -140,36 +139,28 @@ return filename - def find_library(self, libname, env, extra_dirs, libtype: LibType = LibType.PREFER_SHARED): - code = '''stop; end program''' - return self.find_library_impl(libname, env, extra_dirs, code, libtype) - - def has_multi_arguments(self, args, env): - for arg in args[:]: - # some compilers, e.g. GCC, don't warn for unsupported warning-disable - # flags, so when we are testing a flag like "-Wno-forgotten-towel", also - # check the equivalent enable flag too "-Wforgotten-towel" - if arg.startswith('-Wno-'): - args.append('-W' + arg[5:]) - if arg.startswith('-Wl,'): - mlog.warning('{} looks like a linker argument, ' - 'but has_argument and other similar methods only ' - 'support checking compiler arguments. Using them ' - 'to check linker arguments are never supported, ' - 'and results are likely to be wrong regardless of ' - 'the compiler you are using. has_link_argument or ' - 'other similar method can be used instead.' - .format(arg)) + def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]: code = 'stop; end program' - return self.has_arguments(args, env, code, mode='compile') + return self._find_library_impl(libname, env, extra_dirs, code, libtype) + + def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self._has_multi_arguments(args, env, 'stop; end program') + + def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self._has_multi_link_arguments(args, env, 'stop; end program') class GnuFortranCompiler(GnuCompiler, FortranCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, - defines=None, **kwargs): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): FortranCompiler.__init__(self, exelist, version, for_machine, - is_cross, info, exe_wrapper, **kwargs) + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) GnuCompiler.__init__(self, defines) default_warn_args = ['-Wall'] self.warn_args = {'0': [], @@ -177,54 +168,78 @@ '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-Wpedantic', '-fimplicit-none']} - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = FortranCompiler.get_options(self) fortran_stds = ['legacy', 'f95', 'f2003'] if version_compare(self.version, '>=4.4.0'): fortran_stds += ['f2008'] if version_compare(self.version, '>=8.0.0'): fortran_stds += ['f2018'] - opts.update({'fortran_std': coredata.UserComboOption('Fortran language standard to use', - ['none'] + fortran_stds, - 'none')}) + opts.update({ + 'std': coredata.UserComboOption( + 'Fortran language standard to use', + ['none'] + fortran_stds, + 'none', + ), + }) return opts - def get_option_compile_args(self, options) -> T.List[str]: + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['fortran_std'] + std = options['std'] if std.value != 'none': args.append('-std=' + std.value) return args - def get_dependency_gen_args(self, outtarget, outfile): + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: # Disabled until this is fixed: # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=62162 # return ['-cpp', '-MD', '-MQ', outtarget] return [] - def get_module_outdir_args(self, path): + def get_module_outdir_args(self, path: str) -> T.List[str]: return ['-J' + path] - def language_stdlib_only_link_flags(self): + def language_stdlib_only_link_flags(self) -> T.List[str]: return ['-lgfortran', '-lm'] + def has_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + disable_cache: bool = False) -> T.Tuple[bool, bool]: + ''' + Derived from mixins/clike.py:has_header, but without C-style usage of + __has_include which breaks with GCC-Fortran 10: + https://github.com/mesonbuild/meson/issues/7017 + ''' + fargs = {'prefix': prefix, 'header': hname} + code = '{prefix}\n#include <{header}>' + return self.compiles(code.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies, mode='preprocess', disable_cache=disable_cache) + + class ElbrusFortranCompiler(GnuFortranCompiler, ElbrusCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, - defines=None, **kwargs): - GnuFortranCompiler.__init__(self, exelist, version, for_machine, - is_cross, info, exe_wrapper, defines, - **kwargs) + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + GnuFortranCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, defines=defines, + linker=linker, full_version=full_version) ElbrusCompiler.__init__(self) class G95FortranCompiler(FortranCompiler): LINKER_PREFIX = '-Wl,' - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): FortranCompiler.__init__(self, exelist, version, for_machine, - is_cross, info, exe_wrapper, **kwargs) + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) self.id = 'g95' default_warn_args = ['-Wall'] self.warn_args = {'0': [], @@ -232,10 +247,10 @@ '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-pedantic']} - def get_module_outdir_args(self, path): + def get_module_outdir_args(self, path: str) -> T.List[str]: return ['-fmod=' + path] - def get_no_warn_args(self): + def get_no_warn_args(self) -> T.List[str]: # FIXME: Confirm that there's no compiler option to disable all warnings return [] @@ -244,39 +259,45 @@ LINKER_PREFIX = '-Wl,' - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, - **kwargs): - FortranCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs) + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) self.id = 'sun' - def get_dependency_gen_args(self, outtarget, outfile): + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: return ['-fpp'] - def get_always_args(self): + def get_always_args(self) -> T.List[str]: return [] - def get_warn_args(self, level): + def get_warn_args(self, level: str) -> T.List[str]: return [] - def get_module_incdir_args(self): + def get_module_incdir_args(self) -> T.Tuple[str, ...]: return ('-M', ) - def get_module_outdir_args(self, path): + def get_module_outdir_args(self, path: str) -> T.List[str]: return ['-moddir=' + path] - def openmp_flags(self): + def openmp_flags(self) -> T.List[str]: return ['-xopenmp'] class IntelFortranCompiler(IntelGnuLikeCompiler, FortranCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, - **kwargs): - self.file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp') + file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', ) + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): FortranCompiler.__init__(self, exelist, version, for_machine, - is_cross, info, exe_wrapper, **kwargs) + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) # FIXME: Add support for OS X and Windows in detect_fortran_compiler so # we are sent the type of compiler IntelGnuLikeCompiler.__init__(self) @@ -287,32 +308,36 @@ '2': default_warn_args + ['-warn', 'unused'], '3': ['-warn', 'all']} - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = FortranCompiler.get_options(self) fortran_stds = ['legacy', 'f95', 'f2003', 'f2008', 'f2018'] - opts.update({'fortran_std': coredata.UserComboOption('Fortran language standard to use', - ['none'] + fortran_stds, - 'none')}) + opts.update({ + 'std': coredata.UserComboOption( + 'Fortran language standard to use', + ['none'] + fortran_stds, + 'none', + ), + }) return opts - def get_option_compile_args(self, options) -> T.List[str]: + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['fortran_std'] + std = options['std'] stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'} if std.value != 'none': args.append('-stand=' + stds[std.value]) return args - def get_preprocess_only_args(self): + def get_preprocess_only_args(self) -> T.List[str]: return ['-cpp', '-EP'] - def get_always_args(self): + def get_always_args(self) -> T.List[str]: """Ifort doesn't have -pipe.""" val = super().get_always_args() val.remove('-pipe') return val - def language_stdlib_only_link_flags(self): + def language_stdlib_only_link_flags(self) -> T.List[str]: return ['-lifcore', '-limf'] def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: @@ -321,14 +346,17 @@ class IntelClFortranCompiler(IntelVisualStudioLikeCompiler, FortranCompiler): - file_suffixes = ['f90', 'f', 'for', 'ftn', 'fpp'] + file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', ) always_args = ['/nologo'] - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, target: str, info: 'MachineInfo', exe_wrapper=None, - **kwargs): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): FortranCompiler.__init__(self, exelist, version, for_machine, - is_cross, info, exe_wrapper, **kwargs) + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) IntelVisualStudioLikeCompiler.__init__(self, target) default_warn_args = ['/warn:general', '/warn:truncated_source'] @@ -337,32 +365,39 @@ '2': default_warn_args + ['/warn:unused'], '3': ['/warn:all']} - def get_options(self): + def get_options(self) -> 'OptionDictType': opts = FortranCompiler.get_options(self) fortran_stds = ['legacy', 'f95', 'f2003', 'f2008', 'f2018'] - opts.update({'fortran_std': coredata.UserComboOption('Fortran language standard to use', - ['none'] + fortran_stds, - 'none')}) + opts.update({ + 'std': coredata.UserComboOption( + 'Fortran language standard to use', + ['none'] + fortran_stds, + 'none', + ), + }) return opts - def get_option_compile_args(self, options) -> T.List[str]: + def get_option_compile_args(self, options: 'OptionDictType') -> T.List[str]: args = [] - std = options['fortran_std'] + std = options['std'] stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'} if std.value != 'none': args.append('/stand:' + stds[std.value]) return args - def get_module_outdir_args(self, path) -> T.List[str]: + def get_module_outdir_args(self, path: str) -> T.List[str]: return ['/module:' + path] class PathScaleFortranCompiler(FortranCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, - **kwargs): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): FortranCompiler.__init__(self, exelist, version, for_machine, - is_cross, info, exe_wrapper, **kwargs) + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) self.id = 'pathscale' default_warn_args = ['-fullwarn'] self.warn_args = {'0': [], @@ -370,16 +405,19 @@ '2': default_warn_args, '3': default_warn_args} - def openmp_flags(self): + def openmp_flags(self) -> T.List[str]: return ['-mp'] class PGIFortranCompiler(PGICompiler, FortranCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, - **kwargs): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): FortranCompiler.__init__(self, exelist, version, for_machine, - is_cross, info, exe_wrapper, **kwargs) + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) PGICompiler.__init__(self) default_warn_args = ['-Minform=inform'] @@ -392,13 +430,36 @@ return ['-lpgf90rtl', '-lpgf90', '-lpgf90_rpm1', '-lpgf902', '-lpgf90rtl', '-lpgftnrtl', '-lrt'] + +class NvidiaHPC_FortranCompiler(PGICompiler, FortranCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + PGICompiler.__init__(self) + + self.id = 'nvidia_hpc' + default_warn_args = ['-Minform=inform'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args, + '3': default_warn_args + ['-Mdclchk']} + + class FlangFortranCompiler(ClangCompiler, FortranCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, - **kwargs): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): FortranCompiler.__init__(self, exelist, version, for_machine, - is_cross, info, exe_wrapper, **kwargs) - ClangCompiler.__init__(self) + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + ClangCompiler.__init__(self, {}) self.id = 'flang' default_warn_args = ['-Minform=inform'] self.warn_args = {'0': [], @@ -410,11 +471,14 @@ return ['-lflang', '-lpgmath'] class Open64FortranCompiler(FortranCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, - **kwargs): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): FortranCompiler.__init__(self, exelist, version, for_machine, - is_cross, info, exe_wrapper, **kwargs) + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) self.id = 'open64' default_warn_args = ['-fullwarn'] self.warn_args = {'0': [], @@ -422,23 +486,26 @@ '2': default_warn_args, '3': default_warn_args} - def openmp_flags(self): + def openmp_flags(self) -> T.List[str]: return ['-mp'] class NAGFortranCompiler(FortranCompiler): - def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, - **kwargs): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): FortranCompiler.__init__(self, exelist, version, for_machine, - is_cross, info, exe_wrapper, **kwargs) + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) self.id = 'nagfor' - def get_warn_args(self, level): + def get_warn_args(self, level: str) -> T.List[str]: return [] - def get_module_outdir_args(self, path): + def get_module_outdir_args(self, path: str) -> T.List[str]: return ['-mdir', path] - def openmp_flags(self): + def openmp_flags(self) -> T.List[str]: return ['-openmp'] diff -Nru meson-0.53.2/mesonbuild/compilers/__init__.py meson-0.57.0+really0.56.2/mesonbuild/compilers/__init__.py --- meson-0.53.2/mesonbuild/compilers/__init__.py 2019-12-04 18:45:50.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/__init__.py 2020-09-17 22:00:51.000000000 +0000 @@ -30,11 +30,14 @@ 'is_llvm_ir', 'is_object', 'is_source', + 'is_known_suffix', 'lang_suffixes', 'sort_clink', 'AppleClangCCompiler', 'AppleClangCPPCompiler', + 'AppleClangObjCCompiler', + 'AppleClangObjCPPCompiler', 'ArmCCompiler', 'ArmCPPCompiler', 'ArmclangCCompiler', @@ -47,7 +50,6 @@ 'ClangObjCPPCompiler', 'ClangClCCompiler', 'ClangClCPPCompiler', - 'CompilerArgs', 'CPPCompiler', 'DCompiler', 'DmdDCompiler', @@ -57,6 +59,7 @@ 'ElbrusCCompiler', 'EmscriptenCCompiler', 'GnuCompiler', + 'GnuLikeCompiler', 'GnuCPPCompiler', 'ElbrusCPPCompiler', 'EmscriptenCPPCompiler', @@ -84,12 +87,19 @@ 'ObjCPPCompiler', 'Open64FortranCompiler', 'PathScaleFortranCompiler', + 'NvidiaHPC_CCompiler', + 'NvidiaHPC_CPPCompiler', + 'NvidiaHPC_FortranCompiler', 'PGICCompiler', 'PGICPPCompiler', 'PGIFortranCompiler', 'RustCompiler', 'CcrxCCompiler', 'CcrxCPPCompiler', + 'Xc16CCompiler', + 'CompCertCCompiler', + 'C2000CCompiler', + 'C2000CPPCompiler', 'SunFortranCompiler', 'SwiftCompiler', 'ValaCompiler', @@ -115,9 +125,10 @@ is_llvm_ir, is_object, is_library, + is_known_suffix, lang_suffixes, + languages_using_ldflags, sort_clink, - CompilerArgs, ) from .c import ( CCompiler, @@ -131,8 +142,12 @@ EmscriptenCCompiler, IntelCCompiler, IntelClCCompiler, + NvidiaHPC_CCompiler, PGICCompiler, CcrxCCompiler, + Xc16CCompiler, + CompCertCCompiler, + C2000CCompiler, VisualStudioCCompiler, ) from .cpp import ( @@ -147,8 +162,10 @@ EmscriptenCPPCompiler, IntelCPPCompiler, IntelClCPPCompiler, + NvidiaHPC_CPPCompiler, PGICPPCompiler, CcrxCPPCompiler, + C2000CPPCompiler, VisualStudioCPPCompiler, ) from .cs import MonoCompiler, VisualStudioCsCompiler @@ -170,17 +187,20 @@ NAGFortranCompiler, Open64FortranCompiler, PathScaleFortranCompiler, + NvidiaHPC_FortranCompiler, PGIFortranCompiler, SunFortranCompiler, ) from .java import JavaCompiler from .objc import ( ObjCCompiler, + AppleClangObjCCompiler, ClangObjCCompiler, GnuObjCCompiler, ) from .objcpp import ( ObjCPPCompiler, + AppleClangObjCPPCompiler, ClangObjCPPCompiler, GnuObjCPPCompiler, ) @@ -188,6 +208,6 @@ from .swift import SwiftCompiler from .vala import ValaCompiler from .mixins.visualstudio import VisualStudioLikeCompiler -from .mixins.gnu import GnuCompiler +from .mixins.gnu import GnuCompiler, GnuLikeCompiler from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler from .mixins.clang import ClangCompiler diff -Nru meson-0.53.2/mesonbuild/compilers/java.py meson-0.57.0+really0.56.2/mesonbuild/compilers/java.py --- meson-0.53.2/mesonbuild/compilers/java.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/java.py 2020-10-18 21:29:13.000000000 +0000 @@ -15,6 +15,7 @@ import os.path import shutil import subprocess +import textwrap import typing as T from ..mesonlib import EnvironmentException, MachineChoice @@ -23,60 +24,40 @@ if T.TYPE_CHECKING: from ..envconfig import MachineInfo + from ..environment import Environment class JavaCompiler(BasicLinkerIsCompilerMixin, Compiler): language = 'java' - def __init__(self, exelist, version, for_machine: MachineChoice, - info: 'MachineInfo'): - super().__init__(exelist, version, for_machine, info) + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', full_version: T.Optional[str] = None): + super().__init__(exelist, version, for_machine, info, full_version=full_version) self.id = 'unknown' - self.is_cross = False self.javarunner = 'java' - def get_werror_args(self): + def get_werror_args(self) -> T.List[str]: return ['-Werror'] - def split_shlib_to_parts(self, fname): - return None, fname - - def get_dependency_gen_args(self, outtarget, outfile): - return [] - - def get_compile_only_args(self): - return [] - - def get_output_args(self, subdir): + def get_output_args(self, subdir: str) -> T.List[str]: if subdir == '': subdir = './' return ['-d', subdir, '-s', subdir] - def get_coverage_args(self): - return [] - - def get_std_exe_link_args(self): + def get_pic_args(self) -> T.List[str]: return [] - def get_include_args(self, path): + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: return [] - def get_pic_args(self): - return [] - - def name_string(self): - return ' '.join(self.exelist) - - def get_pch_use_args(self, pch_dir, header): - return [] - - def get_pch_name(self, header_name): + def get_pch_name(self, name: str) -> str: return '' - def get_buildtype_args(self, buildtype): + def get_buildtype_args(self, buildtype: str) -> T.List[str]: return java_buildtype_args[buildtype] - def compute_parameters_with_absolute_paths(self, parameter_list, build_dir): + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: for idx, i in enumerate(parameter_list): if i in ['-cp', '-classpath', '-sourcepath'] and idx + 1 < len(parameter_list): path_list = parameter_list[idx + 1].split(os.pathsep) @@ -85,17 +66,18 @@ return parameter_list - def sanity_check(self, work_dir, environment): + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: src = 'SanityCheck.java' obj = 'SanityCheck' source_name = os.path.join(work_dir, src) with open(source_name, 'w') as ofile: - ofile.write('''class SanityCheck { - public static void main(String[] args) { - int i; - } -} -''') + ofile.write(textwrap.dedent( + '''class SanityCheck { + public static void main(String[] args) { + int i; + } + } + ''')) pc = subprocess.Popen(self.exelist + [src], cwd=work_dir) pc.wait() if pc.returncode != 0: @@ -115,5 +97,8 @@ "all about it." raise EnvironmentException(m) - def needs_static_linker(self): + def needs_static_linker(self) -> bool: return False + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return [] diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/arm.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/arm.py --- meson-0.53.2/mesonbuild/compilers/mixins/arm.py 2020-02-25 18:00:46.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/arm.py 2021-01-06 10:39:48.000000000 +0000 @@ -1,4 +1,4 @@ -# Copyright 2012-2019 The Meson development team +# Copyright 2012-2020 Meson development team # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,22 +15,29 @@ """Representations specific to the arm family of compilers.""" import os -import re import typing as T from ... import mesonlib +from ...linkers import ArmClangDynamicLinker from ..compilers import clike_debug_args from .clang import clang_color_args if T.TYPE_CHECKING: from ...environment import Environment + from ...compilers.compilers import Compiler +else: + # This is a bit clever, for mypy we pretend that these mixins descend from + # Compiler, so we get all of the methods and attributes defined for us, but + # for runtime we make them descend from object (which all classes normally + # do). This gives up DRYer type checking, with no runtime impact + Compiler = object arm_buildtype_args = { 'plain': [], - 'debug': ['-O0', '--debug'], - 'debugoptimized': ['-O1', '--debug'], - 'release': ['-O3', '-Otime'], - 'minsize': ['-O3', '-Ospace'], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], 'custom': [], } # type: T.Dict[str, T.List[str]] @@ -38,33 +45,35 @@ '0': ['-O0'], 'g': ['-g'], '1': ['-O1'], - '2': ['-O2'], - '3': ['-O3'], - 's': [], + '2': [], # Compiler defaults to -O2 + '3': ['-O3', '-Otime'], + 's': ['-O3'], # Compiler defaults to -Ospace } # type: T.Dict[str, T.List[str]] armclang_buildtype_args = { 'plain': [], - 'debug': ['-O0', '-g'], - 'debugoptimized': ['-O1', '-g'], - 'release': ['-Os'], - 'minsize': ['-Oz'], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], 'custom': [], } # type: T.Dict[str, T.List[str]] armclang_optimization_args = { - '0': ['-O0'], + '0': [], # Compiler defaults to -O0 'g': ['-g'], '1': ['-O1'], '2': ['-O2'], '3': ['-O3'], - 's': ['-Os'] + 's': ['-Oz'] } # type: T.Dict[str, T.List[str]] -class ArmCompiler: - # Functionality that is common to all ARM family compilers. - def __init__(self): +class ArmCompiler(Compiler): + + """Functionality that is common to all ARM family compilers.""" + + def __init__(self) -> None: if not self.is_cross: raise mesonlib.EnvironmentException('armcc supports only cross-compilation.') self.id = 'arm' @@ -72,7 +81,7 @@ self.warn_args = {'0': [], '1': default_warn_args, '2': default_warn_args + [], - '3': default_warn_args + []} + '3': default_warn_args + []} # type: T.Dict[str, T.List[str]] # Assembly self.can_compile_suffixes.add('s') @@ -87,9 +96,8 @@ def get_always_args(self) -> T.List[str]: return [] - # Override CCompiler.get_dependency_gen_args def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: - return [] + return ['--depend_target', outtarget, '--depend', outfile, '--depend_single_line'] def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: # FIXME: Add required arguments @@ -126,32 +134,15 @@ return parameter_list -class ArmclangCompiler: - def __init__(self): +class ArmclangCompiler(Compiler): + + def __init__(self) -> None: if not self.is_cross: raise mesonlib.EnvironmentException('armclang supports only cross-compilation.') # Check whether 'armlink' is available in path - self.linker_exe = 'armlink' - args = '--vsn' - try: - p, stdo, stderr = mesonlib.Popen_safe(self.linker_exe, args) - except OSError as e: - err_msg = 'Unknown linker\nRunning "{0}" gave \n"{1}"'.format(' '.join([self.linker_exe] + [args]), e) - raise mesonlib.EnvironmentException(err_msg) - # Verify the armlink version - ver_str = re.search('.*Component.*', stdo) - if ver_str: - ver_str = ver_str.group(0) - else: - raise mesonlib.EnvironmentException('armlink version string not found') - assert ver_str # makes mypy happy - # Using the regular expression from environment.search_version, - # which is used for searching compiler version - version_regex = r'(? T.List[str]: - return [] + return ['-MD', '-MT', outtarget, '-MF', outfile] def get_optimization_args(self, optimization_level: str) -> T.List[str]: return armclang_optimization_args[optimization_level] diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/c2000.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/c2000.py --- meson-0.53.2/mesonbuild/compilers/mixins/c2000.py 1970-01-01 00:00:00.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/c2000.py 2020-10-18 21:29:13.000000000 +0000 @@ -0,0 +1,124 @@ +# Copyright 2012-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Representations specific to the Texas Instruments C2000 compiler family.""" + +import os +import typing as T + +from ...mesonlib import EnvironmentException + +if T.TYPE_CHECKING: + from ...environment import Environment + from ...compilers.compilers import Compiler +else: + # This is a bit clever, for mypy we pretend that these mixins descend from + # Compiler, so we get all of the methods and attributes defined for us, but + # for runtime we make them descend from object (which all classes normally + # do). This gives up DRYer type checking, with no runtime impact + Compiler = object + +c2000_buildtype_args = { + 'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], +} # type: T.Dict[str, T.List[str]] + +c2000_optimization_args = { + '0': ['-O0'], + 'g': ['-Ooff'], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-04'] +} # type: T.Dict[str, T.List[str]] + +c2000_debug_args = { + False: [], + True: [] +} # type: T.Dict[bool, T.List[str]] + + +class C2000Compiler(Compiler): + + def __init__(self) -> None: + if not self.is_cross: + raise EnvironmentException('c2000 supports only cross-compilation.') + self.id = 'c2000' + # Assembly + self.can_compile_suffixes.add('asm') + default_warn_args = [] # type: T.List[str] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + [], + '3': default_warn_args + []} # type: T.Dict[str, T.List[str]] + + def get_pic_args(self) -> T.List[str]: + # PIC support is not enabled by default for c2000, + # if users want to use it, they need to add the required arguments explicitly + return [] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return c2000_buildtype_args[buildtype] + + def get_pch_suffix(self) -> str: + return 'pch' + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return [] + + def thread_flags(self, env: 'Environment') -> T.List[str]: + return [] + + def get_coverage_args(self) -> T.List[str]: + return [] + + def get_no_stdinc_args(self) -> T.List[str]: + return [] + + def get_no_stdlib_link_args(self) -> T.List[str]: + return [] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return c2000_optimization_args[optimization_level] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return c2000_debug_args[is_debug] + + @classmethod + def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]: + result = [] + for i in args: + if i.startswith('-D'): + i = '-define=' + i[2:] + if i.startswith('-I'): + i = '-include=' + i[2:] + if i.startswith('-Wl,-rpath='): + continue + elif i == '--print-search-dirs': + continue + elif i.startswith('-L'): + continue + result.append(i) + return result + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:9] == '-include=': + parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:])) + + return parameter_list diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/ccrx.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/ccrx.py --- meson-0.53.2/mesonbuild/compilers/mixins/ccrx.py 2020-02-25 18:00:46.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/ccrx.py 2020-10-18 21:29:13.000000000 +0000 @@ -21,6 +21,13 @@ if T.TYPE_CHECKING: from ...environment import Environment + from ...compilers.compilers import Compiler +else: + # This is a bit clever, for mypy we pretend that these mixins descend from + # Compiler, so we get all of the methods and attributes defined for us, but + # for runtime we make them descend from object (which all classes normally + # do). This gives up DRYer type checking, with no runtime impact + Compiler = object ccrx_buildtype_args = { 'plain': [], @@ -46,8 +53,13 @@ } # type: T.Dict[bool, T.List[str]] -class CcrxCompiler: - def __init__(self): +class CcrxCompiler(Compiler): + + if T.TYPE_CHECKING: + is_cross = True + can_compile_suffixes = set() # type: T.Set[str] + + def __init__(self) -> None: if not self.is_cross: raise EnvironmentException('ccrx supports only cross-compilation.') self.id = 'ccrx' @@ -57,7 +69,7 @@ self.warn_args = {'0': [], '1': default_warn_args, '2': default_warn_args + [], - '3': default_warn_args + []} + '3': default_warn_args + []} # type: T.Dict[str, T.List[str]] def get_pic_args(self) -> T.List[str]: # PIC support is not enabled by default for CCRX, @@ -73,10 +85,6 @@ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: return [] - # Override CCompiler.get_dependency_gen_args - def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: - return [] - def thread_flags(self, env: 'Environment') -> T.List[str]: return [] diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/clang.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/clang.py --- meson-0.53.2/mesonbuild/compilers/mixins/clang.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/clang.py 2021-01-06 10:39:48.000000000 +0000 @@ -15,11 +15,12 @@ """Abstractions for the LLVM/Clang compiler family.""" import os +import shutil import typing as T from ... import mesonlib from ...linkers import AppleDynamicLinker -from ..compilers import clike_optimization_args +from ..compilers import CompileCheckMode from .gnu import GnuLikeCompiler if T.TYPE_CHECKING: @@ -32,11 +33,21 @@ 'never': ['-Xclang', '-fno-color-diagnostics'], } # type: T.Dict[str, T.List[str]] +clang_optimization_args = { + '0': [], + 'g': ['-Og'], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-Os'], +} # type: T.Dict[str, T.List[str]] class ClangCompiler(GnuLikeCompiler): - def __init__(self): + + def __init__(self, defines: T.Optional[T.Dict[str, str]]): super().__init__() self.id = 'clang' + self.defines = defines or {} self.base_options.append('b_colorout') # TODO: this really should be part of the linker base_options, but # linkers don't have base_options. @@ -48,8 +59,14 @@ def get_colorout_args(self, colortype: str) -> T.List[str]: return clang_color_args[colortype][:] + def has_builtin_define(self, define: str) -> bool: + return define in self.defines + + def get_builtin_define(self, define: str) -> T.Optional[str]: + return self.defines.get(define) + def get_optimization_args(self, optimization_level: str) -> T.List[str]: - return clike_optimization_args[optimization_level] + return clang_optimization_args[optimization_level] def get_pch_suffix(self) -> str: return 'pch' @@ -60,17 +77,17 @@ # so it might change semantics at any time. return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))] - def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.List[str]: - myargs = ['-Werror=unknown-warning-option', '-Werror=unused-command-line-argument'] - if mesonlib.version_compare(self.version, '>=3.6.0'): - myargs.append('-Werror=ignored-optimization-argument') - return super().has_multi_arguments( - myargs + args, - env) + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + myargs = [] # type: T.List[str] + if mode is CompileCheckMode.COMPILE: + myargs.extend(['-Werror=unknown-warning-option', '-Werror=unused-command-line-argument']) + if mesonlib.version_compare(self.version, '>=3.6.0'): + myargs.append('-Werror=ignored-optimization-argument') + return super().get_compiler_check_args(mode) + myargs def has_function(self, funcname: str, prefix: str, env: 'Environment', *, extra_args: T.Optional[T.List[str]] = None, - dependencies: T.Optional[T.List['Dependency']] = None) -> bool: + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: if extra_args is None: extra_args = [] # Starting with XCode 8, we need to pass this to force linker @@ -81,7 +98,7 @@ if isinstance(self.linker, AppleDynamicLinker) and mesonlib.version_compare(self.version, '>=8.0'): extra_args.append('-Wl,-no_weak_imports') return super().has_function(funcname, prefix, env, extra_args=extra_args, - dependencies=dependencies) + dependencies=dependencies) def openmp_flags(self) -> T.List[str]: if mesonlib.version_compare(self.version, '>=3.8.0'): @@ -91,3 +108,29 @@ else: # Shouldn't work, but it'll be checked explicitly in the OpenMP dependency. return [] + + @classmethod + def use_linker_args(cls, linker: str) -> T.List[str]: + # Clang additionally can use a linker specified as a path, which GCC + # (and other gcc-like compilers) cannot. This is becuse clang (being + # llvm based) is retargetable, while GCC is not. + # + + # qcld: Qualcomm Snapdragon linker, based on LLVM + if linker == 'qcld': + return ['-fuse-ld=qcld'] + + if shutil.which(linker): + if not shutil.which(linker): + raise mesonlib.MesonException( + 'Cannot find linker {}.'.format(linker)) + return ['-fuse-ld={}'.format(linker)] + return super().use_linker_args(linker) + + def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]: + # Clang only warns about unknown or ignored attributes, so force an + # error. + return ['-Werror=attributes'] + + def get_coverage_link_args(self) -> T.List[str]: + return ['--coverage'] diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/clike.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/clike.py --- meson-0.53.2/mesonbuild/compilers/mixins/clike.py 2020-01-23 12:51:19.000000000 +0000 +++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/clike.py 2021-01-06 10:39:48.000000000 +0000 @@ -20,6 +20,7 @@ standalone, they only work through inheritance. """ +import collections import functools import glob import itertools @@ -27,107 +28,169 @@ import re import subprocess import typing as T -from pathlib import Path +from ..._pathlib import Path +from ... import arglist from ... import mesonlib -from ...mesonlib import LibType from ... import mlog +from ...linkers import GnuLikeDynamicLinkerMixin, SolarisDynamicLinker, CompCertDynamicLinker +from ...mesonlib import LibType from .. import compilers +from ..compilers import CompileCheckMode from .visualstudio import VisualStudioLikeCompiler if T.TYPE_CHECKING: + from ...dependencies import Dependency, ExternalProgram from ...environment import Environment + from ...compilers.compilers import Compiler +else: + # This is a bit clever, for mypy we pretend that these mixins descend from + # Compiler, so we get all of the methods and attributes defined for us, but + # for runtime we make them descend from object (which all classes normally + # do). This gives up DRYer type checking, with no runtime impact + Compiler = object + +GROUP_FLAGS = re.compile(r'''\.so (?:\.[0-9]+)? (?:\.[0-9]+)? (?:\.[0-9]+)?$ | + ^(?:-Wl,)?-l | + \.a$''', re.X) + +class CLikeCompilerArgs(arglist.CompilerArgs): + prepend_prefixes = ('-I', '-L') + dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U') + + # NOTE: not thorough. A list of potential corner cases can be found in + # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 + dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic') + dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') + dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread') + + def to_native(self, copy: bool = False) -> T.List[str]: + # This seems to be allowed, but could never work? + assert isinstance(self.compiler, compilers.Compiler), 'How did you get here' + + # Check if we need to add --start/end-group for circular dependencies + # between static libraries, and for recursively searching for symbols + # needed by static libraries that are provided by object files or + # shared libraries. + self.flush_pre_post() + if copy: + new = self.copy() + else: + new = self + # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which + # all act like (or are) gnu ld + # TODO: this could probably be added to the DynamicLinker instead + if isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker, CompCertDynamicLinker)): + group_start = -1 + group_end = -1 + for i, each in enumerate(new): + if not GROUP_FLAGS.search(each): + continue + group_end = i + if group_start < 0: + # First occurrence of a library + group_start = i + if group_start >= 0: + # Last occurrence of a library + new.insert(group_end + 1, '-Wl,--end-group') + new.insert(group_start, '-Wl,--start-group') + # Remove system/default include paths added with -isystem + default_dirs = self.compiler.get_default_include_dirs() + if default_dirs: + bad_idx_list = [] # type: T.List[int] + for i, each in enumerate(new): + if not each.startswith('-isystem'): + continue + + # Remove the -isystem and the path if the path is a default path + if (each == '-isystem' and + i < (len(new) - 1) and + new[i + 1] in default_dirs): + bad_idx_list += [i, i + 1] + elif each.startswith('-isystem=') and each[9:] in default_dirs: + bad_idx_list += [i] + elif each[8:] in default_dirs: + bad_idx_list += [i] + for i in reversed(bad_idx_list): + new.pop(i) + return self.compiler.unix_args_to_native(new._container) + + def __repr__(self) -> str: + self.flush_pre_post() + return 'CLikeCompilerArgs({!r}, {!r})'.format(self.compiler, self._container) -class CLikeCompiler: +class CLikeCompiler(Compiler): """Shared bits for the C and CPP Compilers.""" + if T.TYPE_CHECKING: + warn_args = {} # type: T.Dict[str, T.List[str]] + # TODO: Replace this manual cache with functools.lru_cache - library_dirs_cache = {} - program_dirs_cache = {} - find_library_cache = {} - find_framework_cache = {} - internal_libs = compilers.unixy_compiler_internal_libs + find_library_cache = {} # type: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], str, LibType], T.Optional[T.List[str]]] + find_framework_cache = {} # type: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], bool], T.Optional[T.List[str]]] + internal_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS - def __init__(self, is_cross: bool, exe_wrapper: T.Optional[str] = None): + def __init__(self, exe_wrapper: T.Optional['ExternalProgram'] = None): # If a child ObjC or CPP class has already set it, don't set it ourselves - self.is_cross = is_cross self.can_compile_suffixes.add('h') # If the exe wrapper was not found, pretend it wasn't set so that the # sanity check is skipped and compiler checks use fallbacks. - if not exe_wrapper or not exe_wrapper.found(): + if not exe_wrapper or not exe_wrapper.found() or not exe_wrapper.get_command(): self.exe_wrapper = None else: - self.exe_wrapper = exe_wrapper.get_command() + self.exe_wrapper = exe_wrapper + + def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CLikeCompilerArgs: + # This is correct, mypy just doesn't understand co-operative inheritance + return CLikeCompilerArgs(self, args) - def needs_static_linker(self): + def needs_static_linker(self) -> bool: return True # When compiling static libraries, so yes. - def get_always_args(self): + def get_always_args(self) -> T.List[str]: ''' Args that are always-on for all C compilers other than MSVC ''' - return ['-pipe'] + compilers.get_largefile_args(self) + return ['-pipe'] + self.get_largefile_args() - def get_no_stdinc_args(self): + def get_no_stdinc_args(self) -> T.List[str]: return ['-nostdinc'] - def get_no_stdlib_link_args(self): + def get_no_stdlib_link_args(self) -> T.List[str]: return ['-nostdlib'] - def get_warn_args(self, level): + def get_warn_args(self, level: str) -> T.List[str]: + # TODO: this should be an enum return self.warn_args[level] - def get_no_warn_args(self): + def get_no_warn_args(self) -> T.List[str]: # Almost every compiler uses this for disabling warnings return ['-w'] - def split_shlib_to_parts(self, fname): - return None, fname - - def depfile_for_object(self, objfile): - return objfile + '.' + self.get_depfile_suffix() - - def get_depfile_suffix(self): + def get_depfile_suffix(self) -> str: return 'd' - def get_exelist(self): - return self.exelist[:] + def get_exelist(self) -> T.List[str]: + return self.exelist.copy() - def get_preprocess_only_args(self): + def get_preprocess_only_args(self) -> T.List[str]: return ['-E', '-P'] - def get_compile_only_args(self): + def get_compile_only_args(self) -> T.List[str]: return ['-c'] - def get_no_optimization_args(self): + def get_no_optimization_args(self) -> T.List[str]: return ['-O0'] - def get_compiler_check_args(self): - ''' - Get arguments useful for compiler checks such as being permissive in - the code quality and not doing any optimization. - ''' - return self.get_no_optimization_args() - - def get_output_args(self, target): + def get_output_args(self, target: str) -> T.List[str]: return ['-o', target] - def get_coverage_args(self): - return ['--coverage'] - - def get_coverage_link_args(self) -> T.List[str]: - return self.linker.get_coverage_args() - - def get_werror_args(self): + def get_werror_args(self) -> T.List[str]: return ['-Werror'] - def get_std_exe_link_args(self): - # TODO: is this a linker property? - return [] - - def get_include_args(self, path, is_system): + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: if path == '': path = '.' if is_system: @@ -141,7 +204,9 @@ return [] @functools.lru_cache() - def get_library_dirs(self, env, elf_class = None): + def _get_library_dirs(self, env: 'Environment', + elf_class: T.Optional[int] = None) -> T.List[str]: + # TODO: replace elf_class with enum dirs = self.get_compiler_dirs(env, 'libraries') if elf_class is None or elf_class == 0: return dirs @@ -158,31 +223,47 @@ if not files: retval.append(d) continue - file_to_check = os.path.join(d, files[0]) - with open(file_to_check, 'rb') as fd: - header = fd.read(5) - # if file is not an ELF file, it's weird, but accept dir - # if it is elf, and the class matches, accept dir - if header[1:4] != b'ELF' or int(header[4]) == elf_class: - retval.append(d) - # at this point, it's an ELF file which doesn't match the - # appropriate elf_class, so skip this one - return tuple(retval) + + for f in files: + file_to_check = os.path.join(d, f) + try: + with open(file_to_check, 'rb') as fd: + header = fd.read(5) + # if file is not an ELF file, it's weird, but accept dir + # if it is elf, and the class matches, accept dir + if header[1:4] != b'ELF' or int(header[4]) == elf_class: + retval.append(d) + # at this point, it's an ELF file which doesn't match the + # appropriate elf_class, so skip this one + # stop scanning after the first sucessful read + break + except OSError: + # Skip the file if we can't read it + pass + + return retval + + def get_library_dirs(self, env: 'Environment', + elf_class: T.Optional[int] = None) -> T.List[str]: + """Wrap the lru_cache so that we return a new copy and don't allow + mutation of the cached value. + """ + return self._get_library_dirs(env, elf_class).copy() @functools.lru_cache() - def get_program_dirs(self, env): + def _get_program_dirs(self, env: 'Environment') -> T.List[str]: ''' Programs used by the compiler. Also where toolchain DLLs such as libstdc++-6.dll are found with MinGW. ''' return self.get_compiler_dirs(env, 'programs') + def get_program_dirs(self, env: 'Environment') -> T.List[str]: + return self._get_program_dirs(env).copy() + def get_pic_args(self) -> T.List[str]: return ['-fPIC'] - def name_string(self) -> str: - return ' '.join(self.exelist) - def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: return ['-include', os.path.basename(header)] @@ -192,7 +273,7 @@ def get_linker_search_args(self, dirname: str) -> T.List[str]: return self.linker.get_search_args(dirname) - def get_default_include_dirs(self): + def get_default_include_dirs(self) -> T.List[str]: return [] def gen_export_dynamic_link_args(self, env: 'Environment') -> T.List[str]: @@ -201,13 +282,14 @@ def gen_import_library_args(self, implibname: str) -> T.List[str]: return self.linker.import_library_args(implibname) - def sanity_check_impl(self, work_dir, environment, sname, code): + def _sanity_check_impl(self, work_dir: str, environment: 'Environment', + sname: str, code: str) -> None: mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist)) mlog.debug('Is cross compiler: %s.' % str(self.is_cross)) source_name = os.path.join(work_dir, sname) binname = sname.rsplit('.', 1)[0] - mode = 'link' + mode = CompileCheckMode.LINK if self.is_cross: binname += '_cross' if self.exe_wrapper is None: @@ -216,7 +298,7 @@ # on OSX the compiler binary is the same but you need # a ton of compiler flags to differentiate between # arm and x86_64. So just compile. - mode = 'compile' + mode = CompileCheckMode.COMPILE cargs, largs = self._get_basic_compiler_args(environment, mode) extra_flags = cargs + self.linker_to_compiler_args(largs) @@ -244,7 +326,7 @@ if self.exe_wrapper is None: # Can't check if the binaries run so we have to assume they do return - cmdlist = self.exe_wrapper + [binary_name] + cmdlist = self.exe_wrapper.get_command() + [binary_name] else: cmdlist = [binary_name] mlog.debug('Running test binary command: ' + ' '.join(cmdlist)) @@ -256,18 +338,23 @@ if pe.returncode != 0: raise mesonlib.EnvironmentException('Executables created by {0} compiler {1} are not runnable.'.format(self.language, self.name_string())) - def sanity_check(self, work_dir, environment): + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: code = 'int main(void) { int class=0; return class; }\n' - return self.sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code) + return self._sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code) - def check_header(self, hname, prefix, env, *, extra_args=None, dependencies=None): + def check_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: fargs = {'prefix': prefix, 'header': hname} code = '''{prefix} #include <{header}>''' return self.compiles(code.format(**fargs), env, extra_args=extra_args, dependencies=dependencies) - def has_header(self, hname, prefix, env, *, extra_args=None, dependencies=None, disable_cache=False): + def has_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + disable_cache: bool = False) -> T.Tuple[bool, bool]: fargs = {'prefix': prefix, 'header': hname} code = '''{prefix} #ifdef __has_include @@ -280,7 +367,10 @@ return self.compiles(code.format(**fargs), env, extra_args=extra_args, dependencies=dependencies, mode='preprocess', disable_cache=disable_cache) - def has_header_symbol(self, hname, symbol, prefix, env, *, extra_args=None, dependencies=None): + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol} t = '''{prefix} #include <{header}> @@ -294,11 +384,20 @@ return self.compiles(t.format(**fargs), env, extra_args=extra_args, dependencies=dependencies) - def _get_basic_compiler_args(self, env, mode): - cargs, largs = [], [] - # Select a CRT if needed since we're linking - if mode == 'link': - cargs += self.get_linker_debug_crt_args() + def _get_basic_compiler_args(self, env: 'Environment', mode: CompileCheckMode) -> T.Tuple[T.List[str], T.List[str]]: + cargs = [] # type: T.List[str] + largs = [] # type: T.List[str] + if mode is CompileCheckMode.LINK: + # Sometimes we need to manually select the CRT to use with MSVC. + # One example is when trying to do a compiler check that involves + # linking with static libraries since MSVC won't select a CRT for + # us in that case and will error out asking us to pick one. + try: + crt_val = env.coredata.base_options['b_vscrt'].value + buildtype = env.coredata.builtins['buildtype'].value + cargs += self.get_crt_compile_args(crt_val, buildtype) + except (KeyError, AttributeError): + pass # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS and CPPFLAGS from the env sys_args = env.coredata.get_external_args(self.for_machine, self.language) @@ -309,7 +408,11 @@ cleaned_sys_args = self.remove_linkerlike_args(sys_args) cargs += cleaned_sys_args - if mode == 'link': + if mode is CompileCheckMode.LINK: + ld_value = env.lookup_binary_entry(self.for_machine, self.language + '_ld') + if ld_value is not None: + largs += self.use_linker_args(ld_value[0]) + # Add LDFLAGS from the env sys_ld_args = env.coredata.get_external_link_args(self.for_machine, self.language) # CFLAGS and CXXFLAGS go to both linking and compiling, but we want them @@ -319,24 +422,30 @@ cargs += self.get_compiler_args_for_mode(mode) return cargs, largs - def _get_compiler_check_args(self, env, extra_args, dependencies, mode='compile'): + def build_wrapper_args(self, env: 'Environment', + extra_args: T.Union[None, arglist.CompilerArgs, T.List[str]], + dependencies: T.Optional[T.List['Dependency']], + mode: CompileCheckMode = CompileCheckMode.COMPILE) -> arglist.CompilerArgs: + # TODO: the caller should handle the listfing of these arguments if extra_args is None: extra_args = [] else: + # TODO: we want to do this in the caller extra_args = mesonlib.listify(extra_args) - extra_args = mesonlib.listify([e(mode) if callable(e) else e for e in extra_args]) + extra_args = mesonlib.listify([e(mode.value) if callable(e) else e for e in extra_args]) if dependencies is None: dependencies = [] - elif not isinstance(dependencies, list): - dependencies = [dependencies] + elif not isinstance(dependencies, collections.abc.Iterable): + # TODO: we want to ensure the front end does the listifing here + dependencies = [dependencies] # type: ignore # Collect compiler arguments - cargs = compilers.CompilerArgs(self) - largs = [] + cargs = self.compiler_args() # type: arglist.CompilerArgs + largs = [] # type: T.List[str] for d in dependencies: # Add compile flags needed by dependencies cargs += d.get_compile_args() - if mode == 'link': + if mode is CompileCheckMode.LINK: # Add link flags needed to find dependencies largs += d.get_link_args() @@ -344,7 +453,7 @@ cargs += ca largs += la - cargs += self.get_compiler_check_args() + cargs += self.get_compiler_check_args(mode) # on MSVC compiler and linker flags must be separated by the "/link" argument # at this point, the '/link' argument may already be part of extra_args, otherwise, it is added here @@ -354,22 +463,11 @@ args = cargs + extra_args + largs return args - def compiles(self, code, env, *, extra_args=None, dependencies=None, mode='compile', disable_cache=False): - with self._build_wrapper(code, env, extra_args, dependencies, mode, disable_cache=disable_cache) as p: - return p.returncode == 0, p.cached - - def _build_wrapper(self, code, env, extra_args, dependencies=None, mode='compile', want_output=False, disable_cache=False, temp_dir=None): - args = self._get_compiler_check_args(env, extra_args, dependencies, mode) - if disable_cache or want_output: - return self.compile(code, extra_args=args, mode=mode, want_output=want_output, temp_dir=env.scratch_dir) - return self.cached_compile(code, env.coredata, extra_args=args, mode=mode, temp_dir=env.scratch_dir) - - def links(self, code, env, *, extra_args=None, dependencies=None, disable_cache=False): - return self.compiles(code, env, extra_args=extra_args, - dependencies=dependencies, mode='link', disable_cache=disable_cache) - - def run(self, code: str, env, *, extra_args=None, dependencies=None): - if self.is_cross and self.exe_wrapper is None: + def run(self, code: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> compilers.RunResult: + need_exe_wrapper = env.need_exe_wrapper(self.for_machine) + if need_exe_wrapper and self.exe_wrapper is None: raise compilers.CrossNoRunException('Can not run test applications in this cross environment.') with self._build_wrapper(code, env, extra_args, dependencies, mode='link', want_output=True) as p: if p.returncode != 0: @@ -377,10 +475,10 @@ p.input_name, p.returncode)) return compilers.RunResult(False) - if self.is_cross: - cmdlist = self.exe_wrapper + [p.output_name] + if need_exe_wrapper: + cmdlist = self.exe_wrapper.get_command() + [p.output_name] else: - cmdlist = p.output_name + cmdlist = [p.output_name] try: pe, so, se = mesonlib.Popen_safe(cmdlist) except Exception as e: @@ -393,7 +491,9 @@ mlog.debug(se) return compilers.RunResult(True, pe.returncode, so, se) - def _compile_int(self, expression, prefix, env, extra_args, dependencies): + def _compile_int(self, expression: str, prefix: str, env: 'Environment', + extra_args: T.Optional[T.List[str]], + dependencies: T.Optional[T.List['Dependency']]) -> bool: fargs = {'prefix': prefix, 'expression': expression} t = '''#include {prefix} @@ -401,7 +501,10 @@ return self.compiles(t.format(**fargs), env, extra_args=extra_args, dependencies=dependencies)[0] - def cross_compute_int(self, expression, low, high, guess, prefix, env, extra_args, dependencies): + def cross_compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int], + guess: T.Optional[int], prefix: str, env: 'Environment', + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: # Try user's guess first if isinstance(guess, int): if self._compile_int('%s == %d' % (expression, guess), prefix, env, extra_args, dependencies): @@ -449,7 +552,10 @@ return low - def compute_int(self, expression, low, high, guess, prefix, env, *, extra_args=None, dependencies=None): + def compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int], + guess: T.Optional[int], prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: if extra_args is None: extra_args = [] if self.is_cross: @@ -469,7 +575,9 @@ raise mesonlib.EnvironmentException('Could not run compute_int test binary.') return int(res.stdout) - def cross_sizeof(self, typename, prefix, env, *, extra_args=None, dependencies=None): + def cross_sizeof(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'type': typename} @@ -484,7 +592,9 @@ return -1 return self.cross_compute_int('sizeof(%s)' % typename, None, None, None, prefix, env, extra_args, dependencies) - def sizeof(self, typename, prefix, env, *, extra_args=None, dependencies=None): + def sizeof(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'type': typename} @@ -505,7 +615,9 @@ raise mesonlib.EnvironmentException('Could not run sizeof test binary.') return int(res.stdout) - def cross_alignment(self, typename, prefix, env, *, extra_args=None, dependencies=None): + def cross_alignment(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'type': typename} @@ -526,7 +638,9 @@ }};''' return self.cross_compute_int('offsetof(struct tmp, target)', None, None, None, t.format(**fargs), env, extra_args, dependencies) - def alignment(self, typename, prefix, env, *, extra_args=None, dependencies=None): + def alignment(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: if extra_args is None: extra_args = [] if self.is_cross: @@ -555,7 +669,10 @@ raise mesonlib.EnvironmentException('Could not determine alignment of %s. Sorry. You might want to file a bug.' % typename) return align - def get_define(self, dname, prefix, env, extra_args, dependencies, disable_cache=False): + def get_define(self, dname: str, prefix: str, env: 'Environment', + extra_args: T.Optional[T.List[str]], + dependencies: T.Optional[T.List['Dependency']], + disable_cache: bool = False) -> T.Tuple[str, bool]: delim = '"MESON_GET_DEFINE_DELIMITER"' fargs = {'prefix': prefix, 'define': dname, 'delim': delim} code = ''' @@ -564,11 +681,11 @@ # define {define} #endif {delim}\n{define}''' - args = self._get_compiler_check_args(env, extra_args, dependencies, - mode='preprocess').to_native() - func = lambda: self.cached_compile(code.format(**fargs), env.coredata, extra_args=args, mode='preprocess') + args = self.build_wrapper_args(env, extra_args, dependencies, + mode=CompileCheckMode.PREPROCESS).to_native() + func = functools.partial(self.cached_compile, code.format(**fargs), env.coredata, extra_args=args, mode='preprocess') if disable_cache: - func = lambda: self.compile(code.format(**fargs), extra_args=args, mode='preprocess', temp_dir=env.scratch_dir) + func = functools.partial(self.compile, code.format(**fargs), extra_args=args, mode='preprocess', temp_dir=env.scratch_dir) with func() as p: cached = p.cached if p.returncode != 0: @@ -576,9 +693,13 @@ # Get the preprocessed value after the delimiter, # minus the extra newline at the end and # merge string literals. - return self.concatenate_string_literals(p.stdo.split(delim + '\n')[-1][:-1]), cached + return self._concatenate_string_literals(p.stdout.split(delim + '\n')[-1][:-1]), cached - def get_return_value(self, fname, rtype, prefix, env, extra_args, dependencies): + def get_return_value(self, fname: str, rtype: str, prefix: str, + env: 'Environment', extra_args: T.Optional[T.List[str]], + dependencies: T.Optional[T.List['Dependency']]) -> T.Union[str, int]: + # TODO: rtype should be an enum. + # TODO: maybe we can use overload to tell mypy when this will return int vs str? if rtype == 'string': fmt = '%s' cast = '(char*)' @@ -606,9 +727,10 @@ except ValueError: m = 'Return value of {}() is not an int' raise mesonlib.EnvironmentException(m.format(fname)) + assert False, 'Unreachable' @staticmethod - def _no_prototype_templ(): + def _no_prototype_templ() -> T.Tuple[str, str]: """ Try to find the function without a prototype from a header by defining our own dummy prototype and trying to link with the C library (and @@ -643,7 +765,7 @@ return head, main @staticmethod - def _have_prototype_templ(): + def _have_prototype_templ() -> T.Tuple[str, str]: """ Returns a head-er and main() call that uses the headers listed by the user for the function prototype while checking if a function exists. @@ -658,13 +780,16 @@ # is not run so we don't care what the return value is. main = '''\nint main(void) {{ void *a = (void*) &{func}; - long b = (long) a; + long long b = (long long) a; return (int) b; }}''' return head, main - def has_function(self, funcname, prefix, env, *, extra_args=None, dependencies=None): - """ + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + """Determine if a function exists. + First, this function looks for the symbol in the default libraries provided by the compiler (stdlib + a few others usually). If that fails, it checks if any of the headers specified in the prefix provide @@ -684,7 +809,11 @@ return val, False raise mesonlib.EnvironmentException('Cross variable {0} is not a boolean.'.format(varname)) - fargs = {'prefix': prefix, 'func': funcname} + # TODO: we really need a protocol for this, + # + # class StrProto(typing.Protocol): + # def __str__(self) -> str: ... + fargs = {'prefix': prefix, 'func': funcname} # type: T.Dict[str, T.Union[str, bool, int]] # glibc defines functions that are not available on Linux as stubs that # fail with ENOSYS (such as e.g. lchmod). In this case we want to fail @@ -727,30 +856,39 @@ # need to look for them differently. On nice compilers like clang, we # can just directly use the __has_builtin() macro. fargs['no_includes'] = '#include' not in prefix + is_builtin = funcname.startswith('__builtin_') + fargs['is_builtin'] = is_builtin + fargs['__builtin_'] = '' if is_builtin else '__builtin_' t = '''{prefix} int main(void) {{ + + /* With some toolchains (MSYS2/mingw for example) the compiler + * provides various builtins which are not really implemented and + * fall back to the stdlib where they aren't provided and fail at + * build/link time. In case the user provides a header, including + * the header didn't lead to the function being defined, and the + * function we are checking isn't a builtin itself we assume the + * builtin is not functional and we just error out. */ + #if !{no_includes:d} && !defined({func}) && !{is_builtin:d} + #error "No definition for {__builtin_}{func} found in the prefix" + #endif + #ifdef __has_builtin - #if !__has_builtin(__builtin_{func}) - #error "__builtin_{func} not found" + #if !__has_builtin({__builtin_}{func}) + #error "{__builtin_}{func} not found" #endif #elif ! defined({func}) - /* Check for __builtin_{func} only if no includes were added to the - * prefix above, which means no definition of {func} can be found. - * We would always check for this, but we get false positives on - * MSYS2 if we do. Their toolchain is broken, but we can at least - * give them a workaround. */ - #if {no_includes:d} - __builtin_{func}; - #else - #error "No definition for __builtin_{func} found in the prefix" - #endif + {__builtin_}{func}; #endif return 0; }}''' return self.links(t.format(**fargs), env, extra_args=extra_args, dependencies=dependencies) - def has_members(self, typename, membernames, prefix, env, *, extra_args=None, dependencies=None): + def has_members(self, typename: str, membernames: T.List[str], + prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'type': typename, 'name': 'foo'} @@ -767,7 +905,8 @@ return self.compiles(t.format(**fargs), env, extra_args=extra_args, dependencies=dependencies) - def has_type(self, typename, prefix, env, extra_args, dependencies=None): + def has_type(self, typename: str, prefix: str, env: 'Environment', extra_args: T.List[str], + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: fargs = {'prefix': prefix, 'type': typename} t = '''{prefix} void bar(void) {{ @@ -776,7 +915,7 @@ return self.compiles(t.format(**fargs), env, extra_args=extra_args, dependencies=dependencies) - def symbols_have_underscore_prefix(self, env): + def symbols_have_underscore_prefix(self, env: 'Environment') -> bool: ''' Check if the compiler prefixes an underscore to global C symbols ''' @@ -789,12 +928,12 @@ } #endif ''' - args = self.get_compiler_check_args() + args = self.get_compiler_check_args(CompileCheckMode.COMPILE) n = 'symbols_have_underscore_prefix' with self._build_wrapper(code, env, extra_args=args, mode='compile', want_output=True, temp_dir=env.scratch_dir) as p: if p.returncode != 0: m = 'BUG: Unable to compile {!r} check: {}' - raise RuntimeError(m.format(n, p.stdo)) + raise RuntimeError(m.format(n, p.stdout)) if not os.path.isfile(p.output_name): m = 'BUG: Can\'t find compiled test code for {!r} check' raise RuntimeError(m.format(n)) @@ -811,8 +950,8 @@ return False raise RuntimeError('BUG: {!r} check failed unexpectedly'.format(n)) - def _get_patterns(self, env, prefixes, suffixes, shared=False): - patterns = [] + def _get_patterns(self, env: 'Environment', prefixes: T.List[str], suffixes: T.List[str], shared: bool = False) -> T.List[str]: + patterns = [] # type: T.List[str] for p in prefixes: for s in suffixes: patterns.append(p + '{}.' + s) @@ -828,7 +967,7 @@ patterns.append(p + '{}.so.[0-9]*.[0-9]*') return patterns - def get_library_naming(self, env, libtype: LibType, strict=False): + def get_library_naming(self, env: 'Environment', libtype: LibType, strict: bool = False) -> T.Tuple[str, ...]: ''' Get library prefixes and suffixes for the target platform ordered by priority @@ -875,8 +1014,8 @@ return tuple(patterns) @staticmethod - def _sort_shlibs_openbsd(libs): - filtered = [] + def _sort_shlibs_openbsd(libs: T.List[str]) -> T.List[str]: + filtered = [] # type: T.List[str] for lib in libs: # Validate file as a shared library of type libfoo.so.X.Y ret = lib.rsplit('.so.', maxsplit=1) @@ -891,7 +1030,7 @@ return sorted(filtered, key=float_cmp, reverse=True) @classmethod - def _get_trials_from_pattern(cls, pattern, directory, libname): + def _get_trials_from_pattern(cls, pattern: str, directory: str, libname: str) -> T.List[Path]: f = Path(directory) / pattern.format(libname) # Globbing for OpenBSD if '*' in pattern: @@ -901,7 +1040,7 @@ return [f] @staticmethod - def _get_file_from_list(env, files: T.List[str]) -> Path: + def _get_file_from_list(env: 'Environment', paths: T.List[Path]) -> Path: ''' We just check whether the library exists. We can't do a link check because the library might have unresolved symbols that require other @@ -909,31 +1048,30 @@ architecture. ''' # If not building on macOS for Darwin, do a simple file check - files = [Path(f) for f in files] if not env.machines.host.is_darwin() or not env.machines.build.is_darwin(): - for f in files: - if f.is_file(): - return f + for p in paths: + if p.is_file(): + return p # Run `lipo` and check if the library supports the arch we want - for f in files: - if not f.is_file(): + for p in paths: + if not p.is_file(): continue - archs = mesonlib.darwin_get_object_archs(f) + archs = mesonlib.darwin_get_object_archs(str(p)) if archs and env.machines.host.cpu_family in archs: - return f + return p else: mlog.debug('Rejected {}, supports {} but need {}' - .format(f, archs, env.machines.host.cpu_family)) + .format(p, archs, env.machines.host.cpu_family)) return None @functools.lru_cache() - def output_is_64bit(self, env): + def output_is_64bit(self, env: 'Environment') -> bool: ''' returns true if the output produced is 64-bit, false if 32-bit ''' return self.sizeof('void *', '', env) == 8 - def find_library_real(self, libname, env, extra_dirs, code, libtype: LibType): + def _find_library_real(self, libname: str, env: 'Environment', extra_dirs: T.List[str], code: str, libtype: LibType) -> T.Optional[T.List[str]]: # First try if we can just add the library as -l. # Gcc + co seem to prefer builtin lib dirs to -L dirs. # Only try to find std libs if no extra dirs specified. @@ -942,7 +1080,7 @@ if ((not extra_dirs and libtype is LibType.PREFER_SHARED) or libname in self.internal_libs): cargs = ['-l' + libname] - largs = self.get_allow_undefined_link_args() + largs = self.get_linker_always_args() + self.get_allow_undefined_link_args() extra_args = cargs + self.linker_to_compiler_args(largs) if self.links(code, env, extra_args=extra_args, disable_cache=True)[0]: @@ -966,16 +1104,17 @@ # Search in the specified dirs, and then in the system libraries for d in itertools.chain(extra_dirs, self.get_library_dirs(env, elf_class)): for p in patterns: - trial = self._get_trials_from_pattern(p, d, libname) - if not trial: + trials = self._get_trials_from_pattern(p, d, libname) + if not trials: continue - trial = self._get_file_from_list(env, trial) + trial = self._get_file_from_list(env, trials) if not trial: continue return [trial.as_posix()] return None - def find_library_impl(self, libname, env, extra_dirs, code, libtype: LibType): + def _find_library_impl(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + code: str, libtype: LibType) -> T.Optional[T.List[str]]: # These libraries are either built-in or invalid if libname in self.ignore_libs: return [] @@ -983,24 +1122,26 @@ extra_dirs = [extra_dirs] key = (tuple(self.exelist), libname, tuple(extra_dirs), code, libtype) if key not in self.find_library_cache: - value = self.find_library_real(libname, env, extra_dirs, code, libtype) + value = self._find_library_real(libname, env, extra_dirs, code, libtype) self.find_library_cache[key] = value else: value = self.find_library_cache[key] if value is None: return None - return value[:] + return value.copy() - def find_library(self, libname, env, extra_dirs, libtype: LibType = LibType.PREFER_SHARED): - code = 'int main(void) { return 0; }' - return self.find_library_impl(libname, env, extra_dirs, code, libtype) + def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]: + code = 'int main(void) { return 0; }\n' + return self._find_library_impl(libname, env, extra_dirs, code, libtype) - def find_framework_paths(self, env): + def find_framework_paths(self, env: 'Environment') -> T.List[str]: ''' These are usually /Library/Frameworks and /System/Library/Frameworks, unless you select a particular macOS SDK with the -isysroot flag. You can also add to this by setting -F in CFLAGS. ''' + # TODO: this really needs to be *AppleClang*, not just any clang. if self.id != 'clang': raise mesonlib.MesonException('Cannot find framework path with non-clang compiler') # Construct the compiler command-line @@ -1012,7 +1153,7 @@ os_env = os.environ.copy() os_env['LC_ALL'] = 'C' _, _, stde = mesonlib.Popen_safe(commands, env=os_env, stdin=subprocess.PIPE) - paths = [] + paths = [] # T.List[str] for line in stde.split('\n'): if '(framework directory)' not in line: continue @@ -1021,7 +1162,7 @@ paths.append(line[:-21].strip()) return paths - def find_framework_real(self, name, env, extra_dirs, allow_system): + def _find_framework_real(self, name: str, env: 'Environment', extra_dirs: T.List[str], allow_system: bool) -> T.Optional[T.List[str]]: code = 'int main(void) { return 0; }' link_args = [] for d in extra_dirs: @@ -1032,57 +1173,63 @@ link_args += ['-framework', name] if self.links(code, env, extra_args=(extra_args + link_args), disable_cache=True)[0]: return link_args + return None - def find_framework_impl(self, name, env, extra_dirs, allow_system): + def _find_framework_impl(self, name: str, env: 'Environment', extra_dirs: T.List[str], + allow_system: bool) -> T.Optional[T.List[str]]: if isinstance(extra_dirs, str): extra_dirs = [extra_dirs] key = (tuple(self.exelist), name, tuple(extra_dirs), allow_system) if key in self.find_framework_cache: value = self.find_framework_cache[key] else: - value = self.find_framework_real(name, env, extra_dirs, allow_system) + value = self._find_framework_real(name, env, extra_dirs, allow_system) self.find_framework_cache[key] = value if value is None: return None - return value[:] + return value.copy() - def find_framework(self, name, env, extra_dirs, allow_system=True): + def find_framework(self, name: str, env: 'Environment', extra_dirs: T.List[str], + allow_system: bool = True) -> T.Optional[T.List[str]]: ''' Finds the framework with the specified name, and returns link args for the same or returns None when the framework is not found. ''' + # TODO: maybe this belongs in clang? also, should probably check for macOS? if self.id != 'clang': raise mesonlib.MesonException('Cannot find frameworks with non-clang compiler') - return self.find_framework_impl(name, env, extra_dirs, allow_system) + return self._find_framework_impl(name, env, extra_dirs, allow_system) def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + # TODO: does this belong here or in GnuLike or maybe PosixLike? return [] def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + # TODO: does this belong here or in GnuLike or maybe PosixLike? return [] - def thread_flags(self, env): + def thread_flags(self, env: 'Environment') -> T.List[str]: + # TODO: does this belong here or in GnuLike or maybe PosixLike? host_m = env.machines[self.for_machine] if host_m.is_haiku() or host_m.is_darwin(): return [] return ['-pthread'] - def thread_link_flags(self, env: 'Environment') -> T.List[str]: - return self.linker.thread_flags(env) + def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]: + return args.copy() - def linker_to_compiler_args(self, args): - return args - - def has_arguments(self, args, env, code, mode): + def has_arguments(self, args: T.List[str], env: 'Environment', code: str, + mode: str) -> T.Tuple[bool, bool]: return self.compiles(code, env, extra_args=args, mode=mode) - def has_multi_arguments(self, args, env): - for arg in args[:]: + def _has_multi_arguments(self, args: T.List[str], env: 'Environment', code: str) -> T.Tuple[bool, bool]: + new_args = [] # type: T.List[str] + for arg in args: # some compilers, e.g. GCC, don't warn for unsupported warning-disable # flags, so when we are testing a flag like "-Wno-forgotten-towel", also # check the equivalent enable flag too "-Wforgotten-towel" if arg.startswith('-Wno-'): - args.append('-W' + arg[5:]) + new_args.append('-W' + arg[5:]) if arg.startswith('-Wl,'): mlog.warning('{} looks like a linker argument, ' 'but has_argument and other similar methods only ' @@ -1092,20 +1239,25 @@ 'the compiler you are using. has_link_argument or ' 'other similar method can be used instead.' .format(arg)) - code = 'int i;\n' - return self.has_arguments(args, env, code, mode='compile') + new_args.append(arg) + return self.has_arguments(new_args, env, code, mode='compile') + + def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self._has_multi_arguments(args, env, 'extern int i;\nint i;\n') - def has_multi_link_arguments(self, args, env): + def _has_multi_link_arguments(self, args: T.List[str], env: 'Environment', code: str) -> T.Tuple[bool, bool]: # First time we check for link flags we need to first check if we have # --fatal-warnings, otherwise some linker checks could give some # false positive. args = self.linker.fatal_warnings() + args args = self.linker_to_compiler_args(args) - code = 'int main(void) { return 0; }' return self.has_arguments(args, env, code, mode='link') + def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self._has_multi_link_arguments(args, env, 'int main(void) { return 0; }\n') + @staticmethod - def concatenate_string_literals(s): + def _concatenate_string_literals(s: str) -> str: pattern = re.compile(r'(?P
.*([^\\]")|^")(?P([^\\"]|\\.)*)"\s+"(?P([^\\"]|\\.)*)(?P".*)')
         ret = s
         m = pattern.match(ret)
@@ -1114,7 +1266,13 @@
             m = pattern.match(ret)
         return ret
 
-    def has_func_attribute(self, name, env):
+    def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+        # Most compilers (such as GCC and Clang) only warn about unknown or
+        # ignored attributes, so force an error. Overriden in GCC and Clang
+        # mixins.
+        return ['-Werror']
+
+    def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]:
         # Just assume that if we're not on windows that dllimport and dllexport
         # don't work
         m = env.machines[self.for_machine]
@@ -1122,6 +1280,8 @@
             if name in ['dllimport', 'dllexport']:
                 return False, False
 
-        # Clang and GCC both return warnings if the __attribute__ is undefined,
-        # so set -Werror
-        return self.compiles(self.attribute_check_func(name), env, extra_args='-Werror')
+        return self.compiles(self.attribute_check_func(name), env,
+                             extra_args=self.get_has_func_attribute_extra_args(name))
+
+    def get_disable_assert_args(self) -> T.List[str]:
+        return ['-DNDEBUG']
diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/compcert.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/compcert.py
--- meson-0.53.2/mesonbuild/compilers/mixins/compcert.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/compcert.py	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,131 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Representations specific to the CompCert C compiler family."""
+
+import os
+import re
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+ccomp_buildtype_args = {
+    'plain': [''],
+    'debug': ['-O0', '-g'],
+    'debugoptimized': ['-O0', '-g'],
+    'release': ['-03'],
+    'minsize': ['-Os'],
+    'custom': ['-Obranchless'],
+}  # type: T.Dict[str, T.List[str]]
+
+ccomp_optimization_args = {
+    '0': ['-O0'],
+    'g': ['-O0'],
+    '1': ['-O1'],
+    '2': ['-O2'],
+    '3': ['-O3'],
+    's': ['-Os']
+}  # type: T.Dict[str, T.List[str]]
+
+ccomp_debug_args = {
+    False: [],
+    True: ['-g']
+}  # type: T.Dict[bool, T.List[str]]
+
+# As of CompCert 20.04, these arguments should be passed to the underlying gcc linker (via -WUl,)
+# There are probably (many) more, but these are those used by picolibc
+ccomp_args_to_wul = [
+        r"^-ffreestanding$",
+        r"^-r$"
+] # type: T.List[str]
+
+class CompCertCompiler(Compiler):
+
+    def __init__(self) -> None:
+        self.id = 'ccomp'
+        # Assembly
+        self.can_compile_suffixes.add('s')
+        default_warn_args = []  # type: T.List[str]
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + [],
+                          '3': default_warn_args + []}  # type: T.Dict[str, T.List[str]]
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+    def get_pic_args(self) -> T.List[str]:
+        # As of now, CompCert does not support PIC
+        return []
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return ccomp_buildtype_args[buildtype]
+
+    def get_pch_suffix(self) -> str:
+        return 'pch'
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        return []
+
+    def unix_args_to_native(self, args: T.List[str]) -> T.List[str]:
+        "Always returns a copy that can be independently mutated"
+        patched_args = []  # type: T.List[str]
+        for arg in args:
+            added = 0
+            for ptrn in ccomp_args_to_wul:
+                if re.match(ptrn, arg):
+                    patched_args.append('-WUl,' + arg)
+                    added = 1
+            if not added:
+                patched_args.append(arg)
+        return patched_args
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def get_preprocess_only_args(self) -> T.List[str]:
+        return ['-E']
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return ['-c']
+
+    def get_coverage_args(self) -> T.List[str]:
+        return []
+
+    def get_no_stdinc_args(self) -> T.List[str]:
+        return ['-nostdinc']
+
+    def get_no_stdlib_link_args(self) -> T.List[str]:
+        return ['-nostdlib']
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return ccomp_optimization_args[optimization_level]
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return ccomp_debug_args[is_debug]
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:9] == '-I':
+                parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+        return parameter_list
diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/elbrus.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/elbrus.py
--- meson-0.53.2/mesonbuild/compilers/mixins/elbrus.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/elbrus.py	2021-01-06 10:39:48.000000000 +0000
@@ -30,7 +30,8 @@
 class ElbrusCompiler(GnuLikeCompiler):
     # Elbrus compiler is nearly like GCC, but does not support
     # PCH, LTO, sanitizers and color output as of version 1.21.x.
-    def __init__(self):
+
+    def __init__(self) -> None:
         super().__init__()
         self.id = 'lcc'
         self.base_options = ['b_pgo', 'b_coverage',
diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/emscripten.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/emscripten.py
--- meson-0.53.2/mesonbuild/compilers/mixins/emscripten.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/emscripten.py	2021-01-06 10:39:48.000000000 +0000
@@ -17,22 +17,22 @@
 import os.path
 import typing as T
 
-from ...mesonlib import MesonException
+from ... import coredata
 
-class EmscriptenMixin:
-    def get_option_link_args(self, options):
-        return []
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
 
-    def get_soname_args(self, *args, **kwargs):
-        raise MesonException('Emscripten does not support shared libraries.')
 
-    def get_allow_undefined_link_args(self) -> T.List[str]:
-        return ['-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0']
+class EmscriptenMixin(Compiler):
 
-    def get_linker_output_args(self, output: str) -> T.List[str]:
-        return ['-o', output]
-
-    def _get_compile_output(self, dirname, mode):
+    def _get_compile_output(self, dirname: str, mode: str) -> str:
         # In pre-processor mode, the output is sent to stdout and discarded
         if mode == 'preprocess':
             return None
@@ -44,3 +44,24 @@
         else:
             suffix = 'wasm'
         return os.path.join(dirname, 'output.' + suffix)
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return ['-s', 'USE_PTHREADS=1']
+
+    def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+        args = ['-s', 'USE_PTHREADS=1']
+        count = env.coredata.compiler_options[self.for_machine][self.language]['thread_count'].value  # type: int
+        if count:
+            args.extend(['-s', 'PTHREAD_POOL_SIZE={}'.format(count)])
+        return args
+
+    def get_options(self) -> 'coredata.OptionDictType':
+        opts = super().get_options()
+        opts.update({
+            'thread_count': coredata.UserIntegerOption(
+                'Number of threads to use in web assembly, set to 0 to disable',
+                (0, None, 4),  # Default was picked at random
+            ),
+        })
+
+        return opts
diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/gnu.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/gnu.py
--- meson-0.53.2/mesonbuild/compilers/mixins/gnu.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/gnu.py	2021-01-06 10:39:48.000000000 +0000
@@ -26,8 +26,14 @@
 from ... import mlog
 
 if T.TYPE_CHECKING:
-    from ...coredata import UserOption  # noqa: F401
     from ...environment import Environment
+    from ..compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
 
 # XXX: prevent circular references.
 # FIXME: this really is a posix interface not a c-like interface
@@ -106,7 +112,7 @@
     )
     stdout = p.stdout.read().decode('utf-8', errors='replace')
     parse_state = 0
-    paths = []
+    paths = []  # type: T.List[str]
     for line in stdout.split('\n'):
         line = line.strip(' \n\r\t')
         if parse_state == 0:
@@ -129,7 +135,7 @@
     return paths
 
 
-class GnuLikeCompiler(metaclass=abc.ABCMeta):
+class GnuLikeCompiler(Compiler, metaclass=abc.ABCMeta):
     """
     GnuLikeCompiler is a common interface to all compilers implementing
     the GNU-style commandline interface. This includes GCC, Clang
@@ -139,13 +145,15 @@
 
     LINKER_PREFIX = '-Wl,'
 
-    def __init__(self):
-        self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage',
+    def __init__(self) -> None:
+        self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_coverage',
                              'b_ndebug', 'b_staticpic', 'b_pie']
         if not (self.info.is_windows() or self.info.is_cygwin() or self.info.is_openbsd()):
             self.base_options.append('b_lundef')
         if not self.info.is_windows() or self.info.is_cygwin():
             self.base_options.append('b_asneeded')
+        if not self.info.is_hurd():
+            self.base_options.append('b_sanitize')
         # All GCC-like backends can do assembly
         self.can_compile_suffixes.add('s')
 
@@ -162,14 +170,14 @@
 
     @abc.abstractmethod
     def get_optimization_args(self, optimization_level: str) -> T.List[str]:
-        raise NotImplementedError("get_optimization_args not implemented")
+        pass
 
     def get_debug_args(self, is_debug: bool) -> T.List[str]:
         return clike_debug_args[is_debug]
 
     @abc.abstractmethod
     def get_pch_suffix(self) -> str:
-        raise NotImplementedError("get_pch_suffix not implemented")
+        pass
 
     def split_shlib_to_parts(self, fname: str) -> T.Tuple[str, str]:
         return os.path.dirname(fname), fname
@@ -182,7 +190,7 @@
 
     @abc.abstractmethod
     def openmp_flags(self) -> T.List[str]:
-        raise NotImplementedError("openmp_flags not implemented")
+        pass
 
     def gnu_symbol_visibility_args(self, vistype: str) -> T.List[str]:
         return gnu_symbol_visibility_args[vistype]
@@ -211,6 +219,20 @@
             return ['-mwindows' if value else '-mconsole']
         return []
 
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        args = []
+        if self.info.is_windows() or self.info.is_cygwin():
+            if 'windows' in value:
+                args = ['-Wl,--subsystem,windows']
+            elif 'console' in value:
+                args = ['-Wl,--subsystem,console']
+            else:
+                raise mesonlib.MesonException('Only "windows" and "console" are supported for win_subsystem with MinGW, not "{}".'.format(value))
+        if ',' in value:
+            args[-1] = args[-1] + ':' + value.split(',')[1]
+        return args
+
+
     def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
         for idx, i in enumerate(parameter_list):
             if i[:2] == '-I' or i[:2] == '-L':
@@ -221,12 +243,10 @@
     @functools.lru_cache()
     def _get_search_dirs(self, env: 'Environment') -> str:
         extra_args = ['--print-search-dirs']
-        stdo = None
         with self._build_wrapper('', env, extra_args=extra_args,
                                  dependencies=None, mode='compile',
                                  want_output=True) as p:
-            stdo = p.stdo
-        return stdo
+            return p.stdout
 
     def _split_fetch_real_dirs(self, pathstr: str) -> T.List[str]:
         # We need to use the path separator used by the compiler for printing
@@ -289,7 +309,7 @@
     def get_output_args(self, target: str) -> T.List[str]:
         return ['-o', target]
 
-    def get_dependency_gen_args(self, outtarget, outfile):
+    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
         return ['-MD', '-MQ', outtarget, '-MF', outfile]
 
     def get_compile_only_args(self) -> T.List[str]:
@@ -304,8 +324,15 @@
 
     @classmethod
     def use_linker_args(cls, linker: str) -> T.List[str]:
+        if linker not in {'gold', 'bfd', 'lld'}:
+            raise mesonlib.MesonException(
+                'Unsupported linker, only bfd, gold, and lld are supported, '
+                'not {}.'.format(linker))
         return ['-fuse-ld={}'.format(linker)]
 
+    def get_coverage_args(self) -> T.List[str]:
+        return ['--coverage']
+
 
 class GnuCompiler(GnuLikeCompiler):
     """
@@ -313,7 +340,7 @@
     Compilers imitating GCC (Clang/Intel) should use the GnuLikeCompiler ABC.
     """
 
-    def __init__(self, defines: T.Dict[str, str]):
+    def __init__(self, defines: T.Optional[T.Dict[str, str]]):
         super().__init__()
         self.id = 'gcc'
         self.defines = defines or {}
@@ -325,6 +352,7 @@
         return []
 
     def get_warn_args(self, level: str) -> T.List[str]:
+        # Mypy doesn't understand cooperative inheritance
         args = super().get_warn_args(level)
         if mesonlib.version_compare(self.version, '<4.8.0') and '-Wpedantic' in args:
             # -Wpedantic was added in 4.8.0
@@ -349,14 +377,20 @@
     def openmp_flags(self) -> T.List[str]:
         return ['-fopenmp']
 
-    def has_arguments(self, args, env, code, mode):
+    def has_arguments(self, args: T.List[str], env: 'Environment', code: str,
+                      mode: str) -> T.Tuple[bool, bool]:
         # For some compiler command line arguments, the GNU compilers will
         # emit a warning on stderr indicating that an option is valid for a
         # another language, but still complete with exit_success
-        with self._build_wrapper(code, env, args, None, mode, disable_cache=False, want_output=True) as p:
+        with self._build_wrapper(code, env, args, None, mode) as p:
             result = p.returncode == 0
-            if self.language in {'cpp', 'objcpp'} and 'is valid for C/ObjC' in p.stde:
+            if self.language in {'cpp', 'objcpp'} and 'is valid for C/ObjC' in p.stderr:
                 result = False
-            if self.language in {'c', 'objc'} and 'is valid for C++/ObjC++' in p.stde:
+            if self.language in {'c', 'objc'} and 'is valid for C++/ObjC++' in p.stderr:
                 result = False
         return result, p.cached
+
+    def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+        # GCC only warns about unknown or ignored attributes, so force an
+        # error.
+        return ['-Werror=attributes']
diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/intel.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/intel.py
--- meson-0.53.2/mesonbuild/compilers/mixins/intel.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/intel.py	2021-01-06 10:39:48.000000000 +0000
@@ -24,11 +24,14 @@
 import typing as T
 
 from ... import mesonlib
+from ..compilers import CompileCheckMode
 from .gnu import GnuLikeCompiler
 from .visualstudio import VisualStudioLikeCompiler
 
 if T.TYPE_CHECKING:
-    import subprocess  # noqa: F401
+    from ...arglist import CompilerArgs
+    from ...dependencies import Dependency
+    from ...environment import Environment
 
 # XXX: avoid circular dependencies
 # TODO: this belongs in a posix compiler class
@@ -69,7 +72,7 @@
         's': ['-Os'],
     }
 
-    def __init__(self):
+    def __init__(self) -> None:
         super().__init__()
         # As of 19.0.0 ICC doesn't have sanitizer, color, or lto support.
         #
@@ -97,21 +100,16 @@
         else:
             return ['-openmp']
 
-    def compiles(self, *args, **kwargs) -> T.Tuple[bool, bool]:
-        # This covers a case that .get('foo', []) doesn't, that extra_args is
-        # defined and is None
-        extra_args = kwargs.get('extra_args') or []
-        kwargs['extra_args'] = [
-            extra_args,
+    def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+        extra_args = [
             '-diag-error', '10006',  # ignoring unknown option
             '-diag-error', '10148',  # Option not supported
             '-diag-error', '10155',  # ignoring argument required
             '-diag-error', '10156',  # ignoring not argument allowed
             '-diag-error', '10157',  # Ignoring argument of the wrong type
             '-diag-error', '10158',  # Argument must be separate. Can be hit by trying an option like -foo-bar=foo when -foo=bar is a valid option but -foo-bar isn't
-            '-diag-error', '1292',   # unknown __attribute__
         ]
-        return super().compiles(*args, **kwargs)
+        return super().get_compiler_check_args(mode) + extra_args
 
     def get_profile_generate_args(self) -> T.List[str]:
         return ['-prof-gen=threadsafe']
@@ -125,6 +123,9 @@
     def get_optimization_args(self, optimization_level: str) -> T.List[str]:
         return self.OPTIM_ARGS[optimization_level]
 
+    def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+        return ['-diag-error', '1292']
+
 
 class IntelVisualStudioLikeCompiler(VisualStudioLikeCompiler):
 
@@ -140,23 +141,22 @@
     }  # type: T.Dict[str, T.List[str]]
 
     OPTIM_ARGS = {
-        '0': ['/O0'],
-        'g': ['/O0'],
+        '0': ['/Od'],
+        'g': ['/Od'],
         '1': ['/O1'],
         '2': ['/O2'],
         '3': ['/O3'],
         's': ['/Os'],
     }
 
-    def __init__(self, target: str):
+    def __init__(self, target: str) -> None:
         super().__init__(target)
         self.id = 'intel-cl'
 
-    def compile(self, code, *, extra_args: T.Optional[T.List[str]] = None, **kwargs) -> T.Iterator['subprocess.Popen']:
-        # This covers a case that .get('foo', []) doesn't, that extra_args is
-        if kwargs.get('mode', 'compile') != 'link':
-            extra_args = extra_args.copy() if extra_args is not None else []
-            extra_args.extend([
+    def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+        args = super().get_compiler_check_args(mode)
+        if mode is not CompileCheckMode.LINK:
+            args.extend([
                 '/Qdiag-error:10006',  # ignoring unknown option
                 '/Qdiag-error:10148',  # Option not supported
                 '/Qdiag-error:10155',  # ignoring argument required
@@ -164,7 +164,7 @@
                 '/Qdiag-error:10157',  # Ignoring argument of the wrong type
                 '/Qdiag-error:10158',  # Argument must be separate. Can be hit by trying an option like -foo-bar=foo when -foo=bar is a valid option but -foo-bar isn't
             ])
-        return super().compile(code, extra_args, **kwargs)
+        return args
 
     def get_toolset_version(self) -> T.Optional[str]:
         # Avoid circular dependencies....
@@ -186,3 +186,6 @@
 
     def get_optimization_args(self, optimization_level: str) -> T.List[str]:
         return self.OPTIM_ARGS[optimization_level]
+
+    def get_pch_base_name(self, header: str) -> str:
+        return os.path.basename(header)
\ No newline at end of file
diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/islinker.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/islinker.py
--- meson-0.53.2/mesonbuild/compilers/mixins/islinker.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/islinker.py	2021-01-06 10:39:48.000000000 +0000
@@ -20,7 +20,6 @@
 classes for those cases.
 """
 
-import os
 import typing as T
 
 from ... import mesonlib
@@ -28,20 +27,16 @@
 if T.TYPE_CHECKING:
     from ...coredata import OptionDictType
     from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
 
 
-class LinkerEnvVarsMixin:
-
-    """Mixin reading LDFLAGS from the environment."""
-
-    def get_linker_args_from_envvars(self) -> T.List[str]:
-        flags = os.environ.get('LDFLAGS')
-        if not flags:
-            return []
-        return mesonlib.split_args(flags)
-
-
-class BasicLinkerIsCompilerMixin:
+class BasicLinkerIsCompilerMixin(Compiler):
 
     """Provides a baseline of methods that a linker would implement.
 
@@ -102,8 +97,7 @@
         return []
 
     def get_coverage_link_args(self) -> T.List[str]:
-        m = "Linker {} doesn't implement coverage data generation.".format(self.id)
-        raise mesonlib.EnvironmentException(m)
+        return []
 
     def no_undefined_link_args(self) -> T.List[str]:
         return []
@@ -111,22 +105,28 @@
     def bitcode_args(self) -> T.List[str]:
         raise mesonlib.MesonException("This linker doesn't support bitcode bundles")
 
-    def get_soname_args(self, for_machine: 'mesonlib.MachineChoice',
-                        prefix: str, shlib_name: str, suffix: str, soversion: str,
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str,
                         darwin_versions: T.Tuple[str, str],
                         is_shared_module: bool) -> T.List[str]:
         raise mesonlib.MesonException("This linker doesn't support soname args")
 
     def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
                          rpath_paths: str, build_rpath: str,
-                         install_rpath: str) -> T.List[str]:
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+    def get_asneeded_args(self) -> T.List[str]:
         return []
 
-    def get_linker_debug_crt_args(self) -> T.List[str]:
+    def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
         return []
 
-    def get_asneeded_args(self) -> T.List[str]:
+    def get_link_debugfile_name(self, target: str) -> str:
+        return ''
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
         return []
 
-    def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
+    def thread_link_flags(self, env: 'Environment') -> T.List[str]:
         return []
diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/pgi.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/pgi.py
--- meson-0.53.2/mesonbuild/compilers/mixins/pgi.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/pgi.py	2021-01-06 10:39:48.000000000 +0000
@@ -16,10 +16,20 @@
 
 import typing as T
 import os
-from pathlib import Path
+from ..._pathlib import Path
 
 from ..compilers import clike_debug_args, clike_optimization_args
 
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
 pgi_buildtype_args = {
     'plain': [],
     'debug': [],
@@ -30,8 +40,9 @@
 }  # type: T.Dict[str, T.List[str]]
 
 
-class PGICompiler:
-    def __init__(self):
+class PGICompiler(Compiler):
+
+    def __init__(self) -> None:
         self.base_options = ['b_pch']
         self.id = 'pgi'
 
@@ -39,7 +50,8 @@
         self.warn_args = {'0': [],
                           '1': default_warn_args,
                           '2': default_warn_args,
-                          '3': default_warn_args}
+                          '3': default_warn_args
+        }  # type: T.Dict[str, T.List[str]]
 
     def get_module_incdir_args(self) -> T.Tuple[str]:
         return ('-module', )
@@ -74,9 +86,6 @@
                 parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
         return parameter_list
 
-    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
-        return []
-
     def get_always_args(self) -> T.List[str]:
         return []
 
@@ -94,6 +103,6 @@
         else:
             return []
 
-    def thread_flags(self, env):
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
         # PGI cannot accept -pthread, it's already threaded
         return []
diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/visualstudio.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/visualstudio.py
--- meson-0.53.2/mesonbuild/compilers/mixins/visualstudio.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/visualstudio.py	2021-01-06 10:39:48.000000000 +0000
@@ -20,11 +20,19 @@
 import os
 import typing as T
 
+from ... import arglist
 from ... import mesonlib
 from ... import mlog
 
 if T.TYPE_CHECKING:
     from ...environment import Environment
+    from .clike import CLikeCompiler as Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
 
 vs32_instruction_set_args = {
     'mmx': ['/arch:SSE'], # There does not seem to be a flag just for MMX
@@ -54,29 +62,38 @@
 
 msvc_buildtype_args = {
     'plain': [],
-    'debug': ["/ZI", "/Ob0", "/Od", "/RTC1"],
-    'debugoptimized': ["/Zi", "/Ob1"],
-    'release': ["/Ob2", "/Gw"],
-    'minsize': ["/Zi", "/Gw"],
+    'debug': ["/ZI", "/RTC1"],
+    'debugoptimized': [],
+    'release': [],
+    'minsize': [],
     'custom': [],
 }  # type: T.Dict[str, T.List[str]]
 
+# Clang-cl doesn't have /ZI, and /Zi and /Z7 do the same thing
+# quoting the docs (https://clang.llvm.org/docs/MSVCCompatibility.html):
+#
+# Clang emits relatively complete CodeView debug information if /Z7 or /Zi is
+# passed. Microsoft’s link.exe will transform the CodeView debug information
+# into a PDB
+clangcl_buildtype_args = msvc_buildtype_args.copy()
+clangcl_buildtype_args['debug'] = ['/Zi', '/Ob0', '/Od', '/RTC1']
+
 msvc_optimization_args = {
-    '0': [],
+    '0': ['/Od', '/Ob0'],
     'g': ['/O0'],
     '1': ['/O1'],
-    '2': ['/O2'],
-    '3': ['/O2'],
-    's': ['/O1'], # Implies /Os.
+    '2': ['/O2', '/Ob1'],
+    '3': ['/O2', '/Ob2', '/Gw'],
+    's': ['/O1', '/Gw'], # Implies /Os.
 }  # type: T.Dict[str, T.List[str]]
 
 msvc_debug_args = {
     False: [],
-    True: []  # Fixme!
+    True: ['/Zi']
 }  # type: T.Dict[bool, T.List[str]]
 
 
-class VisualStudioLikeCompiler(metaclass=abc.ABCMeta):
+class VisualStudioLikeCompiler(Compiler, metaclass=abc.ABCMeta):
 
     """A common interface for all compilers implementing an MSVC-style
     interface.
@@ -88,10 +105,8 @@
 
     std_warn_args = ['/W3']
     std_opt_args = ['/O2']
-    # XXX: this is copied in this patch only to avoid circular dependencies
-    #ignore_libs = unixy_compiler_internal_libs
-    ignore_libs = ('m', 'c', 'pthread', 'dl', 'rt', 'execinfo')
-    internal_libs = ()
+    ignore_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS + ['execinfo']
+    internal_libs = []  # type: T.List[str]
 
     crt_args = {
         'none': [],
@@ -105,7 +120,7 @@
     # See: https://ninja-build.org/manual.html#_deps
     always_args = ['/nologo', '/showIncludes']
     warn_args = {
-        '0': ['/W1'],
+        '0': [],
         '1': ['/W2'],
         '2': ['/W3'],
         '3': ['/W4'],
@@ -122,20 +137,19 @@
             self.machine = 'x64'
         elif '86' in target:
             self.machine = 'x86'
+        elif 'aarch64' in target:
+            self.machine = 'arm64'
+        elif 'arm' in target:
+            self.machine = 'arm'
         else:
             self.machine = target
+        assert self.linker is not None
         self.linker.machine = self.machine
 
     # Override CCompiler.get_always_args
     def get_always_args(self) -> T.List[str]:
         return self.always_args
 
-    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
-        args = msvc_buildtype_args[buildtype]
-        if self.id == 'msvc' and mesonlib.version_compare(self.version, '<18.0'):
-            args = [arg for arg in args if arg != '/Gw']
-        return args
-
     def get_pch_suffix(self) -> str:
         return 'pch'
 
@@ -145,10 +159,12 @@
         pchname = '.'.join(chopped)
         return pchname
 
+    def get_pch_base_name(self, header: str) -> str:
+        # This needs to be implemented by inherting classes
+        raise NotImplementedError
+
     def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
-        base = os.path.basename(header)
-        if self.id == 'clang-cl':
-            base = header
+        base = self.get_pch_base_name(header)
         pchname = self.get_pch_name(header)
         return ['/FI' + base, '/Yu' + base, '/Fp' + os.path.join(pch_dir, pchname)]
 
@@ -166,14 +182,17 @@
             return ['/Fe' + target]
         return ['/Fo' + target]
 
-    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
-        return msvc_optimization_args[optimization_level]
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return msvc_buildtype_args[buildtype]
 
     def get_debug_args(self, is_debug: bool) -> T.List[str]:
         return msvc_debug_args[is_debug]
 
-    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
-        return []
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        args = msvc_optimization_args[optimization_level]
+        if mesonlib.version_compare(self.version, '<18.0'):
+            args = [arg for arg in args if arg != '/Gw']
+        return args
 
     def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]:
         return ['/link'] + args
@@ -186,6 +205,9 @@
         else:
             return ['/SUBSYSTEM:CONSOLE']
 
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        return ['/SUBSYSTEM:' + value.upper()]
+
     def get_pic_args(self) -> T.List[str]:
         return [] # PIC is handled by the loader on Windows
 
@@ -200,13 +222,12 @@
         objname = os.path.splitext(pchname)[0] + '.obj'
         return objname, ['/Yc' + header, '/Fp' + pchname, '/Fo' + objname]
 
-    def gen_import_library_args(self, implibname: str) -> T.List[str]:
-        "The name of the outputted import library"
-        return ['/IMPLIB:' + implibname]
-
     def openmp_flags(self) -> T.List[str]:
         return ['/openmp']
 
+    def openmp_link_flags(self) -> T.List[str]:
+        return []
+
     # FIXME, no idea what these should be.
     def thread_flags(self, env: 'Environment') -> T.List[str]:
         return []
@@ -283,37 +304,22 @@
     # Visual Studio is special. It ignores some arguments it does not
     # understand and you can't tell it to error out on those.
     # http://stackoverflow.com/questions/15259720/how-can-i-make-the-microsoft-c-compiler-treat-unknown-flags-as-errors-rather-t
-    def has_arguments(self, args: T.List[str], env: 'Environment', code, mode: str) -> T.Tuple[bool, bool]:
+    def has_arguments(self, args: T.List[str], env: 'Environment', code: str, mode: str) -> T.Tuple[bool, bool]:
         warning_text = '4044' if mode == 'link' else '9002'
-        if self.id == 'clang-cl' and mode != 'link':
-            args = args + ['-Werror=unknown-argument']
         with self._build_wrapper(code, env, extra_args=args, mode=mode) as p:
             if p.returncode != 0:
                 return False, p.cached
-            return not(warning_text in p.stde or warning_text in p.stdo), p.cached
+            return not(warning_text in p.stderr or warning_text in p.stdout), p.cached
 
     def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]:
         pdbarr = rel_obj.split('.')[:-1]
         pdbarr += ['pdb']
         args = ['/Fd' + '.'.join(pdbarr)]
-        # When generating a PDB file with PCH, all compile commands write
-        # to the same PDB file. Hence, we need to serialize the PDB
-        # writes using /FS since we do parallel builds. This slows down the
-        # build obviously, which is why we only do this when PCH is on.
-        # This was added in Visual Studio 2013 (MSVC 18.0). Before that it was
-        # always on: https://msdn.microsoft.com/en-us/library/dn502518.aspx
-        if pch and self.id == 'msvc' and mesonlib.version_compare(self.version, '>=18.0'):
-            args = ['/FS'] + args
         return args
 
     def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]:
         if self.is_64:
             return vs64_instruction_set_args.get(instruction_set, None)
-        if self.id == 'msvc' and self.version.split('.')[0] == '16' and instruction_set == 'avx':
-            # VS documentation says that this exists and should work, but
-            # it does not. The headers do not contain AVX intrinsics
-            # and the can not be called.
-            return None
         return vs32_instruction_set_args.get(instruction_set, None)
 
     def _calculate_toolset_version(self, version: int) -> T.Optional[str]:
@@ -341,10 +347,6 @@
         return None
 
     def get_toolset_version(self) -> T.Optional[str]:
-        if self.id == 'clang-cl':
-            # I have no idea
-            return '14.1'
-
         # See boost/config/compiler/visualc.cpp for up to date mapping
         try:
             version = int(''.join(self.version.split('.')[0:2]))
@@ -360,18 +362,23 @@
     def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
         if crt_val in self.crt_args:
             return self.crt_args[crt_val]
-        assert(crt_val == 'from_buildtype')
+        assert(crt_val in ['from_buildtype', 'static_from_buildtype'])
+        dbg = 'mdd'
+        rel = 'md'
+        if crt_val == 'static_from_buildtype':
+            dbg = 'mtd'
+            rel = 'mt'
         # Match what build type flags used to do.
         if buildtype == 'plain':
             return []
         elif buildtype == 'debug':
-            return self.crt_args['mdd']
+            return self.crt_args[dbg]
         elif buildtype == 'debugoptimized':
-            return self.crt_args['md']
+            return self.crt_args[rel]
         elif buildtype == 'release':
-            return self.crt_args['md']
+            return self.crt_args[rel]
         elif buildtype == 'minsize':
-            return self.crt_args['md']
+            return self.crt_args[rel]
         else:
             assert(buildtype == 'custom')
             raise mesonlib.EnvironmentException('Requested C runtime based on buildtype, but buildtype is "custom".')
@@ -384,6 +391,58 @@
     def get_argument_syntax(self) -> str:
         return 'msvc'
 
-    @classmethod
-    def use_linker_args(cls, linker: str) -> T.List[str]:
-        return []
+
+class MSVCCompiler(VisualStudioLikeCompiler):
+
+    """Spcific to the Microsoft Compilers."""
+
+    def __init__(self, target: str):
+        super().__init__(target)
+        self.id = 'msvc'
+
+    def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]:
+        args = super().get_compile_debugfile_args(rel_obj, pch)
+        # When generating a PDB file with PCH, all compile commands write
+        # to the same PDB file. Hence, we need to serialize the PDB
+        # writes using /FS since we do parallel builds. This slows down the
+        # build obviously, which is why we only do this when PCH is on.
+        # This was added in Visual Studio 2013 (MSVC 18.0). Before that it was
+        # always on: https://msdn.microsoft.com/en-us/library/dn502518.aspx
+        if pch and mesonlib.version_compare(self.version, '>=18.0'):
+            args = ['/FS'] + args
+        return args
+
+    def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]:
+        if self.version.split('.')[0] == '16' and instruction_set == 'avx':
+            # VS documentation says that this exists and should work, but
+            # it does not. The headers do not contain AVX intrinsics
+            # and they can not be called.
+            return None
+        return super().get_instruction_set_args(instruction_set)
+
+    def get_pch_base_name(self, header: str) -> str:
+        return os.path.basename(header)
+
+
+class ClangClCompiler(VisualStudioLikeCompiler):
+
+    """Spcific to Clang-CL."""
+
+    def __init__(self, target: str):
+        super().__init__(target)
+        self.id = 'clang-cl'
+
+    def has_arguments(self, args: T.List[str], env: 'Environment', code: str, mode: str) -> T.Tuple[bool, bool]:
+        if mode != 'link':
+            args = args + ['-Werror=unknown-argument']
+        return super().has_arguments(args, env, code, mode)
+
+    def get_toolset_version(self) -> T.Optional[str]:
+        # XXX: what is the right thing to do here?
+        return '14.1'
+
+    def get_pch_base_name(self, header: str) -> str:
+        return header
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return clangcl_buildtype_args[buildtype]
diff -Nru meson-0.53.2/mesonbuild/compilers/mixins/xc16.py meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/xc16.py
--- meson-0.53.2/mesonbuild/compilers/mixins/xc16.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/mixins/xc16.py	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,127 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Representations specific to the Microchip XC16 C compiler family."""
+
+import os
+import typing as T
+
+from ...mesonlib import EnvironmentException
+
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+xc16_buildtype_args = {
+    'plain': [],
+    'debug': [],
+    'debugoptimized': [],
+    'release': [],
+    'minsize': [],
+    'custom': [],
+}  # type: T.Dict[str, T.List[str]]
+
+xc16_optimization_args = {
+    '0': ['-O0'],
+    'g': ['-O0'],
+    '1': ['-O1'],
+    '2': ['-O2'],
+    '3': ['-O3'],
+    's': ['-Os']
+}  # type: T.Dict[str, T.List[str]]
+
+xc16_debug_args = {
+    False: [],
+    True: []
+}  # type: T.Dict[bool, T.List[str]]
+
+
+class Xc16Compiler(Compiler):
+
+    def __init__(self) -> None:
+        if not self.is_cross:
+            raise EnvironmentException('xc16 supports only cross-compilation.')
+        self.id = 'xc16'
+        # Assembly
+        self.can_compile_suffixes.add('s')
+        default_warn_args = []  # type: T.List[str]
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + [],
+                          '3': default_warn_args + []}  # type: T.Dict[str, T.List[str]]
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+    def get_pic_args(self) -> T.List[str]:
+        # PIC support is not enabled by default for xc16,
+        # if users want to use it, they need to add the required arguments explicitly
+        return []
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return xc16_buildtype_args[buildtype]
+
+    def get_pch_suffix(self) -> str:
+        return 'pch'
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        return []
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def get_coverage_args(self) -> T.List[str]:
+        return []
+
+    def get_no_stdinc_args(self) -> T.List[str]:
+        return ['-nostdinc']
+
+    def get_no_stdlib_link_args(self) -> T.List[str]:
+        return ['--nostdlib']
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return xc16_optimization_args[optimization_level]
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return xc16_debug_args[is_debug]
+
+    @classmethod
+    def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+        result = []
+        for i in args:
+            if i.startswith('-D'):
+                i = '-D' + i[2:]
+            if i.startswith('-I'):
+                i = '-I' + i[2:]
+            if i.startswith('-Wl,-rpath='):
+                continue
+            elif i == '--print-search-dirs':
+                continue
+            elif i.startswith('-L'):
+                continue
+            result.append(i)
+        return result
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:9] == '-I':
+                parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+        return parameter_list
diff -Nru meson-0.53.2/mesonbuild/compilers/objcpp.py meson-0.57.0+really0.56.2/mesonbuild/compilers/objcpp.py
--- meson-0.53.2/mesonbuild/compilers/objcpp.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/objcpp.py	2021-01-06 10:39:48.000000000 +0000
@@ -23,27 +23,35 @@
 from .mixins.clang import ClangCompiler
 
 if T.TYPE_CHECKING:
+    from ..dependencies import ExternalProgram
     from ..envconfig import MachineInfo
+    from ..environment import Environment
+    from ..linkers import DynamicLinker
 
 class ObjCPPCompiler(CLikeCompiler, Compiler):
 
     language = 'objcpp'
 
-    def __init__(self, exelist, version, for_machine: MachineChoice,
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
                  is_cross: bool, info: 'MachineInfo',
-                 exe_wrap: T.Optional[str], **kwargs):
-        Compiler.__init__(self, exelist, version, for_machine, info, **kwargs)
-        CLikeCompiler.__init__(self, is_cross, exe_wrap)
+                 exe_wrap: T.Optional['ExternalProgram'],
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        Compiler.__init__(self, exelist, version, for_machine, info,
+                          is_cross=is_cross, full_version=full_version,
+                          linker=linker)
+        CLikeCompiler.__init__(self, exe_wrap)
 
     @staticmethod
-    def get_display_language():
+    def get_display_language() -> str:
         return 'Objective-C++'
 
-    def sanity_check(self, work_dir, environment):
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
         # TODO try to use sanity_check_impl instead of duplicated code
         source_name = os.path.join(work_dir, 'sanitycheckobjcpp.mm')
         binary_name = os.path.join(work_dir, 'sanitycheckobjcpp')
-        extra_flags = environment.coredata.get_external_args(self.for_machine, self.language)
+        extra_flags = []
+        extra_flags += environment.coredata.get_external_args(self.for_machine, self.language)
         if self.is_cross:
             extra_flags += self.get_compile_only_args()
         else:
@@ -66,10 +74,14 @@
 
 
 class GnuObjCPPCompiler(GnuCompiler, ObjCPPCompiler):
-    def __init__(self, exelist, version, for_machine: MachineChoice,
-                 is_cross, info: 'MachineInfo', exe_wrapper=None,
-                 defines=None, **kwargs):
-        ObjCPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs)
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 defines: T.Optional[T.Dict[str, str]] = None,
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        ObjCPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+                              info, exe_wrapper, linker=linker, full_version=full_version)
         GnuCompiler.__init__(self, defines)
         default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
         self.warn_args = {'0': [],
@@ -79,13 +91,24 @@
 
 
 class ClangObjCPPCompiler(ClangCompiler, ObjCPPCompiler):
-    def __init__(self, exelist, version, for_machine: MachineChoice,
-                 is_cross, info: 'MachineInfo', exe_wrapper=None,
-                 **kwargs):
-        ObjCPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs)
-        ClangCompiler.__init__(self)
+
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 defines: T.Optional[T.Dict[str, str]] = None,
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        ObjCPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+                              info, exe_wrapper, linker=linker, full_version=full_version)
+        ClangCompiler.__init__(self, defines)
         default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
         self.warn_args = {'0': [],
                           '1': default_warn_args,
                           '2': default_warn_args + ['-Wextra'],
                           '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+
+
+class AppleClangObjCPPCompiler(ClangObjCPPCompiler):
+
+    """Handle the differences between Apple's clang and vanilla clang."""
diff -Nru meson-0.53.2/mesonbuild/compilers/objc.py meson-0.57.0+really0.56.2/mesonbuild/compilers/objc.py
--- meson-0.53.2/mesonbuild/compilers/objc.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/objc.py	2021-01-06 10:39:48.000000000 +0000
@@ -23,28 +23,36 @@
 from .mixins.clang import ClangCompiler
 
 if T.TYPE_CHECKING:
+    from ..dependencies import ExternalProgram
     from ..envconfig import MachineInfo
+    from ..environment import Environment
+    from ..linkers import DynamicLinker
 
 
 class ObjCCompiler(CLikeCompiler, Compiler):
 
     language = 'objc'
 
-    def __init__(self, exelist, version, for_machine: MachineChoice,
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
                  is_cross: bool, info: 'MachineInfo',
-                 exe_wrap: T.Optional[str], **kwargs):
-        Compiler.__init__(self, exelist, version, for_machine, info, **kwargs)
-        CLikeCompiler.__init__(self, is_cross, exe_wrap)
+                 exe_wrap: T.Optional['ExternalProgram'],
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        Compiler.__init__(self, exelist, version, for_machine, info,
+                          is_cross=is_cross, full_version=full_version,
+                          linker=linker)
+        CLikeCompiler.__init__(self, exe_wrap)
 
     @staticmethod
-    def get_display_language():
+    def get_display_language() -> str:
         return 'Objective-C'
 
-    def sanity_check(self, work_dir, environment):
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
         # TODO try to use sanity_check_impl instead of duplicated code
         source_name = os.path.join(work_dir, 'sanitycheckobjc.m')
         binary_name = os.path.join(work_dir, 'sanitycheckobjc')
-        extra_flags = environment.coredata.get_external_args(self.for_machine, self.language)
+        extra_flags = []
+        extra_flags += environment.coredata.get_external_args(self.for_machine, self.language)
         if self.is_cross:
             extra_flags += self.get_compile_only_args()
         else:
@@ -66,11 +74,14 @@
 
 
 class GnuObjCCompiler(GnuCompiler, ObjCCompiler):
-    def __init__(self, exelist, version, for_machine: MachineChoice,
-                 is_cross, info: 'MachineInfo', exe_wrapper=None,
-                 defines=None, **kwargs):
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 defines: T.Optional[T.Dict[str, str]] = None,
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
         ObjCCompiler.__init__(self, exelist, version, for_machine, is_cross,
-                              info, exe_wrapper, **kwargs)
+                              info, exe_wrapper, linker=linker, full_version=full_version)
         GnuCompiler.__init__(self, defines)
         default_warn_args = ['-Wall', '-Winvalid-pch']
         self.warn_args = {'0': [],
@@ -80,14 +91,22 @@
 
 
 class ClangObjCCompiler(ClangCompiler, ObjCCompiler):
-    def __init__(self, exelist, version, for_machine: MachineChoice,
-                 is_cross, info: 'MachineInfo', exe_wrapper=None,
-                 **kwargs):
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 defines: T.Optional[T.Dict[str, str]] = None,
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
         ObjCCompiler.__init__(self, exelist, version, for_machine, is_cross,
-                              info, exe_wrapper, **kwargs)
-        ClangCompiler.__init__(self)
+                              info, exe_wrapper, linker=linker, full_version=full_version)
+        ClangCompiler.__init__(self, defines)
         default_warn_args = ['-Wall', '-Winvalid-pch']
         self.warn_args = {'0': [],
                           '1': default_warn_args,
                           '2': default_warn_args + ['-Wextra'],
                           '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+
+class AppleClangObjCCompiler(ClangObjCCompiler):
+
+    """Handle the differences between Apple's clang and vanilla clang."""
diff -Nru meson-0.53.2/mesonbuild/compilers/rust.py meson-0.57.0+really0.56.2/mesonbuild/compilers/rust.py
--- meson-0.53.2/mesonbuild/compilers/rust.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/rust.py	2021-01-06 10:39:48.000000000 +0000
@@ -13,55 +13,63 @@
 # limitations under the License.
 
 import subprocess, os.path
+import textwrap
 import typing as T
 
 from ..mesonlib import EnvironmentException, MachineChoice, Popen_safe
 from .compilers import Compiler, rust_buildtype_args, clike_debug_args
 
 if T.TYPE_CHECKING:
+    from ..dependencies import ExternalProgram
     from ..envconfig import MachineInfo
     from ..environment import Environment  # noqa: F401
+    from ..linkers import DynamicLinker
 
-rust_optimization_args = {'0': [],
-                          'g': ['-C', 'opt-level=0'],
-                          '1': ['-C', 'opt-level=1'],
-                          '2': ['-C', 'opt-level=2'],
-                          '3': ['-C', 'opt-level=3'],
-                          's': ['-C', 'opt-level=s'],
-                          }
+rust_optimization_args = {
+    '0': [],
+    'g': ['-C', 'opt-level=0'],
+    '1': ['-C', 'opt-level=1'],
+    '2': ['-C', 'opt-level=2'],
+    '3': ['-C', 'opt-level=3'],
+    's': ['-C', 'opt-level=s'],
+}  # type: T.Dict[str, T.List[str]]
 
 class RustCompiler(Compiler):
 
     # rustc doesn't invoke the compiler itself, it doesn't need a LINKER_PREFIX
     language = 'rust'
 
-    def __init__(self, exelist, version, for_machine: MachineChoice,
-                 is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs):
-        super().__init__(exelist, version, for_machine, info, **kwargs)
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 full_version: T.Optional[str] = None,
+                 linker: T.Optional['DynamicLinker'] = None):
+        super().__init__(exelist, version, for_machine, info,
+                         is_cross=is_cross, full_version=full_version,
+                         linker=linker)
         self.exe_wrapper = exe_wrapper
         self.id = 'rustc'
-        self.is_cross = is_cross
 
-    def needs_static_linker(self):
+    def needs_static_linker(self) -> bool:
         return False
 
-    def name_string(self):
-        return ' '.join(self.exelist)
-
-    def sanity_check(self, work_dir, environment):
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
         source_name = os.path.join(work_dir, 'sanity.rs')
         output_name = os.path.join(work_dir, 'rusttest')
         with open(source_name, 'w') as ofile:
-            ofile.write('''fn main() {
-}
-''')
+            ofile.write(textwrap.dedent(
+                '''fn main() {
+                }
+                '''))
         pc = subprocess.Popen(self.exelist + ['-o', output_name, source_name],
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE,
                               cwd=work_dir)
-        stdo, stde = pc.communicate()
-        stdo = stdo.decode('utf-8', errors='replace')
-        stde = stde.decode('utf-8', errors='replace')
+        _stdo, _stde = pc.communicate()
+        assert isinstance(_stdo, bytes)
+        assert isinstance(_stde, bytes)
+        stdo = _stdo.decode('utf-8', errors='replace')
+        stde = _stde.decode('utf-8', errors='replace')
         if pc.returncode != 0:
             raise EnvironmentException('Rust compiler %s can not compile programs.\n%s\n%s' % (
                 self.name_string(),
@@ -71,7 +79,7 @@
             if self.exe_wrapper is None:
                 # Can't check if the binaries run so we have to assume they do
                 return
-            cmdlist = self.exe_wrapper + [output_name]
+            cmdlist = self.exe_wrapper.get_command() + [output_name]
         else:
             cmdlist = [output_name]
         pe = subprocess.Popen(cmdlist, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
@@ -79,24 +87,25 @@
         if pe.returncode != 0:
             raise EnvironmentException('Executables created by Rust compiler %s are not runnable.' % self.name_string())
 
-    def get_dependency_gen_args(self, outfile):
+    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
         return ['--dep-info', outfile]
 
-    def get_buildtype_args(self, buildtype):
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
         return rust_buildtype_args[buildtype]
 
-    def get_sysroot(self):
+    def get_sysroot(self) -> str:
         cmd = self.exelist + ['--print', 'sysroot']
         p, stdo, stde = Popen_safe(cmd)
         return stdo.split('\n')[0]
 
-    def get_debug_args(self, is_debug):
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
         return clike_debug_args[is_debug]
 
-    def get_optimization_args(self, optimization_level):
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
         return rust_optimization_args[optimization_level]
 
-    def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+                                               build_dir: str) -> T.List[str]:
         for idx, i in enumerate(parameter_list):
             if i[:2] == '-L':
                 for j in ['dependency', 'crate', 'native', 'framework', 'all']:
@@ -107,8 +116,12 @@
 
         return parameter_list
 
-    def get_std_exe_link_args(self):
-        return []
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return ['-o', outputname]
+
+    @classmethod
+    def use_linker_args(cls, linker: str) -> T.List[str]:
+        return ['-C', 'linker={}'.format(linker)]
 
     # Rust does not have a use_linker_args because it dispatches to a gcc-like
     # C compiler for dynamic linking, as such we invoke the C compiler's
diff -Nru meson-0.53.2/mesonbuild/compilers/swift.py meson-0.57.0+really0.56.2/mesonbuild/compilers/swift.py
--- meson-0.53.2/mesonbuild/compilers/swift.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/swift.py	2021-01-06 10:39:48.000000000 +0000
@@ -21,84 +21,88 @@
 
 if T.TYPE_CHECKING:
     from ..envconfig import MachineInfo
+    from ..environment import Environment
+    from ..linkers import DynamicLinker
 
-swift_optimization_args = {'0': [],
-                           'g': [],
-                           '1': ['-O'],
-                           '2': ['-O'],
-                           '3': ['-O'],
-                           's': ['-O'],
-                           }
+swift_optimization_args = {
+    '0': [],
+    'g': [],
+    '1': ['-O'],
+    '2': ['-O'],
+    '3': ['-O'],
+    's': ['-O'],
+}  # type: T.Dict[str, T.List[str]]
 
 class SwiftCompiler(Compiler):
 
     LINKER_PREFIX = ['-Xlinker']
     language = 'swift'
 
-    def __init__(self, exelist, version, for_machine: MachineChoice,
-                 is_cross, info: 'MachineInfo', **kwargs):
-        super().__init__(exelist, version, for_machine, info, **kwargs)
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo', full_version: T.Optional[str] = None,
+                 linker: T.Optional['DynamicLinker'] = None):
+        super().__init__(exelist, version, for_machine, info,
+                         is_cross=is_cross, full_version=full_version,
+                         linker=linker)
         self.version = version
         self.id = 'llvm'
-        self.is_cross = is_cross
 
-    def name_string(self):
-        return ' '.join(self.exelist)
-
-    def needs_static_linker(self):
+    def needs_static_linker(self) -> bool:
         return True
 
-    def get_werror_args(self):
+    def get_werror_args(self) -> T.List[str]:
         return ['--fatal-warnings']
 
-    def get_dependency_gen_args(self, outtarget, outfile):
+    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
         return ['-emit-dependencies']
 
-    def depfile_for_object(self, objfile):
+    def depfile_for_object(self, objfile: str) -> str:
         return os.path.splitext(objfile)[0] + '.' + self.get_depfile_suffix()
 
-    def get_depfile_suffix(self):
+    def get_depfile_suffix(self) -> str:
         return 'd'
 
-    def get_output_args(self, target):
+    def get_output_args(self, target: str) -> T.List[str]:
         return ['-o', target]
 
-    def get_header_import_args(self, headername):
+    def get_header_import_args(self, headername: str) -> T.List[str]:
         return ['-import-objc-header', headername]
 
-    def get_warn_args(self, level):
+    def get_warn_args(self, level: str) -> T.List[str]:
         return []
 
-    def get_buildtype_args(self, buildtype):
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
         return swift_buildtype_args[buildtype]
 
-    def get_std_exe_link_args(self):
+    def get_std_exe_link_args(self) -> T.List[str]:
         return ['-emit-executable']
 
-    def get_module_args(self, modname):
+    def get_module_args(self, modname: str) -> T.List[str]:
         return ['-module-name', modname]
 
-    def get_mod_gen_args(self):
+    def get_mod_gen_args(self) -> T.List[str]:
         return ['-emit-module']
 
-    def get_include_args(self, dirname):
-        return ['-I' + dirname]
+    def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+        return ['-I' + path]
 
-    def get_compile_only_args(self):
+    def get_compile_only_args(self) -> T.List[str]:
         return ['-c']
 
-    def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+                                               build_dir: str) -> T.List[str]:
         for idx, i in enumerate(parameter_list):
             if i[:2] == '-I' or i[:2] == '-L':
                 parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
 
         return parameter_list
 
-    def sanity_check(self, work_dir, environment):
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
         src = 'swifttest.swift'
         source_name = os.path.join(work_dir, src)
         output_name = os.path.join(work_dir, 'swifttest')
-        extra_flags = environment.coredata.get_external_args(self.for_machine, self.language)
+        extra_flags = []
+        extra_flags += environment.coredata.get_external_args(self.for_machine, self.language)
         if self.is_cross:
             extra_flags += self.get_compile_only_args()
         else:
@@ -116,8 +120,8 @@
         if subprocess.call(output_name) != 0:
             raise EnvironmentException('Executables created by Swift compiler %s are not runnable.' % self.name_string())
 
-    def get_debug_args(self, is_debug):
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
         return clike_debug_args[is_debug]
 
-    def get_optimization_args(self, optimization_level):
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
         return swift_optimization_args[optimization_level]
diff -Nru meson-0.53.2/mesonbuild/compilers/vala.py meson-0.57.0+really0.56.2/mesonbuild/compilers/vala.py
--- meson-0.53.2/mesonbuild/compilers/vala.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/compilers/vala.py	2021-01-06 10:39:48.000000000 +0000
@@ -18,68 +18,66 @@
 from .. import mlog
 from ..mesonlib import EnvironmentException, MachineChoice, version_compare
 
-from .compilers import Compiler
+from .compilers import Compiler, LibType
 
 if T.TYPE_CHECKING:
     from ..envconfig import MachineInfo
+    from ..environment import Environment
 
 class ValaCompiler(Compiler):
 
     language = 'vala'
 
-    def __init__(self, exelist, version, for_machine: MachineChoice,
-                 is_cross, info: 'MachineInfo'):
-        super().__init__(exelist, version, for_machine, info)
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo'):
+        super().__init__(exelist, version, for_machine, info, is_cross=is_cross)
         self.version = version
-        self.is_cross = is_cross
         self.id = 'valac'
         self.base_options = ['b_colorout']
 
-    def name_string(self):
-        return ' '.join(self.exelist)
-
-    def needs_static_linker(self):
+    def needs_static_linker(self) -> bool:
         return False # Because compiles into C.
 
-    def get_optimization_args(self, optimization_level):
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
         return []
 
-    def get_debug_args(self, is_debug):
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
         return ['--debug'] if is_debug else []
 
-    def get_output_args(self, target):
+    def get_output_args(self, target: str) -> T.List[str]:
         return [] # Because compiles into C.
 
-    def get_compile_only_args(self):
+    def get_compile_only_args(self) -> T.List[str]:
         return [] # Because compiles into C.
 
-    def get_pic_args(self):
+    def get_pic_args(self) -> T.List[str]:
         return []
 
-    def get_pie_args(self):
+    def get_pie_args(self) -> T.List[str]:
         return []
 
-    def get_pie_link_args(self):
+    def get_pie_link_args(self) -> T.List[str]:
         return []
 
-    def get_always_args(self):
+    def get_always_args(self) -> T.List[str]:
         return ['-C']
 
-    def get_warn_args(self, warning_level):
+    def get_warn_args(self, warning_level: str) -> T.List[str]:
         return []
 
-    def get_no_warn_args(self):
+    def get_no_warn_args(self) -> T.List[str]:
         return ['--disable-warnings']
 
-    def get_werror_args(self):
+    def get_werror_args(self) -> T.List[str]:
         return ['--fatal-warnings']
 
-    def get_colorout_args(self, colortype):
+    def get_colorout_args(self, colortype: str) -> T.List[str]:
         if version_compare(self.version, '>=0.37.1'):
             return ['--color=' + colortype]
         return []
 
-    def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+                                               build_dir: str) -> T.List[str]:
         for idx, i in enumerate(parameter_list):
             if i[:9] == '--girdir=':
                 parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
@@ -92,32 +90,34 @@
 
         return parameter_list
 
-    def sanity_check(self, work_dir, environment):
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
         code = 'class MesonSanityCheck : Object { }'
-        extra_flags = environment.coredata.get_external_args(self.for_machine, self.language)
+        extra_flags = []
+        extra_flags += environment.coredata.get_external_args(self.for_machine, self.language)
         if self.is_cross:
             extra_flags += self.get_compile_only_args()
         else:
             extra_flags += environment.coredata.get_external_link_args(self.for_machine, self.language)
         with self.cached_compile(code, environment.coredata, extra_args=extra_flags, mode='compile') as p:
             if p.returncode != 0:
-                msg = 'Vala compiler {!r} can not compile programs' \
-                      ''.format(self.name_string())
+                msg = 'Vala compiler {!r} can not compile programs'.format(self.name_string())
                 raise EnvironmentException(msg)
 
-    def get_buildtype_args(self, buildtype):
-        if buildtype == 'debug' or buildtype == 'debugoptimized' or buildtype == 'minsize':
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        if buildtype in {'debug', 'debugoptimized', 'minsize'}:
             return ['--debug']
         return []
 
-    def find_library(self, libname, env, extra_dirs, *args):
+    def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
+                     libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]:
         if extra_dirs and isinstance(extra_dirs, str):
             extra_dirs = [extra_dirs]
         # Valac always looks in the default vapi dir, so only search there if
         # no extra dirs are specified.
         if not extra_dirs:
             code = 'class MesonFindLibrary : Object { }'
-            args = env.coredata.get_external_args(self.for_machine, self.language)
+            args = []
+            args += env.coredata.get_external_args(self.for_machine, self.language)
             vapi_args = ['--pkg', libname]
             args += vapi_args
             with self.cached_compile(code, env.coredata, extra_args=args, mode='compile') as p:
@@ -131,8 +131,8 @@
         mlog.debug('Searched {!r} and {!r} wasn\'t found'.format(extra_dirs, libname))
         return None
 
-    def thread_flags(self, env):
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
         return []
 
-    def thread_link_flags(self, env):
+    def thread_link_flags(self, env: 'Environment') -> T.List[str]:
         return []
diff -Nru meson-0.53.2/mesonbuild/coredata.py meson-0.57.0+really0.56.2/mesonbuild/coredata.py
--- meson-0.53.2/mesonbuild/coredata.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/coredata.py	2021-01-09 22:56:47.000000000 +0000
@@ -1,4 +1,4 @@
-# Copyright 2012-2019 The Meson development team
+# Copyrigh 2012-2020 The Meson development team
 
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,14 +12,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from . import mlog
+from . import mlog, mparser
 import pickle, os, uuid
 import sys
 from itertools import chain
-from pathlib import PurePath
-from collections import OrderedDict
+from ._pathlib import PurePath
+from collections import OrderedDict, defaultdict
 from .mesonlib import (
-    MesonException, MachineChoice, PerMachine,
+    MesonException, EnvironmentException, MachineChoice, PerMachine,
     default_libdir, default_libexecdir, default_prefix, split_args
 )
 from .wrap import WrapMode
@@ -32,12 +32,14 @@
 
 if T.TYPE_CHECKING:
     from . import dependencies
-    from .compilers import Compiler  # noqa: F401
+    from .compilers.compilers import Compiler, CompileResult  # noqa: F401
     from .environment import Environment
+    from .mesonlib import OptionOverrideProxy
 
-    OptionDictType = T.Dict[str, 'UserOption[T.Any]']
+    OptionDictType = T.Union[T.Dict[str, 'UserOption[T.Any]'], OptionOverrideProxy]
+    CompilerCheckCacheKey = T.Tuple[T.Tuple[str, ...], str, str, T.Tuple[str, ...], str]
 
-version = '0.53.2'
+version = '0.56.2'
 backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'vs2019', 'xcode']
 
 default_yielding = False
@@ -45,8 +47,18 @@
 # Can't bind this near the class method it seems, sadly.
 _T = T.TypeVar('_T')
 
+class MesonVersionMismatchException(MesonException):
+    '''Build directory generated with Meson version is incompatible with current version'''
+    def __init__(self, old_version: str, current_version: str) -> None:
+        super().__init__('Build directory has been generated with Meson version {}, '
+                         'which is incompatible with the current version {}.'
+                         .format(old_version, current_version))
+        self.old_version = old_version
+        self.current_version = current_version
+
+
 class UserOption(T.Generic[_T]):
-    def __init__(self, description, choices, yielding):
+    def __init__(self, description: str, choices: T.Optional[T.Union[str, T.List[_T]]], yielding: T.Optional[bool]):
         super().__init__()
         self.choices = choices
         self.description = description
@@ -56,7 +68,8 @@
             raise MesonException('Value of "yielding" must be a boolean.')
         self.yielding = yielding
 
-    def printable_value(self):
+    def printable_value(self) -> T.Union[str, int, bool, T.List[T.Union[str, int, bool]]]:
+        assert isinstance(self.value, (str, int, bool, list))
         return self.value
 
     # Check that the input is a valid value and return the
@@ -65,30 +78,32 @@
     def validate_value(self, value: T.Any) -> _T:
         raise RuntimeError('Derived option class did not override validate_value.')
 
-    def set_value(self, newvalue):
+    def set_value(self, newvalue: T.Any) -> None:
         self.value = self.validate_value(newvalue)
 
 class UserStringOption(UserOption[str]):
-    def __init__(self, description, value, choices=None, yielding=None):
-        super().__init__(description, choices, yielding)
+    def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
+        super().__init__(description, None, yielding)
         self.set_value(value)
 
-    def validate_value(self, value):
+    def validate_value(self, value: T.Any) -> str:
         if not isinstance(value, str):
             raise MesonException('Value "%s" for string option is not a string.' % str(value))
         return value
 
 class UserBooleanOption(UserOption[bool]):
-    def __init__(self, description, value, yielding=None):
+    def __init__(self, description: str, value, yielding: T.Optional[bool] = None) -> None:
         super().__init__(description, [True, False], yielding)
         self.set_value(value)
 
     def __bool__(self) -> bool:
         return self.value
 
-    def validate_value(self, value) -> bool:
+    def validate_value(self, value: T.Any) -> bool:
         if isinstance(value, bool):
             return value
+        if not isinstance(value, str):
+            raise MesonException('Value {} cannot be converted to a boolean'.format(value))
         if value.lower() == 'true':
             return True
         if value.lower() == 'false':
@@ -96,19 +111,20 @@
         raise MesonException('Value %s is not boolean (true or false).' % value)
 
 class UserIntegerOption(UserOption[int]):
-    def __init__(self, description, min_value, max_value, value, yielding=None):
-        super().__init__(description, [True, False], yielding)
+    def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
+        min_value, max_value, default_value = value
         self.min_value = min_value
         self.max_value = max_value
-        self.set_value(value)
         c = []
         if min_value is not None:
             c.append('>=' + str(min_value))
         if max_value is not None:
             c.append('<=' + str(max_value))
-        self.choices = ', '.join(c)
+        choices = ', '.join(c)
+        super().__init__(description, choices, yielding)
+        self.set_value(default_value)
 
-    def validate_value(self, value) -> int:
+    def validate_value(self, value: T.Any) -> int:
         if isinstance(value, str):
             value = self.toint(value)
         if not isinstance(value, int):
@@ -119,35 +135,35 @@
             raise MesonException('New value %d is more than maximum value %d.' % (value, self.max_value))
         return value
 
-    def toint(self, valuestring) -> int:
+    def toint(self, valuestring: str) -> int:
         try:
             return int(valuestring)
         except ValueError:
             raise MesonException('Value string "%s" is not convertible to an integer.' % valuestring)
 
 class UserUmaskOption(UserIntegerOption, UserOption[T.Union[str, int]]):
-    def __init__(self, description, value, yielding=None):
-        super().__init__(description, 0, 0o777, value, yielding)
+    def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
+        super().__init__(description, (0, 0o777, value), yielding)
         self.choices = ['preserve', '0000-0777']
 
-    def printable_value(self):
+    def printable_value(self) -> str:
         if self.value == 'preserve':
             return self.value
         return format(self.value, '04o')
 
-    def validate_value(self, value):
+    def validate_value(self, value: T.Any) -> T.Union[str, int]:
         if value is None or value == 'preserve':
             return 'preserve'
         return super().validate_value(value)
 
-    def toint(self, valuestring):
+    def toint(self, valuestring: T.Union[str, int]) -> int:
         try:
             return int(valuestring, 8)
         except ValueError as e:
             raise MesonException('Invalid mode: {}'.format(e))
 
 class UserComboOption(UserOption[str]):
-    def __init__(self, description, choices: T.List[str], value, yielding=None):
+    def __init__(self, description: str, choices: T.List[str], value: T.Any, yielding: T.Optional[bool] = None):
         super().__init__(description, choices, yielding)
         if not isinstance(self.choices, list):
             raise MesonException('Combo choices must be an array.')
@@ -156,20 +172,28 @@
                 raise MesonException('Combo choice elements must be strings.')
         self.set_value(value)
 
-    def validate_value(self, value):
+    def validate_value(self, value: T.Any) -> str:
         if value not in self.choices:
+            if isinstance(value, bool):
+                _type = 'boolean'
+            elif isinstance(value, (int, float)):
+                _type = 'number'
+            else:
+                _type = 'string'
             optionsstring = ', '.join(['"%s"' % (item,) for item in self.choices])
-            raise MesonException('Value "%s" for combo option is not one of the choices. Possible choices are: %s.' % (value, optionsstring))
+            raise MesonException('Value "{}" (of type "{}") for combo option "{}" is not one of the choices.'
+                                 ' Possible choices are (as string): {}.'.format(
+                                     value, _type, self.description, optionsstring))
         return value
 
 class UserArrayOption(UserOption[T.List[str]]):
-    def __init__(self, description, value, split_args=False, user_input=False, allow_dups=False, **kwargs):
+    def __init__(self, description: str, value: T.Union[str, T.List[str]], split_args: bool = False, user_input: bool = False, allow_dups: bool = False, **kwargs: T.Any) -> None:
         super().__init__(description, kwargs.get('choices', []), yielding=kwargs.get('yielding', None))
         self.split_args = split_args
         self.allow_dups = allow_dups
         self.value = self.validate_value(value, user_input=user_input)
 
-    def validate_value(self, value, user_input: bool = True) -> T.List[str]:
+    def validate_value(self, value: T.Union[str, T.List[str]], user_input: bool = True) -> T.List[str]:
         # User input is for options defined on the command line (via -D
         # options). Users can put their input in as a comma separated
         # string, but for defining options in meson_options.txt the format
@@ -213,26 +237,18 @@
 class UserFeatureOption(UserComboOption):
     static_choices = ['enabled', 'disabled', 'auto']
 
-    def __init__(self, description, value, yielding=None):
+    def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
         super().__init__(description, self.static_choices, value, yielding)
 
-    def is_enabled(self):
+    def is_enabled(self) -> bool:
         return self.value == 'enabled'
 
-    def is_disabled(self):
+    def is_disabled(self) -> bool:
         return self.value == 'disabled'
 
-    def is_auto(self):
+    def is_auto(self) -> bool:
         return self.value == 'auto'
 
-
-def load_configs(filenames: T.List[str]) -> configparser.ConfigParser:
-    """Load configuration files from a named subdirectory."""
-    config = configparser.ConfigParser()
-    config.read(filenames)
-    return config
-
-
 if T.TYPE_CHECKING:
     CacheKeyType = T.Tuple[T.Tuple[T.Any, ...], ...]
     SubCacheKeyType = T.Tuple[T.Any, ...]
@@ -351,7 +367,7 @@
 
 class CoreData:
 
-    def __init__(self, options: argparse.Namespace, scratch_dir: str):
+    def __init__(self, options: argparse.Namespace, scratch_dir: str, meson_command: T.List[str]):
         self.lang_guids = {
             'default': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
             'c': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
@@ -362,23 +378,30 @@
         self.test_guid = str(uuid.uuid4()).upper()
         self.regen_guid = str(uuid.uuid4()).upper()
         self.install_guid = str(uuid.uuid4()).upper()
+        self.meson_command = meson_command
         self.target_guids = {}
         self.version = version
-        self.init_builtins()
-        self.backend_options = {} # : T.Dict[str, UserOption]
-        self.user_options = {} # : T.Dict[str, UserOption]
-        self.compiler_options = PerMachine({}, {})
-        self.base_options = {} # : T.Dict[str, UserOption]
+        self.builtins = {} # type: OptionDictType
+        self.builtins_per_machine = PerMachine({}, {})
+        self.backend_options = {} # type: OptionDictType
+        self.user_options = {} # type: OptionDictType
+        self.compiler_options = PerMachine(
+            defaultdict(dict),
+            defaultdict(dict),
+        ) # type: PerMachine[T.defaultdict[str, OptionDictType]]
+        self.base_options = {} # type: OptionDictType
         self.cross_files = self.__load_config_files(options, scratch_dir, 'cross')
-        self.compilers = PerMachine(OrderedDict(), OrderedDict())
+        self.compilers = PerMachine(OrderedDict(), OrderedDict())  # type: PerMachine[T.Dict[str, Compiler]]
 
         build_cache = DependencyCache(self.builtins_per_machine, MachineChoice.BUILD)
         host_cache = DependencyCache(self.builtins_per_machine, MachineChoice.BUILD)
         self.deps = PerMachine(build_cache, host_cache)  # type: PerMachine[DependencyCache]
-        self.compiler_check_cache = OrderedDict()
+        self.compiler_check_cache = OrderedDict()  # type: T.Dict[CompilerCheckCacheKey, compiler.CompileResult]
+
         # Only to print a warning if it changes between Meson invocations.
         self.config_files = self.__load_config_files(options, scratch_dir, 'native')
-        self.libdir_cross_fixup()
+        self.builtin_options_libdir_cross_fixup()
+        self.init_builtins('')
 
     @staticmethod
     def __load_config_files(options: argparse.Namespace, scratch_dir: str, ftype: str) -> T.List[str]:
@@ -438,12 +461,12 @@
             raise MesonException('Cannot find specified {} file: {}'.format(ftype, f))
         return real
 
-    def libdir_cross_fixup(self):
+    def builtin_options_libdir_cross_fixup(self):
         # By default set libdir to "lib" when cross compiling since
         # getting the "system default" is always wrong on multiarch
         # platforms as it gets a value like lib/x86_64-linux-gnu.
         if self.cross_files:
-            self.builtins['libdir'].value = 'lib'
+            BUILTIN_OPTIONS['libdir'].default = 'lib'
 
     def sanitize_prefix(self, prefix):
         prefix = os.path.expanduser(prefix)
@@ -463,7 +486,7 @@
                 prefix = prefix[:-1]
         return prefix
 
-    def sanitize_dir_option_value(self, prefix, option, value):
+    def sanitize_dir_option_value(self, prefix: str, option: str, value: T.Any) -> T.Any:
         '''
         If the option is an installation directory option and the value is an
         absolute path, check that it resides within prefix and return the value
@@ -471,55 +494,79 @@
 
         This way everyone can do f.ex, get_option('libdir') and be sure to get
         the library directory relative to prefix.
+
+        .as_posix() keeps the posix-like file seperators Meson uses.
         '''
-        if option.endswith('dir') and os.path.isabs(value) and \
+        try:
+            value = PurePath(value)
+        except TypeError:
+            return value
+        if option.endswith('dir') and value.is_absolute() and \
            option not in builtin_dir_noprefix_options:
             # Value must be a subdir of the prefix
             # commonpath will always return a path in the native format, so we
             # must use pathlib.PurePath to do the same conversion before
             # comparing.
-            if os.path.commonpath([value, prefix]) != str(PurePath(prefix)):
-                m = 'The value of the {!r} option is {!r} which must be a ' \
-                    'subdir of the prefix {!r}.\nNote that if you pass a ' \
-                    'relative path, it is assumed to be a subdir of prefix.'
-                raise MesonException(m.format(option, value, prefix))
-            # Convert path to be relative to prefix
-            skip = len(prefix) + 1
-            value = value[skip:]
-        return value
+            msg = ('The value of the {!r} option is \'{!s}\' which must be a '
+                   'subdir of the prefix {!r}.\nNote that if you pass a '
+                   'relative path, it is assumed to be a subdir of prefix.')
+            # os.path.commonpath doesn't understand case-insensitive filesystems,
+            # but PurePath().relative_to() does.
+            try:
+                value = value.relative_to(prefix)
+            except ValueError:
+                raise MesonException(msg.format(option, value, prefix))
+            if '..' in str(value):
+                raise MesonException(msg.format(option, value, prefix))
+        return value.as_posix()
 
-    def init_builtins(self):
+    def init_builtins(self, subproject: str):
         # Create builtin options with default values
-        self.builtins = {}
-        for key, opt in builtin_options.items():
-            self.builtins[key] = opt.init_option(key, default_prefix())
-        self.builtins_per_machine = PerMachine({}, {})
+        for key, opt in BUILTIN_OPTIONS.items():
+            self.add_builtin_option(self.builtins, key, opt, subproject)
         for for_machine in iter(MachineChoice):
-            for key, opt in builtin_options_per_machine.items():
-                self.builtins_per_machine[for_machine][key] = opt.init_option()
+            for key, opt in BUILTIN_OPTIONS_PER_MACHINE.items():
+                self.add_builtin_option(self.builtins_per_machine[for_machine], key, opt, subproject)
+
+    def add_builtin_option(self, opts_map, key, opt, subproject):
+        if subproject:
+            if opt.yielding:
+                # This option is global and not per-subproject
+                return
+            optname = subproject + ':' + key
+            value = opts_map[key].value
+        else:
+            optname = key
+            value = None
+        opts_map[optname] = opt.init_option(key, value, default_prefix())
 
-    def init_backend_options(self, backend_name):
+    def init_backend_options(self, backend_name: str) -> None:
         if backend_name == 'ninja':
             self.backend_options['backend_max_links'] = \
                 UserIntegerOption(
                     'Maximum number of linker processes to run or 0 for no '
                     'limit',
-                    0, None, 0)
+                    (0, None, 0))
         elif backend_name.startswith('vs'):
             self.backend_options['backend_startup_project'] = \
                 UserStringOption(
                     'Default project to execute in Visual Studio',
                     '')
 
-    def get_builtin_option(self, optname):
+    def get_builtin_option(self, optname: str, subproject: str = '') -> T.Union[str, int, bool]:
+        raw_optname = optname
+        if subproject:
+            optname = subproject + ':' + optname
         for opts in self._get_all_builtin_options():
             v = opts.get(optname)
+            if v is None or v.yielding:
+                v = opts.get(raw_optname)
             if v is None:
                 continue
-            if optname == 'wrap_mode':
+            if raw_optname == 'wrap_mode':
                 return WrapMode.from_string(v.value)
             return v.value
-        raise RuntimeError('Tried to get unknown builtin option %s.' % optname)
+        raise RuntimeError('Tried to get unknown builtin option %s.' % raw_optname)
 
     def _try_set_builtin_option(self, optname, value):
         for opts in self._get_all_builtin_options():
@@ -586,28 +633,59 @@
             mode = 'custom'
         self.builtins['buildtype'].set_value(mode)
 
-    @staticmethod
+    @classmethod
     def get_prefixed_options_per_machine(
+        cls,
         options_per_machine # : PerMachine[T.Dict[str, _V]]]
-    ) -> T.Iterable[T.Dict[str, _V]]:
-        for for_machine in iter(MachineChoice):
-            prefix = for_machine.get_prefix()
-            yield {
-                prefix + k: v
-                for k, v in options_per_machine[for_machine].items()
-            }
+    ) -> T.Iterable[T.Tuple[str, _V]]:
+        return cls._flatten_pair_iterator(
+            (for_machine.get_prefix(), options_per_machine[for_machine])
+            for for_machine in iter(MachineChoice)
+        )
+
+    @classmethod
+    def flatten_lang_iterator(
+        cls,
+        outer # : T.Iterable[T.Tuple[str, T.Dict[str, _V]]]
+    ) -> T.Iterable[T.Tuple[str, _V]]:
+        return cls._flatten_pair_iterator((lang + '_', opts) for lang, opts in outer)
+
+    @staticmethod
+    def _flatten_pair_iterator(
+        outer # : T.Iterable[T.Tuple[str, T.Dict[str, _V]]]
+    ) -> T.Iterable[T.Tuple[str, _V]]:
+        for k0, v0 in outer:
+            for k1, v1 in v0.items():
+                yield (k0 + k1, v1)
+
+    @classmethod
+    def insert_build_prefix(cls, k):
+        idx = k.find(':')
+        if idx < 0:
+            return 'build.' + k
+        return k[:idx + 1] + 'build.' + k[idx + 1:]
+
+    @classmethod
+    def is_per_machine_option(cls, optname):
+        if optname in BUILTIN_OPTIONS_PER_MACHINE:
+            return True
+        from .compilers import compilers
+        for lang_prefix in [lang + '_' for lang in compilers.all_languages]:
+            if optname.startswith(lang_prefix):
+                return True
+        return False
 
     def _get_all_nonbuiltin_options(self) -> T.Iterable[T.Dict[str, UserOption]]:
         yield self.backend_options
         yield self.user_options
-        yield from self.get_prefixed_options_per_machine(self.compiler_options)
+        yield dict(self.flatten_lang_iterator(self.get_prefixed_options_per_machine(self.compiler_options)))
         yield self.base_options
 
-    def _get_all_builtin_options(self) -> T.Dict[str, UserOption]:
-        yield from self.get_prefixed_options_per_machine(self.builtins_per_machine)
+    def _get_all_builtin_options(self) -> T.Iterable[T.Dict[str, UserOption]]:
+        yield dict(self.get_prefixed_options_per_machine(self.builtins_per_machine))
         yield self.builtins
 
-    def get_all_options(self) -> T.Dict[str, UserOption]:
+    def get_all_options(self) -> T.Iterable[T.Dict[str, UserOption]]:
         yield from self._get_all_nonbuiltin_options()
         yield from self._get_all_builtin_options()
 
@@ -623,40 +701,56 @@
         raise MesonException('Tried to validate unknown option %s.' % option_name)
 
     def get_external_args(self, for_machine: MachineChoice, lang):
-        return self.compiler_options[for_machine][lang + '_args'].value
+        return self.compiler_options[for_machine][lang]['args'].value
 
     def get_external_link_args(self, for_machine: MachineChoice, lang):
-        return self.compiler_options[for_machine][lang + '_link_args'].value
+        return self.compiler_options[for_machine][lang]['link_args'].value
 
-    def merge_user_options(self, options):
+    def merge_user_options(self, options: T.Dict[str, UserOption[T.Any]]) -> None:
         for (name, value) in options.items():
             if name not in self.user_options:
                 self.user_options[name] = value
-            else:
-                oldval = self.user_options[name]
-                if type(oldval) != type(value):
-                    self.user_options[name] = value
+                continue
 
-    def is_cross_build(self) -> bool:
+            oldval = self.user_options[name]
+            if type(oldval) != type(value):
+                self.user_options[name] = value
+            elif oldval.choices != value.choices:
+                # If the choices have changed, use the new value, but attempt
+                # to keep the old options. If they are not valid keep the new
+                # defaults but warn.
+                self.user_options[name] = value
+                try:
+                    value.set_value(oldval.value)
+                except MesonException as e:
+                    mlog.warning('Old value(s) of {} are no longer valid, resetting to default ({}).'.format(name, value.value))
+
+    def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
+        if when_building_for == MachineChoice.BUILD:
+            return False
         return len(self.cross_files) > 0
 
     def strip_build_option_names(self, options):
-        res = {}
+        res = OrderedDict()
         for k, v in options.items():
             if k.startswith('build.'):
                 k = k.split('.', 1)[1]
-            res[k] = v
+                res.setdefault(k, v)
+            else:
+                res[k] = v
         return res
 
     def copy_build_options_from_regular_ones(self):
         assert(not self.is_cross_build())
         for k, o in self.builtins_per_machine.host.items():
             self.builtins_per_machine.build[k].set_value(o.value)
-        for k, o in self.compiler_options.host.items():
-            if k in self.compiler_options.build:
-                self.compiler_options.build[k].set_value(o.value)
+        for lang, host_opts in self.compiler_options.host.items():
+            build_opts = self.compiler_options.build[lang]
+            for k, o in host_opts.items():
+                if k in build_opts:
+                    build_opts[k].set_value(o.value)
 
-    def set_options(self, options, *, subproject='', warn_unknown=True):
+    def set_options(self, options: T.Dict[str, T.Any], subproject: str = '', warn_unknown: bool = True) -> None:
         if not self.is_cross_build():
             options = self.strip_build_option_names(options)
         # Set prefix first because it's needed to sanitize other options
@@ -665,7 +759,7 @@
             self.builtins['prefix'].set_value(prefix)
             for key in builtin_dir_noprefix_options:
                 if key not in options:
-                    self.builtins[key].set_value(builtin_options[key].prefixed_default(key, prefix))
+                    self.builtins[key].set_value(BUILTIN_OPTIONS[key].prefixed_default(key, prefix))
 
         unknown_options = []
         for k, v in options.items():
@@ -685,119 +779,167 @@
             unknown_options = ', '.join(sorted(unknown_options))
             sub = 'In subproject {}: '.format(subproject) if subproject else ''
             mlog.warning('{}Unknown options: "{}"'.format(sub, unknown_options))
+            mlog.log('The value of new options can be set with:')
+            mlog.log(mlog.bold('meson setup  --reconfigure -Dnew_option=new_value ...'))
         if not self.is_cross_build():
             self.copy_build_options_from_regular_ones()
 
-    def set_default_options(self, default_options, subproject, env):
-        # Set defaults first from conf files (cross or native), then
-        # override them as nec as necessary.
-        for k, v in env.paths.host:
-            if v is not None:
-                env.cmd_line_options.setdefault(k, v)
-
-        # Set default options as if they were passed to the command line.
-        # Subprojects can only define default for user options.
-        from . import optinterpreter
+    def set_default_options(self, default_options: 'T.OrderedDict[str, str]', subproject: str, env: 'Environment') -> None:
+        # Preserve order: if env.raw_options has 'buildtype' it must come after
+        # 'optimization' if it is in default_options.
+        raw_options = OrderedDict()
         for k, v in default_options.items():
             if subproject:
-                if optinterpreter.is_invalid_name(k, log=False):
-                    continue
                 k = subproject + ':' + k
-            env.cmd_line_options.setdefault(k, v)
+            raw_options[k] = v
+        raw_options.update(env.raw_options)
+        env.raw_options = raw_options
 
-        # Create a subset of cmd_line_options, keeping only options for this
-        # subproject. Also take builtin options if it's the main project.
+        # Create a subset of raw_options, keeping only project and builtin
+        # options for this subproject.
         # Language and backend specific options will be set later when adding
         # languages and setting the backend (builtin options must be set first
         # to know which backend we'll use).
-        options = {}
+        options = OrderedDict()
 
-        # Some options default to environment variables if they are
-        # unset, set those now. These will either be overwritten
-        # below, or they won't. These should only be set on the first run.
-        if env.first_invocation:
-            p_env = os.environ.get('PKG_CONFIG_PATH')
-            if p_env:
-                # PKG_CONFIG_PATH may contain duplicates, which must be
-                # removed, else a duplicates-in-array-option warning arises.
-                pkg_config_paths = []
-                for k in p_env.split(':'):
-                    if k not in pkg_config_paths:
-                        pkg_config_paths.append(k)
-                options['pkg_config_path'] = pkg_config_paths
-
-        for k, v in env.cmd_line_options.items():
+        from . import optinterpreter
+        for k, v in env.raw_options.items():
+            raw_optname = k
             if subproject:
+                # Subproject: skip options for other subprojects
                 if not k.startswith(subproject + ':'):
                     continue
-            elif k not in builtin_options.keys() \
-                    and 'build.' + k not in builtin_options_per_machine.keys() \
-                    and k not in builtin_options_per_machine.keys():
-                if ':' in k:
-                    continue
-                if optinterpreter.is_invalid_name(k, log=False):
-                    continue
+                raw_optname = k.split(':')[1]
+            elif ':' in k:
+                # Main prject: skip options for subprojects
+                continue
+            # Skip base, compiler, and backend options, they are handled when
+            # adding languages and setting backend.
+            if (k not in self.builtins and
+                k not in self.get_prefixed_options_per_machine(self.builtins_per_machine) and
+                optinterpreter.is_invalid_name(raw_optname, log=False)):
+                continue
             options[k] = v
 
         self.set_options(options, subproject=subproject)
 
+    def add_compiler_options(self, options, lang, for_machine, env):
+        # prefixed compiler options affect just this machine
+        opt_prefix = for_machine.get_prefix()
+        for k, o in options.items():
+            optname = opt_prefix + lang + '_' + k
+            value = env.raw_options.get(optname)
+            if value is not None:
+                o.set_value(value)
+            self.compiler_options[for_machine][lang].setdefault(k, o)
+
     def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
                       for_machine: MachineChoice, env: 'Environment') -> None:
         """Add global language arguments that are needed before compiler/linker detection."""
         from .compilers import compilers
+        options = compilers.get_global_options(lang, comp, for_machine,
+                                               env.is_cross_build())
+        self.add_compiler_options(options, lang, for_machine, env)
 
-        optprefix = lang + '_'
-        for k, o in compilers.get_global_options(lang, comp, env.properties[for_machine]).items():
-            if not k.startswith(optprefix):
-                raise MesonException('Internal error, %s has incorrect prefix.' % k)
-            # prefixed compiler options affect just this machine
-            opt_prefix = for_machine.get_prefix()
-            if opt_prefix + k in env.cmd_line_options:
-                o.set_value(env.cmd_line_options[opt_prefix + k])
-            self.compiler_options[for_machine].setdefault(k, o)
-
-    def process_new_compiler(self, lang: str, comp: T.Type['Compiler'], env: 'Environment') -> None:
+    def process_new_compiler(self, lang: str, comp: 'Compiler', env: 'Environment') -> None:
         from . import compilers
 
         self.compilers[comp.for_machine][lang] = comp
-        enabled_opts = []
-
-        optprefix = lang + '_'
-        for k, o in comp.get_options().items():
-            if not k.startswith(optprefix):
-                raise MesonException('Internal error, %s has incorrect prefix.' % k)
-            # prefixed compiler options affect just this machine
-            opt_prefix = comp.for_machine.get_prefix()
-            if opt_prefix + k in env.cmd_line_options:
-                o.set_value(env.cmd_line_options[opt_prefix + k])
-            self.compiler_options[comp.for_machine].setdefault(k, o)
+        self.add_compiler_options(comp.get_options(), lang, comp.for_machine, env)
 
         enabled_opts = []
         for optname in comp.base_options:
             if optname in self.base_options:
                 continue
             oobj = compilers.base_options[optname]
-            if optname in env.cmd_line_options:
-                oobj.set_value(env.cmd_line_options[optname])
+            if optname in env.raw_options:
+                oobj.set_value(env.raw_options[optname])
                 enabled_opts.append(optname)
             self.base_options[optname] = oobj
         self.emit_base_options_warnings(enabled_opts)
 
     def emit_base_options_warnings(self, enabled_opts: list):
         if 'b_bitcode' in enabled_opts:
-            mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as such as \'b_asneeded\' have been disabled.')
-            mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.')
+            mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.', fatal=False)
+            mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.', fatal=False)
 
 class CmdLineFileParser(configparser.ConfigParser):
-    def __init__(self):
+    def __init__(self) -> None:
         # We don't want ':' as key delimiter, otherwise it would break when
         # storing subproject options like "subproject:option=value"
         super().__init__(delimiters=['='], interpolation=None)
 
-def get_cmd_line_file(build_dir):
+    def optionxform(self, option: str) -> str:
+        # Don't call str.lower() on keys
+        return option
+
+class MachineFileParser():
+    def __init__(self, filenames: T.List[str]) -> None:
+        self.parser = CmdLineFileParser()
+        self.constants = {'True': True, 'False': False}
+        self.sections = {}
+
+        self.parser.read(filenames)
+
+        # Parse [constants] first so they can be used in other sections
+        if self.parser.has_section('constants'):
+            self.constants.update(self._parse_section('constants'))
+
+        for s in self.parser.sections():
+            if s == 'constants':
+                continue
+            self.sections[s] = self._parse_section(s)
+
+    def _parse_section(self, s):
+        self.scope = self.constants.copy()
+        section = {}
+        for entry, value in self.parser.items(s):
+            if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
+                raise EnvironmentException('Malformed variable name {!r} in machine file.'.format(entry))
+            # Windows paths...
+            value = value.replace('\\', '\\\\')
+            try:
+                ast = mparser.Parser(value, 'machinefile').parse()
+                res = self._evaluate_statement(ast.lines[0])
+            except MesonException:
+                raise EnvironmentException('Malformed value in machine file variable {!r}.'.format(entry))
+            except KeyError as e:
+                raise EnvironmentException('Undefined constant {!r} in machine file variable {!r}.'.format(e.args[0], entry))
+            section[entry] = res
+            self.scope[entry] = res
+        return section
+
+    def _evaluate_statement(self, node):
+        if isinstance(node, (mparser.StringNode)):
+            return node.value
+        elif isinstance(node, mparser.BooleanNode):
+            return node.value
+        elif isinstance(node, mparser.NumberNode):
+            return node.value
+        elif isinstance(node, mparser.ArrayNode):
+            return [self._evaluate_statement(arg) for arg in node.args.arguments]
+        elif isinstance(node, mparser.IdNode):
+            return self.scope[node.value]
+        elif isinstance(node, mparser.ArithmeticNode):
+            l = self._evaluate_statement(node.left)
+            r = self._evaluate_statement(node.right)
+            if node.operation == 'add':
+                if (isinstance(l, str) and isinstance(r, str)) or \
+                   (isinstance(l, list) and isinstance(r, list)):
+                    return l + r
+            elif node.operation == 'div':
+                if isinstance(l, str) and isinstance(r, str):
+                    return os.path.join(l, r)
+        raise EnvironmentException('Unsupported node type')
+
+def parse_machine_files(filenames):
+    parser = MachineFileParser(filenames)
+    return parser.sections
+
+def get_cmd_line_file(build_dir: str) -> str:
     return os.path.join(build_dir, 'meson-private', 'cmd_line.txt')
 
-def read_cmd_line_file(build_dir, options):
+def read_cmd_line_file(build_dir: str, options: argparse.Namespace) -> None:
     filename = get_cmd_line_file(build_dir)
     if not os.path.isfile(filename):
         return
@@ -819,14 +961,14 @@
         # literal_eval to get it into the list of strings.
         options.native_file = ast.literal_eval(properties.get('native_file', '[]'))
 
-def cmd_line_options_to_string(options):
+def cmd_line_options_to_string(options: argparse.Namespace) -> T.Dict[str, str]:
     return {k: str(v) for k, v in options.cmd_line_options.items()}
 
-def write_cmd_line_file(build_dir, options):
+def write_cmd_line_file(build_dir: str, options: argparse.Namespace) -> None:
     filename = get_cmd_line_file(build_dir)
     config = CmdLineFileParser()
 
-    properties = {}
+    properties = OrderedDict()
     if options.cross_file:
         properties['cross_file'] = options.cross_file
     if options.native_file:
@@ -837,7 +979,7 @@
     with open(filename, 'w') as f:
         config.write(f)
 
-def update_cmd_line_file(build_dir, options):
+def update_cmd_line_file(build_dir: str, options: argparse.Namespace):
     filename = get_cmd_line_file(build_dir)
     config = CmdLineFileParser()
     config.read(filename)
@@ -845,7 +987,7 @@
     with open(filename, 'w') as f:
         config.write(f)
 
-def get_cmd_line_options(build_dir, options):
+def get_cmd_line_options(build_dir: str, options: argparse.Namespace) -> str:
     copy = argparse.Namespace(**vars(options))
     read_cmd_line_file(build_dir, copy)
     cmdline = ['-D{}={}'.format(k, v) for k, v in copy.cmd_line_options.items()]
@@ -855,10 +997,10 @@
         cmdline += ['--native-file {}'.format(f) for f in options.native_file]
     return ' '.join([shlex.quote(x) for x in cmdline])
 
-def major_versions_differ(v1, v2):
+def major_versions_differ(v1: str, v2: str) -> bool:
     return v1.split('.')[0:2] != v2.split('.')[0:2]
 
-def load(build_dir):
+def load(build_dir: str) -> CoreData:
     filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
     load_fail_msg = 'Coredata file {!r} is corrupted. Try with a fresh build tree.'.format(filename)
     try:
@@ -874,12 +1016,10 @@
     if not isinstance(obj, CoreData):
         raise MesonException(load_fail_msg)
     if major_versions_differ(obj.version, version):
-        raise MesonException('Build directory has been generated with Meson version %s, '
-                             'which is incompatible with current version %s.\n' %
-                             (obj.version, version))
+        raise MesonVersionMismatchException(obj.version, version)
     return obj
 
-def save(obj, build_dir):
+def save(obj: CoreData, build_dir: str) -> str:
     filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
     prev_filename = filename + '.prev'
     tempfilename = filename + '~'
@@ -896,17 +1036,17 @@
     return filename
 
 
-def register_builtin_arguments(parser):
-    for n, b in builtin_options.items():
+def register_builtin_arguments(parser: argparse.ArgumentParser) -> None:
+    for n, b in BUILTIN_OPTIONS.items():
         b.add_to_argparse(n, parser, '', '')
-    for n, b in builtin_options_per_machine.items():
+    for n, b in BUILTIN_OPTIONS_PER_MACHINE.items():
         b.add_to_argparse(n, parser, '', ' (just for host machine)')
         b.add_to_argparse(n, parser, 'build.', ' (just for build machine)')
     parser.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option",
                         help='Set the value of an option, can be used several times to set multiple options.')
 
-def create_options_dict(options):
-    result = {}
+def create_options_dict(options: T.List[str]) -> T.Dict[str, str]:
+    result = OrderedDict()
     for o in options:
         try:
             (key, value) = o.split('=', 1)
@@ -915,14 +1055,14 @@
         result[key] = value
     return result
 
-def parse_cmd_line_options(args):
+def parse_cmd_line_options(args: argparse.Namespace) -> None:
     args.cmd_line_options = create_options_dict(args.projectoptions)
 
     # Merge builtin options set with --option into the dict.
     for name in chain(
-            builtin_options.keys(),
-            ('build.' + k for k in builtin_options_per_machine.keys()),
-            builtin_options_per_machine.keys(),
+            BUILTIN_OPTIONS.keys(),
+            ('build.' + k for k in BUILTIN_OPTIONS_PER_MACHINE.keys()),
+            BUILTIN_OPTIONS_PER_MACHINE.keys(),
     ):
         value = getattr(args, name, None)
         if value is not None:
@@ -940,10 +1080,10 @@
 
     """Class for a builtin option type.
 
-    Currently doesn't support UserIntegerOption, or a few other cases.
+    There are some cases that are not fully supported yet.
     """
 
-    def __init__(self, opt_type: T.Type[_U], description: str, default: T.Any, yielding: T.Optional[bool] = None, *,
+    def __init__(self, opt_type: T.Type[_U], description: str, default: T.Any, yielding: bool = True, *,
                  choices: T.Any = None):
         self.opt_type = opt_type
         self.description = description
@@ -951,17 +1091,20 @@
         self.choices = choices
         self.yielding = yielding
 
-    def init_option(self, name: str = 'prefix', prefix: str = '') -> _U:
+    def init_option(self, name: str, value: T.Optional[T.Any], prefix: str) -> _U:
         """Create an instance of opt_type and return it."""
-        keywords = {'yielding': self.yielding, 'value': self.prefixed_default(name, prefix)}
+        if value is None:
+            value = self.prefixed_default(name, prefix)
+        keywords = {'yielding': self.yielding, 'value': value}
         if self.choices:
             keywords['choices'] = self.choices
         return self.opt_type(self.description, **keywords)
 
     def _argparse_action(self) -> T.Optional[str]:
-        if self.default is True:
-            return 'store_false'
-        elif self.default is False:
+        # If the type is a boolean, the presence of the argument in --foo form
+        # is to enable it. Disabling happens by using -Dfoo=false, which is
+        # parsed under `args.projectoptions` and does not hit this codepath.
+        if isinstance(self.default, bool):
             return 'store_true'
         return None
 
@@ -989,7 +1132,7 @@
         return self.default
 
     def add_to_argparse(self, name: str, parser: argparse.ArgumentParser, prefix: str, help_suffix: str) -> None:
-        kwargs = {}
+        kwargs = OrderedDict()
 
         c = self._argparse_choices()
         b = self._argparse_action()
@@ -1006,29 +1149,32 @@
         cmdline_name = self.argparse_name_to_arg(prefix + name)
         parser.add_argument(cmdline_name, help=h + help_suffix, **kwargs)
 
+
 # Update `docs/markdown/Builtin-options.md` after changing the options below
-builtin_options = OrderedDict([
-    # Directories
-    ('prefix',     BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())),
-    ('bindir',     BuiltinOption(UserStringOption, 'Executable directory', 'bin')),
-    ('datadir',    BuiltinOption(UserStringOption, 'Data file directory', 'share')),
-    ('includedir', BuiltinOption(UserStringOption, 'Header file directory', 'include')),
-    ('infodir',    BuiltinOption(UserStringOption, 'Info page directory', 'share/info')),
-    ('libdir',     BuiltinOption(UserStringOption, 'Library directory', default_libdir())),
-    ('libexecdir', BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())),
-    ('localedir',  BuiltinOption(UserStringOption, 'Locale data directory', 'share/locale')),
+BUILTIN_DIR_OPTIONS = OrderedDict([
+    ('prefix',          BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())),
+    ('bindir',          BuiltinOption(UserStringOption, 'Executable directory', 'bin')),
+    ('datadir',         BuiltinOption(UserStringOption, 'Data file directory', 'share')),
+    ('includedir',      BuiltinOption(UserStringOption, 'Header file directory', 'include')),
+    ('infodir',         BuiltinOption(UserStringOption, 'Info page directory', 'share/info')),
+    ('libdir',          BuiltinOption(UserStringOption, 'Library directory', default_libdir())),
+    ('libexecdir',      BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())),
+    ('localedir',       BuiltinOption(UserStringOption, 'Locale data directory', 'share/locale')),
     ('localstatedir',   BuiltinOption(UserStringOption, 'Localstate data directory', 'var')),
     ('mandir',          BuiltinOption(UserStringOption, 'Manual page directory', 'share/man')),
     ('sbindir',         BuiltinOption(UserStringOption, 'System executable directory', 'sbin')),
     ('sharedstatedir',  BuiltinOption(UserStringOption, 'Architecture-independent data directory', 'com')),
     ('sysconfdir',      BuiltinOption(UserStringOption, 'Sysconf data directory', 'etc')),
-    # Core options
+])  # type: OptionDictType
+
+BUILTIN_CORE_OPTIONS = OrderedDict([
     ('auto_features',   BuiltinOption(UserFeatureOption, "Override value of all 'auto' features", 'auto')),
     ('backend',         BuiltinOption(UserComboOption, 'Backend to use', 'ninja', choices=backendlist)),
     ('buildtype',       BuiltinOption(UserComboOption, 'Build type to use', 'debug',
                                       choices=['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'])),
     ('debug',           BuiltinOption(UserBooleanOption, 'Debug', True)),
-    ('default_library', BuiltinOption(UserComboOption, 'Default library type', 'shared', choices=['shared', 'static', 'both'])),
+    ('default_library', BuiltinOption(UserComboOption, 'Default library type', 'shared', choices=['shared', 'static', 'both'],
+                                      yielding=False)),
     ('errorlogs',       BuiltinOption(UserBooleanOption, "Whether to print the logs from failing tests", True)),
     ('install_umask',   BuiltinOption(UserUmaskOption, 'Default umask to apply on permissions of installed files', '022')),
     ('layout',          BuiltinOption(UserComboOption, 'Build directory layout', 'mirror', choices=['mirror', 'flat'])),
@@ -1036,14 +1182,18 @@
     ('stdsplit',        BuiltinOption(UserBooleanOption, 'Split stdout and stderr in test logs', True)),
     ('strip',           BuiltinOption(UserBooleanOption, 'Strip targets on install', False)),
     ('unity',           BuiltinOption(UserComboOption, 'Unity build', 'off', choices=['on', 'off', 'subprojects'])),
-    ('warning_level',   BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'])),
-    ('werror',          BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False)),
+    ('unity_size',      BuiltinOption(UserIntegerOption, 'Unity block size', (2, None, 4))),
+    ('warning_level',   BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'], yielding=False)),
+    ('werror',          BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)),
     ('wrap_mode',       BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback'])),
-])
+    ('force_fallback_for', BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])),
+])  # type: OptionDictType
 
-builtin_options_per_machine = OrderedDict([
-    ('pkg_config_path', BuiltinOption(UserArrayOption, 'T.List of additional paths for pkg-config to search', [])),
-    ('cmake_prefix_path', BuiltinOption(UserArrayOption, 'T.List of additional prefixes for cmake to search', [])),
+BUILTIN_OPTIONS = OrderedDict(chain(BUILTIN_DIR_OPTIONS.items(), BUILTIN_CORE_OPTIONS.items()))
+
+BUILTIN_OPTIONS_PER_MACHINE = OrderedDict([
+    ('pkg_config_path', BuiltinOption(UserArrayOption, 'List of additional paths for pkg-config to search', [])),
+    ('cmake_prefix_path', BuiltinOption(UserArrayOption, 'List of additional prefixes for cmake to search', [])),
 ])
 
 # Special prefix-dependent defaults for installation directories that reside in
@@ -1054,7 +1204,7 @@
     'sharedstatedir': {'/usr': '/var/lib', '/usr/local': '/var/local/lib'},
 }
 
-forbidden_target_names = {'clean': None,
+FORBIDDEN_TARGET_NAMES = {'clean': None,
                           'clean-ctlist': None,
                           'clean-gcno': None,
                           'clean-gcda': None,
@@ -1075,3 +1225,4 @@
                           'dist': None,
                           'distcheck': None,
                           }
+
diff -Nru meson-0.53.2/mesonbuild/dependencies/base.py meson-0.57.0+really0.56.2/mesonbuild/dependencies/base.py
--- meson-0.53.2/mesonbuild/dependencies/base.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/base.py	2021-01-06 10:39:48.000000000 +0000
@@ -22,20 +22,27 @@
 import shlex
 import shutil
 import stat
+import sys
 import textwrap
 import platform
 import typing as T
 from enum import Enum
-from pathlib import Path, PurePath
+from .._pathlib import Path, PurePath
 
 from .. import mlog
 from .. import mesonlib
 from ..compilers import clib_langs
+from ..envconfig import get_env_var
 from ..environment import BinaryTable, Environment, MachineInfo
-from ..cmake import CMakeExecutor, CMakeTraceParser, CMakeException
+from ..cmake import CMakeExecutor, CMakeTraceParser, CMakeException, CMakeToolchain, CMakeExecScope, check_cmake_args
 from ..mesonlib import MachineChoice, MesonException, OrderedSet, PerMachine
 from ..mesonlib import Popen_safe, version_compare_many, version_compare, listify, stringlistify, extract_as_list, split_args
 from ..mesonlib import Version, LibType
+from ..mesondata import mesondata
+
+if T.TYPE_CHECKING:
+    from ..compilers.compilers import CompilerType  # noqa: F401
+    DependencyType = T.TypeVar('DependencyType', bound='Dependency')
 
 # These must be defined in this file to avoid cyclical references.
 packages = {}
@@ -69,39 +76,31 @@
     DUB = 'dub'
 
 
-class Dependency:
-    @classmethod
-    def _process_method_kw(cls, kwargs):
-        method = kwargs.get('method', 'auto')
-        if isinstance(method, DependencyMethods):
-            return method
-        if method not in [e.value for e in DependencyMethods]:
-            raise DependencyException('method {!r} is invalid'.format(method))
-        method = DependencyMethods(method)
-
-        # This sets per-tool config methods which are deprecated to to the new
-        # generic CONFIG_TOOL value.
-        if method in [DependencyMethods.SDLCONFIG, DependencyMethods.CUPSCONFIG,
-                      DependencyMethods.PCAPCONFIG, DependencyMethods.LIBWMFCONFIG]:
-            mlog.warning(textwrap.dedent("""\
-                Configuration method {} has been deprecated in favor of
-                'config-tool'. This will be removed in a future version of
-                meson.""".format(method)))
-            method = DependencyMethods.CONFIG_TOOL
-
-        # Set the detection method. If the method is set to auto, use any available method.
-        # If method is set to a specific string, allow only that detection method.
-        if method == DependencyMethods.AUTO:
-            methods = cls.get_methods()
-        elif method in cls.get_methods():
-            methods = [method]
-        else:
-            raise DependencyException(
-                'Unsupported detection method: {}, allowed methods are {}'.format(
-                    method.value,
-                    mlog.format_list([x.value for x in [DependencyMethods.AUTO] + cls.get_methods()])))
+def find_external_program(env: Environment, for_machine: MachineChoice, name: str,
+                          display_name: str, default_names: T.List[str],
+                          allow_default_for_cross: bool = True) -> T.Generator['ExternalProgram', None, None]:
+    """Find an external program, chcking the cross file plus any default options."""
+    # Lookup in cross or machine file.
+    potential_path = env.lookup_binary_entry(for_machine, name)
+    if potential_path is not None:
+        mlog.debug('{} binary for {} specified from cross file, native file, '
+                    'or env var as {}'.format(display_name, for_machine, potential_path))
+        yield ExternalProgram.from_entry(name, potential_path)
+        # We never fallback if the user-specified option is no good, so
+        # stop returning options.
+        return
+    mlog.debug('{} binary missing from cross or native file, or env var undefined.'.format(display_name))
+    # Fallback on hard-coded defaults, if a default binary is allowed for use
+    # with cross targets, or if this is not a cross target
+    if allow_default_for_cross or not (for_machine is MachineChoice.HOST and env.is_cross_build(for_machine)):
+        for potential_path in default_names:
+            mlog.debug('Trying a default {} fallback at'.format(display_name), potential_path)
+            yield ExternalProgram(potential_path, silent=True)
+    else:
+        mlog.debug('Default target is not allowed for cross use')
+
 
-        return methods
+class Dependency:
 
     @classmethod
     def _process_include_type_kw(cls, kwargs) -> str:
@@ -115,17 +114,17 @@
 
     def __init__(self, type_name, kwargs):
         self.name = "null"
-        self.version = None
+        self.version = None  # type: T.Optional[str]
         self.language = None # None means C-like
         self.is_found = False
         self.type_name = type_name
-        self.compile_args = []
+        self.compile_args = []  # type: T.List[str]
         self.link_args = []
         # Raw -L and -l arguments without manual library searching
         # If None, self.link_args will be used
         self.raw_link_args = None
         self.sources = []
-        self.methods = self._process_method_kw(kwargs)
+        self.methods = process_method_kw(self.get_methods(), kwargs)
         self.include_type = self._process_include_type_kw(kwargs)
         self.ext_deps = []  # type: T.List[Dependency]
 
@@ -133,7 +132,7 @@
         s = '<{0} {1}: {2}>'
         return s.format(self.__class__.__name__, self.name, self.is_found)
 
-    def get_compile_args(self):
+    def get_compile_args(self) -> T.List[str]:
         if self.include_type == 'system':
             converted = []
             for i in self.compile_args:
@@ -157,7 +156,7 @@
             return self.raw_link_args
         return self.link_args
 
-    def found(self):
+    def found(self) -> bool:
         return self.is_found
 
     def get_sources(self):
@@ -172,7 +171,7 @@
     def get_name(self):
         return self.name
 
-    def get_version(self):
+    def get_version(self) -> str:
         if self.version:
             return self.version
         else:
@@ -184,7 +183,7 @@
     def get_exe_args(self, compiler):
         return []
 
-    def get_pkgconfig_variable(self, variable_name, kwargs):
+    def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Any]) -> str:
         raise DependencyException('{!r} is not a pkgconfig dependency'.format(self.name))
 
     def get_configtool_variable(self, variable_name):
@@ -208,22 +207,25 @@
         """
         raise RuntimeError('Unreachable code in partial_dependency called')
 
-    def _add_sub_dependency(self, dep_type: T.Type['Dependency'], env: Environment,
-                            kwargs: T.Dict[str, T.Any], *,
-                            method: DependencyMethods = DependencyMethods.AUTO) -> None:
-        """Add an internal dependency of of the given type.
-
-        This method is intended to simplify cases of adding a dependency on
-        another dependency type (such as threads). This will by default set
-        the method back to auto, but the 'method' keyword argument can be
-        used to overwrite this behavior.
+    def _add_sub_dependency(self, deplist: T.Iterable[T.Callable[[], 'Dependency']]) -> bool:
+        """Add an internal depdency from a list of possible dependencies.
+
+        This method is intended to make it easier to add additional
+        dependencies to another dependency internally.
+
+        Returns true if the dependency was successfully added, false
+        otherwise.
         """
-        kwargs = kwargs.copy()
-        kwargs['method'] = method
-        self.ext_deps.append(dep_type(env, kwargs))
+        for d in deplist:
+            dep = d()
+            if dep.is_found:
+                self.ext_deps.append(dep)
+                return True
+        return False
 
     def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
-                     configtool: T.Optional[str] = None, default_value: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
                      pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
         if default_value is not None:
             return default_value
@@ -235,7 +237,8 @@
         return new_dep
 
 class InternalDependency(Dependency):
-    def __init__(self, version, incdirs, compile_args, link_args, libraries, whole_libraries, sources, ext_deps):
+    def __init__(self, version, incdirs, compile_args, link_args, libraries,
+                 whole_libraries, sources, ext_deps, variables: T.Dict[str, T.Any]):
         super().__init__('internal', {})
         self.version = version
         self.is_found = True
@@ -246,8 +249,19 @@
         self.whole_libraries = whole_libraries
         self.sources = sources
         self.ext_deps = ext_deps
+        self.variables = variables
+
+    def __deepcopy__(self, memo: dict) -> 'InternalDependency':
+        result = self.__class__.__new__(self.__class__)
+        memo[id(self)] = result
+        for k, v in self.__dict__.items():
+            if k in ['libraries', 'whole_libraries']:
+                setattr(result, k, copy.copy(v))
+            else:
+                setattr(result, k, copy.deepcopy(v, memo))
+        return result
 
-    def get_pkgconfig_variable(self, variable_name, kwargs):
+    def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Any]) -> str:
         raise DependencyException('Method "get_pkgconfig_variable()" is '
                                   'invalid for an internal dependency')
 
@@ -270,17 +284,32 @@
         return InternalDependency(
             self.version, final_includes, final_compile_args,
             final_link_args, final_libraries, final_whole_libraries,
-            final_sources, final_deps)
+            final_sources, final_deps, self.variables)
+
+    def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
+                     pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
+        val = self.variables.get(internal, default_value)
+        if val is not None:
+            return val
+        raise DependencyException('Could not get an internal variable and no default provided for {!r}'.format(self))
+
+    def generate_link_whole_dependency(self) -> T.Type['Dependency']:
+        new_dep = copy.deepcopy(self)
+        new_dep.whole_libraries += new_dep.libraries
+        new_dep.libraries = []
+        return new_dep
 
 class HasNativeKwarg:
-    def __init__(self, kwargs):
+    def __init__(self, kwargs: T.Dict[str, T.Any]):
         self.for_machine = self.get_for_machine_from_kwargs(kwargs)
 
-    def get_for_machine_from_kwargs(self, kwargs):
+    def get_for_machine_from_kwargs(self, kwargs: T.Dict[str, T.Any]) -> MachineChoice:
         return MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST
 
 class ExternalDependency(Dependency, HasNativeKwarg):
-    def __init__(self, type_name, environment, language, kwargs):
+    def __init__(self, type_name, environment, kwargs, language: T.Optional[str] = None):
         Dependency.__init__(self, type_name, kwargs)
         self.env = environment
         self.name = type_name # default
@@ -296,25 +325,7 @@
             raise DependencyException('Static keyword must be boolean')
         # Is this dependency to be run on the build platform?
         HasNativeKwarg.__init__(self, kwargs)
-        self.clib_compiler = None
-        # Set the compiler that will be used by this dependency
-        # This is only used for configuration checks
-        compilers = self.env.coredata.compilers[self.for_machine]
-        # Set the compiler for this dependency if a language is specified,
-        # else try to pick something that looks usable.
-        if self.language:
-            if self.language not in compilers:
-                m = self.name.capitalize() + ' requires a {0} compiler, but ' \
-                    '{0} is not in the list of project languages'
-                raise DependencyException(m.format(self.language.capitalize()))
-            self.clib_compiler = compilers[self.language]
-        else:
-            # Try to find a compiler that can find C libraries for
-            # running compiler.find_library()
-            for lang in clib_langs:
-                self.clib_compiler = compilers.get(lang, None)
-                if self.clib_compiler:
-                    break
+        self.clib_compiler = detect_compiler(self.name, environment, self.for_machine, self.language)
 
     def get_compiler(self):
         return self.clib_compiler
@@ -396,27 +407,38 @@
 
 class ConfigToolDependency(ExternalDependency):
 
-    """Class representing dependencies found using a config tool."""
+    """Class representing dependencies found using a config tool.
+
+    Takes the following extra keys in kwargs that it uses internally:
+    :tools List[str]: A list of tool names to use
+    :version_arg str: The argument to pass to the tool to get it's version
+    :returncode_value int: The value of the correct returncode
+        Because some tools are stupid and don't return 0
+    """
 
     tools = None
     tool_name = None
+    version_arg = '--version'
     __strip_version = re.compile(r'^[0-9][0-9.]+')
 
-    def __init__(self, name, environment, language, kwargs):
-        super().__init__('config-tool', environment, language, kwargs)
+    def __init__(self, name, environment, kwargs, language: T.Optional[str] = None):
+        super().__init__('config-tool', environment, kwargs, language=language)
         self.name = name
+        # You may want to overwrite the class version in some cases
         self.tools = listify(kwargs.get('tools', self.tools))
+        if not self.tool_name:
+            self.tool_name = self.tools[0]
+        if 'version_arg' in kwargs:
+            self.version_arg = kwargs['version_arg']
 
         req_version = kwargs.get('version', None)
-        tool, version = self.find_config(req_version)
+        tool, version = self.find_config(req_version, kwargs.get('returncode_value', 0))
         self.config = tool
         self.is_found = self.report_config(version, req_version)
         if not self.is_found:
             self.config = None
             return
         self.version = version
-        if getattr(self, 'finish_init', None):
-            self.finish_init(self)
 
     def _sanitize_version(self, version):
         """Remove any non-numeric, non-point version suffixes."""
@@ -427,63 +449,25 @@
             return m.group(0).rstrip('.')
         return version
 
-    @classmethod
-    def factory(cls, name, environment, language, kwargs, tools, tool_name, finish_init=None):
-        """Constructor for use in dependencies that can be found multiple ways.
-
-        In addition to the standard constructor values, this constructor sets
-        the tool_name and tools values of the instance.
-        """
-        # This deserves some explanation, because metaprogramming is hard.
-        # This uses type() to create a dynamic subclass of ConfigToolDependency
-        # with the tools and tool_name class attributes set, this class is then
-        # instantiated and returned. The reduce function (method) is also
-        # attached, since python's pickle module won't be able to do anything
-        # with this dynamically generated class otherwise.
-        def reduce(self):
-            return (cls._unpickle, (), self.__dict__)
-        sub = type('{}Dependency'.format(name.capitalize()), (cls, ),
-                   {'tools': tools, 'tool_name': tool_name, '__reduce__': reduce, 'finish_init': staticmethod(finish_init)})
-
-        return sub(name, environment, language, kwargs)
-
-    @classmethod
-    def _unpickle(cls):
-        return cls.__new__(cls)
-
-    def find_config(self, versions=None):
+    def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) \
+            -> T.Tuple[T.Optional[str], T.Optional[str]]:
         """Helper method that searches for config tool binaries in PATH and
         returns the one that best matches the given version requirements.
         """
         if not isinstance(versions, list) and versions is not None:
             versions = listify(versions)
-
-        tool = self.env.binaries[self.for_machine].lookup_entry(self.tool_name)
-        if tool is not None:
-            tools = [tool]
-        else:
-            if not self.env.machines.matches_build_machine(self.for_machine):
-                mlog.deprecation('No entry for {0} specified in your cross file. '
-                                 'Falling back to searching PATH. This may find a '
-                                 'native version of {0}! This will become a hard '
-                                 'error in a future version of meson'.format(self.tool_name))
-            tools = [[t] for t in self.tools]
-
-        best_match = (None, None)
-        for tool in tools:
-            if len(tool) == 1:
-                # In some situations the command can't be directly executed.
-                # For example Shell scripts need to be called through sh on
-                # Windows (see issue #1423).
-                potential_bin = ExternalProgram(tool[0], silent=True)
-                if not potential_bin.found():
-                    continue
-                tool = potential_bin.get_command()
+        best_match = (None, None)  # type: T.Tuple[T.Optional[str], T.Optional[str]]
+        for potential_bin in find_external_program(
+                self.env, self.for_machine, self.tool_name,
+                self.tool_name, self.tools, allow_default_for_cross=False):
+            if not potential_bin.found():
+                continue
+            tool = potential_bin.get_command()
             try:
-                p, out = Popen_safe(tool + ['--version'])[:2]
+                p, out = Popen_safe(tool + [self.version_arg])[:2]
             except (FileNotFoundError, PermissionError):
                 continue
-            if p.returncode != 0:
+            if p.returncode != returncode:
                 continue
 
             out = self._sanitize_version(out.strip())
@@ -525,7 +509,7 @@
 
         return self.config is not None
 
-    def get_config_value(self, args, stage):
+    def get_config_value(self, args: T.List[str], stage: str) -> T.List[str]:
         p, out, err = Popen_safe(self.config + args)
         if p.returncode != 0:
             if self.required:
@@ -554,7 +538,8 @@
         return self.type_name
 
     def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
-                     configtool: T.Optional[str] = None, default_value: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
                      pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
         if configtool:
             # In the not required case '' (empty string) will be returned if the
@@ -581,33 +566,14 @@
     # We cache all pkg-config subprocess invocations to avoid redundant calls
     pkgbin_cache = {}
 
-    def __init__(self, name, environment, kwargs, language=None):
-        super().__init__('pkgconfig', environment, language, kwargs)
+    def __init__(self, name, environment, kwargs, language: T.Optional[str] = None):
+        super().__init__('pkgconfig', environment, kwargs, language=language)
         self.name = name
         self.is_libtool = False
         # Store a copy of the pkg-config path on the object itself so it is
         # stored in the pickled coredata and recovered.
         self.pkgbin = None
 
-        # Create an iterator of options
-        def search():
-            # Lookup in cross or machine file.
-            potential_pkgpath = environment.binaries[self.for_machine].lookup_entry('pkgconfig')
-            if potential_pkgpath is not None:
-                mlog.debug('Pkg-config binary for {} specified from cross file, native file, '
-                           'or env var as {}'.format(self.for_machine, potential_pkgpath))
-                yield ExternalProgram.from_entry('pkgconfig', potential_pkgpath)
-                # We never fallback if the user-specified option is no good, so
-                # stop returning options.
-                return
-            mlog.debug('Pkg-config binary missing from cross or native file, or env var undefined.')
-            # Fallback on hard-coded defaults.
-            # TODO prefix this for the cross case instead of ignoring thing.
-            if environment.machines.matches_build_machine(self.for_machine):
-                for potential_pkgpath in environment.default_pkgconfig:
-                    mlog.debug('Trying a default pkg-config fallback at', potential_pkgpath)
-                    yield ExternalProgram(potential_pkgpath, silent=True)
-
         # Only search for pkg-config for each machine the first time and store
         # the result in the class definition
         if PkgConfigDependency.class_pkgbin[self.for_machine] is False:
@@ -617,9 +583,9 @@
         else:
             assert PkgConfigDependency.class_pkgbin[self.for_machine] is None
             mlog.debug('Pkg-config binary for %s is not cached.' % self.for_machine)
-            for potential_pkgbin in search():
-                mlog.debug('Trying pkg-config binary {} for machine {} at {}'
-                           .format(potential_pkgbin.name, self.for_machine, potential_pkgbin.command))
+            for potential_pkgbin in find_external_program(
+                    self.env, self.for_machine, 'pkgconfig', 'Pkg-config',
+                    environment.default_pkgconfig, allow_default_for_cross=False):
                 version_if_ok = self.check_pkgconfig(potential_pkgbin)
                 if not version_if_ok:
                     continue
@@ -681,6 +647,25 @@
         mlog.debug("Called `{}` -> {}\n{}".format(call, rc, out))
         return rc, out, err
 
+    @staticmethod
+    def setup_env(env, environment, for_machine, extra_path=None):
+        extra_paths = environment.coredata.builtins_per_machine[for_machine]['pkg_config_path'].value
+        if extra_path:
+            extra_paths.append(extra_path)
+        sysroot = environment.properties[for_machine].get_sys_root()
+        if sysroot:
+            env['PKG_CONFIG_SYSROOT_DIR'] = sysroot
+        new_pkg_config_path = ':'.join([p for p in extra_paths])
+        mlog.debug('PKG_CONFIG_PATH: ' + new_pkg_config_path)
+        env['PKG_CONFIG_PATH'] = new_pkg_config_path
+
+        pkg_config_libdir_prop = environment.properties[for_machine].get_pkg_config_libdir()
+        if pkg_config_libdir_prop:
+            new_pkg_config_libdir = ':'.join([p for p in pkg_config_libdir_prop])
+            env['PKG_CONFIG_LIBDIR'] = new_pkg_config_libdir
+            mlog.debug('PKG_CONFIG_LIBDIR: ' + new_pkg_config_libdir)
+
+
     def _call_pkgbin(self, args, env=None):
         # Always copy the environment since we're going to modify it
         # with pkg-config variables
@@ -689,13 +674,7 @@
         else:
             env = env.copy()
 
-        extra_paths = self.env.coredata.builtins_per_machine[self.for_machine]['pkg_config_path'].value
-        sysroot = self.env.properties[self.for_machine].get_sys_root()
-        if sysroot:
-            env['PKG_CONFIG_SYSROOT_DIR'] = sysroot
-        new_pkg_config_path = ':'.join([p for p in extra_paths])
-        mlog.debug('PKG_CONFIG_PATH: ' + new_pkg_config_path)
-        env['PKG_CONFIG_PATH'] = new_pkg_config_path
+        PkgConfigDependency.setup_env(env, self.env, self.for_machine)
 
         fenv = frozenset(env.items())
         targs = tuple(args)
@@ -797,7 +776,10 @@
         #
         # Only prefix_libpaths are reordered here because there should not be
         # too many system_libpaths to cause library version issues.
-        pkg_config_path = os.environ.get('PKG_CONFIG_PATH')
+        pkg_config_path = get_env_var(
+            self.for_machine,
+            self.env.is_cross_build(),
+            'PKG_CONFIG_PATH')
         if pkg_config_path:
             pkg_config_path = pkg_config_path.split(os.pathsep)
         else:
@@ -907,7 +889,7 @@
                                       (self.name, out_raw))
         self.link_args, self.raw_link_args = self._search_libs(out, out_raw)
 
-    def get_pkgconfig_variable(self, variable_name, kwargs):
+    def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Any]) -> str:
         options = ['--variable=' + variable_name, self.name]
 
         if 'define_variable' in kwargs:
@@ -1007,7 +989,8 @@
         return self.type_name
 
     def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
-                     configtool: T.Optional[str] = None, default_value: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
                      pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
         if pkgconfig:
             kwargs = {}
@@ -1042,19 +1025,29 @@
     def _extra_cmake_opts(self) -> T.List[str]:
         return []
 
-    def _map_module_list(self, modules: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
+    def _map_module_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
         # Map the input module list to something else
         # This function will only be executed AFTER the initial CMake
         # interpreter pass has completed. Thus variables defined in the
         # CMakeLists.txt can be accessed here.
+        #
+        # Both the modules and components inputs contain the original lists.
         return modules
 
+    def _map_component_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
+        # Map the input components list to something else. This
+        # function will be executed BEFORE the initial CMake interpreter
+        # pass. Thus variables from the CMakeLists.txt can NOT be accessed.
+        #
+        # Both the modules and components inputs contain the original lists.
+        return components
+
     def _original_module_name(self, module: str) -> str:
         # Reverse the module mapping done by _map_module_list for
         # one module
         return module
 
-    def __init__(self, name: str, environment: Environment, kwargs, language: str = None):
+    def __init__(self, name: str, environment: Environment, kwargs, language: T.Optional[str] = None):
         # Gather a list of all languages to support
         self.language_list = []  # type: T.List[str]
         if language is None:
@@ -1076,14 +1069,13 @@
         # Ensure that the list is unique
         self.language_list = list(set(self.language_list))
 
-        super().__init__('cmake', environment, language, kwargs)
+        super().__init__('cmake', environment, kwargs, language=language)
         self.name = name
         self.is_libtool = False
         # Store a copy of the CMake path on the object itself so it is
         # stored in the pickled coredata and recovered.
         self.cmakebin = None
         self.cmakeinfo = None
-        self.traceparser = CMakeTraceParser()
 
         # Where all CMake "build dirs" are located
         self.cmake_root_dir = environment.scratch_dir
@@ -1091,50 +1083,49 @@
         # T.List of successfully found modules
         self.found_modules = []
 
+        # Initialize with None before the first return to avoid
+        # AttributeError exceptions in derived classes
+        self.traceparser = None  # type: CMakeTraceParser
+
+        # TODO further evaluate always using MachineChoice.BUILD
         self.cmakebin = CMakeExecutor(environment, CMakeDependency.class_cmake_version, self.for_machine, silent=self.silent)
         if not self.cmakebin.found():
             self.cmakebin = None
-            msg = 'No CMake binary for machine %s not found. Giving up.' % self.for_machine
+            msg = 'No CMake binary for machine {} not found. Giving up.'.format(self.for_machine)
             if self.required:
                 raise DependencyException(msg)
             mlog.debug(msg)
             return
 
+        # Setup the trace parser
+        self.traceparser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir())
+
+        cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args'))
+        cm_args = check_cmake_args(cm_args)
         if CMakeDependency.class_cmakeinfo[self.for_machine] is None:
-            CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info()
+            CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info(cm_args)
         self.cmakeinfo = CMakeDependency.class_cmakeinfo[self.for_machine]
         if self.cmakeinfo is None:
             raise self._gen_exception('Unable to obtain CMake system information')
 
+        components = [(x, True) for x in stringlistify(extract_as_list(kwargs, 'components'))]
         modules = [(x, True) for x in stringlistify(extract_as_list(kwargs, 'modules'))]
         modules += [(x, False) for x in stringlistify(extract_as_list(kwargs, 'optional_modules'))]
         cm_path = stringlistify(extract_as_list(kwargs, 'cmake_module_path'))
         cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path]
-        cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args'))
         if cm_path:
             cm_args.append('-DCMAKE_MODULE_PATH=' + ';'.join(cm_path))
-
-        pref_path = self.env.coredata.builtins_per_machine[self.for_machine]['cmake_prefix_path'].value
-        if 'CMAKE_PREFIX_PATH' in os.environ:
-            env_pref_path = os.environ['CMAKE_PREFIX_PATH'].split(os.pathsep)
-            env_pref_path = [x for x in env_pref_path if x]  # Filter out empty strings
-            if not pref_path:
-                pref_path = []
-            pref_path += env_pref_path
-        if pref_path:
-            cm_args.append('-DCMAKE_PREFIX_PATH={}'.format(';'.join(pref_path)))
-
-        if not self._preliminary_find_check(name, cm_path, pref_path, environment.machines[self.for_machine]):
+        if not self._preliminary_find_check(name, cm_path, self.cmakebin.get_cmake_prefix_paths(), environment.machines[self.for_machine]):
             mlog.debug('Preliminary CMake check failed. Aborting.')
             return
-        self._detect_dep(name, modules, cm_args)
+        self._detect_dep(name, modules, components, cm_args)
 
     def __repr__(self):
         s = '<{0} {1}: {2} {3}>'
         return s.format(self.__class__.__name__, self.name, self.is_found,
                         self.version_reqs)
 
-    def _get_cmake_info(self):
+    def _get_cmake_info(self, cm_args):
         mlog.debug("Extracting basic cmake information")
         res = {}
 
@@ -1146,11 +1137,16 @@
             gen_list += [CMakeDependency.class_working_generator]
         gen_list += CMakeDependency.class_cmake_generators
 
+        temp_parser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir())
+        toolchain = CMakeToolchain(self.env, self.for_machine, CMakeExecScope.DEPENDENCY, self._get_build_dir())
+        toolchain.write()
+
         for i in gen_list:
             mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
 
             # Prepare options
-            cmake_opts = ['--trace-expand', '.']
+            cmake_opts = temp_parser.trace_args() + toolchain.get_cmake_args() + ['.']
+            cmake_opts += cm_args
             if len(i) > 0:
                 cmake_opts = ['-G', i] + cmake_opts
 
@@ -1170,17 +1166,21 @@
             return None
 
         try:
-            temp_parser = CMakeTraceParser()
             temp_parser.parse(err1)
         except MesonException:
             return None
 
+        def process_paths(l: T.List[str]) -> T.Set[str]:
+            l = [x.split(':') for x in l]
+            l = [x for sublist in l for x in sublist]
+            return set(l)
+
         # Extract the variables and sanity check them
-        root_paths = set(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH'))
-        root_paths.update(set(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT')))
+        root_paths = process_paths(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH'))
+        root_paths.update(process_paths(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT')))
         root_paths = sorted(root_paths)
         root_paths = list(filter(lambda x: os.path.isdir(x), root_paths))
-        module_paths = set(temp_parser.get_cmake_var('MESON_PATHS_LIST'))
+        module_paths = process_paths(temp_parser.get_cmake_var('MESON_PATHS_LIST'))
         rooted_paths = []
         for j in [Path(x) for x in root_paths]:
             for i in [Path(x) for x in module_paths]:
@@ -1304,7 +1304,7 @@
 
         return False
 
-    def _detect_dep(self, name: str, modules: T.List[T.Tuple[str, bool]], args: T.List[str]):
+    def _detect_dep(self, name: str, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]], args: T.List[str]):
         # Detect a dependency with CMake using the '--find-package' mode
         # and the trace output (stderr)
         #
@@ -1322,12 +1322,24 @@
             gen_list += [CMakeDependency.class_working_generator]
         gen_list += CMakeDependency.class_cmake_generators
 
+        # Map the components
+        comp_mapped = self._map_component_list(modules, components)
+        toolchain = CMakeToolchain(self.env, self.for_machine, CMakeExecScope.DEPENDENCY, self._get_build_dir())
+        toolchain.write()
+
         for i in gen_list:
             mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
 
             # Prepare options
-            cmake_opts = ['--trace-expand', '-DNAME={}'.format(name), '-DARCHS={}'.format(';'.join(self.cmakeinfo['archs']))] + args + ['.']
+            cmake_opts = []
+            cmake_opts += ['-DNAME={}'.format(name)]
+            cmake_opts += ['-DARCHS={}'.format(';'.join(self.cmakeinfo['archs']))]
+            cmake_opts += ['-DCOMPS={}'.format(';'.join([x[0] for x in comp_mapped]))]
+            cmake_opts += args
+            cmake_opts += self.traceparser.trace_args()
+            cmake_opts += toolchain.get_cmake_args()
             cmake_opts += self._extra_cmake_opts()
+            cmake_opts += ['.']
             if len(i) > 0:
                 cmake_opts = ['-G', i] + cmake_opts
 
@@ -1373,7 +1385,7 @@
 
         # Post-process module list. Used in derived classes to modify the
         # module list (append prepend a string, etc.).
-        modules = self._map_module_list(modules)
+        modules = self._map_module_list(modules, components)
         autodetected_module_list = False
 
         # Try guessing a CMake target if none is provided
@@ -1410,7 +1422,9 @@
                                       'Valid targets are:\n{}'.format(name, list(self.traceparser.targets.keys())))
 
         # Set dependencies with CMake targets
-        reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-pthread)$')
+        # recognise arguments we should pass directly to the linker
+        reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-pthread|-delayload:[a-zA-Z0-9_\.]+|[a-zA-Z0-9_]+\.lib)$')
+        reg_is_maybe_bare_lib = re.compile(r'^[a-zA-Z0-9_]+$')
         processed_targets = []
         incDirs = []
         compileDefinitions = []
@@ -1455,8 +1469,20 @@
                     cfgs = [x for x in tgt.properties['IMPORTED_CONFIGURATIONS'] if x]
                     cfg = cfgs[0]
 
-                if 'RELEASE' in cfgs:
-                    cfg = 'RELEASE'
+                if 'b_vscrt' in self.env.coredata.base_options:
+                    is_debug = self.env.coredata.get_builtin_option('buildtype') == 'debug'
+                    if self.env.coredata.base_options['b_vscrt'].value in ('mdd', 'mtd'):
+                        is_debug = True
+                else:
+                    is_debug = self.env.coredata.get_builtin_option('debug')
+                if is_debug:
+                    if 'DEBUG' in cfgs:
+                        cfg = 'DEBUG'
+                    elif 'RELEASE' in cfgs:
+                        cfg = 'RELEASE'
+                else:
+                    if 'RELEASE' in cfgs:
+                        cfg = 'RELEASE'
 
                 if 'IMPORTED_IMPLIB_{}'.format(cfg) in tgt.properties:
                     libraries += [x for x in tgt.properties['IMPORTED_IMPLIB_{}'.format(cfg)] if x]
@@ -1478,8 +1504,20 @@
                 for j in otherDeps:
                     if j in self.traceparser.targets:
                         targets += [j]
-                    elif reg_is_lib.match(j) or os.path.exists(j):
+                    elif reg_is_lib.match(j):
                         libraries += [j]
+                    elif os.path.isabs(j) and os.path.exists(j):
+                        libraries += [j]
+                    elif mesonlib.is_windows() and reg_is_maybe_bare_lib.match(j):
+                        # On Windows, CMake library dependencies can be passed as bare library names,
+                        # e.g. 'version' should translate into 'version.lib'. CMake brute-forces a
+                        # combination of prefix/suffix combinations to find the right library, however
+                        # as we do not have a compiler environment available to us, we cannot do the
+                        # same, but must assume any bare argument passed which is not also a CMake
+                        # target must be a system library we should try to link against
+                        libraries += ["{}.lib".format(j)]
+                    else:
+                        mlog.warning('CMake: Dependency', mlog.bold(j), 'for', mlog.bold(name), 'target', mlog.bold(self._original_module_name(curr)), 'was not found')
 
                 processed_targets += [curr]
 
@@ -1497,14 +1535,24 @@
         self.compile_args = compileOptions + compileDefinitions + ['-I{}'.format(x) for x in incDirs]
         self.link_args = libraries
 
-    def _setup_cmake_dir(self, cmake_file: str) -> str:
-        # Setup the CMake build environment and return the "build" directory
+    def _get_build_dir(self) -> Path:
         build_dir = Path(self.cmake_root_dir) / 'cmake_{}'.format(self.name)
         build_dir.mkdir(parents=True, exist_ok=True)
+        return build_dir
+
+    def _setup_cmake_dir(self, cmake_file: str) -> Path:
+        # Setup the CMake build environment and return the "build" directory
+        build_dir = self._get_build_dir()
+
+        # Remove old CMake cache so we can try out multiple generators
+        cmake_cache = build_dir / 'CMakeCache.txt'
+        cmake_files = build_dir / 'CMakeFiles'
+        if cmake_cache.exists():
+            cmake_cache.unlink()
+        shutil.rmtree(cmake_files.as_posix(), ignore_errors=True)
 
         # Insert language parameters into the CMakeLists.txt and write new CMakeLists.txt
-        src_cmake = Path(__file__).parent / 'data' / cmake_file
-        cmake_txt = src_cmake.read_text()
+        cmake_txt = mesondata['dependencies/data/' + cmake_file].data
 
         # In general, some Fortran CMake find_package() also require C language enabled,
         # even if nothing from C is directly used. An easy Fortran example that fails
@@ -1518,20 +1566,20 @@
         if not cmake_language:
             cmake_language += ['NONE']
 
-        cmake_txt = """
-cmake_minimum_required(VERSION ${{CMAKE_VERSION}})
-project(MesonTemp LANGUAGES {})
-""".format(' '.join(cmake_language)) + cmake_txt
+        cmake_txt = textwrap.dedent("""
+            cmake_minimum_required(VERSION ${{CMAKE_VERSION}})
+            project(MesonTemp LANGUAGES {})
+        """).format(' '.join(cmake_language)) + cmake_txt
 
         cm_file = build_dir / 'CMakeLists.txt'
         cm_file.write_text(cmake_txt)
         mlog.cmd_ci_include(cm_file.absolute().as_posix())
 
-        return str(build_dir)
+        return build_dir
 
     def _call_cmake(self, args, cmake_file: str, env=None):
         build_dir = self._setup_cmake_dir(cmake_file)
-        return self.cmakebin.call_with_fake_build(args, build_dir, env=env)
+        return self.cmakebin.call(args, build_dir, env=env)
 
     @staticmethod
     def get_methods():
@@ -1548,9 +1596,10 @@
         return ''
 
     def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
-                     configtool: T.Optional[str] = None, default_value: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
                      pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
-        if cmake:
+        if cmake and self.traceparser is not None:
             try:
                 v = self.traceparser.vars[cmake]
             except KeyError:
@@ -1568,7 +1617,7 @@
     class_dubbin = None
 
     def __init__(self, name, environment, kwargs):
-        super().__init__('dub', environment, 'd', kwargs)
+        super().__init__('dub', environment, kwargs, language='d')
         self.name = name
         self.compiler = super().get_compiler()
         self.module_path = None
@@ -1695,6 +1744,14 @@
             lib_file_name = os.path.basename(default_path)
         module_build_path = os.path.join(module_path, '.dub', 'build')
 
+        # If default_path is a path to lib file and
+        # directory of lib don't have subdir '.dub/build'
+        if not os.path.isdir(module_build_path) and os.path.isfile(default_path):
+            if folder_only:
+                return module_path
+            else:
+                return default_path
+
         # Get D version implemented in the compiler
         # gdc doesn't support this
         ret, res = self._call_dubbin(['--version'])
@@ -1770,6 +1827,17 @@
         self.name = name
         if command is not None:
             self.command = listify(command)
+            if mesonlib.is_windows():
+                cmd = self.command[0]
+                args = self.command[1:]
+                # Check whether the specified cmd is a path to a script, in
+                # which case we need to insert the interpreter. If not, try to
+                # use it as-is.
+                ret = self._shebang_to_cmd(cmd)
+                if ret:
+                    self.command = ret + args
+                else:
+                    self.command = [cmd] + args
         else:
             all_search_dirs = [search_dir]
             if extra_search_dirs:
@@ -1807,8 +1875,12 @@
         return ' '.join(self.command)
 
     @classmethod
-    def from_bin_list(cls, bt: BinaryTable, name):
-        command = bt.lookup_entry(name)
+    def from_bin_list(cls, env: Environment, for_machine: MachineChoice, name):
+        # There is a static `for_machine` for this class because the binary
+        # aways runs on the build platform. (It's host platform is our build
+        # platform.) But some external programs have a target platform, so this
+        # is what we are specifying here.
+        command = env.lookup_binary_entry(for_machine, name)
         if command is None:
             return NonExistingExternalProgram()
         return cls.from_entry(name, command)
@@ -1819,14 +1891,22 @@
         # Ensure that we use USERPROFILE even when inside MSYS, MSYS2, Cygwin, etc.
         if 'USERPROFILE' not in os.environ:
             return path
-        # Ignore executables in the WindowsApps directory which are
-        # zero-sized wrappers that magically open the Windows Store to
-        # install the application.
+        # The WindowsApps directory is a bit of a problem. It contains
+        # some zero-sized .exe files which have "reparse points", that
+        # might either launch an installed application, or might open
+        # a page in the Windows Store to download the application.
+        #
+        # To handle the case where the python interpreter we're
+        # running on came from the Windows Store, if we see the
+        # WindowsApps path in the search path, replace it with
+        # dirname(sys.executable).
         appstore_dir = Path(os.environ['USERPROFILE']) / 'AppData' / 'Local' / 'Microsoft' / 'WindowsApps'
         paths = []
         for each in path.split(os.pathsep):
             if Path(each) != appstore_dir:
                 paths.append(each)
+            elif 'WindowsApps' in sys.executable:
+                paths.append(os.path.dirname(sys.executable))
         return os.pathsep.join(paths)
 
     @staticmethod
@@ -1843,7 +1923,7 @@
         return ExternalProgram(command, silent=True)
 
     @staticmethod
-    def _shebang_to_cmd(script):
+    def _shebang_to_cmd(script: str) -> T.Optional[list]:
         """
         Check if the file has a shebang and manually parse it to figure out
         the interpreter to use. This is useful if the script is not executable
@@ -1887,7 +1967,7 @@
         except Exception as e:
             mlog.debug(e)
         mlog.debug('Unusable script {!r}'.format(script))
-        return False
+        return None
 
     def _is_executable(self, path):
         suffix = os.path.splitext(path)[-1].lower()[1:]
@@ -1899,9 +1979,9 @@
             return not os.path.isdir(path)
         return False
 
-    def _search_dir(self, name, search_dir):
+    def _search_dir(self, name: str, search_dir: T.Optional[str]) -> T.Optional[list]:
         if search_dir is None:
-            return False
+            return None
         trial = os.path.join(search_dir, name)
         if os.path.exists(trial):
             if self._is_executable(trial):
@@ -1916,9 +1996,9 @@
                     trial_ext = '{}.{}'.format(trial, ext)
                     if os.path.exists(trial_ext):
                         return [trial_ext]
-        return False
+        return None
 
-    def _search_windows_special_cases(self, name, command):
+    def _search_windows_special_cases(self, name: str, command: str) -> list:
         '''
         Lots of weird Windows quirks:
         1. PATH search for @name returns files with extensions from PATHEXT,
@@ -1961,7 +2041,7 @@
                 return commands
         return [None]
 
-    def _search(self, name, search_dir):
+    def _search(self, name: str, search_dir: T.Optional[str]) -> list:
         '''
         Search in the specified dir for the specified executable by name
         and if not found search in PATH
@@ -1983,29 +2063,29 @@
     def found(self) -> bool:
         return self.command[0] is not None
 
-    def get_command(self):
+    def get_command(self) -> T.List[str]:
         return self.command[:]
 
-    def get_path(self):
+    def get_path(self) -> str:
         return self.path
 
-    def get_name(self):
+    def get_name(self) -> str:
         return self.name
 
 
 class NonExistingExternalProgram(ExternalProgram):  # lgtm [py/missing-call-to-init]
     "A program that will never exist"
 
-    def __init__(self, name='nonexistingprogram'):
+    def __init__(self, name: str = 'nonexistingprogram') -> None:
         self.name = name
         self.command = [None]
         self.path = None
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         r = '<{} {!r} -> {!r}>'
         return r.format(self.__class__.__name__, self.name, self.command)
 
-    def found(self):
+    def found(self) -> bool:
         return False
 
 
@@ -2030,7 +2110,7 @@
 
 class ExternalLibrary(ExternalDependency):
     def __init__(self, name, link_args, environment, language, silent=False):
-        super().__init__('library', environment, language, {})
+        super().__init__('library', environment, {}, language=language)
         self.name = name
         self.language = language
         self.is_found = False
@@ -2072,10 +2152,10 @@
 class ExtraFrameworkDependency(ExternalDependency):
     system_framework_paths = None
 
-    def __init__(self, name, required, paths, env, lang, kwargs):
-        super().__init__('extraframeworks', env, lang, kwargs)
+    def __init__(self, name, env, kwargs, language: T.Optional[str] = None):
+        paths = kwargs.get('paths', [])
+        super().__init__('extraframeworks', env, kwargs, language=language)
         self.name = name
-        self.required = required
         # Full path to framework directory
         self.framework_path = None
         if not self.clib_compiler:
@@ -2171,15 +2251,94 @@
         return 'framework'
 
 
+class DependencyFactory:
+
+    """Factory to get dependencies from multiple sources.
+
+    This class provides an initializer that takes a set of names and classes
+    for various kinds of dependencies. When the initialized object is called
+    it returns a list of callables return Dependency objects to try in order.
+
+    :name: The name of the dependency. This will be passed as the name
+        parameter of the each dependency unless it is overridden on a per
+        type basis.
+    :methods: An ordered list of DependencyMethods. This is the order
+        dependencies will be returned in unless they are removed by the
+        _process_method function
+    :*_name: This will overwrite the name passed to the coresponding class.
+        For example, if the name is 'zlib', but cmake calls the dependency
+        'Z', then using `cmake_name='Z'` will pass the name as 'Z' to cmake.
+    :*_class: A *type* or callable that creates a class, and has the
+        signature of an ExternalDependency
+    :system_class: If you pass DependencyMethods.SYSTEM in methods, you must
+        set this argument.
+    """
+
+    def __init__(self, name: str, methods: T.List[DependencyMethods], *,
+                 extra_kwargs: T.Optional[T.Dict[str, T.Any]] = None,
+                 pkgconfig_name: T.Optional[str] = None,
+                 pkgconfig_class: 'T.Type[PkgConfigDependency]' = PkgConfigDependency,
+                 cmake_name: T.Optional[str] = None,
+                 cmake_class: 'T.Type[CMakeDependency]' = CMakeDependency,
+                 configtool_class: 'T.Optional[T.Type[ConfigToolDependency]]' = None,
+                 framework_name: T.Optional[str] = None,
+                 framework_class: 'T.Type[ExtraFrameworkDependency]' = ExtraFrameworkDependency,
+                 system_class: 'T.Type[ExternalDependency]' = ExternalDependency):
+
+        if DependencyMethods.CONFIG_TOOL in methods and not configtool_class:
+            raise DependencyException('A configtool must have a custom class')
+
+        self.extra_kwargs = extra_kwargs or {}
+        self.methods = methods
+        self.classes = {
+            # Just attach the correct name right now, either the generic name
+            # or the method specific name.
+            DependencyMethods.EXTRAFRAMEWORK: functools.partial(framework_class, framework_name or name),
+            DependencyMethods.PKGCONFIG: functools.partial(pkgconfig_class, pkgconfig_name or name),
+            DependencyMethods.CMAKE: functools.partial(cmake_class, cmake_name or name),
+            DependencyMethods.SYSTEM: functools.partial(system_class, name),
+            DependencyMethods.CONFIG_TOOL: None,
+        }
+        if configtool_class is not None:
+            self.classes[DependencyMethods.CONFIG_TOOL] = functools.partial(configtool_class, name)
+
+    @staticmethod
+    def _process_method(method: DependencyMethods, env: Environment, for_machine: MachineChoice) -> bool:
+        """Report whether a method is valid or not.
+
+        If the method is valid, return true, otherwise return false. This is
+        used in a list comprehension to filter methods that are not possible.
+
+        By default this only remove EXTRAFRAMEWORK dependencies for non-mac platforms.
+        """
+        # Extra frameworks are only valid for macOS and other apple products
+        if (method is DependencyMethods.EXTRAFRAMEWORK and
+                not env.machines[for_machine].is_darwin()):
+            return False
+        return True
+
+    def __call__(self, env: Environment, for_machine: MachineChoice,
+                 kwargs: T.Dict[str, T.Any]) -> T.List['DependencyType']:
+        """Return a list of Dependencies with the arguments already attached."""
+        methods = process_method_kw(self.methods, kwargs)
+        nwargs = self.extra_kwargs.copy()
+        nwargs.update(kwargs)
+
+        return [functools.partial(self.classes[m], env, nwargs) for m in methods
+                if self._process_method(m, env, for_machine)]
+
+
 def get_dep_identifier(name, kwargs) -> T.Tuple:
     identifier = (name, )
     for key, value in kwargs.items():
         # 'version' is irrelevant for caching; the caller must check version matches
         # 'native' is handled above with `for_machine`
         # 'required' is irrelevant for caching; the caller handles it separately
-        # 'fallback' subprojects cannot be cached -- they must be initialized
+        # 'fallback' and 'allow_fallback' is not part of the cache because,
+        #     once a dependency has been found through a fallback, it should
+        #     be used for the rest of the Meson run.
         # 'default_options' is only used in fallback case
-        if key in ('version', 'native', 'required', 'fallback', 'default_options'):
+        if key in ('version', 'native', 'required', 'fallback', 'allow_fallback', 'default_options'):
             continue
         # All keyword arguments are strings, ints, or lists (or lists of lists)
         if isinstance(value, list):
@@ -2222,7 +2381,7 @@
     type_text = PerMachine('Build-time', 'Run-time')[for_machine] + ' dependency'
 
     # build a list of dependency methods to try
-    candidates = _build_external_dependency_list(name, env, kwargs)
+    candidates = _build_external_dependency_list(name, env, for_machine, kwargs)
 
     pkg_exc = []
     pkgdep = []
@@ -2285,7 +2444,8 @@
     return NotFoundDependency(env)
 
 
-def _build_external_dependency_list(name, env: Environment, kwargs: T.Dict[str, T.Any]) -> list:
+def _build_external_dependency_list(name: str, env: Environment, for_machine: MachineChoice,
+                                    kwargs: T.Dict[str, T.Any]) -> T.List['DependencyType']:
     # First check if the method is valid
     if 'method' in kwargs and kwargs['method'] not in [e.value for e in DependencyMethods]:
         raise DependencyException('method {!r} is invalid'.format(kwargs['method']))
@@ -2296,10 +2456,10 @@
         # Create the list of dependency object constructors using a factory
         # class method, if one exists, otherwise the list just consists of the
         # constructor
-        if getattr(packages[lname], '_factory', None):
-            dep = packages[lname]._factory(env, kwargs)
-        else:
+        if isinstance(packages[lname], type) and issubclass(packages[lname], Dependency):
             dep = [functools.partial(packages[lname], env, kwargs)]
+        else:
+            dep = packages[lname](env, for_machine, kwargs)
         return dep
 
     candidates = []
@@ -2323,8 +2483,7 @@
     if 'extraframework' == kwargs.get('method', ''):
         # On OSX, also try framework dependency detector
         if mesonlib.is_osx():
-            candidates.append(functools.partial(ExtraFrameworkDependency, name,
-                                                False, None, env, None, kwargs))
+            candidates.append(functools.partial(ExtraFrameworkDependency, name, env, kwargs))
         return candidates
 
     # Otherwise, just use the pkgconfig and cmake dependency detector
@@ -2333,8 +2492,7 @@
 
         # On OSX, also try framework dependency detector
         if mesonlib.is_osx():
-            candidates.append(functools.partial(ExtraFrameworkDependency, name,
-                                                False, None, env, None, kwargs))
+            candidates.append(functools.partial(ExtraFrameworkDependency, name, env, kwargs))
 
         # Only use CMake as a last resort, since it might not work 100% (see #6113)
         candidates.append(functools.partial(CMakeDependency, name, env, kwargs))
@@ -2375,3 +2533,83 @@
     """
     exclude = {'-L{}'.format(p) for p in environment.get_compiler_system_dirs(for_machine)}
     return [l for l in link_args if l not in exclude]
+
+
+def process_method_kw(possible: T.Iterable[DependencyMethods], kwargs) -> T.List[DependencyMethods]:
+    method = kwargs.get('method', 'auto')  # type: T.Union[DependencyMethods, str]
+    if isinstance(method, DependencyMethods):
+        return [method]
+    # TODO: try/except?
+    if method not in [e.value for e in DependencyMethods]:
+        raise DependencyException('method {!r} is invalid'.format(method))
+    method = DependencyMethods(method)
+
+    # This sets per-tool config methods which are deprecated to to the new
+    # generic CONFIG_TOOL value.
+    if method in [DependencyMethods.SDLCONFIG, DependencyMethods.CUPSCONFIG,
+                  DependencyMethods.PCAPCONFIG, DependencyMethods.LIBWMFCONFIG]:
+        mlog.warning(textwrap.dedent("""\
+            Configuration method {} has been deprecated in favor of
+            'config-tool'. This will be removed in a future version of
+            meson.""".format(method)))
+        method = DependencyMethods.CONFIG_TOOL
+
+    # Set the detection method. If the method is set to auto, use any available method.
+    # If method is set to a specific string, allow only that detection method.
+    if method == DependencyMethods.AUTO:
+        methods = list(possible)
+    elif method in possible:
+        methods = [method]
+    else:
+        raise DependencyException(
+            'Unsupported detection method: {}, allowed methods are {}'.format(
+                method.value,
+                mlog.format_list([x.value for x in [DependencyMethods.AUTO] + list(possible)])))
+
+    return methods
+
+
+if T.TYPE_CHECKING:
+    FactoryType = T.TypeVar('FactoryType', bound=T.Callable[..., T.List[T.Callable[[], 'Dependency']]])
+
+
+def factory_methods(methods: T.Set[DependencyMethods]) -> T.Callable[['FactoryType'], 'FactoryType']:
+    """Decorator for handling methods for dependency factory functions.
+
+    This helps to make factory functions self documenting
+    >>> @factory_methods([DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE])
+    >>> def factory(env: Environment, for_machine: MachineChoice, kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List[T.Callable[[], 'Dependency']]:
+    >>>     pass
+    """
+
+    def inner(func: 'FactoryType') -> 'FactoryType':
+
+        @functools.wraps(func)
+        def wrapped(env: Environment, for_machine: MachineChoice, kwargs: T.Dict[str, T.Any]) -> T.List[T.Callable[[], 'Dependency']]:
+            return func(env, for_machine, kwargs, process_method_kw(methods, kwargs))
+
+        return T.cast('FactoryType', wrapped)
+
+    return inner
+
+
+def detect_compiler(name: str, env: Environment, for_machine: MachineChoice,
+                    language: T.Optional[str]) -> T.Optional['CompilerType']:
+    """Given a language and environment find the compiler used."""
+    compilers = env.coredata.compilers[for_machine]
+
+    # Set the compiler for this dependency if a language is specified,
+    # else try to pick something that looks usable.
+    if language:
+        if language not in compilers:
+            m = name.capitalize() + ' requires a {0} compiler, but ' \
+                '{0} is not in the list of project languages'
+            raise DependencyException(m.format(language.capitalize()))
+        return compilers[language]
+    else:
+        for lang in clib_langs:
+            try:
+                return compilers[lang]
+            except KeyError:
+                continue
+    return None
diff -Nru meson-0.53.2/mesonbuild/dependencies/boost.py meson-0.57.0+really0.56.2/mesonbuild/dependencies/boost.py
--- meson-0.53.2/mesonbuild/dependencies/boost.py	2019-08-28 17:15:39.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/boost.py	2021-01-06 10:39:48.000000000 +0000
@@ -1,4 +1,4 @@
-# Copyright 2013-2017 The Meson development team
+# Copyright 2013-2020 The Meson development team
 
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,17 +12,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-# This file contains the detection logic for miscellaneous external dependencies.
-
-import glob
 import os
+import re
+import functools
+import typing as T
+from .._pathlib import Path
 
 from .. import mlog
 from .. import mesonlib
-from ..environment import detect_cpu_family
+from ..envconfig import get_env_var
+from ..environment import Environment
 
-from .base import (DependencyException, ExternalDependency)
-from .misc import ThreadDependency
+from .base import DependencyException, ExternalDependency, PkgConfigDependency
+from .misc import threads_factory
 
 # On windows 3 directory layouts are supported:
 # * The default layout (versioned) installed:
@@ -39,16 +41,6 @@
 # mingw-w64 / Windows : libboost_-mt.a            (location = /mingw64/lib/)
 #                       libboost_-mt.dll.a
 #
-# Library names supported:
-#   - libboost_--mt-gd-x_x.lib (static)
-#   - boost_--mt-gd-x_x.lib|.dll (shared)
-#   - libboost_.lib (static)
-#   - boost_.lib|.dll (shared)
-#   where compiler is vc141 for example.
-#
-# NOTE: -gd means runtime and build time debugging is on
-#       -mt means threading=multi
-#
 # The `modules` argument accept library names. This is because every module that
 # has libraries to link against also has multiple options regarding how to
 # link. See for example:
@@ -78,614 +70,1046 @@
 # Furthermore, the boost documentation for unix above uses examples from windows like
 #   "libboost_regex-vc71-mt-d-x86-1_34.lib", so apparently the abi tags may be more aimed at windows.
 #
-# Probably we should use the linker search path to decide which libraries to use.  This will
-# make it possible to find the macports boost libraries without setting BOOST_ROOT, and will
-# also mean that it would be possible to use user-installed boost libraries when official
-# packages are installed.
-#
-# We thus follow the following strategy:
-# 1. Look for libraries using compiler.find_library( )
-#   1.1 On Linux, just look for boost_
-#   1.2 On other systems (e.g. Mac) look for boost_-mt if multithreading.
-#   1.3 Otherwise look for boost_
-# 2. Fall back to previous approach
-#   2.1. Search particular directories.
-#   2.2. Find boost libraries with unknown suffixes using file-name globbing.
-
-# TODO: Unix: Don't assume we know where the boost dir is, rely on -Idir and -Ldir being set.
-# TODO: Allow user to specify suffix in BOOST_SUFFIX, or add specific options like BOOST_DEBUG for 'd' for debug.
+# We follow the following strategy for finding modules:
+# A) Detect potential boost root directories (uses also BOOST_ROOT env var)
+# B) Foreach candidate
+#   1. Look for the boost headers (boost/version.pp)
+#   2. Find all boost libraries
+#     2.1 Add all libraries in lib*
+#     2.2 Filter out non boost libraries
+#     2.3 Filter the renaining libraries based on the meson requirements (static/shared, etc.)
+#     2.4 Ensure that all libraries have the same boost tag (and are thus compatible)
+#   3. Select the libraries matching the requested modules
+
+@functools.total_ordering
+class BoostIncludeDir():
+    def __init__(self, path: Path, version_int: int):
+        self.path = path
+        self.version_int = version_int
+        major = int(self.version_int / 100000)
+        minor = int((self.version_int / 100) % 1000)
+        patch = int(self.version_int % 100)
+        self.version = '{}.{}.{}'.format(major, minor, patch)
+        self.version_lib = '{}_{}'.format(major, minor)
+
+    def __repr__(self) -> str:
+        return ''.format(self.version, self.path)
+
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, BoostIncludeDir):
+            return (self.version_int, self.path) < (other.version_int, other.path)
+        return NotImplemented
+
+@functools.total_ordering
+class BoostLibraryFile():
+    # Python libraries are special because of the included
+    # minor version in the module name.
+    boost_python_libs = ['boost_python', 'boost_numpy']
+    reg_python_mod_split = re.compile(r'(boost_[a-zA-Z]+)([0-9]*)')
+
+    reg_abi_tag = re.compile(r'^s?g?y?d?p?n?$')
+    reg_ver_tag = re.compile(r'^[0-9_]+$')
+
+    def __init__(self, path: Path):
+        self.path = path
+        self.name = self.path.name
+
+        # Initialize default properties
+        self.static = False
+        self.toolset = ''
+        self.arch = ''
+        self.version_lib = ''
+        self.mt = True
+
+        self.runtime_static = False
+        self.runtime_debug = False
+        self.python_debug = False
+        self.debug = False
+        self.stlport = False
+        self.deprecated_iostreams = False
+
+        # Post process the library name
+        name_parts = self.name.split('.')
+        self.basename = name_parts[0]
+        self.suffixes = name_parts[1:]
+        self.vers_raw = [x for x in self.suffixes if x.isdigit()]
+        self.suffixes = [x for x in self.suffixes if not x.isdigit()]
+        self.nvsuffix = '.'.join(self.suffixes)  # Used for detecting the library type
+        self.nametags = self.basename.split('-')
+        self.mod_name = self.nametags[0]
+        if self.mod_name.startswith('lib'):
+            self.mod_name = self.mod_name[3:]
+
+        # Set library version if possible
+        if len(self.vers_raw) >= 2:
+            self.version_lib = '{}_{}'.format(self.vers_raw[0], self.vers_raw[1])
+
+        # Detecting library type
+        if self.nvsuffix in ['so', 'dll', 'dll.a', 'dll.lib', 'dylib']:
+            self.static = False
+        elif self.nvsuffix in ['a', 'lib']:
+            self.static = True
+        else:
+            raise DependencyException('Unable to process library extension "{}" ({})'.format(self.nvsuffix, self.path))
 
-class BoostDependency(ExternalDependency):
-    def __init__(self, environment, kwargs):
-        super().__init__('boost', environment, 'cpp', kwargs)
-        self.need_static_link = ['boost_exception', 'boost_test_exec_monitor']
-        self.is_debug = environment.coredata.get_builtin_option('buildtype').startswith('debug')
-        threading = kwargs.get("threading", "multi")
-        self.is_multithreading = threading == "multi"
-
-        self.requested_modules = self.get_requested(kwargs)
-        if 'thread' in self.requested_modules:
-            self._add_sub_dependency(ThreadDependency, environment, kwargs)
-
-        self.boost_root = None
-        self.boost_roots = []
-        self.incdir = None
-        self.libdir = None
-
-        if 'BOOST_ROOT' in os.environ:
-            self.boost_root = os.environ['BOOST_ROOT']
-            self.boost_roots = [self.boost_root]
-            if not os.path.isabs(self.boost_root):
-                raise DependencyException('BOOST_ROOT must be an absolute path.')
-        if 'BOOST_INCLUDEDIR' in os.environ:
-            self.incdir = os.environ['BOOST_INCLUDEDIR']
-        if 'BOOST_LIBRARYDIR' in os.environ:
-            self.libdir = os.environ['BOOST_LIBRARYDIR']
-
-        if self.boost_root is None:
-            if self.env.machines[self.for_machine].is_windows():
-                self.boost_roots = self.detect_win_roots()
-            else:
-                self.boost_roots = self.detect_nix_roots()
+        # boost_.lib is the dll import library
+        if self.basename.startswith('boost_') and self.nvsuffix == 'lib':
+            self.static = False
+
+        # Process tags
+        tags = self.nametags[1:]
+        # Filter out the python version tag and fix modname
+        if self.is_python_lib():
+            tags = self.fix_python_name(tags)
+        if not tags:
+            return
 
-        if self.incdir is None:
-            if self.env.machines[self.for_machine].is_windows():
-                self.incdir = self.detect_win_incdir()
+        # Without any tags mt is assumed, however, an absence of mt in the name
+        # with tags present indicates that the lib was built without mt support
+        self.mt = False
+        for i in tags:
+            if i == 'mt':
+                self.mt = True
+            elif len(i) == 3 and i[1:] in ['32', '64']:
+                self.arch = i
+            elif BoostLibraryFile.reg_abi_tag.match(i):
+                self.runtime_static = 's' in i
+                self.runtime_debug = 'g' in i
+                self.python_debug = 'y' in i
+                self.debug = 'd' in i
+                self.stlport = 'p' in i
+                self.deprecated_iostreams = 'n' in i
+            elif BoostLibraryFile.reg_ver_tag.match(i):
+                self.version_lib = i
             else:
-                self.incdir = self.detect_nix_incdir()
-
-        mlog.debug('Boost library root dir is', mlog.bold(self.boost_root))
-        mlog.debug('Boost include directory is', mlog.bold(self.incdir))
+                self.toolset = i
 
-        # 1. check if we can find BOOST headers.
-        self.detect_headers_and_version()
+    def __repr__(self) -> str:
+        return ''.format(self.abitag, self.mod_name, self.path)
 
-        if not self.is_found:
-            return # if we can not find 'boost/version.hpp'
-
-        # 2. check if we can find BOOST libraries.
-        self.detect_lib_modules()
-        mlog.debug('Boost library directory is', mlog.bold(self.libdir))
-
-        mlog.debug('Installed Boost libraries: ')
-        for key in sorted(self.lib_modules.keys()):
-            mlog.debug(key, self.lib_modules[key])
-
-        # 3. check if requested modules are valid, that is, either found or in the list of known boost libraries
-        self.check_invalid_modules()
-
-        # 4. final check whether or not we find all requested and valid modules
-        self.check_find_requested_modules()
-
-    def check_invalid_modules(self):
-        invalid_modules = [c for c in self.requested_modules if 'boost_' + c not in self.lib_modules and 'boost_' + c not in BOOST_LIBS]
-
-        # previous versions of meson allowed include dirs as modules
-        remove = []
-        for m in invalid_modules:
-            if m in BOOST_DIRS:
-                mlog.warning('Requested boost library', mlog.bold(m), 'that doesn\'t exist. '
-                             'This will be an error in the future')
-                remove.append(m)
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, BoostLibraryFile):
+            return (
+                self.mod_name, self.static, self.version_lib, self.arch,
+                not self.mt, not self.runtime_static,
+                not self.debug, self.runtime_debug, self.python_debug,
+                self.stlport, self.deprecated_iostreams,
+                self.name,
+            ) < (
+                other.mod_name, other.static, other.version_lib, other.arch,
+                not other.mt, not other.runtime_static,
+                not other.debug, other.runtime_debug, other.python_debug,
+                other.stlport, other.deprecated_iostreams,
+                other.name,
+            )
+        return NotImplemented
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, BoostLibraryFile):
+            return self.name == other.name
+        return NotImplemented
+
+    def __hash__(self) -> int:
+        return hash(self.name)
+
+    @property
+    def abitag(self) -> str:
+        abitag = ''
+        abitag += 'S' if self.static else '-'
+        abitag += 'M' if self.mt else '-'
+        abitag += ' '
+        abitag += 's' if self.runtime_static else '-'
+        abitag += 'g' if self.runtime_debug else '-'
+        abitag += 'y' if self.python_debug else '-'
+        abitag += 'd' if self.debug else '-'
+        abitag += 'p' if self.stlport else '-'
+        abitag += 'n' if self.deprecated_iostreams else '-'
+        abitag += ' ' + (self.arch or '???')
+        abitag += ' ' + (self.toolset or '?')
+        abitag += ' ' + (self.version_lib or 'x_xx')
+        return abitag
+
+    def is_boost(self) -> bool:
+        return any([self.name.startswith(x) for x in ['libboost_', 'boost_']])
+
+    def is_python_lib(self) -> bool:
+        return any([self.mod_name.startswith(x) for x in BoostLibraryFile.boost_python_libs])
+
+    def fix_python_name(self, tags: T.List[str]) -> T.List[str]:
+        # Handle the boost_python naming madeness.
+        # See https://github.com/mesonbuild/meson/issues/4788 for some distro
+        # specific naming variantions.
+        other_tags = []  # type: T.List[str]
+
+        # Split the current modname into the base name and the version
+        m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
+        cur_name = m_cur.group(1)
+        cur_vers = m_cur.group(2)
+
+        # Update the current version string if the new version string is longer
+        def update_vers(new_vers: str) -> None:
+            nonlocal cur_vers
+            new_vers = new_vers.replace('_', '')
+            new_vers = new_vers.replace('.', '')
+            if not new_vers.isdigit():
+                return
+            if len(new_vers) > len(cur_vers):
+                cur_vers = new_vers
+
+        for i in tags:
+            if i.startswith('py'):
+                update_vers(i[2:])
+            elif i.isdigit():
+                update_vers(i)
+            elif len(i) >= 3 and i[0].isdigit and i[2].isdigit() and i[1] == '.':
+                update_vers(i)
+            else:
+                other_tags += [i]
 
-        self.requested_modules = [x for x in self.requested_modules if x not in remove]
-        invalid_modules = [x for x in invalid_modules if x not in remove]
+        self.mod_name = cur_name + cur_vers
+        return other_tags
 
-        if invalid_modules:
-            mlog.error('Invalid Boost modules: ' + ', '.join(invalid_modules))
+    def mod_name_matches(self, mod_name: str) -> bool:
+        if self.mod_name == mod_name:
             return True
-        else:
+        if not self.is_python_lib():
             return False
 
-    def log_details(self):
-        module_str = ', '.join(self.requested_modules)
-        return module_str
+        m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
+        m_arg = BoostLibraryFile.reg_python_mod_split.match(mod_name)
 
-    def log_info(self):
-        if self.boost_root:
-            return self.boost_root
-        return ''
+        if not m_cur or not m_arg:
+            return False
 
-    def detect_nix_roots(self):
-        return [os.path.abspath(os.path.join(x, '..'))
-                for x in self.clib_compiler.get_default_include_dirs()]
-
-    def detect_win_roots(self):
-        res = []
-        # Where boost documentation says it should be
-        globtext = 'C:\\Program Files\\boost\\boost_*'
-        files = glob.glob(globtext)
-        res.extend(files)
-
-        # Where boost built from source actually installs it
-        if os.path.isdir('C:\\Boost'):
-            res.append('C:\\Boost')
-
-        # Where boost prebuilt binaries are
-        globtext = 'C:\\local\\boost_*'
-        files = glob.glob(globtext)
-        res.extend(files)
-        return res
+        if m_cur.group(1) != m_arg.group(1):
+            return False
 
-    def detect_nix_incdir(self):
-        if self.boost_root:
-            return os.path.join(self.boost_root, 'include')
-        return None
+        cur_vers = m_cur.group(2)
+        arg_vers = m_arg.group(2)
 
-    # FIXME: Should pick a version that matches the requested version
-    # Returns the folder that contains the boost folder.
-    def detect_win_incdir(self):
-        for root in self.boost_roots:
-            globtext = os.path.join(root, 'include', 'boost-*')
-            incdirs = glob.glob(globtext)
-            if incdirs:
-                return incdirs[0]
-            incboostdir = os.path.join(root, 'include', 'boost')
-            if os.path.isdir(incboostdir):
-                return os.path.join(root, 'include')
-            incboostdir = os.path.join(root, 'boost')
-            if os.path.isdir(incboostdir):
-                return root
-        return None
-
-    def get_compile_args(self):
-        args = []
-        include_dir = self.incdir
-
-        # Use "-isystem" when including boost headers instead of "-I"
-        # to avoid compiler warnings/failures when "-Werror" is used
-
-        # Careful not to use "-isystem" on default include dirs as it
-        # breaks some of the headers for certain gcc versions
-
-        # For example, doing g++ -isystem /usr/include on a simple
-        # "int main()" source results in the error:
-        # "/usr/include/c++/6.3.1/cstdlib:75:25: fatal error: stdlib.h: No such file or directory"
-
-        # See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70129
-        # and http://stackoverflow.com/questions/37218953/isystem-on-a-system-include-directory-causes-errors
-        # for more details
+        # Always assume python 2 if nothing is specified
+        if not arg_vers:
+            arg_vers = '2'
+
+        return cur_vers.startswith(arg_vers)
+
+    def version_matches(self, version_lib: str) -> bool:
+        # If no version tag is present, assume that it fits
+        if not self.version_lib or not version_lib:
+            return True
+        return self.version_lib == version_lib
+
+    def arch_matches(self, arch: str) -> bool:
+        # If no version tag is present, assume that it fits
+        if not self.arch or not arch:
+            return True
+        return self.arch == arch
 
-        if include_dir and include_dir not in self.clib_compiler.get_default_include_dirs():
-            args.append("".join(self.clib_compiler.get_include_args(include_dir, True)))
+    def vscrt_matches(self, vscrt: str) -> bool:
+        # If no vscrt tag present, assume that it fits  ['/MD', '/MDd', '/MT', '/MTd']
+        if not vscrt:
+            return True
+        if vscrt in ['/MD', '-MD']:
+            return not self.runtime_static and not self.runtime_debug
+        elif vscrt in ['/MDd', '-MDd']:
+            return not self.runtime_static and self.runtime_debug
+        elif vscrt in ['/MT', '-MT']:
+            return (self.runtime_static or not self.static) and not self.runtime_debug
+        elif vscrt in ['/MTd', '-MTd']:
+            return (self.runtime_static or not self.static) and self.runtime_debug
+
+        mlog.warning('Boost: unknow vscrt tag {}. This may cause the compilation to fail. Please consider reporting this as a bug.'.format(vscrt), once=True)
+        return True
+
+    def get_compiler_args(self) -> T.List[str]:
+        args = []  # type: T.List[str]
+        if self.mod_name in boost_libraries:
+            libdef = boost_libraries[self.mod_name]  # type: BoostLibrary
+            if self.static:
+                args += libdef.static
+            else:
+                args += libdef.shared
+            if self.mt:
+                args += libdef.multi
+            else:
+                args += libdef.single
         return args
 
-    def get_requested(self, kwargs):
-        candidates = mesonlib.extract_as_list(kwargs, 'modules')
-        for c in candidates:
-            if not isinstance(c, str):
+    def get_link_args(self) -> T.List[str]:
+        return [self.path.as_posix()]
+
+class BoostDependency(ExternalDependency):
+    def __init__(self, environment: Environment, kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__('boost', environment, kwargs, language='cpp')
+        buildtype = environment.coredata.get_builtin_option('buildtype')
+        assert isinstance(buildtype, str)
+        self.debug = buildtype.startswith('debug')
+        self.multithreading = kwargs.get('threading', 'multi') == 'multi'
+
+        self.boost_root = None  # type: T.Optional[Path]
+        self.explicit_static = 'static' in kwargs
+
+        # Extract and validate modules
+        self.modules = mesonlib.extract_as_list(kwargs, 'modules')  # type: T.List[str]
+        for i in self.modules:
+            if not isinstance(i, str):
                 raise DependencyException('Boost module argument is not a string.')
-        return candidates
+            if i.startswith('boost_'):
+                raise DependencyException('Boost modules must be passed without the boost_ prefix')
 
-    def detect_headers_and_version(self):
-        try:
-            version = self.clib_compiler.get_define('BOOST_LIB_VERSION', '#include ', self.env, self.get_compile_args(), [], disable_cache=True)[0]
-        except mesonlib.EnvironmentException:
-            return
-        except TypeError:
-            return
-        # Remove quotes
-        version = version[1:-1]
-        # Fix version string
-        self.version = version.replace('_', '.')
-        self.is_found = True
-
-    def detect_lib_modules(self):
-        self.lib_modules = {}
-        # 1. Try to find modules using compiler.find_library( )
-        if self.find_libraries_with_abi_tags(self.abi_tags()):
-            pass
-        # 2. Fall back to the old method
-        else:
-            if self.env.machines[self.for_machine].is_windows():
-                self.detect_lib_modules_win()
-            else:
-                self.detect_lib_modules_nix()
+        self.modules_found = []    # type: T.List[str]
+        self.modules_missing = []  # type: T.List[str]
 
-    def check_find_requested_modules(self):
-        # 3. Check if we can find the modules
-        for m in self.requested_modules:
-            if 'boost_' + m not in self.lib_modules:
-                mlog.debug('Requested Boost library {!r} not found'.format(m))
+        # Do we need threads?
+        if 'thread' in self.modules:
+            if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
                 self.is_found = False
+                return
 
-    def modname_from_filename(self, filename):
-        modname = os.path.basename(filename)
-        modname = modname.split('.', 1)[0]
-        modname = modname.split('-', 1)[0]
-        if modname.startswith('libboost'):
-            modname = modname[3:]
-        return modname
-
-    def compiler_tag(self):
-        tag = None
-        compiler = self.env.detect_cpp_compiler(self.for_machine)
-        if self.env.machines[self.for_machine].is_windows():
-            if compiler.get_id() in ['msvc', 'clang-cl']:
-                comp_ts_version = compiler.get_toolset_version()
-                compiler_ts = comp_ts_version.split('.')
-                # FIXME - what about other compilers?
-                tag = '-vc{}{}'.format(compiler_ts[0], compiler_ts[1])
-            else:
-                tag = ''
-        return tag
+        # Try figuring out the architecture tag
+        self.arch = environment.machines[self.for_machine].cpu_family
+        self.arch = boost_arch_map.get(self.arch, None)
+
+        # First, look for paths specified in a machine file
+        props = self.env.properties[self.for_machine]
+        boost_property_env = [props.get('boost_includedir'), props.get('boost_librarydir'), props.get('boost_root')]
+        if any(boost_property_env):
+            self.detect_boost_machine_file(props)
+            return
 
-    def threading_tag(self):
-        if not self.is_multithreading:
-            return ''
-
-        if self.env.machines[self.for_machine].is_darwin():
-            # - Mac:      requires -mt for multithreading, so should not fall back to non-mt libraries.
-            return '-mt'
-        elif self.env.machines[self.for_machine].is_windows():
-            # - Windows:  requires -mt for multithreading, so should not fall back to non-mt libraries.
-            return '-mt'
-        else:
-            # - Linux:    leaves off -mt but libraries are multithreading-aware.
-            # - Cygwin:   leaves off -mt but libraries are multithreading-aware.
-            return ''
-
-    def version_tag(self):
-        return '-' + self.version.replace('.', '_')
-
-    def debug_tag(self):
-        return '-gd' if self.is_debug else ''
-
-    def arch_tag(self):
-        # currently only applies to windows msvc installed binaries
-        if self.env.detect_cpp_compiler(self.for_machine).get_id() not in ['msvc', 'clang-cl']:
-            return ''
-        # pre-compiled binaries only added arch tag for versions > 1.64
-        if float(self.version) < 1.65:
-            return ''
-        arch = detect_cpu_family(self.env.coredata.compilers.host)
-        if arch == 'x86':
-            return '-x32'
-        elif arch == 'x86_64':
-            return '-x64'
-        return ''
+        # Next, look for paths in the environment
+        boost_manual_env_list = ['BOOST_INCLUDEDIR', 'BOOST_LIBRARYDIR', 'BOOST_ROOT', 'BOOSTROOT']
+        boost_manual_env = [get_env_var(self.for_machine, self.env.is_cross_build, x) for x in boost_manual_env_list]
+        if any(boost_manual_env):
+            self.detect_boost_env()
+            return
 
-    def versioned_abi_tag(self):
-        return self.compiler_tag() + self.threading_tag() + self.debug_tag() + self.arch_tag() + self.version_tag()
+        # Finally, look for paths from .pc files and from searching the filesystem
+        self.detect_roots()
 
-    # FIXME - how to handle different distributions, e.g. for Mac? Currently we handle homebrew and macports, but not fink.
-    def abi_tags(self):
-        if self.env.machines[self.for_machine].is_windows():
-            return [self.versioned_abi_tag(), self.threading_tag()]
-        else:
-            return [self.threading_tag()]
+    def check_and_set_roots(self, roots: T.List[Path]) -> None:
+        roots = list(mesonlib.OrderedSet(roots))
+        for j in roots:
+            #   1. Look for the boost headers (boost/version.hpp)
+            mlog.debug('Checking potential boost root {}'.format(j.as_posix()))
+            inc_dirs = self.detect_inc_dirs(j)
+            inc_dirs = sorted(inc_dirs, reverse=True)  # Prefer the newer versions
+
+            # Early abort when boost is not found
+            if not inc_dirs:
+                continue
+
+            lib_dirs = self.detect_lib_dirs(j)
+            self.is_found = self.run_check(inc_dirs, lib_dirs)
+            if self.is_found:
+                self.boost_root = j
+                break
+
+    def detect_boost_machine_file(self, props: T.Dict[str, str]) -> None:
+        incdir = props.get('boost_includedir')
+        libdir = props.get('boost_librarydir')
+
+        if incdir and libdir:
+            inc_dir = Path(props['boost_includedir'])
+            lib_dir = Path(props['boost_librarydir'])
+
+            if not inc_dir.is_absolute() or not lib_dir.is_absolute():
+                raise DependencyException('Paths given for boost_includedir and boost_librarydir in machine file must be absolute')
+
+            mlog.debug('Trying to find boost with:')
+            mlog.debug('  - boost_includedir = {}'.format(inc_dir))
+            mlog.debug('  - boost_librarydir = {}'.format(lib_dir))
+
+            return self.detect_split_root(inc_dir, lib_dir)
+
+        elif incdir or libdir:
+            raise DependencyException('Both boost_includedir *and* boost_librarydir have to be set in your machine file (one is not enough)')
+
+        rootdir = props.get('boost_root')
+        # It shouldn't be possible to get here without something in boost_root
+        assert(rootdir)
+
+        raw_paths = mesonlib.stringlistify(rootdir)
+        paths = [Path(x) for x in raw_paths]
+        if paths and any([not x.is_absolute() for x in paths]):
+            raise DependencyException('boost_root path given in machine file must be absolute')
+
+        self.check_and_set_roots(paths)
+
+    def detect_boost_env(self) -> None:
+        boost_includedir = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_INCLUDEDIR')
+        boost_librarydir = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_LIBRARYDIR')
+
+        boost_manual_env = [boost_includedir, boost_librarydir]
+        if all(boost_manual_env):
+            inc_dir = Path(boost_includedir)
+            lib_dir = Path(boost_librarydir)
+
+            if not inc_dir.is_absolute() or not lib_dir.is_absolute():
+                raise DependencyException('Paths given in BOOST_INCLUDEDIR and BOOST_LIBRARYDIR must be absolute')
+
+            mlog.debug('Trying to find boost with:')
+            mlog.debug('  - BOOST_INCLUDEDIR = {}'.format(inc_dir))
+            mlog.debug('  - BOOST_LIBRARYDIR = {}'.format(lib_dir))
+
+            return self.detect_split_root(inc_dir, lib_dir)
+
+        elif any(boost_manual_env):
+            raise DependencyException('Both BOOST_INCLUDEDIR *and* BOOST_LIBRARYDIR have to be set (one is not enough). Ignoring.')
+
+        boost_root = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_ROOT')
+        boostroot = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOSTROOT')
+
+        # It shouldn't be possible to get here without something in BOOST_ROOT or BOOSTROOT
+        assert(boost_root or boostroot)
+
+        for path, name in [(boost_root, 'BOOST_ROOT'), (boostroot, 'BOOSTROOT')]:
+            if path:
+                raw_paths = path.split(os.pathsep)
+                paths = [Path(x) for x in raw_paths]
+                if paths and any([not x.is_absolute() for x in paths]):
+                    raise DependencyException('Paths in {} must be absolute'.format(name))
+                break
+
+        self.check_and_set_roots(paths)
+
+    def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool:
+        mlog.debug('  - potential library dirs: {}'.format([x.as_posix() for x in lib_dirs]))
+        mlog.debug('  - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs]))
+
+        #   2. Find all boost libraries
+        libs = []  # type: T.List[BoostLibraryFile]
+        for i in lib_dirs:
+            libs = self.detect_libraries(i)
+            if libs:
+                mlog.debug('  - found boost library dir: {}'.format(i))
+                # mlog.debug('  - raw library list:')
+                # for j in libs:
+                #     mlog.debug('    - {}'.format(j))
+                break
+        libs = sorted(set(libs))
+
+        modules = ['boost_' + x for x in self.modules]
+        for inc in inc_dirs:
+            mlog.debug('  - found boost {} include dir: {}'.format(inc.version, inc.path))
+            f_libs = self.filter_libraries(libs, inc.version_lib)
+
+            mlog.debug('  - filtered library list:')
+            for j in f_libs:
+                mlog.debug('    - {}'.format(j))
+
+            #   3. Select the libraries matching the requested modules
+            not_found = []  # type: T.List[str]
+            selected_modules = []  # type: T.List[BoostLibraryFile]
+            for mod in modules:
+                found = False
+                for l in f_libs:
+                    if l.mod_name_matches(mod):
+                        selected_modules += [l]
+                        found = True
+                        break
+                if not found:
+                    not_found += [mod]
 
-    def sourceforge_dir(self):
-        if self.env.detect_cpp_compiler(self.for_machine).get_id() != 'msvc':
-            return None
-        comp_ts_version = self.env.detect_cpp_compiler(self.for_machine).get_toolset_version()
-        arch = detect_cpu_family(self.env.coredata.compilers.host)
-        if arch == 'x86':
-            return 'lib32-msvc-{}'.format(comp_ts_version)
-        elif arch == 'x86_64':
-            return 'lib64-msvc-{}'.format(comp_ts_version)
-        else:
-            # Does anyone do Boost cross-compiling to other archs on Windows?
-            return None
+            # log the result
+            mlog.debug('  - found:')
+            comp_args = []  # type: T.List[str]
+            link_args = []  # type: T.List[str]
+            for j in selected_modules:
+                c_args = j.get_compiler_args()
+                l_args = j.get_link_args()
+                mlog.debug('    - {:<24} link={} comp={}'.format(j.mod_name, str(l_args), str(c_args)))
+                comp_args += c_args
+                link_args += l_args
+
+            comp_args = list(set(comp_args))
+            link_args = list(set(link_args))
+
+            self.modules_found = [x.mod_name for x in selected_modules]
+            self.modules_found = [x[6:] for x in self.modules_found]
+            self.modules_found = sorted(set(self.modules_found))
+            self.modules_missing = not_found
+            self.modules_missing = [x[6:] for x in self.modules_missing]
+            self.modules_missing = sorted(set(self.modules_missing))
+
+            # if we found all modules we are done
+            if not not_found:
+                self.version = inc.version
+                self.compile_args = ['-I' + inc.path.as_posix()]
+                self.compile_args += comp_args
+                self.compile_args += self._extra_compile_args()
+                self.compile_args = list(mesonlib.OrderedSet(self.compile_args))
+                self.link_args = link_args
+                mlog.debug('  - final compile args: {}'.format(self.compile_args))
+                mlog.debug('  - final link args:    {}'.format(self.link_args))
+                return True
 
-    def find_libraries_with_abi_tag(self, tag):
+            # in case we missed something log it and try again
+            mlog.debug('  - NOT found:')
+            for mod in not_found:
+                mlog.debug('    - {}'.format(mod))
 
-        # All modules should have the same tag
-        self.lib_modules = {}
+        return False
 
-        all_found = True
+    def detect_inc_dirs(self, root: Path) -> T.List[BoostIncludeDir]:
+        candidates = []  # type: T.List[Path]
+        inc_root = root / 'include'
+
+        candidates += [root / 'boost']
+        candidates += [inc_root / 'boost']
+        if inc_root.is_dir():
+            for i in inc_root.iterdir():
+                if not i.is_dir() or not i.name.startswith('boost-'):
+                    continue
+                candidates += [i / 'boost']
+        candidates = [x for x in candidates if x.is_dir()]
+        candidates = [x / 'version.hpp' for x in candidates]
+        candidates = [x for x in candidates if x.exists()]
+        return [self._include_dir_from_version_header(x) for x in candidates]
+
+    def detect_lib_dirs(self, root: Path) -> T.List[Path]:
+        # First check the system include paths. Only consider those within the
+        # given root path
+        system_dirs_t = self.clib_compiler.get_library_dirs(self.env)
+        system_dirs = [Path(x) for x in system_dirs_t]
+        system_dirs = [x.resolve() for x in system_dirs if x.exists()]
+        system_dirs = [x for x in system_dirs if mesonlib.path_is_in_root(x, root)]
+        system_dirs = list(mesonlib.OrderedSet(system_dirs))
+
+        if system_dirs:
+            return system_dirs
+
+        # No system include paths were found --> fall back to manually looking
+        # for library dirs in root
+        dirs = []     # type: T.List[Path]
+        subdirs = []  # type: T.List[Path]
+        for i in root.iterdir():
+            if i.is_dir() and i.name.startswith('lib'):
+                dirs += [i]
+
+        # Some distros put libraries not directly inside /usr/lib but in /usr/lib/x86_64-linux-gnu
+        for i in dirs:
+            for j in i.iterdir():
+                if j.is_dir() and j.name.endswith('-linux-gnu'):
+                    subdirs += [j]
+
+        # Filter out paths that don't match the target arch to avoid finding
+        # the wrong libraries. See https://github.com/mesonbuild/meson/issues/7110
+        if not self.arch:
+            return dirs + subdirs
+
+        arch_list_32 = ['32', 'i386']
+        arch_list_64 = ['64']
+
+        raw_list = dirs + subdirs
+        no_arch = [x for x in raw_list if not any([y in x.name for y in arch_list_32 + arch_list_64])]
+
+        matching_arch = []  # type: T.List[Path]
+        if '32' in self.arch:
+            matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_32])]
+        elif '64' in self.arch:
+            matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_64])]
+
+        return sorted(matching_arch) + sorted(no_arch)
+
+    def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.List[BoostLibraryFile]:
+        # MSVC is very picky with the library tags
+        vscrt = ''
+        try:
+            crt_val = self.env.coredata.base_options['b_vscrt'].value
+            buildtype = self.env.coredata.builtins['buildtype'].value
+            vscrt = self.clib_compiler.get_crt_compile_args(crt_val, buildtype)[0]
+        except (KeyError, IndexError, AttributeError):
+            pass
 
-        for module in self.requested_modules:
-            libname = 'boost_' + module + tag
+        # mlog.debug('    - static: {}'.format(self.static))
+        # mlog.debug('    - not explicit static: {}'.format(not self.explicit_static))
+        # mlog.debug('    - mt: {}'.format(self.multithreading))
+        # mlog.debug('    - version: {}'.format(lib_vers))
+        # mlog.debug('    - arch: {}'.format(self.arch))
+        # mlog.debug('    - vscrt: {}'.format(vscrt))
+        libs = [x for x in libs if x.static == self.static or not self.explicit_static]
+        libs = [x for x in libs if x.mt == self.multithreading]
+        libs = [x for x in libs if x.version_matches(lib_vers)]
+        libs = [x for x in libs if x.arch_matches(self.arch)]
+        libs = [x for x in libs if x.vscrt_matches(vscrt)]
+        libs = [x for x in libs if x.nvsuffix != 'dll']  # Only link to import libraries
+
+        # Only filter by debug when we are building in release mode. Debug
+        # libraries are automatically prefered through sorting otherwise.
+        if not self.debug:
+            libs = [x for x in libs if not x.debug]
+
+        # Take the abitag from the first library and filter by it. This
+        # ensures that we have a set of libraries that are always compatible.
+        if not libs:
+            return []
+        abitag = libs[0].abitag
+        libs = [x for x in libs if x.abitag == abitag]
+
+        return libs
+
+    def detect_libraries(self, libdir: Path) -> T.List[BoostLibraryFile]:
+        libs = []  # type: T.List[BoostLibraryFile]
+        for i in libdir.iterdir():
+            if not i.is_file() or i.is_symlink():
+                continue
+            if not any([i.name.startswith(x) for x in ['libboost_', 'boost_']]):
+                continue
+
+            libs += [BoostLibraryFile(i)]
+        return [x for x in libs if x.is_boost()]  # Filter out no boost libraries
+
+    def detect_split_root(self, inc_dir: Path, lib_dir: Path) -> None:
+        boost_inc_dir = None
+        for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
+            if j.is_file():
+                boost_inc_dir = self._include_dir_from_version_header(j)
+                break
+        if not boost_inc_dir:
+            self.is_found = False
+            return
 
-            args = self.clib_compiler.find_library(libname, self.env, self.extra_lib_dirs())
-            if args is None:
-                mlog.debug("Couldn\'t find library '{}' for boost module '{}'  (ABI tag = '{}')".format(libname, module, tag))
-                all_found = False
-            else:
-                mlog.debug('Link args for boost module "{}" are {}'.format(module, args))
-                self.lib_modules['boost_' + module] = args
+        self.is_found = self.run_check([boost_inc_dir], [lib_dir])
 
-        return all_found
+    def detect_roots(self) -> None:
+        roots = []  # type: T.List[Path]
 
-    def find_libraries_with_abi_tags(self, tags):
-        for tag in tags:
-            if self.find_libraries_with_abi_tag(tag):
-                return True
-        return False
+        # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily
+        # allows BoostDependency to find boost from Conan. See #5438
+        try:
+            boost_pc = PkgConfigDependency('boost', self.env, {'required': False})
+            if boost_pc.found():
+                boost_root = boost_pc.get_pkgconfig_variable('prefix', {'default': None})
+                if boost_root:
+                    roots += [Path(boost_root)]
+        except DependencyException:
+            pass
 
-    def detect_lib_modules_win(self):
-        if not self.libdir:
-            # The libdirs in the distributed binaries (from sf)
-            lib_sf = self.sourceforge_dir()
+        # Add roots from system paths
+        inc_paths = [Path(x) for x in self.clib_compiler.get_default_include_dirs()]
+        inc_paths = [x.parent for x in inc_paths if x.exists()]
+        inc_paths = [x.resolve() for x in inc_paths]
+        roots += inc_paths
 
-            if self.boost_root:
-                roots = [self.boost_root]
-            else:
-                roots = self.boost_roots
-            for root in roots:
-                # The default libdir when building
-                libdir = os.path.join(root, 'lib')
-                if os.path.isdir(libdir):
-                    self.libdir = libdir
-                    break
-                if lib_sf:
-                    full_path = os.path.join(root, lib_sf)
-                    if os.path.isdir(full_path):
-                        self.libdir = full_path
-                        break
-
-        if not self.libdir:
-            return
+        # Add system paths
+        if self.env.machines[self.for_machine].is_windows():
+            # Where boost built from source actually installs it
+            c_root = Path('C:/Boost')
+            if c_root.is_dir():
+                roots += [c_root]
+
+            # Where boost documentation says it should be
+            prog_files = Path('C:/Program Files/boost')
+            # Where boost prebuilt binaries are
+            local_boost = Path('C:/local')
+
+            candidates = []  # type: T.List[Path]
+            if prog_files.is_dir():
+                candidates += [*prog_files.iterdir()]
+            if local_boost.is_dir():
+                candidates += [*local_boost.iterdir()]
 
-        for name in self.need_static_link:
-            # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a
-            libname = 'lib' + name + self.versioned_abi_tag() + '.lib'
-            if os.path.isfile(os.path.join(self.libdir, libname)):
-                self.lib_modules[self.modname_from_filename(libname)] = [libname]
-            else:
-                libname = "lib{}.lib".format(name)
-                if os.path.isfile(os.path.join(self.libdir, libname)):
-                    self.lib_modules[name[3:]] = [libname]
-
-        # globber1 applies to a layout=system installation
-        # globber2 applies to a layout=versioned installation
-        globber1 = 'libboost_*' if self.static else 'boost_*'
-        globber2 = globber1 + self.versioned_abi_tag()
-        # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a
-        globber2_matches = glob.glob(os.path.join(self.libdir, globber2 + '.lib'))
-        for entry in globber2_matches:
-            fname = os.path.basename(entry)
-            self.lib_modules[self.modname_from_filename(fname)] = [fname]
-        if not globber2_matches:
-            # FIXME - why are we only looking for *.lib? Mingw provides *.dll.a and *.a
-            for entry in glob.glob(os.path.join(self.libdir, globber1 + '.lib')):
-                if self.static:
-                    fname = os.path.basename(entry)
-                    self.lib_modules[self.modname_from_filename(fname)] = [fname]
-
-    def detect_lib_modules_nix(self):
-        if self.static:
-            libsuffix = 'a'
-        elif self.env.machines[self.for_machine].is_darwin():
-            libsuffix = 'dylib'
+            roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()]
         else:
-            libsuffix = 'so'
+            tmp = []  # type: T.List[Path]
 
-        globber = 'libboost_*.{}'.format(libsuffix)
-        if self.libdir:
-            libdirs = [self.libdir]
-        elif self.boost_root is None:
-            libdirs = mesonlib.get_library_dirs()
-        else:
-            libdirs = [os.path.join(self.boost_root, 'lib')]
-        for libdir in libdirs:
-            for name in self.need_static_link:
-                libname = 'lib{}.a'.format(name)
-                if os.path.isfile(os.path.join(libdir, libname)):
-                    self.lib_modules[name] = [libname]
-            for entry in glob.glob(os.path.join(libdir, globber)):
-                # I'm not 100% sure what to do here. Some distros
-                # have modules such as thread only as -mt versions.
-                # On debian all packages are built threading=multi
-                # but not suffixed with -mt.
-                # FIXME: implement detect_lib_modules_{debian, redhat, ...}
-                # FIXME: this wouldn't work with -mt-gd either. -BDR
-                if self.is_multithreading and mesonlib.is_debianlike():
-                    pass
-                elif self.is_multithreading and entry.endswith('-mt.{}'.format(libsuffix)):
-                    pass
-                elif not entry.endswith('-mt.{}'.format(libsuffix)):
-                    pass
-                else:
-                    continue
-                modname = self.modname_from_filename(entry)
-                if modname not in self.lib_modules:
-                    self.lib_modules[modname] = [entry]
-
-    def extra_lib_dirs(self):
-        if self.libdir:
-            return [self.libdir]
-        elif self.boost_root:
-            return [os.path.join(self.boost_root, 'lib')]
-        return []
-
-    def get_link_args(self, **kwargs):
-        args = []
-        for d in self.extra_lib_dirs():
-            args += self.clib_compiler.get_linker_search_args(d)
-        for lib in self.requested_modules:
-            args += self.lib_modules['boost_' + lib]
-        return args
+            # Homebrew
+            brew_boost = Path('/usr/local/Cellar/boost')
+            if brew_boost.is_dir():
+                tmp += [x for x in brew_boost.iterdir()]
+
+            # Add some default system paths
+            tmp += [Path('/opt/local')]
+            tmp += [Path('/usr/local/opt/boost')]
+            tmp += [Path('/usr/local')]
+            tmp += [Path('/usr')]
+
+            # Cleanup paths
+            tmp = [x for x in tmp if x.is_dir()]
+            tmp = [x.resolve() for x in tmp]
+            roots += tmp
+
+        self.check_and_set_roots(roots)
+
+    def log_details(self) -> str:
+        res = ''
+        if self.modules_found:
+            res += 'found: ' + ', '.join(self.modules_found)
+        if self.modules_missing:
+            if res:
+                res += ' | '
+            res += 'missing: ' + ', '.join(self.modules_missing)
+        return res
+
+    def log_info(self) -> str:
+        if self.boost_root:
+            return self.boost_root.as_posix()
+        return ''
+
+    def _include_dir_from_version_header(self, hfile: Path) -> BoostIncludeDir:
+        # Extract the version with a regex. Using clib_compiler.get_define would
+        # also work, however, this is slower (since it the compiler has to be
+        # invoked) and overkill since the layout of the header is always the same.
+        assert hfile.exists()
+        raw = hfile.read_text()
+        m = re.search(r'#define\s+BOOST_VERSION\s+([0-9]+)', raw)
+        if not m:
+            mlog.debug('Failed to extract version information from {}'.format(hfile))
+            return BoostIncludeDir(hfile.parents[1], 0)
+        return BoostIncludeDir(hfile.parents[1], int(m.group(1)))
+
+    def _extra_compile_args(self) -> T.List[str]:
+        # BOOST_ALL_DYN_LINK should not be required with the known defines below
+        return ['-DBOOST_ALL_NO_LIB']  # Disable automatic linking
+
+
+# See https://www.boost.org/doc/libs/1_72_0/more/getting_started/unix-variants.html#library-naming
+# See https://mesonbuild.com/Reference-tables.html#cpu-families
+boost_arch_map = {
+    'aarch64': 'a64',
+    'arc': 'a32',
+    'arm': 'a32',
+    'ia64': 'i64',
+    'mips': 'm32',
+    'mips64': 'm64',
+    'ppc': 'p32',
+    'ppc64': 'p64',
+    'sparc': 's32',
+    'sparc64': 's64',
+    'x86': 'x32',
+    'x86_64': 'x64',
+}
+
+
+####      ---- BEGIN GENERATED ----      ####
+#                                           #
+# Generated with tools/boost_names.py:
+#  - boost version:   1.73.0
+#  - modules found:   159
+#  - libraries found: 43
+#
 
-    def get_sources(self):
-        return []
+class BoostLibrary():
+    def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
+        self.name = name
+        self.shared = shared
+        self.static = static
+        self.single = single
+        self.multi = multi
+
+class BoostModule():
+    def __init__(self, name: str, key: str, desc: str, libs: T.List[str]):
+        self.name = name
+        self.key = key
+        self.desc = desc
+        self.libs = libs
+
+
+# dict of all know libraries with additional compile options
+boost_libraries = {
+    'boost_atomic': BoostLibrary(
+        name='boost_atomic',
+        shared=['-DBOOST_ATOMIC_DYN_LINK=1'],
+        static=['-DBOOST_ATOMIC_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_chrono': BoostLibrary(
+        name='boost_chrono',
+        shared=['-DBOOST_CHRONO_DYN_LINK=1'],
+        static=['-DBOOST_CHRONO_STATIC_LINK=1'],
+        single=['-DBOOST_CHRONO_THREAD_DISABLED'],
+        multi=[],
+    ),
+    'boost_container': BoostLibrary(
+        name='boost_container',
+        shared=['-DBOOST_CONTAINER_DYN_LINK=1'],
+        static=['-DBOOST_CONTAINER_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_context': BoostLibrary(
+        name='boost_context',
+        shared=['-DBOOST_CONTEXT_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_contract': BoostLibrary(
+        name='boost_contract',
+        shared=['-DBOOST_CONTRACT_DYN_LINK'],
+        static=['-DBOOST_CONTRACT_STATIC_LINK'],
+        single=['-DBOOST_CONTRACT_DISABLE_THREADS'],
+        multi=[],
+    ),
+    'boost_coroutine': BoostLibrary(
+        name='boost_coroutine',
+        shared=['-DBOOST_COROUTINES_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_date_time': BoostLibrary(
+        name='boost_date_time',
+        shared=['-DBOOST_DATE_TIME_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_exception': BoostLibrary(
+        name='boost_exception',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_fiber': BoostLibrary(
+        name='boost_fiber',
+        shared=['-DBOOST_FIBERS_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_fiber_numa': BoostLibrary(
+        name='boost_fiber_numa',
+        shared=['-DBOOST_FIBERS_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_filesystem': BoostLibrary(
+        name='boost_filesystem',
+        shared=['-DBOOST_FILESYSTEM_DYN_LINK=1'],
+        static=['-DBOOST_FILESYSTEM_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_graph': BoostLibrary(
+        name='boost_graph',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_iostreams': BoostLibrary(
+        name='boost_iostreams',
+        shared=['-DBOOST_IOSTREAMS_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_locale': BoostLibrary(
+        name='boost_locale',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_log': BoostLibrary(
+        name='boost_log',
+        shared=['-DBOOST_LOG_DYN_LINK=1'],
+        static=[],
+        single=['-DBOOST_LOG_NO_THREADS'],
+        multi=[],
+    ),
+    'boost_log_setup': BoostLibrary(
+        name='boost_log_setup',
+        shared=['-DBOOST_LOG_SETUP_DYN_LINK=1'],
+        static=[],
+        single=['-DBOOST_LOG_NO_THREADS'],
+        multi=[],
+    ),
+    'boost_math_c99': BoostLibrary(
+        name='boost_math_c99',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_c99f': BoostLibrary(
+        name='boost_math_c99f',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_c99l': BoostLibrary(
+        name='boost_math_c99l',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_tr1': BoostLibrary(
+        name='boost_math_tr1',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_tr1f': BoostLibrary(
+        name='boost_math_tr1f',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_tr1l': BoostLibrary(
+        name='boost_math_tr1l',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_mpi': BoostLibrary(
+        name='boost_mpi',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_nowide': BoostLibrary(
+        name='boost_nowide',
+        shared=['-DBOOST_NOWIDE_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_prg_exec_monitor': BoostLibrary(
+        name='boost_prg_exec_monitor',
+        shared=['-DBOOST_TEST_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_program_options': BoostLibrary(
+        name='boost_program_options',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_random': BoostLibrary(
+        name='boost_random',
+        shared=['-DBOOST_RANDOM_DYN_LINK'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_regex': BoostLibrary(
+        name='boost_regex',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_serialization': BoostLibrary(
+        name='boost_serialization',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_addr2line': BoostLibrary(
+        name='boost_stacktrace_addr2line',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_backtrace': BoostLibrary(
+        name='boost_stacktrace_backtrace',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_basic': BoostLibrary(
+        name='boost_stacktrace_basic',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_noop': BoostLibrary(
+        name='boost_stacktrace_noop',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_windbg': BoostLibrary(
+        name='boost_stacktrace_windbg',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_windbg_cached': BoostLibrary(
+        name='boost_stacktrace_windbg_cached',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_system': BoostLibrary(
+        name='boost_system',
+        shared=['-DBOOST_SYSTEM_DYN_LINK=1'],
+        static=['-DBOOST_SYSTEM_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_test_exec_monitor': BoostLibrary(
+        name='boost_test_exec_monitor',
+        shared=['-DBOOST_TEST_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_thread': BoostLibrary(
+        name='boost_thread',
+        shared=['-DBOOST_THREAD_BUILD_DLL=1', '-DBOOST_THREAD_USE_DLL=1'],
+        static=['-DBOOST_THREAD_BUILD_LIB=1', '-DBOOST_THREAD_USE_LIB=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_timer': BoostLibrary(
+        name='boost_timer',
+        shared=['-DBOOST_TIMER_DYN_LINK=1'],
+        static=['-DBOOST_TIMER_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_type_erasure': BoostLibrary(
+        name='boost_type_erasure',
+        shared=['-DBOOST_TYPE_ERASURE_DYN_LINK'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_unit_test_framework': BoostLibrary(
+        name='boost_unit_test_framework',
+        shared=['-DBOOST_TEST_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_wave': BoostLibrary(
+        name='boost_wave',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_wserialization': BoostLibrary(
+        name='boost_wserialization',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+}
 
-# Generated with boost_names.py
-BOOST_LIBS = [
-    'boost_atomic',
-    'boost_chrono',
-    'boost_chrono',
-    'boost_container',
-    'boost_context',
-    'boost_coroutine',
-    'boost_date_time',
-    'boost_exception',
-    'boost_fiber',
-    'boost_filesystem',
-    'boost_graph',
-    'boost_iostreams',
-    'boost_locale',
-    'boost_log',
-    'boost_log_setup',
-    'boost_math_tr1',
-    'boost_math_tr1f',
-    'boost_math_tr1l',
-    'boost_math_c99',
-    'boost_math_c99f',
-    'boost_math_c99l',
-    'boost_math_tr1',
-    'boost_math_tr1f',
-    'boost_math_tr1l',
-    'boost_math_c99',
-    'boost_math_c99f',
-    'boost_math_c99l',
-    'boost_math_tr1',
-    'boost_math_tr1f',
-    'boost_math_tr1l',
-    'boost_math_c99',
-    'boost_math_c99f',
-    'boost_math_c99l',
-    'boost_math_tr1',
-    'boost_math_tr1f',
-    'boost_math_tr1l',
-    'boost_math_c99',
-    'boost_math_c99f',
-    'boost_math_c99l',
-    'boost_math_tr1',
-    'boost_math_tr1f',
-    'boost_math_tr1l',
-    'boost_math_c99',
-    'boost_math_c99f',
-    'boost_math_c99l',
-    'boost_math_tr1',
-    'boost_math_tr1f',
-    'boost_math_tr1l',
-    'boost_math_c99',
-    'boost_math_c99f',
-    'boost_math_c99l',
-    'boost_mpi',
-    'boost_program_options',
-    'boost_random',
-    'boost_regex',
-    'boost_serialization',
-    'boost_wserialization',
-    'boost_signals',
-    'boost_stacktrace_noop',
-    'boost_stacktrace_backtrace',
-    'boost_stacktrace_addr2line',
-    'boost_stacktrace_basic',
-    'boost_stacktrace_windbg',
-    'boost_stacktrace_windbg_cached',
-    'boost_system',
-    'boost_prg_exec_monitor',
-    'boost_test_exec_monitor',
-    'boost_unit_test_framework',
-    'boost_thread',
-    'boost_timer',
-    'boost_type_erasure',
-    'boost_wave'
-]
-
-BOOST_DIRS = [
-    'lambda',
-    'optional',
-    'convert',
-    'system',
-    'uuid',
-    'archive',
-    'align',
-    'timer',
-    'chrono',
-    'gil',
-    'logic',
-    'signals',
-    'predef',
-    'tr1',
-    'multi_index',
-    'property_map',
-    'multi_array',
-    'context',
-    'random',
-    'endian',
-    'circular_buffer',
-    'proto',
-    'assign',
-    'format',
-    'math',
-    'phoenix',
-    'graph',
-    'locale',
-    'mpl',
-    'pool',
-    'unordered',
-    'core',
-    'exception',
-    'ptr_container',
-    'flyweight',
-    'range',
-    'typeof',
-    'thread',
-    'move',
-    'spirit',
-    'dll',
-    'compute',
-    'serialization',
-    'ratio',
-    'msm',
-    'config',
-    'metaparse',
-    'coroutine2',
-    'qvm',
-    'program_options',
-    'concept',
-    'detail',
-    'hana',
-    'concept_check',
-    'compatibility',
-    'variant',
-    'type_erasure',
-    'mpi',
-    'test',
-    'fusion',
-    'log',
-    'sort',
-    'local_function',
-    'units',
-    'functional',
-    'preprocessor',
-    'integer',
-    'container',
-    'polygon',
-    'interprocess',
-    'numeric',
-    'iterator',
-    'wave',
-    'lexical_cast',
-    'multiprecision',
-    'utility',
-    'tti',
-    'asio',
-    'dynamic_bitset',
-    'algorithm',
-    'xpressive',
-    'bimap',
-    'signals2',
-    'type_traits',
-    'regex',
-    'statechart',
-    'parameter',
-    'icl',
-    'python',
-    'lockfree',
-    'intrusive',
-    'io',
-    'pending',
-    'geometry',
-    'tuple',
-    'iostreams',
-    'heap',
-    'atomic',
-    'filesystem',
-    'smart_ptr',
-    'function',
-    'fiber',
-    'type_index',
-    'accumulators',
-    'function_types',
-    'coroutine',
-    'vmd',
-    'date_time',
-    'property_tree',
-    'bind'
-]
+#                                           #
+####       ---- END GENERATED ----       ####
diff -Nru meson-0.53.2/mesonbuild/dependencies/coarrays.py meson-0.57.0+really0.56.2/mesonbuild/dependencies/coarrays.py
--- meson-0.53.2/mesonbuild/dependencies/coarrays.py	2019-12-29 22:47:27.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/coarrays.py	2020-08-15 16:27:05.000000000 +0000
@@ -12,8 +12,39 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from ..mesonlib import listify
-from .base import CMakeDependency, DependencyMethods, ExternalDependency, PkgConfigDependency
+import functools
+import typing as T
+
+from .base import CMakeDependency, DependencyMethods, ExternalDependency, PkgConfigDependency, detect_compiler, factory_methods
+
+if T.TYPE_CHECKING:
+    from . base import DependencyType
+    from ..environment import Environment, MachineChoice
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE, DependencyMethods.SYSTEM})
+def coarray_factory(env: 'Environment', for_machine: 'MachineChoice',
+                    kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List['DependencyType']:
+    fcid = detect_compiler('coarray', env, for_machine, 'fortran').get_id()
+    candidates = []  # type: T.List[DependencyType]
+
+    if fcid == 'gcc':
+        # OpenCoarrays is the most commonly used method for Fortran Coarray with GCC
+        if DependencyMethods.PKGCONFIG in methods:
+            for pkg in ['caf-openmpi', 'caf']:
+                candidates.append(functools.partial(
+                    PkgConfigDependency, pkg, env, kwargs, language='fortran'))
+
+        if DependencyMethods.CMAKE in methods:
+            if 'modules' not in kwargs:
+                kwargs['modules'] = 'OpenCoarrays::caf_mpi'
+            candidates.append(functools.partial(
+                CMakeDependency, 'OpenCoarrays', env, kwargs, language='fortran'))
+
+    if DependencyMethods.SYSTEM in methods:
+        candidates.append(functools.partial(CoarrayDependency, env, kwargs))
+
+    return candidates
 
 
 class CoarrayDependency(ExternalDependency):
@@ -26,56 +57,27 @@
     low-level MPI calls.
     """
     def __init__(self, environment, kwargs: dict):
-        super().__init__('coarray', environment, 'fortran', kwargs)
+        super().__init__('coarray', environment, kwargs, language='fortran')
         kwargs['required'] = False
         kwargs['silent'] = True
-        self.is_found = False
-        methods = listify(self.methods)
 
         cid = self.get_compiler().get_id()
         if cid == 'gcc':
-            """ OpenCoarrays is the most commonly used method for Fortran Coarray with GCC """
-
-            if set([DependencyMethods.AUTO, DependencyMethods.PKGCONFIG]).intersection(methods):
-                for pkg in ['caf-openmpi', 'caf']:
-                    pkgdep = PkgConfigDependency(pkg, environment, kwargs, language=self.language)
-                    if pkgdep.found():
-                        self.compile_args = pkgdep.get_compile_args()
-                        self.link_args = pkgdep.get_link_args()
-                        self.version = pkgdep.get_version()
-                        self.is_found = True
-                        self.pcdep = pkgdep
-                        return
-
-            if set([DependencyMethods.AUTO, DependencyMethods.CMAKE]).intersection(methods):
-                if not kwargs.get('modules'):
-                    kwargs['modules'] = 'OpenCoarrays::caf_mpi'
-                cmakedep = CMakeDependency('OpenCoarrays', environment, kwargs, language=self.language)
-                if cmakedep.found():
-                    self.compile_args = cmakedep.get_compile_args()
-                    self.link_args = cmakedep.get_link_args()
-                    self.version = cmakedep.get_version()
-                    self.is_found = True
-                    return
-
-            if DependencyMethods.AUTO in methods:
-                # fallback to single image
-                self.compile_args = ['-fcoarray=single']
-                self.version = 'single image (fallback)'
-                self.is_found = True
-                return
-
+            # Fallback to single image
+            self.compile_args = ['-fcoarray=single']
+            self.version = 'single image (fallback)'
+            self.is_found = True
         elif cid == 'intel':
-            """ Coarrays are built into Intel compilers, no external library needed """
+            # Coarrays are built into Intel compilers, no external library needed
             self.is_found = True
             self.link_args = ['-coarray=shared']
             self.compile_args = self.link_args
         elif cid == 'intel-cl':
-            """ Coarrays are built into Intel compilers, no external library needed """
+            # Coarrays are built into Intel compilers, no external library needed
             self.is_found = True
             self.compile_args = ['/Qcoarray:shared']
         elif cid == 'nagfor':
-            """ NAG doesn't require any special arguments for Coarray """
+            # NAG doesn't require any special arguments for Coarray
             self.is_found = True
 
     @staticmethod
diff -Nru meson-0.53.2/mesonbuild/dependencies/cuda.py meson-0.57.0+really0.56.2/mesonbuild/dependencies/cuda.py
--- meson-0.53.2/mesonbuild/dependencies/cuda.py	2019-12-29 22:47:27.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/cuda.py	2021-01-06 10:39:48.000000000 +0000
@@ -15,6 +15,8 @@
 import glob
 import re
 import os
+import typing as T
+from .._pathlib import Path
 
 from .. import mlog
 from .. import mesonlib
@@ -33,7 +35,7 @@
         if language not in self.supported_languages:
             raise DependencyException('Language \'{}\' is not supported by the CUDA Toolkit. Supported languages are {}.'.format(language, self.supported_languages))
 
-        super().__init__('cuda', environment, language, kwargs)
+        super().__init__('cuda', environment, kwargs, language=language)
         self.requested_modules = self.get_requested(kwargs)
         if 'cudart' not in self.requested_modules:
             self.requested_modules = ['cudart'] + self.requested_modules
@@ -149,22 +151,47 @@
     toolkit_version_regex = re.compile(r'^CUDA Version\s+(.*)$')
     path_version_win_regex = re.compile(r'^v(.*)$')
     path_version_nix_regex = re.compile(r'^cuda-(.*)$')
+    cudart_version_regex = re.compile(r'#define\s+CUDART_VERSION\s+([0-9]+)')
 
-    def _cuda_toolkit_version(self, path):
+    def _cuda_toolkit_version(self, path: str) -> str:
         version = self._read_toolkit_version_txt(path)
         if version:
             return version
+        version = self._read_cuda_runtime_api_version(path)
+        if version:
+            return version
 
         mlog.debug('Falling back to extracting version from path')
         path_version_regex = self.path_version_win_regex if self._is_windows() else self.path_version_nix_regex
-        m = path_version_regex.match(os.path.basename(path))
-        if m:
-            return m[1]
+        try:
+            m = path_version_regex.match(os.path.basename(path))
+            if m:
+                return m.group(1)
+            else:
+                mlog.warning('Could not detect CUDA Toolkit version for {}'.format(path))
+        except Exception as e:
+            mlog.warning('Could not detect CUDA Toolkit version for {}: {}'.format(path, str(e)))
 
-        mlog.warning('Could not detect CUDA Toolkit version for {}'.format(path))
         return '0.0'
 
-    def _read_toolkit_version_txt(self, path):
+    def _read_cuda_runtime_api_version(self, path_str: str) -> T.Optional[str]:
+        path = Path(path_str)
+        for i in path.rglob('cuda_runtime_api.h'):
+            raw = i.read_text()
+            m = self.cudart_version_regex.search(raw)
+            if not m:
+                continue
+            try:
+                vers_int = int(m.group(1))
+            except ValueError:
+                continue
+            # use // for floor instead of / which produces a float
+            major = vers_int // 1000                  # type: int
+            minor = (vers_int - major * 1000) // 10   # type: int
+            return '{}.{}'.format(major, minor)
+        return None
+
+    def _read_toolkit_version_txt(self, path: str) -> T.Optional[str]:
         # Read 'version.txt' at the root of the CUDA Toolkit directory to determine the tookit version
         version_file_path = os.path.join(path, 'version.txt')
         try:
@@ -172,7 +199,7 @@
                 version_str = version_file.readline() # e.g. 'CUDA Version 10.1.168'
                 m = self.toolkit_version_regex.match(version_str)
                 if m:
-                    return self._strip_patch_version(m[1])
+                    return self._strip_patch_version(m.group(1))
         except Exception as e:
             mlog.debug('Could not read CUDA Toolkit\'s version file {}: {}'.format(version_file_path, str(e)))
 
@@ -192,7 +219,7 @@
                 raise DependencyException(msg.format(arch, 'Windows'))
             return os.path.join('lib', libdirs[arch])
         elif machine.is_linux():
-            libdirs = {'x86_64': 'lib64', 'ppc64': 'lib'}
+            libdirs = {'x86_64': 'lib64', 'ppc64': 'lib', 'aarch64': 'lib64'}
             if arch not in libdirs:
                 raise DependencyException(msg.format(arch, 'Linux'))
             return libdirs[arch]
diff -Nru meson-0.53.2/mesonbuild/dependencies/data/CMakeLists.txt meson-0.57.0+really0.56.2/mesonbuild/dependencies/data/CMakeLists.txt
--- meson-0.53.2/mesonbuild/dependencies/data/CMakeLists.txt	2019-12-29 22:47:27.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/data/CMakeLists.txt	2021-01-06 10:39:48.000000000 +0000
@@ -9,7 +9,7 @@
 string(TOUPPER "${_packageName}" PACKAGE_NAME)
 
 while(TRUE)
-  find_package("${NAME}" QUIET)
+  find_package("${NAME}" QUIET COMPONENTS ${COMPS})
 
   # ARCHS has to be set via the CMD interface
   if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "")
diff -Nru meson-0.53.2/mesonbuild/dependencies/dev.py meson-0.57.0+really0.56.2/mesonbuild/dependencies/dev.py
--- meson-0.53.2/mesonbuild/dependencies/dev.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/dev.py	2020-10-26 11:18:42.000000000 +0000
@@ -15,21 +15,24 @@
 # This file contains the detection logic for external dependencies useful for
 # development purposes, such as testing, debugging, etc..
 
-import functools
 import glob
 import os
 import re
+import typing as T
 
 from .. import mesonlib, mlog
 from ..mesonlib import version_compare, stringlistify, extract_as_list, MachineChoice
 from ..environment import get_llvm_tool_names
 from .base import (
     DependencyException, DependencyMethods, ExternalDependency, PkgConfigDependency,
-    strip_system_libdirs, ConfigToolDependency, CMakeDependency
+    strip_system_libdirs, ConfigToolDependency, CMakeDependency, DependencyFactory,
 )
-from .misc import ThreadDependency
+from .misc import threads_factory
+from ..compilers.c import AppleClangCCompiler
+from ..compilers.cpp import AppleClangCPPCompiler
 
-import typing as T
+if T.TYPE_CHECKING:
+    from .. environment import Environment
 
 
 def get_shared_library_suffix(environment, for_machine: MachineChoice):
@@ -44,13 +47,15 @@
     return '.so'
 
 
-class GTestDependency(ExternalDependency):
-    def __init__(self, environment, kwargs):
-        super().__init__('gtest', environment, 'cpp', kwargs)
+class GTestDependencySystem(ExternalDependency):
+    def __init__(self, name: str, environment, kwargs):
+        super().__init__(name, environment, kwargs, language='cpp')
         self.main = kwargs.get('main', False)
         self.src_dirs = ['/usr/src/gtest/src', '/usr/src/googletest/googletest/src']
+        if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+            self.is_found = False
+            return
         self.detect()
-        self._add_sub_dependency(ThreadDependency, environment, kwargs)
 
     def detect(self):
         gtest_detect = self.clib_compiler.find_library("gtest", self.env, [])
@@ -98,30 +103,27 @@
     def log_tried(self):
         return 'system'
 
-    @classmethod
-    def _factory(cls, environment, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
-
-        if DependencyMethods.PKGCONFIG in methods:
-            pcname = 'gtest_main' if kwargs.get('main', False) else 'gtest'
-            candidates.append(functools.partial(PkgConfigDependency, pcname, environment, kwargs))
-
-        if DependencyMethods.SYSTEM in methods:
-            candidates.append(functools.partial(GTestDependency, environment, kwargs))
-
-        return candidates
-
     @staticmethod
     def get_methods():
         return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
 
 
-class GMockDependency(ExternalDependency):
-    def __init__(self, environment, kwargs):
-        super().__init__('gmock', environment, 'cpp', kwargs)
+class GTestDependencyPC(PkgConfigDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        assert name == 'gtest'
+        if kwargs.get('main'):
+            name = 'gtest_main'
+        super().__init__(name, environment, kwargs)
+
+
+class GMockDependencySystem(ExternalDependency):
+    def __init__(self, name: str, environment, kwargs):
+        super().__init__(name, environment, kwargs, language='cpp')
         self.main = kwargs.get('main', False)
-        self._add_sub_dependency(ThreadDependency, environment, kwargs)
+        if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+            self.is_found = False
+            return
 
         # If we are getting main() from GMock, we definitely
         # want to avoid linking in main() from GTest
@@ -132,11 +134,10 @@
         # GMock without GTest is pretty much useless
         # this also mimics the structure given in WrapDB,
         # where GMock always pulls in GTest
-        gtest_dep = GTestDependency(environment, gtest_kwargs)
-        if not gtest_dep.is_found:
+        found = self._add_sub_dependency(gtest_factory(environment, self.for_machine, gtest_kwargs))
+        if not found:
             self.is_found = False
             return
-        self.ext_deps.append(gtest_dep)
 
         # GMock may be a library or just source.
         # Work with both.
@@ -177,25 +178,20 @@
     def log_tried(self):
         return 'system'
 
-    @classmethod
-    def _factory(cls, environment, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
-
-        if DependencyMethods.PKGCONFIG in methods:
-            pcname = 'gmock_main' if kwargs.get('main', False) else 'gmock'
-            candidates.append(functools.partial(PkgConfigDependency, pcname, environment, kwargs))
-
-        if DependencyMethods.SYSTEM in methods:
-            candidates.append(functools.partial(GMockDependency, environment, kwargs))
-
-        return candidates
-
     @staticmethod
     def get_methods():
         return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
 
 
+class GMockDependencyPC(PkgConfigDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        assert name == 'gmock'
+        if kwargs.get('main'):
+            name = 'gmock_main'
+        super().__init__(name, environment, kwargs)
+
+
 class LLVMDependencyConfigTool(ConfigToolDependency):
     """
     LLVM uses a special tool, llvm-config, which has arguments for getting
@@ -204,7 +200,7 @@
     tool_name = 'llvm-config'
     __cpp_blacklist = {'-DNDEBUG'}
 
-    def __init__(self, environment, kwargs):
+    def __init__(self, name: str, environment, kwargs):
         self.tools = get_llvm_tool_names('llvm-config')
 
         # Fedora starting with Fedora 30 adds a suffix of the number
@@ -218,13 +214,12 @@
 
         # It's necessary for LLVM <= 3.8 to use the C++ linker. For 3.9 and 4.0
         # the C linker works fine if only using the C API.
-        super().__init__('LLVM', environment, 'cpp', kwargs)
+        super().__init__(name, environment, kwargs, language='cpp')
         self.provided_modules = []
         self.required_modules = set()
         self.module_details = []
         if not self.is_found:
             return
-        self.static = kwargs.get('static', False)
 
         self.provided_modules = self.get_config_value(['--components'], 'modules')
         modules = stringlistify(extract_as_list(kwargs, 'modules'))
@@ -241,7 +236,9 @@
             self._set_old_link_args()
         self.link_args = strip_system_libdirs(environment, self.for_machine, self.link_args)
         self.link_args = self.__fix_bogus_link_args(self.link_args)
-        self._add_sub_dependency(ThreadDependency, environment, kwargs)
+        if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+            self.is_found = False
+            return
 
     def __fix_bogus_link_args(self, args):
         """This function attempts to fix bogus link arguments that llvm-config
@@ -284,7 +281,13 @@
 
     def _set_new_link_args(self, environment):
         """How to set linker args for LLVM versions >= 3.9"""
-        mode = self.get_config_value(['--shared-mode'], 'link_args')[0]
+        try:
+            mode = self.get_config_value(['--shared-mode'], 'link_args')[0]
+        except IndexError:
+            mlog.debug('llvm-config --shared-mode returned an error')
+            self.is_found = False
+            return
+
         if not self.static and mode == 'static':
             # If llvm is configured with LLVM_BUILD_LLVM_DYLIB but not with
             # LLVM_LINK_LLVM_DYLIB and not LLVM_BUILD_SHARED_LIBS (which
@@ -391,17 +394,32 @@
         return ''
 
 class LLVMDependencyCMake(CMakeDependency):
-    def __init__(self, env, kwargs):
+    def __init__(self, name: str, env, kwargs):
         self.llvm_modules = stringlistify(extract_as_list(kwargs, 'modules'))
         self.llvm_opt_modules = stringlistify(extract_as_list(kwargs, 'optional_modules'))
-        super().__init__(name='LLVM', environment=env, language='cpp', kwargs=kwargs)
+        super().__init__(name, env, kwargs, language='cpp')
+
+        # Cmake will always create a statically linked binary, so don't use
+        # cmake if dynamic is required
+        if not self.static:
+            self.is_found = False
+            mlog.warning('Ignoring LLVM CMake dependency because dynamic was requested')
+            return
+
+        if self.traceparser is None:
+            return
 
         # Extract extra include directories and definitions
         inc_dirs = self.traceparser.get_cmake_var('PACKAGE_INCLUDE_DIRS')
         defs = self.traceparser.get_cmake_var('PACKAGE_DEFINITIONS')
+        # LLVM explicitly uses space-separated variables rather than semicolon lists
+        if len(defs) == 1:
+            defs = defs[0].split(' ')
         temp = ['-I' + x for x in inc_dirs] + defs
         self.compile_args += [x for x in temp if x not in self.compile_args]
-        self._add_sub_dependency(ThreadDependency, env, kwargs)
+        if not self._add_sub_dependency(threads_factory(env, self.for_machine, {})):
+            self.is_found = False
+            return
 
     def _main_cmake_file(self) -> str:
         # Use a custom CMakeLists.txt for LLVM
@@ -410,7 +428,7 @@
     def _extra_cmake_opts(self) -> T.List[str]:
         return ['-DLLVM_MESON_MODULES={}'.format(';'.join(self.llvm_modules + self.llvm_opt_modules))]
 
-    def _map_module_list(self, modules: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
+    def _map_module_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
         res = []
         for mod, required in modules:
             cm_targets = self.traceparser.get_cmake_var('MESON_LLVM_TARGETS_{}'.format(mod))
@@ -430,26 +448,6 @@
             return orig_name[0]
         return module
 
-class LLVMDependency(ExternalDependency):
-    def __init__(self, env, kwargs):
-        super().__init__('LLVM', env, 'cpp', kwargs)
-
-    @classmethod
-    def _factory(cls, env, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
-
-        if DependencyMethods.CONFIG_TOOL in methods:
-            candidates.append(functools.partial(LLVMDependencyConfigTool, env, kwargs))
-
-        if DependencyMethods.CMAKE in methods:
-            candidates.append(functools.partial(LLVMDependencyCMake, env, kwargs))
-
-        return candidates
-
-    @staticmethod
-    def get_methods():
-        return [DependencyMethods.CMAKE, DependencyMethods.CONFIG_TOOL]
 
 class ValgrindDependency(PkgConfigDependency):
     '''
@@ -461,3 +459,76 @@
 
     def get_link_args(self, **kwargs):
         return []
+
+
+class ZlibSystemDependency(ExternalDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+
+        m = self.env.machines[self.for_machine]
+
+        # I'm not sure this is entirely correct. What if we're cross compiling
+        # from something to macOS?
+        if ((m.is_darwin() and isinstance(self.clib_compiler, (AppleClangCCompiler, AppleClangCPPCompiler))) or
+                m.is_freebsd() or m.is_dragonflybsd()):
+            self.is_found = True
+            self.link_args = ['-lz']
+
+            # No need to set includes,
+            # on macos xcode/clang will do that for us.
+            # on freebsd zlib.h is in /usr/include
+        elif m.is_windows():
+            if self.clib_compiler.get_argument_syntax() == 'msvc':
+                libs = ['zlib1' 'zlib']
+            else:
+                libs = ['z']
+            for lib in libs:
+                l = self.clib_compiler.find_library(lib, environment, [])
+                h = self.clib_compiler.has_header('zlib.h', '', environment, dependencies=[self])
+                if l and h[0]:
+                    self.is_found = True
+                    self.link_args = l
+                    break
+            else:
+                return
+        else:
+            mlog.debug('Unsupported OS {}'.format(m.system))
+            return
+
+        v, _ = self.clib_compiler.get_define('ZLIB_VERSION', '#include ', self.env, [], [self])
+        self.version = v.strip('"')
+
+
+    @staticmethod
+    def get_methods():
+        return [DependencyMethods.SYSTEM]
+
+
+llvm_factory = DependencyFactory(
+    'LLVM',
+    [DependencyMethods.CMAKE, DependencyMethods.CONFIG_TOOL],
+    cmake_class=LLVMDependencyCMake,
+    configtool_class=LLVMDependencyConfigTool,
+)
+
+gtest_factory = DependencyFactory(
+    'gtest',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    pkgconfig_class=GTestDependencyPC,
+    system_class=GTestDependencySystem,
+)
+
+gmock_factory = DependencyFactory(
+    'gmock',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    pkgconfig_class=GMockDependencyPC,
+    system_class=GMockDependencySystem,
+)
+
+zlib_factory = DependencyFactory(
+    'zlib',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE, DependencyMethods.SYSTEM],
+    cmake_name='ZLIB',
+    system_class=ZlibSystemDependency,
+)
diff -Nru meson-0.53.2/mesonbuild/dependencies/hdf5.py meson-0.57.0+really0.56.2/mesonbuild/dependencies/hdf5.py
--- meson-0.53.2/mesonbuild/dependencies/hdf5.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/hdf5.py	2021-01-06 10:39:48.000000000 +0000
@@ -14,117 +14,163 @@
 
 # This file contains the detection logic for miscellaneous external dependencies.
 
-import subprocess
+import functools
+import os
+import re
 import shutil
-from pathlib import Path
+import subprocess
+from .._pathlib import Path
+
+from ..mesonlib import OrderedSet, join_args
+from .base import (
+    DependencyException, DependencyMethods, ConfigToolDependency,
+    PkgConfigDependency, factory_methods
+)
+import typing as T
+
+if T.TYPE_CHECKING:
+    from .base import Dependency
+    from ..envconfig import MachineChoice
+    from ..environment import Environment
 
-from .. import mlog
-from ..mesonlib import split_args, listify
-from .base import (DependencyException, DependencyMethods, ExternalDependency, ExternalProgram,
-                   PkgConfigDependency)
-
-class HDF5Dependency(ExternalDependency):
-
-    def __init__(self, environment, kwargs):
-        language = kwargs.get('language', 'c')
-        super().__init__('hdf5', environment, language, kwargs)
-        kwargs['required'] = False
-        kwargs['silent'] = True
-        self.is_found = False
-        methods = listify(self.methods)
 
-        if language not in ('c', 'cpp', 'fortran'):
+class HDF5PkgConfigDependency(PkgConfigDependency):
+
+    """Handle brokenness in the HDF5 pkg-config files."""
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        language = language or 'c'
+        if language not in {'c', 'cpp', 'fortran'}:
             raise DependencyException('Language {} is not supported with HDF5.'.format(language))
 
-        if set([DependencyMethods.AUTO, DependencyMethods.PKGCONFIG]).intersection(methods):
-            pkgconfig_files = ['hdf5', 'hdf5-serial']
-            PCEXE = shutil.which('pkg-config')
-            if PCEXE:
-                # some distros put hdf5-1.2.3.pc with version number in .pc filename.
-                ret = subprocess.run([PCEXE, '--list-all'], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL,
-                                     universal_newlines=True)
-                if ret.returncode == 0:
-                    for pkg in ret.stdout.split('\n'):
-                        if pkg.startswith(('hdf5')):
-                            pkgconfig_files.append(pkg.split(' ', 1)[0])
-                    pkgconfig_files = list(set(pkgconfig_files))  # dedupe
-
-            for pkg in pkgconfig_files:
-                pkgdep = PkgConfigDependency(pkg, environment, kwargs, language=self.language)
-                if not pkgdep.found():
-                    continue
-
-                self.compile_args = pkgdep.get_compile_args()
-                # some broken pkgconfig don't actually list the full path to the needed includes
-                newinc = []
-                for arg in self.compile_args:
-                    if arg.startswith('-I'):
-                        stem = 'static' if kwargs.get('static', False) else 'shared'
-                        if (Path(arg[2:]) / stem).is_dir():
-                            newinc.append('-I' + str(Path(arg[2:]) / stem))
-                self.compile_args += newinc
-
-                # derive needed libraries by language
-                pd_link_args = pkgdep.get_link_args()
-                link_args = []
-                for larg in pd_link_args:
-                    lpath = Path(larg)
-                    # some pkg-config hdf5.pc (e.g. Ubuntu) don't include the commonly-used HL HDF5 libraries,
-                    # so let's add them if they exist
-                    # additionally, some pkgconfig HDF5 HL files are malformed so let's be sure to find HL anyway
-                    if lpath.is_file():
-                        hl = []
-                        if language == 'cpp':
-                            hl += ['_hl_cpp', '_cpp']
-                        elif language == 'fortran':
-                            hl += ['_hl_fortran', 'hl_fortran', '_fortran']
-                        hl += ['_hl']  # C HL library, always needed
-
-                        suffix = '.' + lpath.name.split('.', 1)[1]  # in case of .dll.a
-                        for h in hl:
-                            hlfn = lpath.parent / (lpath.name.split('.', 1)[0] + h + suffix)
-                            if hlfn.is_file():
-                                link_args.append(str(hlfn))
-                        # HDF5 C libs are required by other HDF5 languages
-                        link_args.append(larg)
-                    else:
-                        link_args.append(larg)
-
-                self.link_args = link_args
-                self.version = pkgdep.get_version()
-                self.is_found = True
-                self.pcdep = pkgdep
-                return
-
-        if DependencyMethods.AUTO in methods:
-            wrappers = {'c': 'h5cc', 'cpp': 'h5c++', 'fortran': 'h5fc'}
-            comp_args = []
-            link_args = []
-            # have to always do C as well as desired language
-            for lang in set([language, 'c']):
-                prog = ExternalProgram(wrappers[lang], silent=True)
-                if not prog.found():
-                    return
-                cmd = prog.get_command() + ['-show']
-                p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, timeout=15)
-                if p.returncode != 0:
-                    mlog.debug('Command', mlog.bold(cmd), 'failed to run:')
-                    mlog.debug(mlog.bold('Standard output\n'), p.stdout)
-                    mlog.debug(mlog.bold('Standard error\n'), p.stderr)
-                    return
-                args = split_args(p.stdout)
-                for arg in args[1:]:
-                    if arg.startswith(('-I', '-f', '-D')) or arg == '-pthread':
-                        comp_args.append(arg)
-                    elif arg.startswith(('-L', '-l', '-Wl')):
-                        link_args.append(arg)
-                    elif Path(arg).is_file():
-                        link_args.append(arg)
-            self.compile_args = comp_args
-            self.link_args = link_args
-            self.is_found = True
+        super().__init__(name, environment, kwargs, language)
+        if not self.is_found:
             return
 
-    @staticmethod
-    def get_methods():
-        return [DependencyMethods.AUTO, DependencyMethods.PKGCONFIG]
+        # some broken pkgconfig don't actually list the full path to the needed includes
+        newinc = []  # type: T.List[str]
+        for arg in self.compile_args:
+            if arg.startswith('-I'):
+                stem = 'static' if kwargs.get('static', False) else 'shared'
+                if (Path(arg[2:]) / stem).is_dir():
+                    newinc.append('-I' + str(Path(arg[2:]) / stem))
+        self.compile_args += newinc
+
+        link_args = []  # type: T.List[str]
+        for larg in self.get_link_args():
+            lpath = Path(larg)
+            # some pkg-config hdf5.pc (e.g. Ubuntu) don't include the commonly-used HL HDF5 libraries,
+            # so let's add them if they exist
+            # additionally, some pkgconfig HDF5 HL files are malformed so let's be sure to find HL anyway
+            if lpath.is_file():
+                hl = []
+                if language == 'cpp':
+                    hl += ['_hl_cpp', '_cpp']
+                elif language == 'fortran':
+                    hl += ['_hl_fortran', 'hl_fortran', '_fortran']
+                hl += ['_hl']  # C HL library, always needed
+
+                suffix = '.' + lpath.name.split('.', 1)[1]  # in case of .dll.a
+                for h in hl:
+                    hlfn = lpath.parent / (lpath.name.split('.', 1)[0] + h + suffix)
+                    if hlfn.is_file():
+                        link_args.append(str(hlfn))
+                # HDF5 C libs are required by other HDF5 languages
+                link_args.append(larg)
+            else:
+                link_args.append(larg)
+
+        self.link_args = link_args
+
+
+class HDF5ConfigToolDependency(ConfigToolDependency):
+
+    """Wrapper around hdf5 binary config tools."""
+
+    version_arg = '-showconfig'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        language = language or 'c'
+        if language not in {'c', 'cpp', 'fortran'}:
+            raise DependencyException('Language {} is not supported with HDF5.'.format(language))
+
+        if language == 'c':
+            cenv = 'CC'
+            tools = ['h5cc']
+        elif language == 'cpp':
+            cenv = 'CXX'
+            tools = ['h5c++']
+        elif language == 'fortran':
+            cenv = 'FC'
+            tools = ['h5fc']
+        else:
+            raise DependencyException('How did you get here?')
+
+        # We need this before we call super()
+        for_machine = self.get_for_machine_from_kwargs(kwargs)
+
+        nkwargs = kwargs.copy()
+        nkwargs['tools'] = tools
+
+        # Override the compiler that the config tools are going to use by
+        # setting the environment variables that they use for the compiler and
+        # linkers.
+        compiler = environment.coredata.compilers[for_machine][language]
+        try:
+            os.environ['HDF5_{}'.format(cenv)] = join_args(compiler.get_exelist())
+            os.environ['HDF5_{}LINKER'.format(cenv)] = join_args(compiler.get_linker_exelist())
+            super().__init__(name, environment, nkwargs, language)
+        finally:
+            del os.environ['HDF5_{}'.format(cenv)]
+            del os.environ['HDF5_{}LINKER'.format(cenv)]
+        if not self.is_found:
+            return
+
+        args = self.get_config_value(['-show', '-noshlib' if kwargs.get('static', False) else '-shlib'], 'args')
+        for arg in args[1:]:
+            if arg.startswith(('-I', '-f', '-D')) or arg == '-pthread':
+                self.compile_args.append(arg)
+            elif arg.startswith(('-L', '-l', '-Wl')):
+                self.link_args.append(arg)
+            elif Path(arg).is_file():
+                self.link_args.append(arg)
+
+        # If the language is not C we need to add C as a subdependency
+        if language != 'c':
+            nkwargs = kwargs.copy()
+            nkwargs['language'] = 'c'
+            # I'm being too clever for mypy and pylint
+            self.is_found = self._add_sub_dependency(hdf5_factory(environment, for_machine, nkwargs))  # type: ignore  # pylint: disable=no-value-for-parameter
+
+    def _sanitize_version(self, ver: str) -> str:
+        v = re.search(r'\s*HDF5 Version: (\d+\.\d+\.\d+)', ver)
+        return v.group(1)
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL})
+def hdf5_factory(env: 'Environment', for_machine: 'MachineChoice',
+                 kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List[T.Callable[[], 'Dependency']]:
+    language = kwargs.get('language')
+    candidates = []  # type: T.List[T.Callable[[], Dependency]]
+
+    if DependencyMethods.PKGCONFIG in methods:
+        # Use an ordered set so that these remain the first tried pkg-config files
+        pkgconfig_files = OrderedSet(['hdf5', 'hdf5-serial'])
+        # FIXME: This won't honor pkg-config paths, and cross-native files
+        PCEXE = shutil.which('pkg-config')
+        if PCEXE:
+            # some distros put hdf5-1.2.3.pc with version number in .pc filename.
+            ret = subprocess.run([PCEXE, '--list-all'], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL,
+                                    universal_newlines=True)
+            if ret.returncode == 0:
+                for pkg in ret.stdout.split('\n'):
+                    if pkg.startswith(('hdf5')):
+                        pkgconfig_files.add(pkg.split(' ', 1)[0])
+
+        for pkg in pkgconfig_files:
+            candidates.append(functools.partial(HDF5PkgConfigDependency, pkg, env, kwargs, language))
+
+    if DependencyMethods.CONFIG_TOOL in methods:
+        candidates.append(functools.partial(HDF5ConfigToolDependency, 'hdf5', env, kwargs, language))
+
+    return candidates
diff -Nru meson-0.53.2/mesonbuild/dependencies/__init__.py meson-0.57.0+really0.56.2/mesonbuild/dependencies/__init__.py
--- meson-0.53.2/mesonbuild/dependencies/__init__.py	2020-01-23 22:29:05.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/__init__.py	2020-10-18 21:29:13.000000000 +0000
@@ -14,60 +14,72 @@
 
 from .boost import BoostDependency
 from .cuda import CudaDependency
-from .hdf5 import HDF5Dependency
+from .hdf5 import hdf5_factory
 from .base import (  # noqa: F401
     Dependency, DependencyException, DependencyMethods, ExternalProgram, EmptyExternalProgram, NonExistingExternalProgram,
     ExternalDependency, NotFoundDependency, ExternalLibrary, ExtraFrameworkDependency, InternalDependency,
-    PkgConfigDependency, CMakeDependency, find_external_dependency, get_dep_identifier, packages, _packages_accept_language)
-from .dev import GMockDependency, GTestDependency, LLVMDependency, ValgrindDependency
-from .coarrays import CoarrayDependency
-from .mpi import MPIDependency
-from .scalapack import ScalapackDependency
-from .misc import (BlocksDependency, NetCDFDependency, OpenMPDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency, LibGCryptDependency, GpgmeDependency, ShadercDependency)
+    PkgConfigDependency, CMakeDependency, find_external_dependency, get_dep_identifier, packages, _packages_accept_language,
+    DependencyFactory)
+from .dev import ValgrindDependency, gmock_factory, gtest_factory, llvm_factory, zlib_factory
+from .coarrays import coarray_factory
+from .mpi import mpi_factory
+from .scalapack import scalapack_factory
+from .misc import (
+    BlocksDependency, OpenMPDependency, cups_factory, curses_factory, gpgme_factory,
+    libgcrypt_factory, libwmf_factory, netcdf_factory, pcap_factory, python3_factory,
+    shaderc_factory, threads_factory,
+)
 from .platform import AppleFrameworks
-from .ui import GLDependency, GnuStepDependency, Qt4Dependency, Qt5Dependency, SDL2Dependency, WxDependency, VulkanDependency
+from .ui import GnuStepDependency, Qt4Dependency, Qt5Dependency, WxDependency, gl_factory, sdl2_factory, vulkan_factory
 
 
+# This is a dict where the keys should be strings, and the values must be one
+# of:
+# - An ExternalDependency subclass
+# - A DependencyFactory object
+# - A callable with a signature of (Environment, MachineChoice, Dict[str, Any]) -> List[Callable[[], DependencyType]]
 packages.update({
     # From dev:
-    'gtest': GTestDependency,
-    'gmock': GMockDependency,
-    'llvm': LLVMDependency,
+    'gtest': gtest_factory,
+    'gmock': gmock_factory,
+    'llvm': llvm_factory,
     'valgrind': ValgrindDependency,
+    'zlib': zlib_factory,
 
     'boost': BoostDependency,
     'cuda': CudaDependency,
 
     # per-file
-    'coarray': CoarrayDependency,
-    'hdf5': HDF5Dependency,
-    'mpi': MPIDependency,
-    'scalapack': ScalapackDependency,
+    'coarray': coarray_factory,
+    'hdf5': hdf5_factory,
+    'mpi': mpi_factory,
+    'scalapack': scalapack_factory,
 
     # From misc:
     'blocks': BlocksDependency,
-    'netcdf': NetCDFDependency,
+    'curses': curses_factory,
+    'netcdf': netcdf_factory,
     'openmp': OpenMPDependency,
-    'python3': Python3Dependency,
-    'threads': ThreadDependency,
-    'pcap': PcapDependency,
-    'cups': CupsDependency,
-    'libwmf': LibWmfDependency,
-    'libgcrypt': LibGCryptDependency,
-    'gpgme': GpgmeDependency,
-    'shaderc': ShadercDependency,
+    'python3': python3_factory,
+    'threads': threads_factory,
+    'pcap': pcap_factory,
+    'cups': cups_factory,
+    'libwmf': libwmf_factory,
+    'libgcrypt': libgcrypt_factory,
+    'gpgme': gpgme_factory,
+    'shaderc': shaderc_factory,
 
     # From platform:
     'appleframeworks': AppleFrameworks,
 
     # From ui:
-    'gl': GLDependency,
+    'gl': gl_factory,
     'gnustep': GnuStepDependency,
     'qt4': Qt4Dependency,
     'qt5': Qt5Dependency,
-    'sdl2': SDL2Dependency,
+    'sdl2': sdl2_factory,
     'wxwidgets': WxDependency,
-    'vulkan': VulkanDependency,
+    'vulkan': vulkan_factory,
 })
 _packages_accept_language.update({
     'hdf5',
diff -Nru meson-0.53.2/mesonbuild/dependencies/misc.py meson-0.57.0+really0.56.2/mesonbuild/dependencies/misc.py
--- meson-0.53.2/mesonbuild/dependencies/misc.py	2020-01-23 22:29:05.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/misc.py	2021-01-06 10:39:48.000000000 +0000
@@ -14,68 +14,48 @@
 
 # This file contains the detection logic for miscellaneous external dependencies.
 
-from pathlib import Path
+from .._pathlib import Path
 import functools
 import re
 import sysconfig
+import typing as T
 
 from .. import mlog
 from .. import mesonlib
 from ..environment import detect_cpu_family
-from ..mesonlib import listify
 
 from .base import (
     DependencyException, DependencyMethods, ExternalDependency,
-    ExtraFrameworkDependency, PkgConfigDependency,
-    CMakeDependency, ConfigToolDependency,
+    PkgConfigDependency, CMakeDependency, ConfigToolDependency,
+    factory_methods, DependencyFactory,
 )
 
+if T.TYPE_CHECKING:
+    from ..environment import Environment, MachineChoice
+    from .base import DependencyType, Dependency  # noqa: F401
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE})
+def netcdf_factory(env: 'Environment', for_machine: 'MachineChoice',
+                   kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List['DependencyType']:
+    language = kwargs.get('language', 'c')
+    if language not in ('c', 'cpp', 'fortran'):
+        raise DependencyException('Language {} is not supported with NetCDF.'.format(language))
+
+    candidates = []  # type: T.List['DependencyType']
+
+    if DependencyMethods.PKGCONFIG in methods:
+        if language == 'fortran':
+            pkg = 'netcdf-fortran'
+        else:
+            pkg = 'netcdf'
 
-class NetCDFDependency(ExternalDependency):
-
-    def __init__(self, environment, kwargs):
-        language = kwargs.get('language', 'c')
-        super().__init__('netcdf', environment, language, kwargs)
-        kwargs['required'] = False
-        kwargs['silent'] = True
-        self.is_found = False
-        methods = listify(self.methods)
+        candidates.append(functools.partial(PkgConfigDependency, pkg, env, kwargs, language=language))
 
-        if language not in ('c', 'cpp', 'fortran'):
-            raise DependencyException('Language {} is not supported with NetCDF.'.format(language))
+    if DependencyMethods.CMAKE in methods:
+        candidates.append(functools.partial(CMakeDependency, 'NetCDF', env, kwargs, language=language))
 
-        if set([DependencyMethods.AUTO, DependencyMethods.PKGCONFIG]).intersection(methods):
-            pkgconfig_files = ['netcdf']
-
-            if language == 'fortran':
-                pkgconfig_files.append('netcdf-fortran')
-
-            self.compile_args = []
-            self.link_args = []
-            self.pcdep = []
-            for pkg in pkgconfig_files:
-                pkgdep = PkgConfigDependency(pkg, environment, kwargs, language=self.language)
-                if pkgdep.found():
-                    self.compile_args.extend(pkgdep.get_compile_args())
-                    self.link_args.extend(pkgdep.get_link_args())
-                    self.version = pkgdep.get_version()
-                    self.is_found = True
-                    self.pcdep.append(pkgdep)
-            if self.is_found:
-                return
-
-        if set([DependencyMethods.AUTO, DependencyMethods.CMAKE]).intersection(methods):
-            cmakedep = CMakeDependency('NetCDF', environment, kwargs, language=self.language)
-            if cmakedep.found():
-                self.compile_args = cmakedep.get_compile_args()
-                self.link_args = cmakedep.get_link_args()
-                self.version = cmakedep.get_version()
-                self.is_found = True
-                return
-
-    @staticmethod
-    def get_methods():
-        return [DependencyMethods.AUTO, DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE]
+    return candidates
 
 
 class OpenMPDependency(ExternalDependency):
@@ -94,7 +74,7 @@
 
     def __init__(self, environment, kwargs):
         language = kwargs.get('language')
-        super().__init__('openmp', environment, language, kwargs)
+        super().__init__('openmp', environment, kwargs, language=language)
         self.is_found = False
         if self.clib_compiler.get_id() == 'pgi':
             # through at least PGI 19.4, there is no macro defined for OpenMP, but OpenMP 3.1 is supported.
@@ -117,40 +97,25 @@
             for name in header_names:
                 if self.clib_compiler.has_header(name, '', self.env, dependencies=[self], disable_cache=True)[0]:
                     self.is_found = True
-                    self.compile_args = self.link_args = self.clib_compiler.openmp_flags()
+                    self.compile_args = self.clib_compiler.openmp_flags()
+                    self.link_args = self.clib_compiler.openmp_link_flags()
                     break
             if not self.is_found:
                 mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but omp.h missing.')
 
 
 class ThreadDependency(ExternalDependency):
-    def __init__(self, environment, kwargs):
-        super().__init__('threads', environment, None, kwargs)
-        self.name = 'threads'
-        self.is_found = False
-        methods = listify(self.methods)
-        if DependencyMethods.AUTO in methods:
-            self.is_found = True
-            # Happens if you are using a language with threads
-            # concept without C, such as plain Cuda.
-            if self.clib_compiler is None:
-                self.compile_args = []
-                self.link_args = []
-            else:
-                self.compile_args = self.clib_compiler.thread_flags(environment)
-                self.link_args = self.clib_compiler.thread_link_flags(environment)
-            return
-
-        if DependencyMethods.CMAKE in methods:
-            # for unit tests and for those who simply want
-            # dependency('threads', method: 'cmake')
-            cmakedep = CMakeDependency('Threads', environment, kwargs)
-            if cmakedep.found():
-                self.compile_args = cmakedep.get_compile_args()
-                self.link_args = cmakedep.get_link_args()
-                self.version = cmakedep.get_version()
-                self.is_found = True
-                return
+    def __init__(self, name: str, environment, kwargs):
+        super().__init__(name, environment, kwargs)
+        self.is_found = True
+        # Happens if you are using a language with threads
+        # concept without C, such as plain Cuda.
+        if self.clib_compiler is None:
+            self.compile_args = []
+            self.link_args = []
+        else:
+            self.compile_args = self.clib_compiler.thread_flags(environment)
+            self.link_args = self.clib_compiler.thread_link_flags(environment)
 
     @staticmethod
     def get_methods():
@@ -159,7 +124,7 @@
 
 class BlocksDependency(ExternalDependency):
     def __init__(self, environment, kwargs):
-        super().__init__('blocks', environment, None, kwargs)
+        super().__init__('blocks', environment, kwargs)
         self.name = 'blocks'
         self.is_found = False
 
@@ -190,12 +155,14 @@
             self.is_found = True
 
 
-class Python3Dependency(ExternalDependency):
-    def __init__(self, environment, kwargs):
-        super().__init__('python3', environment, None, kwargs)
+class Python3DependencySystem(ExternalDependency):
+    def __init__(self, name, environment, kwargs):
+        super().__init__(name, environment, kwargs)
 
         if not environment.machines.matches_build_machine(self.for_machine):
             return
+        if not environment.machines[self.for_machine].is_windows():
+            return
 
         self.name = 'python3'
         self.static = kwargs.get('static', False)
@@ -203,28 +170,6 @@
         self.version = '3'
         self._find_libpy3_windows(environment)
 
-    @classmethod
-    def _factory(cls, environment, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
-
-        if DependencyMethods.PKGCONFIG in methods:
-            candidates.append(functools.partial(PkgConfigDependency, 'python3', environment, kwargs))
-
-        if DependencyMethods.SYSCONFIG in methods:
-            candidates.append(functools.partial(Python3Dependency, environment, kwargs))
-
-        if DependencyMethods.EXTRAFRAMEWORK in methods:
-            # In OSX the Python 3 framework does not have a version
-            # number in its name.
-            # There is a python in /System/Library/Frameworks, but that's
-            # python 2, Python 3 will always be in /Library
-            candidates.append(functools.partial(
-                ExtraFrameworkDependency, 'Python', False, ['/Library/Frameworks'],
-                environment, kwargs.get('language', None), kwargs))
-
-        return candidates
-
     @staticmethod
     def get_windows_python_arch():
         pyplat = sysconfig.get_platform()
@@ -322,85 +267,48 @@
     def log_tried(self):
         return 'sysconfig'
 
-class PcapDependency(ExternalDependency):
-
-    def __init__(self, environment, kwargs):
-        super().__init__('pcap', environment, None, kwargs)
-
-    @classmethod
-    def _factory(cls, environment, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
+class PcapDependencyConfigTool(ConfigToolDependency):
 
-        if DependencyMethods.PKGCONFIG in methods:
-            candidates.append(functools.partial(PkgConfigDependency, 'pcap', environment, kwargs))
+    tools = ['pcap-config']
+    tool_name = 'pcap-config'
 
-        if DependencyMethods.CONFIG_TOOL in methods:
-            candidates.append(functools.partial(ConfigToolDependency.factory,
-                                                'pcap', environment, None,
-                                                kwargs, ['pcap-config'],
-                                                'pcap-config',
-                                                PcapDependency.tool_finish_init))
-
-        return candidates
-
-    @staticmethod
-    def tool_finish_init(ctdep):
-        ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
-        ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
-        ctdep.version = PcapDependency.get_pcap_lib_version(ctdep)
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+        self.version = self.get_pcap_lib_version()
 
     @staticmethod
     def get_methods():
         return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
 
-    @staticmethod
-    def get_pcap_lib_version(ctdep):
+    def get_pcap_lib_version(self):
         # Since we seem to need to run a program to discover the pcap version,
         # we can't do that when cross-compiling
-        if not ctdep.env.machines.matches_build_machine(ctdep.for_machine):
+        # FIXME: this should be handled if we have an exe_wrapper
+        if not self.env.machines.matches_build_machine(self.for_machine):
             return None
 
-        v = ctdep.clib_compiler.get_return_value('pcap_lib_version', 'string',
-                                                 '#include ', ctdep.env, [], [ctdep])
+        v = self.clib_compiler.get_return_value('pcap_lib_version', 'string',
+                                                '#include ', self.env, [], [self])
         v = re.sub(r'libpcap version ', '', v)
         v = re.sub(r' -- Apple version.*$', '', v)
         return v
 
 
-class CupsDependency(ExternalDependency):
-    def __init__(self, environment, kwargs):
-        super().__init__('cups', environment, None, kwargs)
-
-    @classmethod
-    def _factory(cls, environment, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
-
-        if DependencyMethods.PKGCONFIG in methods:
-            candidates.append(functools.partial(PkgConfigDependency, 'cups', environment, kwargs))
-
-        if DependencyMethods.CONFIG_TOOL in methods:
-            candidates.append(functools.partial(ConfigToolDependency.factory,
-                                                'cups', environment, None,
-                                                kwargs, ['cups-config'],
-                                                'cups-config', CupsDependency.tool_finish_init))
-
-        if DependencyMethods.EXTRAFRAMEWORK in methods:
-            if mesonlib.is_osx():
-                candidates.append(functools.partial(
-                    ExtraFrameworkDependency, 'cups', False, None, environment,
-                    kwargs.get('language', None), kwargs))
+class CupsDependencyConfigTool(ConfigToolDependency):
 
-        if DependencyMethods.CMAKE in methods:
-            candidates.append(functools.partial(CMakeDependency, 'Cups', environment, kwargs))
+    tools = ['cups-config']
+    tool_name = 'cups-config'
 
-        return candidates
-
-    @staticmethod
-    def tool_finish_init(ctdep):
-        ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
-        ctdep.link_args = ctdep.get_config_value(['--ldflags', '--libs'], 'link_args')
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--ldflags', '--libs'], 'link_args')
 
     @staticmethod
     def get_methods():
@@ -410,90 +318,53 @@
             return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.CMAKE]
 
 
-class LibWmfDependency(ExternalDependency):
-    def __init__(self, environment, kwargs):
-        super().__init__('libwmf', environment, None, kwargs)
-
-    @classmethod
-    def _factory(cls, environment, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
+class LibWmfDependencyConfigTool(ConfigToolDependency):
 
-        if DependencyMethods.PKGCONFIG in methods:
-            candidates.append(functools.partial(PkgConfigDependency, 'libwmf', environment, kwargs))
-
-        if DependencyMethods.CONFIG_TOOL in methods:
-            candidates.append(functools.partial(ConfigToolDependency.factory,
-                                                'libwmf', environment, None, kwargs, ['libwmf-config'], 'libwmf-config', LibWmfDependency.tool_finish_init))
+    tools = ['libwmf-config']
+    tool_name = 'libwmf-config'
 
-        return candidates
-
-    @staticmethod
-    def tool_finish_init(ctdep):
-        ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
-        ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
 
     @staticmethod
     def get_methods():
         return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
 
 
-class LibGCryptDependency(ExternalDependency):
-    def __init__(self, environment, kwargs):
-        super().__init__('libgcrypt', environment, None, kwargs)
-
-    @classmethod
-    def _factory(cls, environment, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
-
-        if DependencyMethods.PKGCONFIG in methods:
-            candidates.append(functools.partial(PkgConfigDependency, 'libgcrypt', environment, kwargs))
-
-        if DependencyMethods.CONFIG_TOOL in methods:
-            candidates.append(functools.partial(ConfigToolDependency.factory,
-                                                'libgcrypt', environment, None, kwargs, ['libgcrypt-config'],
-                                                'libgcrypt-config',
-                                                LibGCryptDependency.tool_finish_init))
+class LibGCryptDependencyConfigTool(ConfigToolDependency):
 
-        return candidates
+    tools = ['libgcrypt-config']
+    tool_name = 'libgcrypt-config'
 
-    @staticmethod
-    def tool_finish_init(ctdep):
-        ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
-        ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
-        ctdep.version = ctdep.get_config_value(['--version'], 'version')[0]
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+        self.version = self.get_config_value(['--version'], 'version')[0]
 
     @staticmethod
     def get_methods():
         return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
 
 
-class GpgmeDependency(ExternalDependency):
-    def __init__(self, environment, kwargs):
-        super().__init__('gpgme', environment, None, kwargs)
+class GpgmeDependencyConfigTool(ConfigToolDependency):
 
-    @classmethod
-    def _factory(cls, environment, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
+    tools = ['gpgme-config']
+    tool_name = 'gpg-config'
 
-        if DependencyMethods.PKGCONFIG in methods:
-            candidates.append(functools.partial(PkgConfigDependency, 'gpgme', environment, kwargs))
-
-        if DependencyMethods.CONFIG_TOOL in methods:
-            candidates.append(functools.partial(ConfigToolDependency.factory,
-                                                'gpgme', environment, None, kwargs, ['gpgme-config'],
-                                                'gpgme-config',
-                                                GpgmeDependency.tool_finish_init))
-
-        return candidates
-
-    @staticmethod
-    def tool_finish_init(ctdep):
-        ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
-        ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
-        ctdep.version = ctdep.get_config_value(['--version'], 'version')[0]
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+        self.version = self.get_config_value(['--version'], 'version')[0]
 
     @staticmethod
     def get_methods():
@@ -503,7 +374,7 @@
 class ShadercDependency(ExternalDependency):
 
     def __init__(self, environment, kwargs):
-        super().__init__('shaderc', environment, None, kwargs)
+        super().__init__('shaderc', environment, kwargs)
 
         static_lib = 'shaderc_combined'
         shared_lib = 'shaderc_shared'
@@ -528,32 +399,188 @@
     def log_tried(self):
         return 'system'
 
-    @classmethod
-    def _factory(cls, environment, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
-
-        if DependencyMethods.PKGCONFIG in methods:
-            # ShaderC packages their shared and static libs together
-            # and provides different pkg-config files for each one. We
-            # smooth over this difference by handling the static
-            # keyword before handing off to the pkg-config handler.
-            shared_libs = ['shaderc']
-            static_libs = ['shaderc_combined', 'shaderc_static']
-
-            if kwargs.get('static', False):
-                c = [functools.partial(PkgConfigDependency, name, environment, kwargs)
-                     for name in static_libs + shared_libs]
-            else:
-                c = [functools.partial(PkgConfigDependency, name, environment, kwargs)
-                     for name in shared_libs + static_libs]
-            candidates.extend(c)
+    @staticmethod
+    def get_methods():
+        return [DependencyMethods.SYSTEM, DependencyMethods.PKGCONFIG]
 
-        if DependencyMethods.SYSTEM in methods:
-            candidates.append(functools.partial(ShadercDependency, environment, kwargs))
 
-        return candidates
+class CursesConfigToolDependency(ConfigToolDependency):
+
+    """Use the curses config tools."""
+
+    tool = 'curses-config'
+    # ncurses5.4-config is for macOS Catalina
+    tools = ['ncursesw6-config', 'ncursesw5-config', 'ncurses6-config', 'ncurses5-config', 'ncurses5.4-config']
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+
+
+class CursesSystemDependency(ExternalDependency):
+
+    """Curses dependency the hard way.
+
+    This replaces hand rolled find_library() and has_header() calls. We
+    provide this for portability reasons, there are a large number of curses
+    implementations, and the differences between them can be very annoying.
+    """
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+
+        candidates = [
+            ('pdcurses', ['pdcurses/curses.h']),
+            ('ncursesw',  ['ncursesw/ncurses.h', 'ncurses.h']),
+            ('ncurses',  ['ncurses/ncurses.h', 'ncurses/curses.h', 'ncurses.h']),
+            ('curses',  ['curses.h']),
+        ]
+
+        # Not sure how else to elegently break out of both loops
+        for lib, headers in candidates:
+            l = self.clib_compiler.find_library(lib, env, [])
+            if l:
+                for header in headers:
+                    h = self.clib_compiler.has_header(header, '', env)
+                    if h[0]:
+                        self.is_found = True
+                        self.link_args = l
+                        # Not sure how to find version for non-ncurses curses
+                        # implementations. The one in illumos/OpenIndiana
+                        # doesn't seem to have a version defined in the header.
+                        if lib.startswith('ncurses'):
+                            v, _ = self.clib_compiler.get_define('NCURSES_VERSION', '#include <{}>'.format(header), env, [], [self])
+                            self.version = v.strip('"')
+                        if lib.startswith('pdcurses'):
+                            v_major, _ = self.clib_compiler.get_define('PDC_VER_MAJOR', '#include <{}>'.format(header), env, [], [self])
+                            v_minor, _ = self.clib_compiler.get_define('PDC_VER_MINOR', '#include <{}>'.format(header), env, [], [self])
+                            self.version = '{}.{}'.format(v_major, v_minor)
+
+                        # Check the version if possible, emit a wraning if we can't
+                        req = kwargs.get('version')
+                        if req:
+                            if self.version:
+                                self.is_found = mesonlib.version_compare(self.version, req)
+                            else:
+                                mlog.warning('Cannot determine version of curses to compare against.')
+
+                        if self.is_found:
+                            mlog.debug('Curses library:', l)
+                            mlog.debug('Curses header:', header)
+                            break
+            if self.is_found:
+                break
 
     @staticmethod
-    def get_methods():
-        return [DependencyMethods.SYSTEM, DependencyMethods.PKGCONFIG]
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.SYSTEM]
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.SYSTEM})
+def curses_factory(env: 'Environment', for_machine: 'MachineChoice',
+                   kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List[T.Callable[[], 'Dependency']]:
+    candidates = []  # type: T.List[T.Callable[[], Dependency]]
+
+    if DependencyMethods.PKGCONFIG in methods:
+        pkgconfig_files = ['pdcurses', 'ncursesw', 'ncurses', 'curses']
+        for pkg in pkgconfig_files:
+            candidates.append(functools.partial(PkgConfigDependency, pkg, env, kwargs))
+
+    # There are path handling problems with these methods on msys, and they
+    # don't apply to windows otherwise (cygwin is handled seperately from
+    # windows)
+    if not env.machines[for_machine].is_windows():
+        if DependencyMethods.CONFIG_TOOL in methods:
+            candidates.append(functools.partial(CursesConfigToolDependency, 'curses', env, kwargs))
+
+        if DependencyMethods.SYSTEM in methods:
+            candidates.append(functools.partial(CursesSystemDependency, 'curses', env, kwargs))
+
+    return candidates
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM})
+def shaderc_factory(env: 'Environment', for_machine: 'MachineChoice',
+                    kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List['DependencyType']:
+    """Custom DependencyFactory for ShaderC.
+
+    ShaderC's odd you get three different libraries from the same build
+    thing are just easier to represent as a separate function than
+    twisting DependencyFactory even more.
+    """
+    candidates = []  # type: T.List['DependencyType']
+
+    if DependencyMethods.PKGCONFIG in methods:
+        # ShaderC packages their shared and static libs together
+        # and provides different pkg-config files for each one. We
+        # smooth over this difference by handling the static
+        # keyword before handing off to the pkg-config handler.
+        shared_libs = ['shaderc']
+        static_libs = ['shaderc_combined', 'shaderc_static']
+
+        if kwargs.get('static', False):
+            c = [functools.partial(PkgConfigDependency, name, env, kwargs)
+                 for name in static_libs + shared_libs]
+        else:
+            c = [functools.partial(PkgConfigDependency, name, env, kwargs)
+                 for name in shared_libs + static_libs]
+        candidates.extend(c)
+
+    if DependencyMethods.SYSTEM in methods:
+        candidates.append(functools.partial(ShadercDependency, env, kwargs))
+
+    return candidates
+
+
+cups_factory = DependencyFactory(
+    'cups',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE],
+    configtool_class=CupsDependencyConfigTool,
+    cmake_name='Cups',
+)
+
+gpgme_factory = DependencyFactory(
+    'gpgme',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    configtool_class=GpgmeDependencyConfigTool,
+)
+
+libgcrypt_factory = DependencyFactory(
+    'libgcrypt',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    configtool_class=LibGCryptDependencyConfigTool,
+)
+
+libwmf_factory = DependencyFactory(
+    'libwmf',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    configtool_class=LibWmfDependencyConfigTool,
+)
+
+pcap_factory = DependencyFactory(
+    'pcap',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    configtool_class=PcapDependencyConfigTool,
+    pkgconfig_name='libpcap',
+)
+
+python3_factory = DependencyFactory(
+    'python3',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM, DependencyMethods.EXTRAFRAMEWORK],
+    system_class=Python3DependencySystem,
+    # There is no version number in the macOS version number
+    framework_name='Python',
+    # There is a python in /System/Library/Frameworks, but thats python 2.x,
+    # Python 3 will always be in /Library
+    extra_kwargs={'paths': ['/Library/Frameworks']},
+)
+
+threads_factory = DependencyFactory(
+    'threads',
+    [DependencyMethods.SYSTEM, DependencyMethods.CMAKE],
+    cmake_name='Threads',
+    system_class=ThreadDependency,
+)
diff -Nru meson-0.53.2/mesonbuild/dependencies/mpi.py meson-0.57.0+really0.56.2/mesonbuild/dependencies/mpi.py
--- meson-0.53.2/mesonbuild/dependencies/mpi.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/mpi.py	2020-09-17 22:00:44.000000000 +0000
@@ -12,111 +12,93 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import functools
 import typing as T
 import os
 import re
-import subprocess
 
-from .. import mlog
-from .. import mesonlib
-from ..mesonlib import split_args, listify
+from .base import (DependencyMethods, PkgConfigDependency, factory_methods,
+                   ConfigToolDependency, detect_compiler, ExternalDependency)
 from ..environment import detect_cpu_family
-from .base import (DependencyException, DependencyMethods, ExternalDependency, ExternalProgram,
-                   PkgConfigDependency)
 
-
-class MPIDependency(ExternalDependency):
-
-    def __init__(self, environment, kwargs: dict):
-        language = kwargs.get('language', 'c')
-        super().__init__('mpi', environment, language, kwargs)
-        kwargs['required'] = False
-        kwargs['silent'] = True
-        self.is_found = False
-        methods = listify(self.methods)
-
-        env_vars = []
-        default_wrappers = []
-        pkgconfig_files = []
+if T.TYPE_CHECKING:
+    from .base import Dependency
+    from ..compilers import Compiler
+    from ..compilers.compiler import CompilerType
+    from ..environment import Environment, MachineChoice
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.SYSTEM})
+def mpi_factory(env: 'Environment', for_machine: 'MachineChoice',
+                kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List[T.Callable[[], 'Dependency']]:
+    language = kwargs.get('language', 'c')
+    if language not in {'c', 'cpp', 'fortran'}:
+        # OpenMPI doesn't work without any other languages
+        return []
+
+    candidates = []  # type: T.List[T.Callable[[], Dependency]]
+    compiler = detect_compiler('mpi', env, for_machine, language)  # type: T.Optional['CompilerType']
+    if compiler is None:
+        return []
+    compiler_is_intel = compiler.get_id() in {'intel', 'intel-cl'}
+
+    # Only OpenMPI has pkg-config, and it doesn't work with the intel compilers
+    if DependencyMethods.PKGCONFIG in methods and not compiler_is_intel:
+        pkg_name = None
         if language == 'c':
-            cid = environment.detect_c_compiler(self.for_machine).get_id()
-            if cid in ('intel', 'intel-cl'):
-                env_vars.append('I_MPI_CC')
-                # IntelMPI doesn't have .pc files
-                default_wrappers.append('mpiicc')
-            else:
-                env_vars.append('MPICC')
-                pkgconfig_files.append('ompi-c')
-            default_wrappers.append('mpicc')
+            pkg_name = 'ompi-c'
         elif language == 'cpp':
-            cid = environment.detect_cpp_compiler(self.for_machine).get_id()
-            if cid in ('intel', 'intel-cl'):
-                env_vars.append('I_MPI_CXX')
-                # IntelMPI doesn't have .pc files
-                default_wrappers.append('mpiicpc')
-            else:
-                env_vars.append('MPICXX')
-                pkgconfig_files.append('ompi-cxx')
-                default_wrappers += ['mpic++', 'mpicxx', 'mpiCC']  # these are not for intelmpi
+            pkg_name = 'ompi-cxx'
         elif language == 'fortran':
-            cid = environment.detect_fortran_compiler(self.for_machine).get_id()
-            if cid in ('intel', 'intel-cl'):
-                env_vars.append('I_MPI_F90')
-                # IntelMPI doesn't have .pc files
-                default_wrappers.append('mpiifort')
-            else:
-                env_vars += ['MPIFC', 'MPIF90', 'MPIF77']
-                pkgconfig_files.append('ompi-fort')
-            default_wrappers += ['mpifort', 'mpif90', 'mpif77']
-        else:
-            raise DependencyException('Language {} is not supported with MPI.'.format(language))
+            pkg_name = 'ompi-fort'
+        candidates.append(functools.partial(
+            PkgConfigDependency, pkg_name, env, kwargs, language=language))
+
+    if DependencyMethods.CONFIG_TOOL in methods:
+        nwargs = kwargs.copy()
+
+        if compiler_is_intel:
+            if env.machines[for_machine].is_windows():
+                nwargs['version_arg'] = '-v'
+                nwargs['returncode_value'] = 3
+
+            if language == 'c':
+                tool_names = [os.environ.get('I_MPI_CC'), 'mpiicc']
+            elif language == 'cpp':
+                tool_names = [os.environ.get('I_MPI_CXX'), 'mpiicpc']
+            elif language == 'fortran':
+                tool_names = [os.environ.get('I_MPI_F90'), 'mpiifort']
+
+            cls = IntelMPIConfigToolDependency  # type: T.Type[ConfigToolDependency]
+        else: # OpenMPI, which doesn't work with intel
+            #
+            # We try the environment variables for the tools first, but then
+            # fall back to the hardcoded names
+            if language == 'c':
+                tool_names = [os.environ.get('MPICC'), 'mpicc']
+            elif language == 'cpp':
+                tool_names = [os.environ.get('MPICXX'), 'mpic++', 'mpicxx', 'mpiCC']
+            elif language == 'fortran':
+                tool_names = [os.environ.get(e) for e in ['MPIFC', 'MPIF90', 'MPIF77']]
+                tool_names.extend(['mpifort', 'mpif90', 'mpif77'])
+
+            cls = OpenMPIConfigToolDependency
+
+        tool_names = [t for t in tool_names if t]  # remove empty environment variables
+        assert tool_names
+
+        nwargs['tools'] = tool_names
+        candidates.append(functools.partial(
+            cls, tool_names[0], env, nwargs, language=language))
+
+    if DependencyMethods.SYSTEM in methods:
+        candidates.append(functools.partial(
+            MSMPIDependency, 'msmpi', env, kwargs, language=language))
+
+    return candidates
 
-        if set([DependencyMethods.AUTO, DependencyMethods.PKGCONFIG]).intersection(methods):
-            for pkg in pkgconfig_files:
-                pkgdep = PkgConfigDependency(pkg, environment, kwargs, language=self.language)
-                if pkgdep.found():
-                    self.compile_args = pkgdep.get_compile_args()
-                    self.link_args = pkgdep.get_link_args()
-                    self.version = pkgdep.get_version()
-                    self.is_found = True
-                    self.pcdep = pkgdep
-                    return
-
-        if DependencyMethods.AUTO in methods:
-            for var in env_vars:
-                if var in os.environ:
-                    wrappers = [os.environ[var]]
-                    break
-            else:
-                # Or search for default wrappers.
-                wrappers = default_wrappers
-
-            for prog in wrappers:
-                # Note: Some use OpenMPI with Intel compilers on Linux
-                result = self._try_openmpi_wrapper(prog, cid)
-                if result is not None:
-                    self.is_found = True
-                    self.version = result[0]
-                    self.compile_args = self._filter_compile_args(result[1])
-                    self.link_args = self._filter_link_args(result[2], cid)
-                    break
-                result = self._try_other_wrapper(prog, cid)
-                if result is not None:
-                    self.is_found = True
-                    self.version = result[0]
-                    self.compile_args = self._filter_compile_args(result[1])
-                    self.link_args = self._filter_link_args(result[2], cid)
-                    break
-
-            if not self.is_found and mesonlib.is_windows():
-                # only Intel Fortran compiler is compatible with Microsoft MPI at this time.
-                if language == 'fortran' and cid != 'intel-cl':
-                    return
-                result = self._try_msmpi()
-                if result is not None:
-                    self.is_found = True
-                    self.version, self.compile_args, self.link_args = result
-            return
+
+class _MPIConfigToolDependency(ConfigToolDependency):
 
     def _filter_compile_args(self, args: T.Sequence[str]) -> T.List[str]:
         """
@@ -142,7 +124,7 @@
                 result.append(f)
         return result
 
-    def _filter_link_args(self, args: T.Sequence[str], cid: str) -> T.List[str]:
+    def _filter_link_args(self, args: T.Sequence[str]) -> T.List[str]:
         """
         MPI wrappers return a bunch of garbage args.
         Drop -O2 and everything that is not needed.
@@ -150,7 +132,7 @@
         result = []
         include_next = False
         for f in args:
-            if self._is_link_arg(f, cid):
+            if self._is_link_arg(f):
                 result.append(f)
                 if f in ('-L', '-Xlinker'):
                     include_next = True
@@ -159,121 +141,94 @@
                 result.append(f)
         return result
 
-    @staticmethod
-    def _is_link_arg(f: str, cid: str) -> bool:
-        if cid == 'intel-cl':
+    def _is_link_arg(self, f: str) -> bool:
+        if self.clib_compiler.id == 'intel-cl':
             return f == '/link' or f.startswith('/LIBPATH') or f.endswith('.lib')   # always .lib whether static or dynamic
         else:
             return (f.startswith(('-L', '-l', '-Xlinker')) or
                     f == '-pthread' or
                     (f.startswith('-W') and f != '-Wall' and not f.startswith('-Werror')))
 
-    def _try_openmpi_wrapper(self, prog, cid: str):
-        # https://www.open-mpi.org/doc/v4.0/man1/mpifort.1.php
-        if cid == 'intel-cl':  # IntelCl doesn't support OpenMPI
-            return None
-        prog = ExternalProgram(prog, silent=True)
-        if not prog.found():
-            return None
-
-        # compiler args
-        cmd = prog.get_command() + ['--showme:compile']
-        p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, timeout=15)
-        if p.returncode != 0:
-            mlog.debug('Command', mlog.bold(cmd), 'failed to run:')
-            mlog.debug(mlog.bold('Standard output\n'), p.stdout)
-            mlog.debug(mlog.bold('Standard error\n'), p.stderr)
-            return None
-        cargs = split_args(p.stdout)
-        # link args
-        cmd = prog.get_command() + ['--showme:link']
-        p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, timeout=15)
-        if p.returncode != 0:
-            mlog.debug('Command', mlog.bold(cmd), 'failed to run:')
-            mlog.debug(mlog.bold('Standard output\n'), p.stdout)
-            mlog.debug(mlog.bold('Standard error\n'), p.stderr)
-            return None
-        libs = split_args(p.stdout)
-        # version
-        cmd = prog.get_command() + ['--showme:version']
-        p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, timeout=15)
-        if p.returncode != 0:
-            mlog.debug('Command', mlog.bold(cmd), 'failed to run:')
-            mlog.debug(mlog.bold('Standard output\n'), p.stdout)
-            mlog.debug(mlog.bold('Standard error\n'), p.stderr)
-            return None
-        v = re.search(r'\d+.\d+.\d+', p.stdout)
+
+class IntelMPIConfigToolDependency(_MPIConfigToolDependency):
+
+    """Wrapper around Intel's mpiicc and friends."""
+
+    version_arg = '-v'  # --version is not the same as -v
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language=language)
+        if not self.is_found:
+            return
+
+        args = self.get_config_value(['-show'], 'link and compile args')
+        self.compile_args = self._filter_compile_args(args)
+        self.link_args = self._filter_link_args(args)
+
+    def _sanitize_version(self, out: str) -> str:
+        v = re.search(r'(\d{4}) Update (\d)', out)
         if v:
-            version = v.group(0)
-        else:
-            version = None
+            return '{}.{}'.format(v.group(1), v.group(2))
+        return out
 
-        return version, cargs, libs
 
-    def _try_other_wrapper(self, prog, cid: str) -> T.Tuple[str, T.List[str], T.List[str]]:
-        prog = ExternalProgram(prog, silent=True)
-        if not prog.found():
-            return None
-
-        cmd = prog.get_command()
-        if cid == 'intel-cl':
-            cmd.append('/show')
-        else:
-            cmd.append('-show')
-        p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, timeout=15)
-        if p.returncode != 0:
-            mlog.debug('Command', mlog.bold(cmd), 'failed to run:')
-            mlog.debug(mlog.bold('Standard output\n'), p.stdout)
-            mlog.debug(mlog.bold('Standard error\n'), p.stderr)
-            return None
-
-        version = None
-        stdout = p.stdout
-        if 'Intel(R) MPI Library' in p.stdout:  # intel-cl: remove messy compiler logo
-            out = stdout.split('\n', 2)
-            version = out[0]
-            stdout = out[2]
-
-        if version is None:
-            p = subprocess.run(cmd + ['-v'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, timeout=15)
-            if p.returncode == 0:
-                version = p.stdout.split('\n', 1)[0]
-
-        args = split_args(stdout)
-
-        return version, args, args
-
-    def _try_msmpi(self) -> T.Tuple[str, T.List[str], T.List[str]]:
-        if self.language == 'cpp':
-            # MS-MPI does not support the C++ version of MPI, only the standard C API.
-            return None
-        if 'MSMPI_INC' not in os.environ:
-            return None
+class OpenMPIConfigToolDependency(_MPIConfigToolDependency):
+
+    """Wrapper around OpenMPI mpicc and friends."""
+
+    version_arg = '--showme:version'
 
-        incdir = os.environ['MSMPI_INC']
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language=language)
+        if not self.is_found:
+            return
+
+        c_args = self.get_config_value(['--showme:compile'], 'compile_args')
+        self.compile_args = self._filter_compile_args(c_args)
+
+        l_args = self.get_config_value(['--showme:link'], 'link_args')
+        self.link_args = self._filter_link_args(l_args)
+
+    def _sanitize_version(self, out: str) -> str:
+        v = re.search(r'\d+.\d+.\d+', out)
+        if v:
+            return v.group(0)
+        return out
+
+
+class MSMPIDependency(ExternalDependency):
+
+    """The Microsoft MPI."""
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language=language)
+        # MSMPI only supports the C API
+        if language not in {'c', 'fortran', None}:
+            self.is_found = False
+            return
+        # MSMPI is only for windows, obviously
+        if not self.env.machines[self.for_machine].is_windows():
+            return
+
+        incdir = os.environ.get('MSMPI_INC')
         arch = detect_cpu_family(self.env.coredata.compilers.host)
+        libdir = None
         if arch == 'x86':
-            if 'MSMPI_LIB32' not in os.environ:
-                return None
-            libdir = os.environ['MSMPI_LIB32']
+            libdir = os.environ.get('MSMPI_LIB32')
             post = 'x86'
         elif arch == 'x86_64':
-            if 'MSMPI_LIB64' not in os.environ:
-                return None
-            libdir = os.environ['MSMPI_LIB64']
+            libdir = os.environ.get('MSMPI_LIB64')
             post = 'x64'
-        else:
-            return None
 
+        if libdir is None or incdir is None:
+            self.is_found = False
+            return
+
+        self.is_found = True
+        self.link_args = ['-l' + os.path.join(libdir, 'msmpi')]
+        self.compile_args = ['-I' + incdir, '-I' + os.path.join(incdir, post)]
         if self.language == 'fortran':
-            return (None,
-                    ['-I' + incdir, '-I' + os.path.join(incdir, post)],
-                    [os.path.join(libdir, 'msmpi.lib'), os.path.join(libdir, 'msmpifec.lib')])
-        else:
-            return (None,
-                    ['-I' + incdir, '-I' + os.path.join(incdir, post)],
-                    [os.path.join(libdir, 'msmpi.lib')])
-
-    @staticmethod
-    def get_methods():
-        return [DependencyMethods.AUTO, DependencyMethods.PKGCONFIG]
+            self.link_args.append('-l' + os.path.join(libdir, 'msmpifec'))
diff -Nru meson-0.53.2/mesonbuild/dependencies/platform.py meson-0.57.0+really0.56.2/mesonbuild/dependencies/platform.py
--- meson-0.53.2/mesonbuild/dependencies/platform.py	2019-05-02 18:59:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/platform.py	2020-08-15 16:27:05.000000000 +0000
@@ -20,7 +20,7 @@
 
 class AppleFrameworks(ExternalDependency):
     def __init__(self, env, kwargs):
-        super().__init__('appleframeworks', env, None, kwargs)
+        super().__init__('appleframeworks', env, kwargs)
         modules = kwargs.get('modules', [])
         if isinstance(modules, str):
             modules = [modules]
diff -Nru meson-0.53.2/mesonbuild/dependencies/scalapack.py meson-0.57.0+really0.56.2/mesonbuild/dependencies/scalapack.py
--- meson-0.53.2/mesonbuild/dependencies/scalapack.py	2019-12-29 22:47:27.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/scalapack.py	2021-01-06 10:39:48.000000000 +0000
@@ -1,4 +1,4 @@
-# Copyright 2013-2019 The Meson development team
+# Copyright 2013-2020 The Meson development team
 
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,109 +12,138 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from pathlib import Path
+from .._pathlib import Path
+import functools
 import os
+import typing as T
 
-from .. import mesonlib
-from .base import CMakeDependency, DependencyMethods, ExternalDependency, PkgConfigDependency
+from .base import CMakeDependency, DependencyMethods, PkgConfigDependency
+from .base import factory_methods, DependencyException
 
+if T.TYPE_CHECKING:
+    from ..environment import Environment, MachineChoice
+    from .base import DependencyType
 
-class ScalapackDependency(ExternalDependency):
-    def __init__(self, environment, kwargs: dict):
-        super().__init__('scalapack', environment, None, kwargs)
-        kwargs['required'] = False
-        kwargs['silent'] = True
-        self.is_found = False
-        self.static = kwargs.get('static', False)
-        methods = mesonlib.listify(self.methods)
-
-        if set([DependencyMethods.AUTO, DependencyMethods.PKGCONFIG]).intersection(methods):
-            pkgconfig_files = []
-            mklroot = None
-            is_gcc = self.clib_compiler.get_id() == 'gcc'
-            # Intel MKL works with non-Intel compilers too -- but not gcc on windows
-            if 'MKLROOT' in os.environ and not (mesonlib.is_windows() and is_gcc):
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE})
+def scalapack_factory(env: 'Environment', for_machine: 'MachineChoice',
+                      kwargs: T.Dict[str, T.Any],
+                      methods: T.List[DependencyMethods]) -> T.List['DependencyType']:
+    candidates = []
+
+    if DependencyMethods.PKGCONFIG in methods:
+        mkl = 'mkl-static-lp64-iomp' if kwargs.get('static', False) else 'mkl-dynamic-lp64-iomp'
+        candidates.append(functools.partial(
+            MKLPkgConfigDependency, mkl, env, kwargs))
+
+        for pkg in ['scalapack-openmpi', 'scalapack']:
+            candidates.append(functools.partial(
+                PkgConfigDependency, pkg, env, kwargs))
+
+    if DependencyMethods.CMAKE in methods:
+        candidates.append(functools.partial(
+            CMakeDependency, 'Scalapack', env, kwargs))
+
+    return candidates
+
+
+class MKLPkgConfigDependency(PkgConfigDependency):
+
+    """PkgConfigDependency for Intel MKL.
+
+    MKL's pkg-config is pretty much borked in every way. We need to apply a
+    bunch of fixups to make it work correctly.
+    """
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None):
+        _m = os.environ.get('MKLROOT')
+        self.__mklroot = Path(_m).resolve() if _m else None
+
+        # We need to call down into the normal super() method even if we don't
+        # find mklroot, otherwise we won't have all of the instance variables
+        # initialized that meson expects.
+        super().__init__(name, env, kwargs, language=language)
+
+        # Doesn't work with gcc on windows, but does on Linux
+        if (not self.__mklroot or (env.machines[self.for_machine].is_windows()
+                                   and self.clib_compiler.id == 'gcc')):
+            self.is_found = False
+
+        # This can happen either because we're using GCC, we couldn't find the
+        # mklroot, or the pkg-config couldn't find it.
+        if not self.is_found:
+            return
+
+        assert self.version != '', 'This should not happen if we didn\'t return above'
+
+        if self.version == 'unknown':
+            # At least by 2020 the version is in the pkg-config, just not with
+            # the correct name
+            v = self.get_variable(pkgconfig='Version', default_value='')
+
+            if not v and self.__mklroot:
                 try:
-                    mklroot = Path(os.environ['MKLROOT']).resolve()
-                except Exception:
+                    v = (
+                        self.__mklroot.as_posix()
+                        .split('compilers_and_libraries_')[1]
+                        .split('/', 1)[0]
+                    )
+                except IndexError:
                     pass
-            if mklroot is not None:
-                # MKL pkg-config is a start, but you have to add / change stuff
-                # https://software.intel.com/en-us/articles/intel-math-kernel-library-intel-mkl-and-pkg-config-tool
-                pkgconfig_files = (
-                    ['mkl-static-lp64-iomp'] if self.static else ['mkl-dynamic-lp64-iomp']
-                )
-                if mesonlib.is_windows():
-                    suffix = '.lib'
-                elif self.static:
-                    suffix = '.a'
-                else:
-                    suffix = ''
-                libdir = mklroot / 'lib/intel64'
-            # Intel compiler might not have Parallel Suite
-            pkgconfig_files += ['scalapack-openmpi', 'scalapack']
-
-            for pkg in pkgconfig_files:
-                pkgdep = PkgConfigDependency(
-                    pkg, environment, kwargs, language=self.language
-                )
-                if pkgdep.found():
-                    self.compile_args = pkgdep.get_compile_args()
-                    if mklroot:
-                        link_args = pkgdep.get_link_args()
-                        if is_gcc:
-                            for i, a in enumerate(link_args):
-                                if 'mkl_intel_lp64' in a:
-                                    link_args[i] = a.replace('intel', 'gf')
-                                    break
-                        # MKL pkg-config omits scalapack
-                        # be sure "-L" and "-Wl" are first if present
-                        i = 0
-                        for j, a in enumerate(link_args):
-                            if a.startswith(('-L', '-Wl')):
-                                i = j + 1
-                            elif j > 3:
-                                break
-                        if mesonlib.is_windows() or self.static:
-                            link_args.insert(
-                                i, str(libdir / ('mkl_scalapack_lp64' + suffix))
-                            )
-                            link_args.insert(
-                                i + 1, str(libdir / ('mkl_blacs_intelmpi_lp64' + suffix))
-                            )
-                        else:
-                            link_args.insert(i, '-lmkl_scalapack_lp64')
-                            link_args.insert(i + 1, '-lmkl_blacs_intelmpi_lp64')
-                    else:
-                        link_args = pkgdep.get_link_args()
-                    self.link_args = link_args
-
-                    self.version = pkgdep.get_version()
-                    if self.version == 'unknown' and mklroot:
-                        try:
-                            v = (
-                                mklroot.as_posix()
-                                .split('compilers_and_libraries_')[1]
-                                .split('/', 1)[0]
-                            )
-                            if v:
-                                self.version = v
-                        except IndexError:
-                            pass
-
-                    self.is_found = True
-                    self.pcdep = pkgdep
-                    return
-
-        if set([DependencyMethods.AUTO, DependencyMethods.CMAKE]).intersection(methods):
-            cmakedep = CMakeDependency('Scalapack', environment, kwargs, language=self.language)
-            if cmakedep.found():
-                self.compile_args = cmakedep.get_compile_args()
-                self.link_args = cmakedep.get_link_args()
-                self.version = cmakedep.get_version()
-                self.is_found = True
-                return
-
-    @staticmethod
-    def get_methods():
-        return [DependencyMethods.AUTO, DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE]
+
+            if v:
+                self.version = v
+
+    def _set_libs(self):
+        super()._set_libs()
+
+        if self.env.machines[self.for_machine].is_windows():
+            suffix = '.lib'
+        elif self.static:
+            suffix = '.a'
+        else:
+            suffix = ''
+        libdir = self.__mklroot / 'lib/intel64'
+
+        if self.clib_compiler.id == 'gcc':
+            for i, a in enumerate(self.link_args):
+                # only replace in filename, not in directory names
+                dirname, basename = os.path.split(a)
+                if 'mkl_intel_lp64' in basename:
+                    basename = basename.replace('intel', 'gf')
+                    self.link_args[i] = '/' + os.path.join(dirname, basename)
+        # MKL pkg-config omits scalapack
+        # be sure "-L" and "-Wl" are first if present
+        i = 0
+        for j, a in enumerate(self.link_args):
+            if a.startswith(('-L', '-Wl')):
+                i = j + 1
+            elif j > 3:
+                break
+        if self.env.machines[self.for_machine].is_windows() or self.static:
+            self.link_args.insert(
+                i, str(libdir / ('mkl_scalapack_lp64' + suffix))
+            )
+            self.link_args.insert(
+                i + 1, str(libdir / ('mkl_blacs_intelmpi_lp64' + suffix))
+            )
+        else:
+            self.link_args.insert(i, '-lmkl_scalapack_lp64')
+            self.link_args.insert(i + 1, '-lmkl_blacs_intelmpi_lp64')
+
+    def _set_cargs(self):
+        env = None
+        if self.language == 'fortran':
+            # gfortran doesn't appear to look in system paths for INCLUDE files,
+            # so don't allow pkg-config to suppress -I flags for system paths
+            env = os.environ.copy()
+            env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1'
+        ret, out, err = self._call_pkgbin([
+            '--cflags', self.name,
+            '--define-variable=prefix=' + self.__mklroot.as_posix()],
+            env=env)
+        if ret != 0:
+            raise DependencyException('Could not generate cargs for %s:\n%s\n' %
+                                      (self.name, err))
+        self.compile_args = self._convert_mingw_paths(self._split_args(out))
diff -Nru meson-0.53.2/mesonbuild/dependencies/ui.py meson-0.57.0+really0.56.2/mesonbuild/dependencies/ui.py
--- meson-0.53.2/mesonbuild/dependencies/ui.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/dependencies/ui.py	2021-01-06 10:39:48.000000000 +0000
@@ -14,10 +14,10 @@
 
 # This file contains the detection logic for external dependencies that
 # are UI-related.
-import functools
 import os
 import re
 import subprocess
+import typing as T
 from collections import OrderedDict
 
 from .. import mlog
@@ -28,14 +28,19 @@
 from ..environment import detect_cpu_family
 
 from .base import DependencyException, DependencyMethods
-from .base import ExternalDependency, ExternalProgram, NonExistingExternalProgram
+from .base import ExternalDependency, NonExistingExternalProgram
 from .base import ExtraFrameworkDependency, PkgConfigDependency
-from .base import ConfigToolDependency
+from .base import ConfigToolDependency, DependencyFactory
+from .base import find_external_program
 
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+    from .base import ExternalProgram
 
-class GLDependency(ExternalDependency):
-    def __init__(self, environment, kwargs):
-        super().__init__('gl', environment, None, kwargs)
+
+class GLDependencySystem(ExternalDependency):
+    def __init__(self, name: str, environment, kwargs):
+        super().__init__(name, environment, kwargs)
 
         if self.env.machines[self.for_machine].is_darwin():
             self.is_found = True
@@ -50,19 +55,6 @@
             # FIXME: Detect version using self.clib_compiler
             return
 
-    @classmethod
-    def _factory(cls, environment, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
-
-        if DependencyMethods.PKGCONFIG in methods:
-            candidates.append(functools.partial(PkgConfigDependency, 'gl', environment, kwargs))
-
-        if DependencyMethods.SYSTEM in methods:
-            candidates.append(functools.partial(GLDependency, environment, kwargs))
-
-        return candidates
-
     @staticmethod
     def get_methods():
         if mesonlib.is_osx() or mesonlib.is_windows():
@@ -79,7 +71,7 @@
     tool_name = 'gnustep-config'
 
     def __init__(self, environment, kwargs):
-        super().__init__('gnustep', environment, 'objc', kwargs)
+        super().__init__('gnustep', environment, kwargs, language='objc')
         if not self.is_found:
             return
         self.modules = kwargs.get('modules', [])
@@ -89,13 +81,13 @@
             ['--gui-libs' if 'gui' in self.modules else '--base-libs'],
             'link_args'))
 
-    def find_config(self, versions=None):
+    def find_config(self, versions=None, returncode: int = 0):
         tool = [self.tools[0]]
         try:
             p, out = Popen_safe(tool + ['--help'])[:2]
         except (FileNotFoundError, PermissionError):
             return (None, None)
-        if p.returncode != 0:
+        if p.returncode != returncode:
             return (None, None)
         self.config = tool
         found_version = self.detect_version()
@@ -176,8 +168,8 @@
             os.path.join(private_dir, 'Qt' + module))
 
 class QtExtraFrameworkDependency(ExtraFrameworkDependency):
-    def __init__(self, name, required, paths, env, lang, kwargs):
-        super().__init__(name, required, paths, env, lang, kwargs)
+    def __init__(self, name, env, kwargs, language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language=language)
         self.mod_name = name[2:]
 
     def get_compile_args(self, with_private_headers=False, qt_version="0"):
@@ -191,7 +183,7 @@
 
 class QtBaseDependency(ExternalDependency):
     def __init__(self, name, env, kwargs):
-        super().__init__(name, env, 'cpp', kwargs)
+        super().__init__(name, env, kwargs, language='cpp')
         self.qtname = name.capitalize()
         self.qtver = name[-1]
         if self.qtver == "4":
@@ -237,24 +229,21 @@
         bins = ['moc', 'uic', 'rcc', 'lrelease']
         found = {b: NonExistingExternalProgram(name='{}-{}'.format(b, self.name))
                  for b in bins}
+        wanted = '== {}'.format(self.version)
 
         def gen_bins():
             for b in bins:
                 if self.bindir:
-                    yield os.path.join(self.bindir, b), b, False
-                yield '{}-{}'.format(b, self.name), b, False
-                yield b, b, self.required if b != 'lrelease' else False
+                    yield os.path.join(self.bindir, b), b
+                # prefer the -qt of the tool to the plain one, as we
+                # don't know what the unsuffixed one points to without calling it.
+                yield '{}-{}'.format(b, self.name), b
+                yield b, b
 
-        for b, name, required in gen_bins():
+        for b, name in gen_bins():
             if found[name].found():
                 continue
 
-            # prefer the -qt of the tool to the plain one, as we
-            # don't know what the unsuffixed one points to without calling it.
-            p = interp_obj.find_program_impl([b], silent=True, required=required).held_object
-            if not p.found():
-                continue
-
             if name == 'lrelease':
                 arg = ['-version']
             elif mesonlib.version_compare(self.version, '>= 5'):
@@ -263,12 +252,18 @@
                 arg = ['-v']
 
             # Ensure that the version of qt and each tool are the same
-            _, out, err = mesonlib.Popen_safe(p.get_command() + arg)
-            if b.startswith('lrelease') or not self.version.startswith('4'):
-                care = out
-            else:
-                care = err
-            if mesonlib.version_compare(self.version, '== {}'.format(care.split(' ')[-1])):
+            def get_version(p):
+                _, out, err = mesonlib.Popen_safe(p.get_command() + arg)
+                if b.startswith('lrelease') or not self.version.startswith('4'):
+                    care = out
+                else:
+                    care = err
+                return care.split(' ')[-1].replace(')', '')
+
+            p = interp_obj.find_program_impl([b], required=False,
+                                             version_func=get_version,
+                                             wanted=wanted).held_object
+            if p.found():
                 found[name] = p
 
         return tuple([found[b] for b in bins])
@@ -334,28 +329,23 @@
             if prefix:
                 self.bindir = os.path.join(prefix, 'bin')
 
-    def _qmake_detect(self, mods, kwargs):
+    def search_qmake(self) -> T.Generator['ExternalProgram', None, None]:
         for qmake in ('qmake-' + self.name, 'qmake'):
-            self.qmake = ExternalProgram.from_bin_list(
-                self.env.binaries.host, qmake)
-            if not self.qmake.found():
-                # Even when cross-compiling, if a cross-info qmake is not
-                # specified, we fallback to using the qmake in PATH because
-                # that's what we used to do
-                self.qmake = ExternalProgram.from_bin_list(
-                    self.env.binaries.build, qmake)
-            if not self.qmake.found():
-                self.qmake = ExternalProgram(qmake, silent=True)
-            if not self.qmake.found():
+            yield from find_external_program(self.env, self.for_machine, qmake, 'QMake', [qmake])
+
+    def _qmake_detect(self, mods, kwargs):
+        for qmake in self.search_qmake():
+            if not qmake.found():
                 continue
             # Check that the qmake is for qt5
-            pc, stdo = Popen_safe(self.qmake.get_command() + ['-v'])[0:2]
+            pc, stdo = Popen_safe(qmake.get_command() + ['-v'])[0:2]
             if pc.returncode != 0:
                 continue
             if not 'Qt version ' + self.qtver in stdo:
                 mlog.log('QMake is not for ' + self.qtname)
                 continue
             # Found qmake for Qt5!
+            self.qmake = qmake
             break
         else:
             # Didn't find qmake :(
@@ -377,7 +367,12 @@
         if self.env.machines.host.is_darwin() and not any(s in xspec for s in ['ios', 'tvos']):
             mlog.debug("Building for macOS, looking for framework")
             self._framework_detect(qvars, mods, kwargs)
-            return qmake
+            # Sometimes Qt is built not as a framework (for instance, when using conan pkg manager)
+            # skip and fall back to normal procedure then
+            if self.is_found:
+                return self.qmake.name
+            else:
+                mlog.debug("Building for macOS, couldn't find framework, falling back to library search")
         incdir = qvars['QT_INSTALL_HEADERS']
         self.compile_args.append('-I' + incdir)
         libdir = qvars['QT_INSTALL_LIBS']
@@ -385,7 +380,12 @@
         self.bindir = self.get_qmake_host_bins(qvars)
         self.is_found = True
 
+        # Use the buildtype by default, but look at the b_vscrt option if the
+        # compiler supports it.
         is_debug = self.env.coredata.get_builtin_option('buildtype') == 'debug'
+        if 'b_vscrt' in self.env.coredata.base_options:
+            if self.env.coredata.base_options['b_vscrt'].value in ('mdd', 'mtd'):
+                is_debug = True
         modules_lib_suffix = self._get_modules_lib_suffix(is_debug)
 
         for module in mods:
@@ -410,6 +410,9 @@
             if libfile:
                 libfile = libfile[0]
             else:
+                mlog.log("Could not find:", module,
+                         self.qtpkgname + module + modules_lib_suffix,
+                         'in', libdir)
                 self.is_found = False
                 break
             self.link_args.append(libfile)
@@ -418,7 +421,7 @@
             if not self._link_with_qtmain(is_debug, libdir):
                 self.is_found = False
 
-        return qmake
+        return self.qmake.name
 
     def _get_modules_lib_suffix(self, is_debug):
         suffix = ''
@@ -427,6 +430,23 @@
                 suffix += 'd'
             if self.qtver == '4':
                 suffix += '4'
+        if self.env.machines[self.for_machine].is_darwin():
+            if is_debug:
+                suffix += '_debug'
+        if mesonlib.version_compare(self.version, '>= 5.14.0'):
+            if self.env.machines[self.for_machine].is_android():
+                cpu_family = self.env.machines[self.for_machine].cpu_family
+                if cpu_family == 'x86':
+                    suffix += '_x86'
+                elif cpu_family == 'x86_64':
+                    suffix += '_x86_64'
+                elif cpu_family == 'arm':
+                    suffix += '_armeabi-v7a'
+                elif cpu_family == 'aarch64':
+                    suffix += '_arm64-v8a'
+                else:
+                    mlog.warning('Android target arch {!r} for Qt5 is unknown, '
+                                 'module detection may not work'.format(cpu_family))
         return suffix
 
     def _link_with_qtmain(self, is_debug, libdir):
@@ -443,14 +463,14 @@
         # ExtraFrameworkDependency doesn't support any methods
         fw_kwargs = kwargs.copy()
         fw_kwargs.pop('method', None)
+        fw_kwargs['paths'] = [libdir]
 
         for m in modules:
             fname = 'Qt' + m
             mlog.debug('Looking for qt framework ' + fname)
-            fwdep = QtExtraFrameworkDependency(fname, False, [libdir], self.env,
-                                               self.language, fw_kwargs)
-            self.compile_args.append('-F' + libdir)
+            fwdep = QtExtraFrameworkDependency(fname, self.env, fw_kwargs, language=self.language)
             if fwdep.found():
+                self.compile_args.append('-F' + libdir)
                 self.compile_args += fwdep.get_compile_args(with_private_headers=self.private_headers,
                                                             qt_version=self.version)
                 self.link_args += fwdep.get_link_args()
@@ -458,8 +478,8 @@
                 break
         else:
             self.is_found = True
-        # Used by self.compilers_detect()
-        self.bindir = self.get_qmake_host_bins(qvars)
+            # Used by self.compilers_detect()
+            self.bindir = self.get_qmake_host_bins(qvars)
 
     def get_qmake_host_bins(self, qvars):
         # Prefer QT_HOST_BINS (qt5, correct for cross and native compiling)
@@ -524,38 +544,17 @@
         return _qt_get_private_includes(mod_inc_dir, module, self.version)
 
 
-# There are three different ways of depending on SDL2:
-# sdl2-config, pkg-config and OSX framework
-class SDL2Dependency(ExternalDependency):
-    def __init__(self, environment, kwargs):
-        super().__init__('sdl2', environment, None, kwargs)
+class SDL2DependencyConfigTool(ConfigToolDependency):
 
-    @classmethod
-    def _factory(cls, environment, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
-
-        if DependencyMethods.PKGCONFIG in methods:
-            candidates.append(functools.partial(PkgConfigDependency, 'sdl2', environment, kwargs))
-
-        if DependencyMethods.CONFIG_TOOL in methods:
-            candidates.append(functools.partial(ConfigToolDependency.factory,
-                                                'sdl2', environment, None,
-                                                kwargs, ['sdl2-config'],
-                                                'sdl2-config', SDL2Dependency.tool_finish_init))
-
-        if DependencyMethods.EXTRAFRAMEWORK in methods:
-            if mesonlib.is_osx():
-                candidates.append(functools.partial(ExtraFrameworkDependency,
-                                                    'sdl2', False, None, environment,
-                                                    kwargs.get('language', None), kwargs))
-                # fwdep.version = '2'  # FIXME
-        return candidates
+    tools = ['sdl2-config']
+    tool_name = 'sdl2-config'
 
-    @staticmethod
-    def tool_finish_init(ctdep):
-        ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
-        ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
 
     @staticmethod
     def get_methods():
@@ -570,17 +569,30 @@
     tools = ['wx-config-3.0', 'wx-config', 'wx-config-gtk3']
     tool_name = 'wx-config'
 
-    def __init__(self, environment, kwargs):
-        super().__init__('WxWidgets', environment, None, kwargs)
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__('WxWidgets', environment, kwargs, language='cpp')
         if not self.is_found:
             return
         self.requested_modules = self.get_requested(kwargs)
+
+        extra_args = []
+        if self.static:
+            extra_args.append('--static=yes')
+
+            # Check to make sure static is going to work
+            err = Popen_safe(self.config + extra_args)[2]
+            if 'No config found to match' in err:
+                mlog.debug('WxWidgets is missing static libraries.')
+                self.is_found = False
+                return
+
         # wx-config seems to have a cflags as well but since it requires C++,
         # this should be good, at least for now.
-        self.compile_args = self.get_config_value(['--cxxflags'] + self.requested_modules, 'compile_args')
-        self.link_args = self.get_config_value(['--libs'] + self.requested_modules, 'link_args')
+        self.compile_args = self.get_config_value(['--cxxflags'] + extra_args + self.requested_modules, 'compile_args')
+        self.link_args = self.get_config_value(['--libs'] + extra_args + self.requested_modules, 'link_args')
 
-    def get_requested(self, kwargs):
+    @staticmethod
+    def get_requested(kwargs: T.Dict[str, T.Any]) -> T.List[str]:
         if 'modules' not in kwargs:
             return []
         candidates = extract_as_list(kwargs, 'modules')
@@ -590,10 +602,10 @@
         return candidates
 
 
-class VulkanDependency(ExternalDependency):
+class VulkanDependencySystem(ExternalDependency):
 
-    def __init__(self, environment, kwargs):
-        super().__init__('vulkan', environment, None, kwargs)
+    def __init__(self, name: str, environment, kwargs, language: T.Optional[str] = None):
+        super().__init__(name, environment, kwargs, language=language)
 
         try:
             self.vulkan_sdk = os.environ['VULKAN_SDK']
@@ -646,22 +658,27 @@
                     self.link_args.append(lib)
                 return
 
-    @classmethod
-    def _factory(cls, environment, kwargs):
-        methods = cls._process_method_kw(kwargs)
-        candidates = []
-
-        if DependencyMethods.PKGCONFIG in methods:
-            candidates.append(functools.partial(PkgConfigDependency, 'vulkan', environment, kwargs))
-
-        if DependencyMethods.SYSTEM in methods:
-            candidates.append(functools.partial(VulkanDependency, environment, kwargs))
-
-        return candidates
-
     @staticmethod
     def get_methods():
-        return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
+        return [DependencyMethods.SYSTEM]
 
     def log_tried(self):
         return 'system'
+
+gl_factory = DependencyFactory(
+    'gl',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    system_class=GLDependencySystem,
+)
+
+sdl2_factory = DependencyFactory(
+    'sdl2',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK],
+    configtool_class=SDL2DependencyConfigTool,
+)
+
+vulkan_factory = DependencyFactory(
+    'vulkan',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    system_class=VulkanDependencySystem,
+)
diff -Nru meson-0.53.2/mesonbuild/envconfig.py meson-0.57.0+really0.56.2/mesonbuild/envconfig.py
--- meson-0.53.2/mesonbuild/envconfig.py	2020-01-23 22:34:28.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/envconfig.py	2021-01-06 10:39:48.000000000 +0000
@@ -12,12 +12,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import configparser, os, subprocess
+import os, subprocess
 import typing as T
+from enum import Enum
 
 from . import mesonlib
-from .mesonlib import EnvironmentException, split_args
+from .mesonlib import EnvironmentException, MachineChoice, PerMachine, split_args
 from . import mlog
+from ._pathlib import Path
 
 _T = T.TypeVar('_T')
 
@@ -40,6 +42,9 @@
     'alpha',
     'arc',
     'arm',
+    'avr',
+    'c2000',
+    'dspic',
     'e2k',
     'ia64',
     'm68k',
@@ -47,6 +52,7 @@
     'mips',
     'mips64',
     'parisc',
+    'pic24',
     'ppc',
     'ppc64',
     'riscv32',
@@ -55,12 +61,13 @@
     'rx',
     's390',
     's390x',
+    'sh4',
     'sparc',
     'sparc64',
     'wasm32',
     'wasm64',
     'x86',
-    'x86_64'
+    'x86_64',
 )
 
 # It would feel more natural to call this "64_BIT_CPU_FAMILES", but
@@ -78,55 +85,52 @@
     'x86_64',
 ]
 
-class MesonConfigFile:
-    @classmethod
-    def from_config_parser(cls, parser: configparser.ConfigParser) -> T.Dict[str, T.Dict[str, T.Dict[str, str]]]:
-        out = {}
-        # This is a bit hackish at the moment.
-        for s in parser.sections():
-            section = {}
-            for entry in parser[s]:
-                value = parser[s][entry]
-                # Windows paths...
-                value = value.replace('\\', '\\\\')
-                if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
-                    raise EnvironmentException('Malformed variable name %s in cross file..' % entry)
-                try:
-                    res = eval(value, {'__builtins__': None}, {'true': True, 'false': False})
-                except Exception:
-                    raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
-
-                for i in (res if isinstance(res, list) else [res]):
-                    if not isinstance(i, (str, int, bool)):
-                        raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
-
-                section[entry] = res
+class CMakeSkipCompilerTest(Enum):
+    ALWAYS = 'always'
+    NEVER = 'never'
+    DEP_ONLY = 'dep_only'
 
-            out[s] = section
-        return out
 
-class HasEnvVarFallback:
+def get_env_var_pair(for_machine: MachineChoice,
+                     is_cross: bool,
+                     var_name: str) -> T.Optional[T.Tuple[str, str]]:
     """
-    A tiny class to indicate that this class contains data that can be
-    initialized from either a config file or environment file. The `fallback`
-    field says whether env vars should be used. Downstream logic (e.g. subclass
-    methods) can check it to decide what to do, since env vars are currently
-    lazily decoded.
-
-    Frankly, this is a pretty silly class at the moment. The hope is the way
-    that we deal with environment variables will become more structured, and
-    this can be starting point.
+    Returns the exact env var and the value.
     """
-    def __init__(self, fallback: bool = True):
-        self.fallback = fallback
+    candidates = PerMachine(
+        # The prefixed build version takes priority, but if we are native
+        # compiling we fall back on the unprefixed host version. This
+        # allows native builds to never need to worry about the 'BUILD_*'
+        # ones.
+        ([var_name + '_FOR_BUILD'] if is_cross else [var_name]),
+        # Always just the unprefixed host verions
+        [var_name]
+    )[for_machine]
+    for var in candidates:
+        value = os.environ.get(var)
+        if value is not None:
+            break
+    else:
+        formatted = ', '.join(['{!r}'.format(var) for var in candidates])
+        mlog.debug('None of {} are defined in the environment, not changing global flags.'.format(formatted))
+        return None
+    mlog.log('Using {!r} from environment with value: {!r}'.format(var, value))
+    return var, value
+
+def get_env_var(for_machine: MachineChoice,
+                is_cross: bool,
+                var_name: str) -> T.Optional[str]:
+    ret = get_env_var_pair(for_machine, is_cross, var_name)
+    if ret is None:
+        return None
+    return ret[1]
 
-class Properties(HasEnvVarFallback):
+class Properties:
     def __init__(
             self,
-            properties: T.Optional[T.Dict[str, T.Union[str, T.List[str]]]] = None,
-            fallback: bool = True):
-        super().__init__(fallback)
-        self.properties = properties or {}  # type: T.Dict[str, T.Union[str, T.List[str]]]
+            properties: T.Optional[T.Dict[str, T.Union[str, bool, int, T.List[str]]]] = None,
+    ):
+        self.properties = properties or {}  # type: T.Dict[str, T.Union[str, bool, int, T.List[str]]]
 
     def has_stdlib(self, language: str) -> bool:
         return language + '_stdlib' in self.properties
@@ -135,29 +139,84 @@
     # true, but without heterogenious dict annotations it's not practical to
     # narrow them
     def get_stdlib(self, language: str) -> T.Union[str, T.List[str]]:
-        return self.properties[language + '_stdlib']
+        stdlib = self.properties[language + '_stdlib']
+        if isinstance(stdlib, str):
+            return stdlib
+        assert isinstance(stdlib, list)
+        for i in stdlib:
+            assert isinstance(i, str)
+        return stdlib
+
+    def get_root(self) -> T.Optional[str]:
+        root = self.properties.get('root', None)
+        assert root is None or isinstance(root, str)
+        return root
+
+    def get_sys_root(self) -> T.Optional[str]:
+        sys_root = self.properties.get('sys_root', None)
+        assert sys_root is None or isinstance(sys_root, str)
+        return sys_root
+
+    def get_pkg_config_libdir(self) -> T.Optional[T.List[str]]:
+        p = self.properties.get('pkg_config_libdir', None)
+        if p is None:
+            return p
+        res = mesonlib.listify(p)
+        for i in res:
+            assert isinstance(i, str)
+        return res
+
+    def get_cmake_defaults(self) -> bool:
+        if 'cmake_defaults' not in self.properties:
+            return True
+        res = self.properties['cmake_defaults']
+        assert isinstance(res, bool)
+        return res
 
-    def get_root(self) -> T.Optional[T.Union[str, T.List[str]]]:
-        return self.properties.get('root', None)
+    def get_cmake_toolchain_file(self) -> T.Optional[Path]:
+        if 'cmake_toolchain_file' not in self.properties:
+            return None
+        raw = self.properties['cmake_toolchain_file']
+        assert isinstance(raw, str)
+        cmake_toolchain_file = Path(raw)
+        if not cmake_toolchain_file.is_absolute():
+            raise EnvironmentException('cmake_toolchain_file ({}) is not absolute'.format(raw))
+        return cmake_toolchain_file
+
+    def get_cmake_skip_compiler_test(self) -> CMakeSkipCompilerTest:
+        if 'cmake_skip_compiler_test' not in self.properties:
+            return CMakeSkipCompilerTest.DEP_ONLY
+        raw = self.properties['cmake_skip_compiler_test']
+        assert isinstance(raw, str)
+        try:
+            return CMakeSkipCompilerTest(raw)
+        except ValueError:
+            raise EnvironmentException(
+                '"{}" is not a valid value for cmake_skip_compiler_test. Supported values are {}'
+                .format(raw, [e.value for e in CMakeSkipCompilerTest]))
 
-    def get_sys_root(self) -> T.Optional[T.Union[str, T.List[str]]]:
-        return self.properties.get('sys_root', None)
+    def get_cmake_use_exe_wrapper(self) -> bool:
+        if 'cmake_use_exe_wrapper' not in self.properties:
+            return True
+        res = self.properties['cmake_use_exe_wrapper']
+        assert isinstance(res, bool)
+        return res
 
-    def __eq__(self, other: T.Any) -> 'T.Union[bool, NotImplemented]':
+    def __eq__(self, other: object) -> bool:
         if isinstance(other, type(self)):
             return self.properties == other.properties
         return NotImplemented
 
     # TODO consider removing so Properties is less freeform
-    def __getitem__(self, key: str) -> T.Any:
+    def __getitem__(self, key: str) -> T.Union[str, bool, int, T.List[str]]:
         return self.properties[key]
 
     # TODO consider removing so Properties is less freeform
-    def __contains__(self, item: T.Any) -> bool:
+    def __contains__(self, item: T.Union[str, bool, int, T.List[str]]) -> bool:
         return item in self.properties
 
     # TODO consider removing, for same reasons as above
-    def get(self, key: str, default: T.Any = None) -> T.Any:
+    def get(self, key: str, default: T.Union[str, bool, int, T.List[str]] = None) -> T.Union[str, bool, int, T.List[str]]:
         return self.properties.get(key, default)
 
 class MachineInfo:
@@ -168,8 +227,8 @@
         self.endian = endian
         self.is_64_bit = cpu_family in CPU_FAMILES_64_BIT  # type: bool
 
-    def __eq__(self, other: T.Any) -> 'T.Union[bool, NotImplemented]':
-        if self.__class__ is not other.__class__:
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, MachineInfo):
             return NotImplemented
         return \
             self.system == other.system and \
@@ -177,8 +236,8 @@
             self.cpu == other.cpu and \
             self.endian == other.endian
 
-    def __ne__(self, other: T.Any) -> 'T.Union[bool, NotImplemented]':
-        if self.__class__ is not other.__class__:
+    def __ne__(self, other: object) -> bool:
+        if not isinstance(other, MachineInfo):
             return NotImplemented
         return not self.__eq__(other)
 
@@ -195,11 +254,11 @@
 
         cpu_family = literal['cpu_family']
         if cpu_family not in known_cpu_families:
-            mlog.warning('Unknown CPU family %s, please report this at https://github.com/mesonbuild/meson/issues/new' % cpu_family)
+            mlog.warning('Unknown CPU family {}, please report this at https://github.com/mesonbuild/meson/issues/new'.format(cpu_family))
 
         endian = literal['endian']
         if endian not in ('little', 'big'):
-            mlog.warning('Unknown endian %s' % endian)
+            mlog.warning('Unknown endian {}'.format(endian))
 
         return cls(literal['system'], cpu_family, literal['cpu'], endian)
 
@@ -207,13 +266,13 @@
         """
         Machine is windows?
         """
-        return self.system == 'windows' or 'mingw' in self.system
+        return self.system == 'windows'
 
     def is_cygwin(self) -> bool:
         """
         Machine is cygwin?
         """
-        return self.system.startswith('cygwin')
+        return self.system == 'cygwin'
 
     def is_linux(self) -> bool:
         """
@@ -263,6 +322,16 @@
         """Machine is illumos or Solaris?"""
         return self.system == 'sunos'
 
+    def is_hurd(self) -> bool:
+        """
+        Machine is GNU/Hurd?
+        """
+        return self.system == 'gnu'
+
+    def is_irix(self) -> bool:
+        """Machine is IRIX?"""
+        return self.system.startswith('irix')
+
     # Various prefixes and suffixes for import libraries, shared libraries,
     # static libraries, and executables.
     # Versioning is added to these names in the backends as-needed.
@@ -281,12 +350,11 @@
     def libdir_layout_is_win(self) -> bool:
         return self.is_windows() or self.is_cygwin()
 
-class BinaryTable(HasEnvVarFallback):
+class BinaryTable:
     def __init__(
             self,
             binaries: T.Optional[T.Dict[str, T.Union[str, T.List[str]]]] = None,
-            fallback: bool = True):
-        super().__init__(fallback)
+    ):
         self.binaries = binaries or {}  # type: T.Dict[str, T.Union[str, T.List[str]]]
         for name, command in self.binaries.items():
             if not isinstance(command, (list, str)):
@@ -311,11 +379,11 @@
         # Linkers
         'c_ld': 'CC_LD',
         'cpp_ld': 'CXX_LD',
-        'd_ld': 'D_LD',
-        'fortran_ld': 'F_LD',
+        'd_ld': 'DC_LD',
+        'fortran_ld': 'FC_LD',
         'objc_ld': 'OBJC_LD',
-        'objcpp_ld': 'OBJCPP_LD',
-        'rust_ld': 'RUST_LD',
+        'objcpp_ld': 'OBJCXX_LD',
+        'rust_ld': 'RUSTC_LD',
 
         # Binutils
         'strip': 'STRIP',
@@ -326,6 +394,16 @@
         'cmake': 'CMAKE',
         'qmake': 'QMAKE',
         'pkgconfig': 'PKG_CONFIG',
+        'make': 'MAKE',
+    }  # type: T.Dict[str, str]
+
+    # Deprecated environment variables mapped from the new variable to the old one
+    # Deprecated in 0.54.0
+    DEPRECATION_MAP = {
+        'DC_LD': 'D_LD',
+        'FC_LD': 'F_LD',
+        'RUSTC_LD': 'RUST_LD',
+        'OBJCXX_LD': 'OBJCPP_LD',
     }  # type: T.Dict[str, str]
 
     @staticmethod
@@ -337,13 +415,6 @@
         return ['ccache']
 
     @classmethod
-    def _warn_about_lang_pointing_to_cross(cls, compiler_exe: str, evar: str) -> None:
-        evar_str = os.environ.get(evar, 'WHO_WOULD_CALL_THEIR_COMPILER_WITH_THIS_NAME')
-        if evar_str == compiler_exe:
-            mlog.warning('''Env var %s seems to point to the cross compiler.
-This is probably wrong, it should always point to the native compiler.''' % evar)
-
-    @classmethod
     def parse_entry(cls, entry: T.Union[str, T.List[str]]) -> T.Tuple[T.List[str], T.List[str]]:
         compiler = mesonlib.stringlistify(entry)
         # Ensure ccache exists and remove it if it doesn't
@@ -355,69 +426,54 @@
         # Return value has to be a list of compiler 'choices'
         return compiler, ccache
 
-    def lookup_entry(self, name: str) -> T.Optional[T.List[str]]:
-        """Lookup binaryk
+    def lookup_entry(self,
+                     for_machine: MachineChoice,
+                     is_cross: bool,
+                     name: str) -> T.Optional[T.List[str]]:
+        """Lookup binary in cross/native file and fallback to environment.
 
         Returns command with args as list if found, Returns `None` if nothing is
         found.
-
-        First tries looking in explicit map, then tries environment variable.
         """
         # Try explicit map, don't fall back on env var
-        command = self.binaries.get(name)
-        if command is not None:
-            command = mesonlib.stringlistify(command)
-            # Relies on there being no "" env var
-            evar = self.evarMap.get(name, "")
-            self._warn_about_lang_pointing_to_cross(command[0], evar)
-        elif self.fallback:
-            # Relies on there being no "" env var
-            evar = self.evarMap.get(name, "")
-            command = os.environ.get(evar)
-            if command is not None:
-                command = split_args(command)
+        # Try explict map, then env vars
+        for _ in [()]: # a trick to get `break`
+            raw_command = self.binaries.get(name)
+            if raw_command is not None:
+                command = mesonlib.stringlistify(raw_command)
+                break # found
+            evar = self.evarMap.get(name)
+            if evar is not None:
+                raw_command = get_env_var(for_machine, is_cross, evar)
+                if raw_command is None:
+                    deprecated = self.DEPRECATION_MAP.get(evar)
+                    if deprecated is not None:
+                        raw_command = get_env_var(for_machine, is_cross, deprecated)
+                        if raw_command is not None:
+                            mlog.deprecation(
+                                'The', deprecated, 'environment variable is deprecated in favor of',
+                                evar, once=True)
+                if raw_command is not None:
+                    command = split_args(raw_command)
+                    break # found
+            command = None
+
 
         # Do not return empty or blank string entries
         if command is not None and (len(command) == 0 or len(command[0].strip()) == 0):
-            return None
+            command = None
         return command
 
-class Directories:
-
-    """Data class that holds information about directories for native and cross
-    builds.
-    """
-
-    def __init__(self, bindir: T.Optional[str] = None, datadir: T.Optional[str] = None,
-                 includedir: T.Optional[str] = None, infodir: T.Optional[str] = None,
-                 libdir: T.Optional[str] = None, libexecdir: T.Optional[str] = None,
-                 localedir: T.Optional[str] = None, localstatedir: T.Optional[str] = None,
-                 mandir: T.Optional[str] = None, prefix: T.Optional[str] = None,
-                 sbindir: T.Optional[str] = None, sharedstatedir: T.Optional[str] = None,
-                 sysconfdir: T.Optional[str] = None):
-        self.bindir = bindir
-        self.datadir = datadir
-        self.includedir = includedir
-        self.infodir = infodir
-        self.libdir = libdir
-        self.libexecdir = libexecdir
-        self.localedir = localedir
-        self.localstatedir = localstatedir
-        self.mandir = mandir
-        self.prefix = prefix
-        self.sbindir = sbindir
-        self.sharedstatedir = sharedstatedir
-        self.sysconfdir = sysconfdir
-
-    def __contains__(self, key: str) -> bool:
-        return hasattr(self, key)
-
-    def __getitem__(self, key: str) -> T.Optional[str]:
-        # Mypy can't figure out what to do with getattr here, so we'll case for it
-        return T.cast(T.Optional[str], getattr(self, key))
-
-    def __setitem__(self, key: str, value: T.Optional[str]) -> None:
-        setattr(self, key, value)
+class CMakeVariables:
+    def __init__(self, variables: T.Optional[T.Dict[str, T.Any]] = None) -> None:
+        variables = variables or {}
+        self.variables = {}  # type: T.Dict[str, T.List[str]]
+
+        for key, value in variables.items():
+            value = mesonlib.listify(value)
+            for i in value:
+                assert isinstance(i, str)
+            self.variables[key] = value
 
-    def __iter__(self) -> T.Iterator[T.Tuple[str, str]]:
-        return iter(self.__dict__.items())
+    def get_variables(self) -> T.Dict[str, T.List[str]]:
+        return self.variables
diff -Nru meson-0.53.2/mesonbuild/environment.py meson-0.57.0+really0.56.2/mesonbuild/environment.py
--- meson-0.53.2/mesonbuild/environment.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/environment.py	2021-01-06 10:39:48.000000000 +0000
@@ -16,9 +16,10 @@
 import tempfile
 import shlex
 import typing as T
+import collections
 
 from . import coredata
-from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker, CcrxLinker, IntelVisualStudioLinker
+from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker, CcrxLinker, Xc16Linker, CompCertLinker, C2000Linker, IntelVisualStudioLinker, AIXArLinker
 from . import mesonlib
 from .mesonlib import (
     MesonException, EnvironmentException, MachineChoice, Popen_safe,
@@ -27,8 +28,9 @@
 from . import mlog
 
 from .envconfig import (
-    BinaryTable, Directories, MachineInfo, MesonConfigFile,
-    Properties, known_cpu_families,
+    BinaryTable, MachineInfo,
+    Properties, known_cpu_families, get_env_var_pair,
+    CMakeVariables,
 )
 from . import compilers
 from .compilers import (
@@ -45,19 +47,27 @@
     ArmClangDynamicLinker,
     ArmDynamicLinker,
     CcrxDynamicLinker,
+    Xc16DynamicLinker,
+    CompCertDynamicLinker,
+    C2000DynamicLinker,
     ClangClDynamicLinker,
     DynamicLinker,
     GnuBFDDynamicLinker,
     GnuGoldDynamicLinker,
     LLVMDynamicLinker,
+    QualcommLLVMDynamicLinker,
     MSVCDynamicLinker,
     OptlinkDynamicLinker,
+    NvidiaHPC_DynamicLinker,
+    NvidiaHPC_StaticLinker,
     PGIDynamicLinker,
     PGIStaticLinker,
     SolarisDynamicLinker,
+    AIXDynamicLinker,
     XilinkDynamicLinker,
     CudaLinker,
     VisualStudioLikeLinkerMixin,
+    WASMDynamicLinker,
 )
 from functools import lru_cache
 from .compilers import (
@@ -67,6 +77,8 @@
     ArmclangCPPCompiler,
     AppleClangCCompiler,
     AppleClangCPPCompiler,
+    AppleClangObjCCompiler,
+    AppleClangObjCPPCompiler,
     ClangCCompiler,
     ClangCPPCompiler,
     ClangObjCCompiler,
@@ -98,22 +110,35 @@
     NAGFortranCompiler,
     Open64FortranCompiler,
     PathScaleFortranCompiler,
+    NvidiaHPC_CCompiler,
+    NvidiaHPC_CPPCompiler,
+    NvidiaHPC_FortranCompiler,
     PGICCompiler,
     PGICPPCompiler,
     PGIFortranCompiler,
     RustCompiler,
     CcrxCCompiler,
     CcrxCPPCompiler,
+    Xc16CCompiler,
+    CompCertCCompiler,
+    C2000CCompiler,
+    C2000CPPCompiler,
     SunFortranCompiler,
     ValaCompiler,
     VisualStudioCCompiler,
     VisualStudioCPPCompiler,
 )
 
+if T.TYPE_CHECKING:
+    from .dependencies import ExternalProgram
+
 build_filename = 'meson.build'
 
 CompilersDict = T.Dict[str, Compiler]
 
+if T.TYPE_CHECKING:
+    import argparse
+
 def detect_gcovr(min_version='3.3', new_rootdir_version='4.2', log=False):
     gcovr_exe = 'gcovr'
     try:
@@ -128,9 +153,18 @@
         return gcovr_exe, mesonlib.version_compare(found, '>=' + new_rootdir_version)
     return None, None
 
-def find_coverage_tools():
+def detect_llvm_cov():
+    tools = get_llvm_tool_names('llvm-cov')
+    for tool in tools:
+        if mesonlib.exe_exists([tool, '--version']):
+            return tool
+    return None
+
+def find_coverage_tools() -> T.Tuple[T.Optional[str], T.Optional[str], T.Optional[str], T.Optional[str], T.Optional[str]]:
     gcovr_exe, gcovr_new_rootdir = detect_gcovr()
 
+    llvm_cov_exe = detect_llvm_cov()
+
     lcov_exe = 'lcov'
     genhtml_exe = 'genhtml'
 
@@ -139,17 +173,21 @@
     if not mesonlib.exe_exists([genhtml_exe, '--version']):
         genhtml_exe = None
 
-    return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe
+    return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe
 
-def detect_ninja(version: str = '1.5', log: bool = False) -> str:
+def detect_ninja(version: str = '1.7', log: bool = False) -> T.List[str]:
     r = detect_ninja_command_and_version(version, log)
     return r[0] if r else None
 
-def detect_ninja_command_and_version(version: str = '1.5', log: bool = False) -> (str, str):
+def detect_ninja_command_and_version(version: str = '1.7', log: bool = False) -> (T.List[str], str):
+    from .dependencies.base import ExternalProgram
     env_ninja = os.environ.get('NINJA', None)
     for n in [env_ninja] if env_ninja else ['ninja', 'ninja-build', 'samu']:
+        prog = ExternalProgram(n, silent=True)
+        if not prog.found():
+            continue
         try:
-            p, found = Popen_safe([n, '--version'])[0:2]
+            p, found = Popen_safe(prog.command + ['--version'])[0:2]
         except (FileNotFoundError, PermissionError):
             # Doesn't exist in PATH or isn't executable
             continue
@@ -157,7 +195,6 @@
         # Perhaps we should add a way for the caller to know the failure mode
         # (not found or too old)
         if p.returncode == 0 and mesonlib.version_compare(found, '>=' + version):
-            n = shutil.which(n)
             if log:
                 name = os.path.basename(n)
                 if name.endswith('-' + found):
@@ -166,8 +203,9 @@
                     name = 'ninja'
                 if name == 'samu':
                     name = 'samurai'
-                mlog.log('Found {}-{} at {}'.format(name, found, quote_arg(n)))
-            return (n, found)
+                mlog.log('Found {}-{} at {}'.format(name, found,
+                         ' '.join([quote_arg(x) for x in prog.command])))
+            return (prog.command, found)
 
 def get_llvm_tool_names(tool: str) -> T.List[str]:
     # Ordered list of possible suffixes of LLVM executables to try. Start with
@@ -177,6 +215,7 @@
     # unless it becomes a stable release.
     suffixes = [
         '', # base (no suffix)
+        '-10',  '100',
         '-9',   '90',
         '-8',   '80',
         '-7',   '70',
@@ -188,7 +227,7 @@
         '-3.7', '37',
         '-3.6', '36',
         '-3.5', '35',
-        '-10',    # Debian development snapshot
+        '-11',    # Debian development snapshot
         '-devel', # FreeBSD development snapshot
     ]
     names = []
@@ -318,7 +357,7 @@
     """
     if mesonlib.is_windows():
         trial = detect_windows_arch(compilers)
-    elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd():
+    elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd() or mesonlib.is_qnx() or mesonlib.is_aix():
         trial = platform.processor().lower()
     else:
         trial = platform.machine().lower()
@@ -326,18 +365,22 @@
         trial = 'x86'
     elif trial == 'bepc':
         trial = 'x86'
+    elif trial == 'arm64':
+        trial = 'aarch64'
     elif trial.startswith('arm') or trial.startswith('earm'):
         trial = 'arm'
     elif trial.startswith(('powerpc64', 'ppc64')):
         trial = 'ppc64'
-    elif trial.startswith(('powerpc', 'ppc')):
-        trial = 'ppc'
-    elif trial == 'macppc':
+    elif trial.startswith(('powerpc', 'ppc')) or trial in {'macppc', 'power macintosh'}:
         trial = 'ppc'
     elif trial in ('amd64', 'x64', 'i86pc'):
         trial = 'x86_64'
     elif trial in {'sun4u', 'sun4v'}:
         trial = 'sparc64'
+    elif trial in {'mipsel', 'mips64el'}:
+        trial = trial.rstrip('el')
+    elif trial in {'ip30', 'ip35'}:
+        trial = 'mips64'
 
     # On Linux (and maybe others) there can be any mixture of 32/64 bit code in
     # the kernel, Python, system, 32-bit chroot on 64-bit host, etc. The only
@@ -354,6 +397,10 @@
         # ATM there is no 64 bit userland for PA-RISC. Thus always
         # report it as 32 bit for simplicity.
         trial = 'parisc'
+    elif trial == 'ppc':
+        # AIX always returns powerpc, check here for 64-bit
+        if any_compiler_has_define(compilers, '__64BIT__'):
+            trial = 'ppc64'
 
     if trial not in known_cpu_families:
         mlog.warning('Unknown CPU family {!r}, please report this at '
@@ -365,10 +412,11 @@
 def detect_cpu(compilers: CompilersDict):
     if mesonlib.is_windows():
         trial = detect_windows_arch(compilers)
-    elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd():
+    elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd() or mesonlib.is_aix():
         trial = platform.processor().lower()
     else:
         trial = platform.machine().lower()
+
     if trial in ('amd64', 'x64', 'i86pc'):
         trial = 'x86_64'
     if trial == 'x86_64':
@@ -384,15 +432,17 @@
     elif trial == 'e2k':
         # Make more precise CPU detection for Elbrus platform.
         trial = platform.processor().lower()
+    elif trial.startswith('mips'):
+        trial = trial.rstrip('el')
+
     # Add more quirks here as bugs are reported. Keep in sync with
     # detect_cpu_family() above.
     return trial
 
 def detect_system():
-    system = platform.system().lower()
-    if system.startswith('cygwin'):
+    if sys.platform == 'cygwin':
         return 'cygwin'
-    return system
+    return platform.system().lower()
 
 def detect_msys2_arch():
     if 'MSYSTEM_CARCH' in os.environ:
@@ -428,9 +478,10 @@
     true_build_cpu_family = detect_cpu_family({})
     return \
         (machine_info.cpu_family == true_build_cpu_family) or \
-        ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86'))
+        ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) or \
+        ((true_build_cpu_family == 'aarch64') and (machine_info.cpu_family == 'arm'))
 
-def search_version(text):
+def search_version(text: str) -> str:
     # Usually of the type 4.1.4 but compiler output may contain
     # stuff like this:
     # (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease)
@@ -464,6 +515,13 @@
     match = version_regex.search(text)
     if match:
         return match.group(0)
+
+    # try a simpler regex that has like "blah 2020.01.100 foo" or "blah 2020.01 foo"
+    version_regex = re.compile(r"(\d{1,4}\.\d{1,4}\.?\d{0,4})")
+    match = version_regex.search(text)
+    if match:
+        return match.group(0)
+
     return 'unknown version'
 
 class Environment:
@@ -471,7 +529,7 @@
     log_dir = 'meson-logs'
     info_dir = 'meson-info'
 
-    def __init__(self, source_dir, build_dir, options):
+    def __init__(self, source_dir: T.Optional[str], build_dir: T.Optional[str], options: 'argparse.Namespace') -> None:
         self.source_dir = source_dir
         self.build_dir = build_dir
         # Do not try to create build directories when build_dir is none.
@@ -484,10 +542,15 @@
             os.makedirs(self.log_dir, exist_ok=True)
             os.makedirs(self.info_dir, exist_ok=True)
             try:
-                self.coredata = coredata.load(self.get_build_dir())
+                self.coredata = coredata.load(self.get_build_dir())  # type: coredata.CoreData
                 self.first_invocation = False
             except FileNotFoundError:
                 self.create_new_coredata(options)
+            except coredata.MesonVersionMismatchException as e:
+                # This is routine, but tell the user the update happened
+                mlog.log('Regenerating configuration from scratch:', str(e))
+                coredata.read_cmd_line_file(self.build_dir, options)
+                self.create_new_coredata(options)
             except MesonException as e:
                 # If we stored previous command line options, we can recover from
                 # a broken/outdated coredata.
@@ -508,19 +571,17 @@
         # Stores machine infos, the only *three* machine one because we have a
         # target machine info on for the user (Meson never cares about the
         # target machine.)
-        machines = PerThreeMachineDefaultable()
+        machines = PerThreeMachineDefaultable()  # type: PerMachineDefaultable[MachineInfo]
 
         # Similar to coredata.compilers, but lower level in that there is no
         # meta data, only names/paths.
-        binaries = PerMachineDefaultable()
+        binaries = PerMachineDefaultable()  # type: PerMachineDefaultable[BinaryTable]
 
         # Misc other properties about each machine.
-        properties = PerMachineDefaultable()
+        properties = PerMachineDefaultable()  # type: PerMachineDefaultable[Properties]
 
-        # Store paths for native and cross build files. There is no target
-        # machine information here because nothing is installed for the target
-        # architecture, just the build and host architectures
-        paths = PerMachineDefaultable()
+        # CMake toolchain variables
+        cmakevars = PerMachineDefaultable()  # type: PerMachineDefaultable[CMakeVariables]
 
         ## Setup build machine defaults
 
@@ -532,45 +593,66 @@
         binaries.build = BinaryTable()
         properties.build = Properties()
 
+        # Unparsed options as given by the user in machine files, command line,
+        # and project()'s default_options. Keys are in the command line format:
+        # "[:][build.]option_name".
+        # Note that order matters because of 'buildtype', if it is after
+        # 'optimization' and 'debug' keys, it override them.
+        self.raw_options = collections.OrderedDict() # type: collections.OrderedDict[str, str]
+
         ## Read in native file(s) to override build machine configuration
 
         if self.coredata.config_files is not None:
-            config = MesonConfigFile.from_config_parser(
-                coredata.load_configs(self.coredata.config_files))
+            config = coredata.parse_machine_files(self.coredata.config_files)
             binaries.build = BinaryTable(config.get('binaries', {}))
-            paths.build = Directories(**config.get('paths', {}))
+            properties.build = Properties(config.get('properties', {}))
+            cmakevars.build = CMakeVariables(config.get('cmake', {}))
+            self.load_machine_file_options(config, properties.build)
 
         ## Read in cross file(s) to override host machine configuration
 
         if self.coredata.cross_files:
-            config = MesonConfigFile.from_config_parser(
-                coredata.load_configs(self.coredata.cross_files))
-            properties.host = Properties(config.get('properties', {}), False)
-            binaries.host = BinaryTable(config.get('binaries', {}), False)
+            config = coredata.parse_machine_files(self.coredata.cross_files)
+            properties.host = Properties(config.get('properties', {}))
+            binaries.host = BinaryTable(config.get('binaries', {}))
+            cmakevars.host = CMakeVariables(config.get('cmake', {}))
             if 'host_machine' in config:
                 machines.host = MachineInfo.from_literal(config['host_machine'])
             if 'target_machine' in config:
                 machines.target = MachineInfo.from_literal(config['target_machine'])
-            paths.host = Directories(**config.get('paths', {}))
+            # Keep only per machine options from the native file and prefix them
+            # with "build.". The cross file takes precedence over all other options.
+            self.keep_per_machine_options()
+            self.load_machine_file_options(config, properties.host)
 
         ## "freeze" now initialized configuration, and "save" to the class.
 
         self.machines = machines.default_missing()
         self.binaries = binaries.default_missing()
         self.properties = properties.default_missing()
-        self.paths = paths.default_missing()
+        self.cmakevars = cmakevars.default_missing()
+
+        # Command line options override those from cross/native files
+        self.raw_options.update(options.cmd_line_options)
+
+        # Take default value from env if not set in cross/native files or command line.
+        self.set_default_options_from_env()
 
-        exe_wrapper = self.binaries.host.lookup_entry('exe_wrapper')
+        # Warn if the user is using two different ways of setting build-type
+        # options that override each other
+        if 'buildtype' in self.raw_options and \
+           ('optimization' in self.raw_options or 'debug' in self.raw_options):
+            mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. '
+                         'Using both is redundant since they override each other. '
+                         'See: https://mesonbuild.com/Builtin-options.html#build-type-options')
+
+        exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper')
         if exe_wrapper is not None:
             from .dependencies import ExternalProgram
-            self.exe_wrapper = ExternalProgram.from_bin_list(
-                self.binaries.host,
-                'exe_wrapper')
+            self.exe_wrapper = ExternalProgram.from_bin_list(self, MachineChoice.HOST, 'exe_wrapper')
         else:
             self.exe_wrapper = None
 
-        self.cmd_line_options = options.cmd_line_options.copy()
-
         # List of potential compilers.
         if mesonlib.is_windows():
             # Intel C and C++ compiler is icl on Windows, but icc and icpc elsewhere.
@@ -595,11 +677,11 @@
                 self.default_objc = []
                 self.default_objcpp = []
             else:
-                self.default_c = ['cc', 'gcc', 'clang', 'pgcc', 'icc']
-                self.default_cpp = ['c++', 'g++', 'clang++', 'pgc++', 'icpc']
+                self.default_c = ['cc', 'gcc', 'clang', 'nvc', 'pgcc', 'icc']
+                self.default_cpp = ['c++', 'g++', 'clang++', 'nvc++', 'pgc++', 'icpc']
                 self.default_objc = ['cc', 'gcc', 'clang']
                 self.default_objcpp = ['c++', 'g++', 'clang++']
-            self.default_fortran = ['gfortran', 'flang', 'pgfortran', 'ifort', 'g95']
+            self.default_fortran = ['gfortran', 'flang', 'nvfortran', 'pgfortran', 'ifort', 'g95']
             self.default_cs = ['mcs', 'csc']
         self.default_d = ['ldc2', 'ldc', 'gdc', 'dmd']
         self.default_java = ['javac']
@@ -616,30 +698,81 @@
         self.clang_static_linker = ['llvm-ar']
         self.default_cmake = ['cmake']
         self.default_pkgconfig = ['pkg-config']
+        self.wrap_resolver = None
 
-    def create_new_coredata(self, options):
+    def load_machine_file_options(self, config, properties):
+        paths = config.get('paths')
+        if paths:
+            mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.')
+            self.raw_options.update(paths)
+        deprecated_properties = set()
+        for lang in compilers.all_languages:
+            deprecated_properties.add(lang + '_args')
+            deprecated_properties.add(lang + '_link_args')
+        for k, v in properties.properties.copy().items():
+            if k in deprecated_properties:
+                mlog.deprecation('{} in the [properties] section of the machine file is deprecated, use the [built-in options] section.'.format(k))
+                self.raw_options[k] = v
+                del properties.properties[k]
+        for section, values in config.items():
+            prefix = ''
+            if ':' in section:
+                subproject, section = section.split(':')
+                prefix = subproject + ':'
+            if section in ['project options', 'built-in options']:
+                self.raw_options.update({prefix + k: v for k, v in values.items()})
+
+    def keep_per_machine_options(self):
+        per_machine_options = {}
+        for optname, value in self.raw_options.items():
+            if self.coredata.is_per_machine_option(optname):
+                build_optname = self.coredata.insert_build_prefix(optname)
+                per_machine_options[build_optname] = value
+        self.raw_options = per_machine_options
+
+    def set_default_options_from_env(self):
+        for for_machine in MachineChoice:
+            p_env_pair = get_env_var_pair(for_machine, self.is_cross_build(), 'PKG_CONFIG_PATH')
+            if p_env_pair is not None:
+                p_env_var, p_env = p_env_pair
+
+                # PKG_CONFIG_PATH may contain duplicates, which must be
+                # removed, else a duplicates-in-array-option warning arises.
+                p_list = list(mesonlib.OrderedSet(p_env.split(':')))
+
+                key = 'pkg_config_path'
+                if for_machine == MachineChoice.BUILD:
+                    key = 'build.' + key
+
+                # Take env vars only on first invocation, if the env changes when
+                # reconfiguring it gets ignored.
+                # FIXME: We should remember if we took the value from env to warn
+                # if it changes on future invocations.
+                if self.first_invocation:
+                    self.raw_options.setdefault(key, p_list)
+
+    def create_new_coredata(self, options: 'argparse.Namespace') -> None:
         # WARNING: Don't use any values from coredata in __init__. It gets
         # re-initialized with project options by the interpreter during
         # build file parsing.
-        self.coredata = coredata.CoreData(options, self.scratch_dir)
-        # Used by the regenchecker script, which runs meson
-        self.coredata.meson_command = mesonlib.meson_command
+        # meson_command is used by the regenchecker script, which runs meson
+        self.coredata = coredata.CoreData(options, self.scratch_dir, mesonlib.meson_command)
         self.first_invocation = True
 
-    def is_cross_build(self) -> bool:
-        return self.coredata.is_cross_build()
+    def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
+        return self.coredata.is_cross_build(when_building_for)
 
-    def dump_coredata(self):
+    def dump_coredata(self) -> str:
         return coredata.save(self.coredata, self.get_build_dir())
 
-    def get_script_dir(self):
+    def get_script_dir(self) -> str:
         import mesonbuild.scripts
         return os.path.dirname(mesonbuild.scripts.__file__)
 
-    def get_log_dir(self):
+    def get_log_dir(self) -> str:
         return self.log_dir
 
-    def get_coredata(self):
+    def get_coredata(self) -> coredata.CoreData:
         return self.coredata
 
     def get_build_command(self, unbuffered=False):
@@ -667,6 +800,12 @@
     def is_library(self, fname):
         return is_library(fname)
 
+    def lookup_binary_entry(self, for_machine: MachineChoice, name: str):
+        return self.binaries[for_machine].lookup_entry(
+            for_machine,
+            self.is_cross_build(),
+            name)
+
     @staticmethod
     def get_gnu_compiler_defines(compiler):
         """
@@ -711,12 +850,34 @@
         minor = defines.get('__LCC_MINOR__', '0')
         return dot.join((generation, major, minor))
 
-    def _get_compilers(self, lang, for_machine):
+    @staticmethod
+    def get_clang_compiler_defines(compiler):
+        """
+        Get the list of Clang pre-processor defines
+        """
+        args = compiler + ['-E', '-dM', '-']
+        p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE)
+        if p.returncode != 0:
+            raise EnvironmentException('Unable to get clang pre-processor defines:\n' + output + error)
+        defines = {}
+        for line in output.split('\n'):
+            if not line:
+                continue
+            d, *rest = line.split(' ', 2)
+            if d != '#define':
+                continue
+            if len(rest) == 1:
+                defines[rest] = True
+            if len(rest) == 2:
+                defines[rest[0]] = rest[1]
+        return defines
+
+    def _get_compilers(self, lang: str, for_machine: MachineChoice) -> T.Tuple[T.List[T.List[str]], T.List[str], T.Optional['ExternalProgram']]:
         '''
         The list of compilers is detected in the exact same way for
         C, C++, ObjC, ObjC++, Fortran, CS so consolidate it here.
         '''
-        value = self.binaries[for_machine].lookup_entry(lang)
+        value = self.lookup_binary_entry(for_machine, lang)
         if value is not None:
             compilers, ccache = BinaryTable.parse_entry(value)
             # Return value has to be a list of compiler 'choices'
@@ -742,9 +903,16 @@
                 errmsg += '\nRunning "{0}" gave "{1}"'.format(c, e)
         raise EnvironmentException(errmsg)
 
+    @staticmethod
+    def __failed_to_detect_linker(compiler: T.List[str], args: T.List[str], stdout: str, stderr: str) -> 'T.NoReturn':
+        msg = 'Unable to detect linker for compiler "{} {}"\nstdout: {}\nstderr: {}'.format(
+            ' '.join(compiler), ' '.join(args), stdout, stderr)
+        raise EnvironmentException(msg)
+
     def _guess_win_linker(self, compiler: T.List[str], comp_class: Compiler,
                           for_machine: MachineChoice, *,
-                          use_linker_prefix: bool = True) -> 'DynamicLinker':
+                          use_linker_prefix: bool = True, invoked_directly: bool = True,
+                          extra_args: T.Optional[T.List[str]] = None) -> 'DynamicLinker':
         self.coredata.add_lang_args(comp_class.language, comp_class, for_machine, self)
 
         # Explicitly pass logo here so that we can get the version of link.exe
@@ -755,14 +923,17 @@
         elif isinstance(comp_class.LINKER_PREFIX, list):
             check_args = comp_class.LINKER_PREFIX + ['/logo'] + comp_class.LINKER_PREFIX + ['--version']
 
-        check_args += self.coredata.compiler_options[for_machine][comp_class.language + '_args'].value
+        check_args += self.coredata.compiler_options[for_machine][comp_class.language]['args'].value
 
         override = []  # type: T.List[str]
-        value = self.binaries[for_machine].lookup_entry(comp_class.language + '_ld')
+        value = self.lookup_binary_entry(for_machine, comp_class.language + '_ld')
         if value is not None:
             override = comp_class.use_linker_args(value[0])
             check_args += override
 
+        if extra_args is not None:
+            check_args.extend(extra_args)
+
         p, o, _ = Popen_safe(compiler + check_args)
         if o.startswith('LLD'):
             if '(compatible with GNU linkers)' in o:
@@ -770,18 +941,19 @@
                     compiler, for_machine, comp_class.LINKER_PREFIX,
                     override, version=search_version(o))
 
-        if value is not None:
+        if value is not None and invoked_directly:
             compiler = value
+            # We've already hanedled the non-direct case above
 
         p, o, e = Popen_safe(compiler + check_args)
         if o.startswith('LLD'):
             return ClangClDynamicLinker(
                 for_machine, [],
                 prefix=comp_class.LINKER_PREFIX if use_linker_prefix else [],
-                exelist=compiler, version=search_version(o))
+                exelist=compiler, version=search_version(o), direct=invoked_directly)
         elif 'OPTLINK' in o:
             # Opltink's stdout *may* beging with a \r character.
-            return OptlinkDynamicLinker(for_machine, version=search_version(o))
+            return OptlinkDynamicLinker(compiler, for_machine, version=search_version(o))
         elif o.startswith('Microsoft') or e.startswith('Microsoft'):
             out = o or e
             match = re.search(r'.*(X86|X64|ARM|ARM64).*', out)
@@ -793,13 +965,13 @@
             return MSVCDynamicLinker(
                 for_machine, [], machine=target, exelist=compiler,
                 prefix=comp_class.LINKER_PREFIX if use_linker_prefix else [],
-                version=search_version(out))
+                version=search_version(out), direct=invoked_directly)
         elif 'GNU coreutils' in o:
             raise EnvironmentException(
                 "Found GNU link.exe instead of MSVC link.exe. This link.exe "
                 "is not a linker. You may need to reorder entries to your "
                 "%PATH% variable to resolve this.")
-        raise EnvironmentException('Unable to determine dynamic linker')
+        self.__failed_to_detect_linker(compiler, check_args, o, e)
 
     def _guess_nix_linker(self, compiler: T.List[str], comp_class: T.Type[Compiler],
                           for_machine: MachineChoice, *,
@@ -812,8 +984,8 @@
         :extra_args: Any additional arguments required (such as a source file)
         """
         self.coredata.add_lang_args(comp_class.language, comp_class, for_machine, self)
-        extra_args = T.cast(T.List[str], extra_args or [])
-        extra_args += self.coredata.compiler_options[for_machine][comp_class.language + '_args'].value
+        extra_args = extra_args or []
+        extra_args += self.coredata.compiler_options[for_machine][comp_class.language]['args'].value
 
         if isinstance(comp_class.LINKER_PREFIX, str):
             check_args = [comp_class.LINKER_PREFIX + '--version'] + extra_args
@@ -821,18 +993,23 @@
             check_args = comp_class.LINKER_PREFIX + ['--version'] + extra_args
 
         override = []  # type: T.List[str]
-        value = self.binaries[for_machine].lookup_entry(comp_class.language + '_ld')
+        value = self.lookup_binary_entry(for_machine, comp_class.language + '_ld')
         if value is not None:
             override = comp_class.use_linker_args(value[0])
             check_args += override
 
         _, o, e = Popen_safe(compiler + check_args)
-        v = search_version(o)
+        v = search_version(o + e)
         if o.startswith('LLD'):
             linker = LLVMDynamicLinker(
                 compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)  # type: DynamicLinker
+        elif 'Snapdragon' in e and 'LLVM' in e:
+            linker = QualcommLLVMDynamicLinker(
+                compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)  # type: DynamicLinker
         elif e.startswith('lld-link: '):
-            # Toolchain wrapper got in the way; this happens with e.g. https://github.com/mstorsjo/llvm-mingw
+            # The LLD MinGW frontend didn't respond to --version before version 9.0.0,
+            # and produced an error message about failing to link (when no object
+            # files were specified), instead of printing the version number.
             # Let's try to extract the linker invocation command to grab the version.
 
             _, o, e = Popen_safe(compiler + check_args + ['-v'])
@@ -860,24 +1037,46 @@
             else:
                 v = 'unknown version'
             linker = AppleDynamicLinker(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
-        elif 'GNU' in o:
-            if 'gold' in o:
+        elif 'GNU' in o or 'GNU' in e:
+            if 'gold' in o or 'gold' in e:
                 cls = GnuGoldDynamicLinker
             else:
                 cls = GnuBFDDynamicLinker
             linker = cls(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
         elif 'Solaris' in e or 'Solaris' in o:
+            for line in (o+e).split('\n'):
+                if 'ld: Software Generation Utilities' in line:
+                    v = line.split(':')[2].lstrip()
+                    break
+            else:
+                v = 'unknown version'
             linker = SolarisDynamicLinker(
                 compiler, for_machine, comp_class.LINKER_PREFIX, override,
+                version=v)
+        elif 'ld: 0706-012 The -- flag is not recognized' in e:
+            if isinstance(comp_class.LINKER_PREFIX, str):
+                _, _, e = Popen_safe(compiler + [comp_class.LINKER_PREFIX + '-V'] + extra_args)
+            else:
+                _, _, e = Popen_safe(compiler + comp_class.LINKER_PREFIX + ['-V'] + extra_args)
+            linker = AIXDynamicLinker(
+                compiler, for_machine, comp_class.LINKER_PREFIX, override,
                 version=search_version(e))
         else:
-            raise EnvironmentException('Unable to determine dynamic linker')
+            self.__failed_to_detect_linker(compiler, check_args, o, e)
         return linker
 
-    def _detect_c_or_cpp_compiler(self, lang: str, for_machine: MachineChoice) -> Compiler:
+    def _detect_c_or_cpp_compiler(self, lang: str, for_machine: MachineChoice, *, override_compiler: T.Optional[T.List[str]] = None) -> Compiler:
+        """Shared implementation for finding the C or C++ compiler to use.
+
+        the override_compiler option is provided to allow compilers which use
+        the compiler (GCC or Clang usually) as their shared linker, to find
+        the linker they need.
+        """
         popen_exceptions = {}
         compilers, ccache, exe_wrap = self._get_compilers(lang, for_machine)
-        is_cross = not self.machines.matches_build_machine(for_machine)
+        if override_compiler is not None:
+            compilers = [override_compiler]
+        is_cross = self.is_cross_build(for_machine)
         info = self.machines[for_machine]
 
         for compiler in compilers:
@@ -909,7 +1108,13 @@
                 arg = '--vsn'
             elif 'ccrx' in compiler_name:
                 arg = '-v'
-            elif 'icl' in compiler_name:
+            elif 'xc16' in compiler_name:
+                arg = '--version'
+            elif 'ccomp' in compiler_name:
+                arg = '-version'
+            elif 'cl2000' in compiler_name:
+                arg = '-version'
+            elif compiler_name in {'icl', 'icl.exe'}:
                 # if you pass anything to icl you get stuck in a pager
                 arg = ''
             else:
@@ -932,6 +1137,9 @@
                 guess_gcc_or_lcc = 'gcc'
             if 'e2k' in out and 'lcc' in out:
                 guess_gcc_or_lcc = 'lcc'
+            if 'Microchip Technology' in out:
+                # this output has "Free Software Foundation" in its version
+                guess_gcc_or_lcc = False
 
             if guess_gcc_or_lcc:
                 defines = self.get_gnu_compiler_defines(compiler)
@@ -950,15 +1158,27 @@
 
                 return cls(
                     ccache + compiler, version, for_machine, is_cross,
-                    info, exe_wrap, defines, full_version=full_version,
+                    info, exe_wrap, defines=defines, full_version=full_version,
                     linker=linker)
 
             if 'Emscripten' in out:
                 cls = EmscriptenCCompiler if lang == 'c' else EmscriptenCPPCompiler
                 self.coredata.add_lang_args(cls.language, cls, for_machine, self)
+
+                # emcc requires a file input in order to pass arguments to the
+                # linker. It'll exit with an error code, but still print the
+                # linker version. Old emcc versions ignore -Wl,--version completely,
+                # however. We'll report "unknown version" in that case.
+                with tempfile.NamedTemporaryFile(suffix='.c') as f:
+                    cmd = compiler + [cls.LINKER_PREFIX + "--version", f.name]
+                    _, o, _ = Popen_safe(cmd)
+
+                linker = WASMDynamicLinker(
+                    compiler, for_machine, cls.LINKER_PREFIX,
+                    [], version=search_version(o))
                 return cls(
                     ccache + compiler, version, for_machine, is_cross, info,
-                    exe_wrap, full_version=full_version)
+                    exe_wrap, linker=linker, full_version=full_version)
 
             if 'armclang' in out:
                 # The compiler version is not present in the first line of output,
@@ -997,11 +1217,13 @@
                 cls = ClangClCCompiler if lang == 'c' else ClangClCPPCompiler
                 linker = self._guess_win_linker(['lld-link'], cls, for_machine)
                 return cls(
-                    compiler, version, for_machine, is_cross, info, exe_wrap,
-                    target, linker=linker)
-            if 'clang' in out:
+                    compiler, version, for_machine, is_cross, info, target,
+                    exe_wrap, linker=linker)
+            if 'clang' in out or 'Clang' in out:
                 linker = None
 
+                defines = self.get_clang_compiler_defines(compiler)
+
                 # Even if the for_machine is darwin, we could be using vanilla
                 # clang.
                 if 'Apple' in out:
@@ -1022,7 +1244,7 @@
 
                 return cls(
                     ccache + compiler, version, for_machine, is_cross, info,
-                    exe_wrap, full_version=full_version, linker=linker)
+                    exe_wrap, defines=defines, full_version=full_version, linker=linker)
 
             if 'Intel(R) C++ Intel(R)' in err:
                 version = search_version(err)
@@ -1031,8 +1253,8 @@
                 self.coredata.add_lang_args(cls.language, cls, for_machine, self)
                 linker = XilinkDynamicLinker(for_machine, [], version=version)
                 return cls(
-                    compiler, version, for_machine, is_cross, info=info,
-                    exe_wrap=exe_wrap, target=target, linker=linker)
+                    compiler, version, for_machine, is_cross, info, target,
+                    exe_wrap, linker=linker)
             if 'Microsoft' in out or 'Microsoft' in err:
                 # Latest versions of Visual Studio print version
                 # number to stderr but earlier ones print version
@@ -1046,7 +1268,7 @@
                     m = 'Failed to detect MSVC compiler version: stderr was\n{!r}'
                     raise EnvironmentException(m.format(err))
                 cl_signature = lookat.split('\n')[0]
-                match = re.search('.*(x86|x64|ARM|ARM64)$', cl_signature)
+                match = re.search(r'.*(x86|x64|ARM|ARM64)([^_A-Za-z0-9]|$)', cl_signature)
                 if match:
                     target = match.group(1)
                 else:
@@ -1055,8 +1277,8 @@
                 cls = VisualStudioCCompiler if lang == 'c' else VisualStudioCPPCompiler
                 linker = self._guess_win_linker(['link'], cls, for_machine)
                 return cls(
-                    compiler, version, for_machine, is_cross, info, exe_wrap,
-                    target, linker=linker)
+                    compiler, version, for_machine, is_cross, info, target,
+                    exe_wrap, full_version=cl_signature, linker=linker)
             if 'PGI Compilers' in out:
                 cls = PGICCompiler if lang == 'c' else PGICPPCompiler
                 self.coredata.add_lang_args(cls.language, cls, for_machine, self)
@@ -1064,6 +1286,13 @@
                 return cls(
                     ccache + compiler, version, for_machine, is_cross,
                     info, exe_wrap, linker=linker)
+            if 'NVIDIA Compilers and Tools' in out:
+                cls = NvidiaHPC_CCompiler if lang == 'c' else NvidiaHPC_CPPCompiler
+                self.coredata.add_lang_args(cls.language, cls, for_machine, self)
+                linker = NvidiaHPC_DynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version)
+                return cls(
+                    ccache + compiler, version, for_machine, is_cross,
+                    info, exe_wrap, linker=linker)
             if '(ICC)' in out:
                 cls = IntelCCompiler if lang == 'c' else IntelCPPCompiler
                 l = self._guess_nix_linker(compiler, cls, for_machine)
@@ -1085,6 +1314,31 @@
                     ccache + compiler, version, for_machine, is_cross, info,
                     exe_wrap, full_version=full_version, linker=linker)
 
+            if 'Microchip Technology' in out:
+                cls = Xc16CCompiler if lang == 'c' else Xc16CCompiler
+                self.coredata.add_lang_args(cls.language, cls, for_machine, self)
+                linker = Xc16DynamicLinker(for_machine, version=version)
+                return cls(
+                    ccache + compiler, version, for_machine, is_cross, info,
+                    exe_wrap, full_version=full_version, linker=linker)
+
+            if 'CompCert' in out:
+                cls = CompCertCCompiler
+                self.coredata.add_lang_args(cls.language, cls, for_machine, self)
+                linker = CompCertDynamicLinker(for_machine, version=version)
+                return cls(
+                    ccache + compiler, version, for_machine, is_cross, info,
+                    exe_wrap, full_version=full_version, linker=linker)
+
+            if 'TMS320C2000 C/C++' in out:
+                cls = C2000CCompiler if lang == 'c' else C2000CPPCompiler
+                self.coredata.add_lang_args(cls.language, cls, for_machine, self)
+                linker = C2000DynamicLinker(for_machine, version=version)
+                return cls(
+                    ccache + compiler, version, for_machine, is_cross, info,
+                    exe_wrap, full_version=full_version, linker=linker)
+
+
         self._handle_exceptions(popen_exceptions, compilers)
 
     def detect_c_compiler(self, for_machine):
@@ -1095,14 +1349,12 @@
 
     def detect_cuda_compiler(self, for_machine):
         popen_exceptions = {}
-        is_cross = not self.machines.matches_build_machine(for_machine)
+        is_cross = self.is_cross_build(for_machine)
         compilers, ccache, exe_wrap = self._get_compilers('cuda', for_machine)
         info = self.machines[for_machine]
         for compiler in compilers:
             if isinstance(compiler, str):
                 compiler = [compiler]
-            else:
-                raise EnvironmentException()
             arg = '--version'
             try:
                 p, out, err = Popen_safe(compiler + [arg])
@@ -1128,14 +1380,14 @@
             cpp_compiler = self.detect_cpp_compiler(for_machine)
             cls = CudaCompiler
             self.coredata.add_lang_args(cls.language, cls, for_machine, self)
-            linker = CudaLinker(compiler, for_machine, 'nvlink', CudaCompiler.LINKER_PREFIX, [], version=CudaLinker.parse_version())
+            linker = CudaLinker(compiler, for_machine, CudaCompiler.LINKER_PREFIX, [], version=CudaLinker.parse_version())
             return cls(ccache + compiler, version, for_machine, is_cross, exe_wrap, host_compiler=cpp_compiler, info=info, linker=linker)
         raise EnvironmentException('Could not find suitable CUDA compiler: "' + ' '.join(compilers) + '"')
 
     def detect_fortran_compiler(self, for_machine: MachineChoice):
         popen_exceptions = {}
         compilers, ccache, exe_wrap = self._get_compilers('fortran', for_machine)
-        is_cross = not self.machines.matches_build_machine(for_machine)
+        is_cross = self.is_cross_build(for_machine)
         info = self.machines[for_machine]
         for compiler in compilers:
             if isinstance(compiler, str):
@@ -1196,8 +1448,8 @@
                     self.coredata.add_lang_args(cls.language, cls, for_machine, self)
                     linker = XilinkDynamicLinker(for_machine, [], version=version)
                     return cls(
-                        compiler, version, for_machine, is_cross, target,
-                        info, exe_wrap, linker=linker)
+                        compiler, version, for_machine, is_cross, info,
+                        target, exe_wrap, linker=linker)
 
                 if 'ifort (IFORT)' in out:
                     linker = self._guess_nix_linker(compiler, IntelFortranCompiler, for_machine)
@@ -1219,6 +1471,15 @@
                         compiler, version, for_machine, is_cross, info, exe_wrap,
                         full_version=full_version, linker=linker)
 
+                if 'NVIDIA Compilers and Tools' in out:
+                    cls = NvidiaHPC_FortranCompiler
+                    self.coredata.add_lang_args(cls.language, cls, for_machine, self)
+                    linker = PGIDynamicLinker(compiler, for_machine,
+                                              cls.LINKER_PREFIX, [], version=version)
+                    return cls(
+                        compiler, version, for_machine, is_cross, info, exe_wrap,
+                        full_version=full_version, linker=linker)
+
                 if 'flang' in out or 'clang' in out:
                     linker = self._guess_nix_linker(
                         compiler, FlangFortranCompiler, for_machine)
@@ -1242,7 +1503,7 @@
 
         self._handle_exceptions(popen_exceptions, compilers)
 
-    def get_scratch_dir(self):
+    def get_scratch_dir(self) -> str:
         return self.scratch_dir
 
     def detect_objc_compiler(self, for_machine: MachineInfo) -> 'Compiler':
@@ -1251,10 +1512,10 @@
     def detect_objcpp_compiler(self, for_machine: MachineInfo) -> 'Compiler':
         return self._detect_objc_or_objcpp_compiler(for_machine, False)
 
-    def _detect_objc_or_objcpp_compiler(self, for_machine: MachineInfo, objc: bool) -> 'Compiler':
+    def _detect_objc_or_objcpp_compiler(self, for_machine: MachineChoice, objc: bool) -> 'Compiler':
         popen_exceptions = {}
         compilers, ccache, exe_wrap = self._get_compilers('objc' if objc else 'objcpp', for_machine)
-        is_cross = not self.machines.matches_build_machine(for_machine)
+        is_cross = self.is_cross_build(for_machine)
         info = self.machines[for_machine]
 
         for compiler in compilers:
@@ -1280,7 +1541,14 @@
                     exe_wrap, defines, linker=linker)
             if 'clang' in out:
                 linker = None
-                comp = ClangObjCCompiler if objc else ClangObjCPPCompiler
+                defines = self.get_clang_compiler_defines(compiler)
+                if not defines:
+                    popen_exceptions[' '.join(compiler)] = 'no pre-processor defines'
+                    continue
+                if 'Apple' in out:
+                    comp = AppleClangObjCCompiler if objc else AppleClangObjCPPCompiler
+                else:
+                    comp = ClangObjCCompiler if objc else ClangObjCPPCompiler
                 if 'windows' in out or self.machines[for_machine].is_windows():
                     # If we're in a MINGW context this actually will use a gnu style ld
                     try:
@@ -1293,11 +1561,11 @@
                         compiler, comp, for_machine)
                 return comp(
                     ccache + compiler, version, for_machine,
-                    is_cross, info, exe_wrap, linker=linker)
+                    is_cross, info, exe_wrap, linker=linker, defines=defines)
         self._handle_exceptions(popen_exceptions, compilers)
 
     def detect_java_compiler(self, for_machine):
-        exelist = self.binaries.host.lookup_entry('java')
+        exelist = self.lookup_binary_entry(for_machine, 'java')
         info = self.machines[for_machine]
         if exelist is None:
             # TODO support fallback
@@ -1306,7 +1574,7 @@
         try:
             p, out, err = Popen_safe(exelist + ['-version'])
         except OSError:
-            raise EnvironmentException('Could not execute Java compiler "%s"' % ' '.join(exelist))
+            raise EnvironmentException('Could not execute Java compiler "{}"'.format(' '.join(exelist)))
         if 'javac' in out or 'javac' in err:
             version = search_version(err if 'javac' in err else out)
             if not version or version == 'unknown version':
@@ -1336,14 +1604,16 @@
                 cls = MonoCompiler
             elif "Visual C#" in out:
                 cls = VisualStudioCsCompiler
+            else:
+                continue
             self.coredata.add_lang_args(cls.language, cls, for_machine, self)
             return cls(comp, version, for_machine, info)
 
         self._handle_exceptions(popen_exceptions, compilers)
 
     def detect_vala_compiler(self, for_machine):
-        exelist = self.binaries.host.lookup_entry('vala')
-        is_cross = not self.machines.matches_build_machine(for_machine)
+        exelist = self.lookup_binary_entry(for_machine, 'vala')
+        is_cross = self.is_cross_build(for_machine)
         info = self.machines[for_machine]
         if exelist is None:
             # TODO support fallback
@@ -1352,7 +1622,7 @@
         try:
             p, out = Popen_safe(exelist + ['--version'])[0:2]
         except OSError:
-            raise EnvironmentException('Could not execute Vala compiler "%s"' % ' '.join(exelist))
+            raise EnvironmentException('Could not execute Vala compiler "{}"'.format(' '.join(exelist)))
         version = search_version(out)
         if 'Vala' in out:
             comp_class = ValaCompiler
@@ -1360,22 +1630,22 @@
             return comp_class(exelist, version, for_machine, info, is_cross)
         raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
 
-    def detect_rust_compiler(self, for_machine):
-        popen_exceptions = {}
-        compilers, ccache, exe_wrap = self._get_compilers('rust', for_machine)
-        is_cross = not self.machines.matches_build_machine(for_machine)
+    def detect_rust_compiler(self, for_machine: MachineChoice) -> RustCompiler:
+        popen_exceptions = {}  # type: T.Dict[str, Exception]
+        compilers, _, exe_wrap = self._get_compilers('rust', for_machine)
+        is_cross = self.is_cross_build(for_machine)
         info = self.machines[for_machine]
 
         cc = self.detect_c_compiler(for_machine)
         is_link_exe = isinstance(cc.linker, VisualStudioLikeLinkerMixin)
-        override = self.binaries[for_machine].lookup_entry('rust_ld')
+        override = self.lookup_binary_entry(for_machine, 'rust_ld')
 
         for compiler in compilers:
             if isinstance(compiler, str):
                 compiler = [compiler]
             arg = ['--version']
             try:
-                p, out = Popen_safe(compiler + arg)[0:2]
+                out = Popen_safe(compiler + arg)[1]
             except OSError as e:
                 popen_exceptions[' '.join(compiler + arg)] = e
                 continue
@@ -1392,17 +1662,30 @@
                 # the default use that, and second add the necessary arguments
                 # to rust to use -fuse-ld
 
+                if any(a.startswith('linker=') for a in compiler):
+                    mlog.warning(
+                        'Please do not put -C linker= in your compiler '
+                        'command, set rust_ld=command in your cross file '
+                        'or use the RUST_LD environment variable. meson '
+                        'will override your seletion otherwise.')
+
                 if override is None:
                     extra_args = {}
                     always_args = []
                     if is_link_exe:
-                        compiler.extend(['-C', 'linker={}'.format(cc.linker.exelist[0])])
+                        compiler.extend(RustCompiler.use_linker_args(cc.linker.exelist[0]))
                         extra_args['direct'] = True
                         extra_args['machine'] = cc.linker.machine
-                    elif not ((info.is_darwin() and isinstance(cc, AppleClangCCompiler)) or
-                              isinstance(cc, GnuCCompiler)):
-                        c = cc.exelist[1] if cc.exelist[0].endswith('ccache') else cc.exelist[0]
-                        compiler.extend(['-C', 'linker={}'.format(c)])
+                    else:
+                        exelist = cc.linker.exelist.copy()
+                        if 'ccache' in exelist[0]:
+                            del exelist[0]
+                        c = exelist.pop(0)
+                        compiler.extend(RustCompiler.use_linker_args(c))
+
+                        # Also ensure that we pass any extra arguments to the linker
+                        for l in exelist:
+                            compiler.extend(['-C', 'link-arg={}'.format(l)])
 
                     # This trickery with type() gets us the class of the linker
                     # so we can initialize a new copy for the Rust Compiler
@@ -1416,21 +1699,22 @@
                 elif 'link' in override[0]:
                     linker = self._guess_win_linker(
                         override, RustCompiler, for_machine, use_linker_prefix=False)
+                    # rustc takes linker arguments without a prefix, and
+                    # inserts the correct prefix itself.
                     linker.direct = True
+                    compiler.extend(RustCompiler.use_linker_args(linker.exelist[0]))
                 else:
-                    # We're creating a new type of "C" compiler, that has rust
-                    # as it's language. This is gross, but I can't figure out
-                    # another way to handle this, because rustc is actually
-                    # invoking the c compiler as it's linker.
-                    b = type('b', (type(cc), ), {})
-                    b.language = RustCompiler.language
-                    linker = self._guess_nix_linker(cc.exelist, b, for_machine)
+                    # On linux and macos rust will invoke the c compiler for
+                    # linking, on windows it will use lld-link or link.exe.
+                    # we will simply ask for the C compiler that coresponds to
+                    # it, and use that.
+                    cc = self._detect_c_or_cpp_compiler('c', for_machine, override_compiler=override)
+                    linker = cc.linker
 
                     # Of course, we're not going to use any of that, we just
                     # need it to get the proper arguments to pass to rustc
-                    c = cc.exelist[1] if cc.exelist[0].endswith('ccache') else cc.exelist[0]
-                    compiler.extend(['-C', 'linker={}'.format(c)])
-                    compiler.extend(['-C', 'link-args={}'.format(' '.join(cc.use_linker_args(override[0])))])
+                    c = linker.exelist[1] if linker.exelist[0].endswith('ccache') else linker.exelist[0]
+                    compiler.extend(RustCompiler.use_linker_args(c))
 
                 self.coredata.add_lang_args(RustCompiler.language, RustCompiler, for_machine, self)
                 return RustCompiler(
@@ -1454,7 +1738,7 @@
             arch = 'x86_mscoff'
 
         popen_exceptions = {}
-        is_cross = not self.machines.matches_build_machine(for_machine)
+        is_cross = self.is_cross_build(for_machine)
         results, ccache, exe_wrap = self._get_compilers('d', for_machine)
         for exelist in results:
             # Search for a D compiler.
@@ -1477,52 +1761,66 @@
 
             if 'LLVM D compiler' in out:
                 # LDC seems to require a file
-                if info.is_windows() or info.is_cygwin():
-                    # Getting LDC on windows to give useful linker output when
-                    # not doing real work is painfully hard. It ships with a
-                    # version of lld-link, so unless we think the user wants
-                    # link.exe, just assume that we're going to use lld-link
-                    # with it.
-                    linker = self._guess_win_linker(
-                        ['link' if is_msvc else 'lld-link'],
-                        compilers.LLVMDCompiler, for_machine, use_linker_prefix=False)
-                else:
-                    with tempfile.NamedTemporaryFile(suffix='.d') as f:
+                # We cannot use NamedTemproraryFile on windows, its documented
+                # to not work for our uses. So, just use mkstemp and only have
+                # one path for simplicity.
+                o, f = tempfile.mkstemp('.d')
+                os.close(o)
+
+                try:
+                    if info.is_windows() or info.is_cygwin():
+                        objfile = os.path.basename(f)[:-1] + 'obj'
+                        linker = self._guess_win_linker(
+                            exelist,
+                            compilers.LLVMDCompiler, for_machine,
+                            use_linker_prefix=True, invoked_directly=False,
+                            extra_args=[f])
+                    else:
                         # LDC writes an object file to the current working directory.
                         # Clean it up.
-                        objectfile = os.path.basename(f.name)[:-1] + 'o'
+                        objfile = os.path.basename(f)[:-1] + 'o'
                         linker = self._guess_nix_linker(
                             exelist, compilers.LLVMDCompiler, for_machine,
-                            extra_args=[f.name])
-                        try:
-                            os.unlink(objectfile)
-                        except Exception:
-                            # Thank you Windows file system semantics and virus scanners.
-                            pass
+                            extra_args=[f])
+                finally:
+                    mesonlib.windows_proof_rm(f)
+                    mesonlib.windows_proof_rm(objfile)
+
                 return compilers.LLVMDCompiler(
                     exelist, version, for_machine, info, arch,
                     full_version=full_version, linker=linker)
             elif 'gdc' in out:
                 linker = self._guess_nix_linker(exelist, compilers.GnuDCompiler, for_machine)
                 return compilers.GnuDCompiler(
-                    exelist, version, for_machine, info, arch, is_cross, exe_wrap,
+                    exelist, version, for_machine, info, arch,
+                    exe_wrapper=exe_wrap, is_cross=is_cross,
                     full_version=full_version, linker=linker)
             elif 'The D Language Foundation' in out or 'Digital Mars' in out:
                 # DMD seems to require a file
-                if info.is_windows() or info.is_cygwin():
-                    if is_msvc:
-                        linker_cmd = ['link']
-                    elif arch == 'x86':
-                        linker_cmd = ['optlink']
+                # We cannot use NamedTemproraryFile on windows, its documented
+                # to not work for our uses. So, just use mkstemp and only have
+                # one path for simplicity.
+                o, f = tempfile.mkstemp('.d')
+                os.close(o)
+
+                # DMD as different detection logic for x86 and x86_64
+                arch_arg = '-m64' if arch == 'x86_64' else '-m32'
+
+                try:
+                    if info.is_windows() or info.is_cygwin():
+                        objfile = os.path.basename(f)[:-1] + 'obj'
+                        linker = self._guess_win_linker(
+                            exelist, compilers.DmdDCompiler, for_machine,
+                            invoked_directly=False, extra_args=[f, arch_arg])
                     else:
-                        linker_cmd = ['lld-link']
-                    linker = self._guess_win_linker(linker_cmd, compilers.DmdDCompiler, for_machine,
-                                                    use_linker_prefix=False)
-                else:
-                    with tempfile.NamedTemporaryFile(suffix='.d') as f:
+                        objfile = os.path.basename(f)[:-1] + 'o'
                         linker = self._guess_nix_linker(
                             exelist, compilers.DmdDCompiler, for_machine,
-                            extra_args=[f.name])
+                            extra_args=[f, arch_arg])
+                finally:
+                    mesonlib.windows_proof_rm(f)
+                    mesonlib.windows_proof_rm(objfile)
+
                 return compilers.DmdDCompiler(
                     exelist, version, for_machine, info, arch,
                     full_version=full_version, linker=linker)
@@ -1531,8 +1829,8 @@
         self._handle_exceptions(popen_exceptions, compilers)
 
     def detect_swift_compiler(self, for_machine):
-        exelist = self.binaries.host.lookup_entry('swift')
-        is_cross = not self.machines.matches_build_machine(for_machine)
+        exelist = self.lookup_binary_entry(for_machine, 'swift')
+        is_cross = self.is_cross_build(for_machine)
         info = self.machines[for_machine]
         if exelist is None:
             # TODO support fallback
@@ -1541,7 +1839,7 @@
         try:
             p, _, err = Popen_safe(exelist + ['-v'])
         except OSError:
-            raise EnvironmentException('Could not execute Swift compiler "%s"' % ' '.join(exelist))
+            raise EnvironmentException('Could not execute Swift compiler "{}"'.format(' '.join(exelist)))
         version = search_version(err)
         if 'Swift' in err:
             # As for 5.0.1 swiftc *requires* a file to check the linker:
@@ -1591,16 +1889,13 @@
         return comp
 
     def detect_static_linker(self, compiler):
-        linker = self.binaries[compiler.for_machine].lookup_entry('ar')
+        linker = self.lookup_binary_entry(compiler.for_machine, 'ar')
         if linker is not None:
             linkers = [linker]
         else:
-            evar = 'AR'
             defaults = [[l] for l in self.default_static_linker]
             if isinstance(compiler, compilers.CudaCompiler):
                 linkers = [self.cuda_static_linker] + defaults
-            elif evar in os.environ:
-                linkers = [split_args(os.environ[evar])]
             elif isinstance(compiler, compilers.VisualStudioLikeCompiler):
                 linkers = [self.vs_static_linker, self.clang_cl_static_linker]
             elif isinstance(compiler, compilers.GnuCompiler):
@@ -1626,6 +1921,8 @@
         for linker in linkers:
             if not {'lib', 'lib.exe', 'llvm-lib', 'llvm-lib.exe', 'xilib', 'xilib.exe'}.isdisjoint(linker):
                 arg = '/?'
+            elif not {'ar2000', 'ar2000.exe'}.isdisjoint(linker):
+                arg = '?'
             else:
                 arg = '--version'
             try:
@@ -1649,21 +1946,27 @@
                 return DLinker(linker, compiler.arch)
             if err.startswith('Renesas') and ('rlink' in linker or 'rlink.exe' in linker):
                 return CcrxLinker(linker)
+            if out.startswith('GNU ar') and ('xc16-ar' in linker or 'xc16-ar.exe' in linker):
+                return Xc16Linker(linker)
+            if out.startswith('TMS320C2000') and ('ar2000' in linker or 'ar2000.exe' in linker):
+                return C2000Linker(linker)
+            if out.startswith('The CompCert'):
+                return CompCertLinker(linker)
             if p.returncode == 0:
                 return ArLinker(linker)
             if p.returncode == 1 and err.startswith('usage'): # OSX
                 return ArLinker(linker)
             if p.returncode == 1 and err.startswith('Usage'): # AIX
-                return ArLinker(linker)
+                return AIXArLinker(linker)
             if p.returncode == 1 and err.startswith('ar: bad option: --'): # Solaris
                 return ArLinker(linker)
         self._handle_exceptions(popen_exceptions, linkers, 'linker')
-        raise EnvironmentException('Unknown static linker "%s"' % ' '.join(linkers))
+        raise EnvironmentException('Unknown static linker "{}"'.format(' '.join(linkers)))
 
-    def get_source_dir(self):
+    def get_source_dir(self) -> str:
         return self.source_dir
 
-    def get_build_dir(self):
+    def get_build_dir(self) -> str:
         return self.build_dir
 
     def get_import_lib_dir(self) -> str:
diff -Nru meson-0.53.2/mesonbuild/interpreterbase.py meson-0.57.0+really0.56.2/mesonbuild/interpreterbase.py
--- meson-0.53.2/mesonbuild/interpreterbase.py	2019-12-29 22:47:27.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/interpreterbase.py	2021-01-06 10:39:48.000000000 +0000
@@ -18,20 +18,60 @@
 from . import mparser, mesonlib, mlog
 from . import environment, dependencies
 
+import abc
 import os, copy, re
+import collections.abc
 from functools import wraps
+import typing as T
 
-class ObjectHolder:
-    def __init__(self, obj, subproject=None):
-        self.held_object = obj
-        self.subproject = subproject
+TV_fw_var = T.Union[str, int, float, bool, list, dict, 'InterpreterObject', 'ObjectHolder']
+TV_fw_args = T.List[T.Union[mparser.BaseNode, TV_fw_var]]
+TV_fw_kwargs = T.Dict[str, T.Union[mparser.BaseNode, TV_fw_var]]
+
+TV_func = T.TypeVar('TV_func', bound=T.Callable[..., T.Any])
+
+TYPE_elementary = T.Union[str, int, float, bool]
+TYPE_var = T.Union[TYPE_elementary, T.List[T.Any], T.Dict[str, T.Any], 'InterpreterObject', 'ObjectHolder']
+TYPE_nvar = T.Union[TYPE_var, mparser.BaseNode]
+TYPE_nkwargs = T.Dict[str, TYPE_nvar]
+TYPE_key_resolver = T.Callable[[mparser.BaseNode], str]
+
+class InterpreterObject:
+    def __init__(self) -> None:
+        self.methods = {}  # type: T.Dict[str, T.Callable[[T.List[TYPE_nvar], TYPE_nkwargs], TYPE_var]]
+        # Current node set during a method call. This can be used as location
+        # when printing a warning message during a method call.
+        self.current_node = None  # type: mparser.BaseNode
+
+    def method_call(
+                self,
+                method_name: str,
+                args: TV_fw_args,
+                kwargs: TV_fw_kwargs
+            ) -> TYPE_var:
+        if method_name in self.methods:
+            method = self.methods[method_name]
+            if not getattr(method, 'no-args-flattening', False):
+                args = flatten(args)
+            return method(args, kwargs)
+        raise InvalidCode('Unknown method "%s" in object.' % method_name)
+
+TV_InterpreterObject = T.TypeVar('TV_InterpreterObject')
 
-    def __repr__(self):
+class ObjectHolder(T.Generic[TV_InterpreterObject]):
+    def __init__(self, obj: InterpreterObject, subproject: T.Optional[str] = None) -> None:
+        self.held_object = obj        # type: InterpreterObject
+        self.subproject = subproject  # type: str
+
+    def __repr__(self) -> str:
         return ''.format(self.held_object)
 
+class MesonVersionString(str):
+    pass
+
 # Decorators for method calls.
 
-def check_stringlist(a, msg='Arguments must be strings.'):
+def check_stringlist(a: T.Any, msg: str = 'Arguments must be strings.') -> None:
     if not isinstance(a, list):
         mlog.debug('Not a list:', str(a))
         raise InvalidArguments('Argument not a list.')
@@ -39,11 +79,11 @@
         mlog.debug('Element not a string:', str(a))
         raise InvalidArguments(msg)
 
-def _get_callee_args(wrapped_args, want_subproject=False):
+def _get_callee_args(wrapped_args: T.Sequence[T.Any], want_subproject: bool = False) -> T.Tuple[T.Any, mparser.BaseNode, TV_fw_args, TV_fw_kwargs, T.Optional[str]]:
     s = wrapped_args[0]
     n = len(wrapped_args)
     # Raise an error if the codepaths are not there
-    subproject = None
+    subproject = None  # type: T.Optional[str]
     if want_subproject and n == 2:
         if hasattr(s, 'subproject'):
             # Interpreter base types have 2 args: self, node
@@ -100,12 +140,13 @@
     kwargs = kwargs if kwargs is not None else {}
     return s, node, args, kwargs, subproject
 
-def flatten(args):
+def flatten(args: T.Union[TYPE_nvar, T.List[TYPE_nvar]]) -> T.List[TYPE_nvar]:
     if isinstance(args, mparser.StringNode):
-        return args.value
-    if isinstance(args, (int, str, mesonlib.File, InterpreterObject)):
-        return args
-    result = []
+        assert isinstance(args.value, str)
+        return [args.value]
+    if not isinstance(args, collections.abc.Sequence):
+        return [args]
+    result = []  # type: T.List[TYPE_nvar]
     for a in args:
         if isinstance(a, list):
             rest = flatten(a)
@@ -116,86 +157,108 @@
             result.append(a)
     return result
 
-def noPosargs(f):
+def noPosargs(f: TV_func) -> TV_func:
     @wraps(f)
-    def wrapped(*wrapped_args, **wrapped_kwargs):
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
         args = _get_callee_args(wrapped_args)[2]
         if args:
             raise InvalidArguments('Function does not take positional arguments.')
         return f(*wrapped_args, **wrapped_kwargs)
-    return wrapped
+    return T.cast(TV_func, wrapped)
 
-def noKwargs(f):
+def builtinMethodNoKwargs(f: TV_func) -> TV_func:
     @wraps(f)
-    def wrapped(*wrapped_args, **wrapped_kwargs):
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+        node = wrapped_args[0].current_node
+        method_name = wrapped_args[2]
+        kwargs = wrapped_args[4]
+        if kwargs:
+            mlog.warning('Method {!r} does not take keyword arguments.'.format(method_name),
+                         'This will become a hard error in the future',
+                         location=node)
+        return f(*wrapped_args, **wrapped_kwargs)
+    return T.cast(TV_func, wrapped)
+
+def noKwargs(f: TV_func) -> TV_func:
+    @wraps(f)
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
         kwargs = _get_callee_args(wrapped_args)[3]
         if kwargs:
             raise InvalidArguments('Function does not take keyword arguments.')
         return f(*wrapped_args, **wrapped_kwargs)
-    return wrapped
+    return T.cast(TV_func, wrapped)
 
-def stringArgs(f):
+def stringArgs(f: TV_func) -> TV_func:
     @wraps(f)
-    def wrapped(*wrapped_args, **wrapped_kwargs):
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
         args = _get_callee_args(wrapped_args)[2]
         assert(isinstance(args, list))
         check_stringlist(args)
         return f(*wrapped_args, **wrapped_kwargs)
-    return wrapped
+    return T.cast(TV_func, wrapped)
 
-def noArgsFlattening(f):
+def noArgsFlattening(f: TV_func) -> TV_func:
     setattr(f, 'no-args-flattening', True)  # noqa: B010
     return f
 
-def disablerIfNotFound(f):
+def disablerIfNotFound(f: TV_func) -> TV_func:
     @wraps(f)
-    def wrapped(*wrapped_args, **wrapped_kwargs):
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
         kwargs = _get_callee_args(wrapped_args)[3]
         disabler = kwargs.pop('disabler', False)
         ret = f(*wrapped_args, **wrapped_kwargs)
         if disabler and not ret.held_object.found():
             return Disabler()
         return ret
-    return wrapped
+    return T.cast(TV_func, wrapped)
 
 class permittedKwargs:
 
-    def __init__(self, permitted):
-        self.permitted = permitted
+    def __init__(self, permitted: T.Set[str]):
+        self.permitted = permitted  # type: T.Set[str]
 
-    def __call__(self, f):
+    def __call__(self, f: TV_func) -> TV_func:
         @wraps(f)
-        def wrapped(*wrapped_args, **wrapped_kwargs):
+        def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
             s, node, args, kwargs, _ = _get_callee_args(wrapped_args)
             for k in kwargs:
                 if k not in self.permitted:
                     mlog.warning('''Passed invalid keyword argument "{}".'''.format(k), location=node)
                     mlog.warning('This will become a hard error in the future.')
             return f(*wrapped_args, **wrapped_kwargs)
-        return wrapped
-
+        return T.cast(TV_func, wrapped)
 
-class FeatureCheckBase:
+class FeatureCheckBase(metaclass=abc.ABCMeta):
     "Base class for feature version checks"
 
-    def __init__(self, feature_name, version):
-        self.feature_name = feature_name
-        self.feature_version = version
+    # In python 3.6 we can just forward declare this, but in 3.5 we can't
+    # This will be overwritten by the subclasses by necessity
+    feature_registry = {}  # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
+
+    def __init__(self, feature_name: str, version: str, extra_message: T.Optional[str] = None):
+        self.feature_name = feature_name  # type: str
+        self.feature_version = version    # type: str
+        self.extra_message = extra_message or ''  # type: str
 
     @staticmethod
-    def get_target_version(subproject):
+    def get_target_version(subproject: str) -> str:
         # Don't do any checks if project() has not been parsed yet
         if subproject not in mesonlib.project_meson_versions:
             return ''
         return mesonlib.project_meson_versions[subproject]
 
-    def use(self, subproject):
+    @staticmethod
+    @abc.abstractmethod
+    def check_version(target_version: str, feature_Version: str) -> bool:
+        pass
+
+    def use(self, subproject: str) -> None:
         tv = self.get_target_version(subproject)
         # No target version
         if tv == '':
             return
         # Target version is new enough
-        if mesonlib.version_compare_condition_with_min(tv, self.feature_version):
+        if self.check_version(tv, self.feature_version):
             return
         # Feature is too new for target version, register it
         if subproject not in self.feature_registry:
@@ -212,7 +275,7 @@
         self.log_usage_warning(tv)
 
     @classmethod
-    def report(cls, subproject):
+    def report(cls, subproject: str) -> None:
         if subproject not in cls.feature_registry:
             return
         warning_str = cls.get_warning_str_prefix(cls.get_target_version(subproject))
@@ -221,59 +284,103 @@
             warning_str += '\n * {}: {}'.format(version, fv[version])
         mlog.warning(warning_str)
 
-    def __call__(self, f):
+    def log_usage_warning(self, tv: str) -> None:
+        raise InterpreterException('log_usage_warning not implemented')
+
+    @staticmethod
+    def get_warning_str_prefix(tv: str) -> str:
+        raise InterpreterException('get_warning_str_prefix not implemented')
+
+    def __call__(self, f: TV_func) -> TV_func:
         @wraps(f)
-        def wrapped(*wrapped_args, **wrapped_kwargs):
+        def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
             subproject = _get_callee_args(wrapped_args, want_subproject=True)[4]
             if subproject is None:
                 raise AssertionError('{!r}'.format(wrapped_args))
             self.use(subproject)
             return f(*wrapped_args, **wrapped_kwargs)
-        return wrapped
+        return T.cast(TV_func, wrapped)
+
+    @classmethod
+    def single_use(cls, feature_name: str, version: str, subproject: str,
+                   extra_message: T.Optional[str] = None) -> None:
+        """Oneline version that instantiates and calls use()."""
+        cls(feature_name, version, extra_message).use(subproject)
+
 
 class FeatureNew(FeatureCheckBase):
     """Checks for new features"""
+
     # Class variable, shared across all instances
     #
     # Format: {subproject: {feature_version: set(feature_names)}}
-    feature_registry = {}
+    feature_registry = {}  # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
 
     @staticmethod
-    def get_warning_str_prefix(tv):
+    def check_version(target_version: str, feature_version: str) -> bool:
+        return mesonlib.version_compare_condition_with_min(target_version, feature_version)
+
+    @staticmethod
+    def get_warning_str_prefix(tv: str) -> str:
         return 'Project specifies a minimum meson_version \'{}\' but uses features which were added in newer versions:'.format(tv)
 
-    def log_usage_warning(self, tv):
-        mlog.warning('Project targeting \'{}\' but tried to use feature introduced '
-                     'in \'{}\': {}'.format(tv, self.feature_version, self.feature_name))
+    def log_usage_warning(self, tv: str) -> None:
+        args = [
+            'Project targeting', "'{}'".format(tv),
+            'but tried to use feature introduced in',
+            "'{}':".format(self.feature_version),
+            '{}.'.format(self.feature_name),
+        ]
+        if self.extra_message:
+            args.append(self.extra_message)
+        mlog.warning(*args)
 
 class FeatureDeprecated(FeatureCheckBase):
     """Checks for deprecated features"""
+
     # Class variable, shared across all instances
     #
     # Format: {subproject: {feature_version: set(feature_names)}}
-    feature_registry = {}
+    feature_registry = {}  # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
 
     @staticmethod
-    def get_warning_str_prefix(tv):
-        return 'Deprecated features used:'
+    def check_version(target_version: str, feature_version: str) -> bool:
+        # For deprecation checks we need to return the inverse of FeatureNew checks
+        return not mesonlib.version_compare_condition_with_min(target_version, feature_version)
 
-    def log_usage_warning(self, tv):
-        mlog.deprecation('Project targeting \'{}\' but tried to use feature '
-                         'deprecated since \'{}\': {}'
-                         ''.format(tv, self.feature_version, self.feature_name))
+    @staticmethod
+    def get_warning_str_prefix(tv: str) -> str:
+        return 'Deprecated features used:'
 
+    def log_usage_warning(self, tv: str) -> None:
+        args = [
+            'Project targeting', "'{}'".format(tv),
+            'but tried to use feature deprecated since',
+            "'{}':".format(self.feature_version),
+            '{}.'.format(self.feature_name),
+        ]
+        if self.extra_message:
+            args.append(self.extra_message)
+        mlog.warning(*args)
+
+
+class FeatureCheckKwargsBase(metaclass=abc.ABCMeta):
+
+    @property
+    @abc.abstractmethod
+    def feature_check_class(self) -> T.Type[FeatureCheckBase]:
+        pass
 
-class FeatureCheckKwargsBase:
-    def __init__(self, feature_name, feature_version, kwargs):
+    def __init__(self, feature_name: str, feature_version: str,
+                 kwargs: T.List[str], extra_message: T.Optional[str] = None):
         self.feature_name = feature_name
         self.feature_version = feature_version
         self.kwargs = kwargs
+        self.extra_message = extra_message
 
-    def __call__(self, f):
+    def __call__(self, f: TV_func) -> TV_func:
         @wraps(f)
-        def wrapped(*wrapped_args, **wrapped_kwargs):
-            # Which FeatureCheck class to invoke
-            FeatureCheckClass = self.feature_check_class
+        def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
             kwargs, subproject = _get_callee_args(wrapped_args, want_subproject=True)[3:5]
             if subproject is None:
                 raise AssertionError('{!r}'.format(wrapped_args))
@@ -281,9 +388,10 @@
                 if arg not in kwargs:
                     continue
                 name = arg + ' arg in ' + self.feature_name
-                FeatureCheckClass(name, self.feature_version).use(subproject)
+                self.feature_check_class.single_use(
+                        name, self.feature_version, subproject, self.extra_message)
             return f(*wrapped_args, **wrapped_kwargs)
-        return wrapped
+        return T.cast(TV_func, wrapped)
 
 class FeatureNewKwargs(FeatureCheckKwargsBase):
     feature_check_class = FeatureNew
@@ -310,37 +418,22 @@
 class BreakRequest(BaseException):
     pass
 
-class InterpreterObject:
-    def __init__(self):
-        self.methods = {}
-        # Current node set during a method call. This can be used as location
-        # when printing a warning message during a method call.
-        self.current_node = None
-
-    def method_call(self, method_name, args, kwargs):
-        if method_name in self.methods:
-            method = self.methods[method_name]
-            if not getattr(method, 'no-args-flattening', False):
-                args = flatten(args)
-            return method(args, kwargs)
-        raise InvalidCode('Unknown method "%s" in object.' % method_name)
-
 class MutableInterpreterObject(InterpreterObject):
-    def __init__(self):
+    def __init__(self) -> None:
         super().__init__()
 
 class Disabler(InterpreterObject):
-    def __init__(self):
+    def __init__(self) -> None:
         super().__init__()
         self.methods.update({'found': self.found_method})
 
-    def found_method(self, args, kwargs):
+    def found_method(self, args: T.Sequence[T.Any], kwargs: T.Dict[str, T.Any]) -> bool:
         return False
 
-def is_disabler(i) -> bool:
+def is_disabler(i: T.Any) -> bool:
     return isinstance(i, Disabler)
 
-def is_arg_disabled(arg) -> bool:
+def is_arg_disabled(arg: T.Any) -> bool:
     if is_disabler(arg):
         return True
     if isinstance(arg, list):
@@ -349,7 +442,7 @@
                 return True
     return False
 
-def is_disabled(args, kwargs) -> bool:
+def is_disabled(args: T.Sequence[T.Any], kwargs: T.Dict[str, T.Any]) -> bool:
     for i in args:
         if is_arg_disabled(i):
             return True
@@ -358,20 +451,34 @@
             return True
     return False
 
+def default_resolve_key(key: mparser.BaseNode) -> str:
+    if not isinstance(key, mparser.IdNode):
+        raise InterpreterException('Invalid kwargs format.')
+    return key.value
+
 class InterpreterBase:
-    def __init__(self, source_root, subdir):
+    elementary_types = (int, float, str, bool, list)
+
+    def __init__(self, source_root: str, subdir: str, subproject: str):
         self.source_root = source_root
-        self.funcs = {}
-        self.builtin = {}
+        self.funcs = {}    # type: T.Dict[str, T.Callable[[mparser.BaseNode, T.List[TYPE_nvar], T.Dict[str, TYPE_nvar]], TYPE_var]]
+        self.builtin = {}  # type: T.Dict[str, InterpreterObject]
         self.subdir = subdir
-        self.variables = {}
+        self.root_subdir = subdir
+        self.subproject = subproject
+        self.variables = {}  # type: T.Dict[str, TYPE_var]
         self.argument_depth = 0
         self.current_lineno = -1
         # Current node set during a function call. This can be used as location
         # when printing a warning message during a method call.
-        self.current_node = None
+        self.current_node = None  # type: mparser.BaseNode
+        # This is set to `version_string` when this statement is evaluated:
+        # meson.version().compare_version(version_string)
+        # If it was part of a if-clause, it is used to temporally override the
+        # current meson version target within that if-block.
+        self.tmp_meson_version = None # type: T.Optional[str]
 
-    def load_root_meson_file(self):
+    def load_root_meson_file(self) -> None:
         mesonfile = os.path.join(self.source_root, self.subdir, environment.build_filename)
         if not os.path.isfile(mesonfile):
             raise InvalidArguments('Missing Meson file in %s' % mesonfile)
@@ -381,22 +488,22 @@
             raise InvalidCode('Builder file is empty.')
         assert(isinstance(code, str))
         try:
-            self.ast = mparser.Parser(code, self.subdir).parse()
+            self.ast = mparser.Parser(code, mesonfile).parse()
         except mesonlib.MesonException as me:
             me.file = mesonfile
             raise me
 
-    def join_path_strings(self, args):
+    def join_path_strings(self, args: T.Sequence[str]) -> str:
         return os.path.join(*args).replace('\\', '/')
 
-    def parse_project(self):
+    def parse_project(self) -> None:
         """
         Parses project() and initializes languages, compilers etc. Do this
         early because we need this before we parse the rest of the AST.
         """
         self.evaluate_codeblock(self.ast, end=1)
 
-    def sanity_check_ast(self):
+    def sanity_check_ast(self) -> None:
         if not isinstance(self.ast, mparser.CodeBlockNode):
             raise InvalidCode('AST is of invalid type. Possibly a bug in the parser.')
         if not self.ast.lines:
@@ -405,7 +512,7 @@
         if not isinstance(first, mparser.FunctionNode) or first.func_name != 'project':
             raise InvalidCode('First statement must be a call to project')
 
-    def run(self):
+    def run(self) -> None:
         # Evaluate everything after the first line, which is project() because
         # we already parsed that in self.parse_project()
         try:
@@ -413,7 +520,7 @@
         except SubdirDoneRequest:
             pass
 
-    def evaluate_codeblock(self, node, start=0, end=None):
+    def evaluate_codeblock(self, node: mparser.CodeBlockNode, start: int = 0, end: T.Optional[int] = None) -> None:
         if node is None:
             return
         if not isinstance(node, mparser.CodeBlockNode):
@@ -429,18 +536,20 @@
                 self.current_lineno = cur.lineno
                 self.evaluate_statement(cur)
             except Exception as e:
-                if not hasattr(e, 'lineno'):
-                    e.lineno = cur.lineno
-                    e.colno = cur.colno
-                    e.file = os.path.join(self.subdir, 'meson.build')
+                if getattr(e, 'lineno', None) is None:
+                    # We are doing the equivalent to setattr here and mypy does not like it
+                    e.lineno = cur.lineno                                                             # type: ignore
+                    e.colno = cur.colno                                                               # type: ignore
+                    e.file = os.path.join(self.source_root, self.subdir, environment.build_filename)  # type: ignore
                 raise e
             i += 1 # In THE FUTURE jump over blocks and stuff.
 
-    def evaluate_statement(self, cur):
+    def evaluate_statement(self, cur: mparser.BaseNode) -> T.Optional[TYPE_var]:
+        self.current_node = cur
         if isinstance(cur, mparser.FunctionNode):
             return self.function_call(cur)
         elif isinstance(cur, mparser.AssignmentNode):
-            return self.assignment(cur)
+            self.assignment(cur)
         elif isinstance(cur, mparser.MethodNode):
             return self.method_call(cur)
         elif isinstance(cur, mparser.StringNode):
@@ -470,9 +579,9 @@
         elif isinstance(cur, mparser.ArithmeticNode):
             return self.evaluate_arithmeticstatement(cur)
         elif isinstance(cur, mparser.ForeachClauseNode):
-            return self.evaluate_foreach(cur)
+            self.evaluate_foreach(cur)
         elif isinstance(cur, mparser.PlusAssignmentNode):
-            return self.evaluate_plusassign(cur)
+            self.evaluate_plusassign(cur)
         elif isinstance(cur, mparser.IndexNode):
             return self.evaluate_indexing(cur)
         elif isinstance(cur, mparser.TernaryNode):
@@ -481,75 +590,81 @@
             raise ContinueRequest()
         elif isinstance(cur, mparser.BreakNode):
             raise BreakRequest()
-        elif self.is_elementary_type(cur):
+        elif isinstance(cur, self.elementary_types):
             return cur
         else:
             raise InvalidCode("Unknown statement.")
+        return None
 
-    def evaluate_arraystatement(self, cur):
+    def evaluate_arraystatement(self, cur: mparser.ArrayNode) -> list:
         (arguments, kwargs) = self.reduce_arguments(cur.args)
         if len(kwargs) > 0:
             raise InvalidCode('Keyword arguments are invalid in array construction.')
         return arguments
 
     @FeatureNew('dict', '0.47.0')
-    def evaluate_dictstatement(self, cur):
-        (arguments, kwargs) = self.reduce_arguments(cur.args)
-        assert (not arguments)
-        result = {}
-        self.argument_depth += 1
-        for key, value in kwargs.items():
+    def evaluate_dictstatement(self, cur: mparser.DictNode) -> TYPE_nkwargs:
+        def resolve_key(key: mparser.BaseNode) -> str:
             if not isinstance(key, mparser.StringNode):
-                FeatureNew('Dictionary entry using non literal key', '0.53.0').use(self.subproject)
-            key = self.evaluate_statement(key)
-            if not isinstance(key, str):
+                FeatureNew.single_use('Dictionary entry using non literal key', '0.53.0', self.subproject)
+            str_key = self.evaluate_statement(key)
+            if not isinstance(str_key, str):
                 raise InvalidArguments('Key must be a string')
-            if key in result:
-                raise InvalidArguments('Duplicate dictionary key: {}'.format(key))
-            result[key] = value
-        self.argument_depth -= 1
-        return result
+            return str_key
+        arguments, kwargs = self.reduce_arguments(cur.args, key_resolver=resolve_key, duplicate_key_error='Duplicate dictionary key: {}')
+        assert not arguments
+        return kwargs
 
-    def evaluate_notstatement(self, cur):
+    def evaluate_notstatement(self, cur: mparser.NotNode) -> T.Union[bool, Disabler]:
         v = self.evaluate_statement(cur.value)
-        if is_disabler(v):
+        if isinstance(v, Disabler):
             return v
         if not isinstance(v, bool):
             raise InterpreterException('Argument to "not" is not a boolean.')
         return not v
 
-    def evaluate_if(self, node):
+    def evaluate_if(self, node: mparser.IfClauseNode) -> T.Optional[Disabler]:
         assert(isinstance(node, mparser.IfClauseNode))
         for i in node.ifs:
+            # Reset self.tmp_meson_version to know if it gets set during this
+            # statement evaluation.
+            self.tmp_meson_version = None
             result = self.evaluate_statement(i.condition)
-            if is_disabler(result):
+            if isinstance(result, Disabler):
                 return result
             if not(isinstance(result, bool)):
                 raise InvalidCode('If clause {!r} does not evaluate to true or false.'.format(result))
             if result:
-                self.evaluate_codeblock(i.block)
-                return
+                prev_meson_version = mesonlib.project_meson_versions[self.subproject]
+                if self.tmp_meson_version:
+                    mesonlib.project_meson_versions[self.subproject] = self.tmp_meson_version
+                try:
+                    self.evaluate_codeblock(i.block)
+                finally:
+                    mesonlib.project_meson_versions[self.subproject] = prev_meson_version
+                return None
         if not isinstance(node.elseblock, mparser.EmptyNode):
             self.evaluate_codeblock(node.elseblock)
+        return None
 
-    def validate_comparison_types(self, val1, val2):
+    def validate_comparison_types(self, val1: T.Any, val2: T.Any) -> bool:
         if type(val1) != type(val2):
             return False
         return True
 
-    def evaluate_in(self, val1, val2):
+    def evaluate_in(self, val1: T.Any, val2: T.Any) -> bool:
         if not isinstance(val1, (str, int, float, ObjectHolder)):
             raise InvalidArguments('lvalue of "in" operator must be a string, integer, float, or object')
         if not isinstance(val2, (list, dict)):
             raise InvalidArguments('rvalue of "in" operator must be an array or a dict')
         return val1 in val2
 
-    def evaluate_comparison(self, node):
+    def evaluate_comparison(self, node: mparser.ComparisonNode) -> T.Union[bool, Disabler]:
         val1 = self.evaluate_statement(node.left)
-        if is_disabler(val1):
+        if isinstance(val1, Disabler):
             return val1
         val2 = self.evaluate_statement(node.right)
-        if is_disabler(val2):
+        if isinstance(val2, Disabler):
             return val2
         if node.ctype == 'in':
             return self.evaluate_in(val1, val2)
@@ -572,68 +687,70 @@
                 'Values of different types ({}, {}) cannot be compared using {}.'.format(type(val1).__name__,
                                                                                          type(val2).__name__,
                                                                                          node.ctype))
-        elif not self.is_elementary_type(val1):
-            raise InterpreterException('{} can only be compared for equality.'.format(node.left.value))
-        elif not self.is_elementary_type(val2):
-            raise InterpreterException('{} can only be compared for equality.'.format(node.right.value))
+        elif not isinstance(val1, self.elementary_types):
+            raise InterpreterException('{} can only be compared for equality.'.format(getattr(node.left, 'value', '')))
+        elif not isinstance(val2, self.elementary_types):
+            raise InterpreterException('{} can only be compared for equality.'.format(getattr(node.right, 'value', '')))
+        # Use type: ignore because mypy will complain that we are comparing two Unions,
+        # but we actually guarantee earlier that both types are the same
         elif node.ctype == '<':
-            return val1 < val2
+            return val1 < val2   # type: ignore
         elif node.ctype == '<=':
-            return val1 <= val2
+            return val1 <= val2  # type: ignore
         elif node.ctype == '>':
-            return val1 > val2
+            return val1 > val2   # type: ignore
         elif node.ctype == '>=':
-            return val1 >= val2
+            return val1 >= val2  # type: ignore
         else:
             raise InvalidCode('You broke my compare eval.')
 
-    def evaluate_andstatement(self, cur):
+    def evaluate_andstatement(self, cur: mparser.AndNode) -> T.Union[bool, Disabler]:
         l = self.evaluate_statement(cur.left)
-        if is_disabler(l):
+        if isinstance(l, Disabler):
             return l
         if not isinstance(l, bool):
             raise InterpreterException('First argument to "and" is not a boolean.')
         if not l:
             return False
         r = self.evaluate_statement(cur.right)
-        if is_disabler(r):
+        if isinstance(r, Disabler):
             return r
         if not isinstance(r, bool):
             raise InterpreterException('Second argument to "and" is not a boolean.')
         return r
 
-    def evaluate_orstatement(self, cur):
+    def evaluate_orstatement(self, cur: mparser.OrNode) -> T.Union[bool, Disabler]:
         l = self.evaluate_statement(cur.left)
-        if is_disabler(l):
+        if isinstance(l, Disabler):
             return l
         if not isinstance(l, bool):
             raise InterpreterException('First argument to "or" is not a boolean.')
         if l:
             return True
         r = self.evaluate_statement(cur.right)
-        if is_disabler(r):
+        if isinstance(r, Disabler):
             return r
         if not isinstance(r, bool):
             raise InterpreterException('Second argument to "or" is not a boolean.')
         return r
 
-    def evaluate_uminusstatement(self, cur):
+    def evaluate_uminusstatement(self, cur: mparser.UMinusNode) -> T.Union[int, Disabler]:
         v = self.evaluate_statement(cur.value)
-        if is_disabler(v):
+        if isinstance(v, Disabler):
             return v
         if not isinstance(v, int):
             raise InterpreterException('Argument to negation is not an integer.')
         return -v
 
     @FeatureNew('/ with string arguments', '0.49.0')
-    def evaluate_path_join(self, l, r):
+    def evaluate_path_join(self, l: str, r: str) -> str:
         if not isinstance(l, str):
             raise InvalidCode('The division operator can only append to a string.')
         if not isinstance(r, str):
             raise InvalidCode('The division operator can only append a string.')
         return self.join_path_strings((l, r))
 
-    def evaluate_division(self, l, r):
+    def evaluate_division(self, l: T.Any, r: T.Any) -> T.Union[int, str]:
         if isinstance(l, str) or isinstance(r, str):
             return self.evaluate_path_join(l, r)
         if isinstance(l, int) and isinstance(r, int):
@@ -642,19 +759,20 @@
             return l // r
         raise InvalidCode('Division works only with strings or integers.')
 
-    def evaluate_arithmeticstatement(self, cur):
+    def evaluate_arithmeticstatement(self, cur: mparser.ArithmeticNode) -> T.Union[int, str, dict, list, Disabler]:
         l = self.evaluate_statement(cur.left)
-        if is_disabler(l):
+        if isinstance(l, Disabler):
             return l
         r = self.evaluate_statement(cur.right)
-        if is_disabler(r):
+        if isinstance(r, Disabler):
             return r
 
         if cur.operation == 'add':
             if isinstance(l, dict) and isinstance(r, dict):
                 return {**l, **r}
             try:
-                return l + r
+                # MyPy error due to handling two Unions (we are catching all exceptions anyway)
+                return l + r  # type: ignore
             except Exception as e:
                 raise InvalidCode('Invalid use of addition: ' + str(e))
         elif cur.operation == 'sub':
@@ -674,10 +792,10 @@
         else:
             raise InvalidCode('You broke me.')
 
-    def evaluate_ternary(self, node):
+    def evaluate_ternary(self, node: mparser.TernaryNode) -> TYPE_var:
         assert(isinstance(node, mparser.TernaryNode))
         result = self.evaluate_statement(node.condition)
-        if is_disabler(result):
+        if isinstance(result, Disabler):
             return result
         if not isinstance(result, bool):
             raise InterpreterException('Ternary condition is not boolean.')
@@ -686,14 +804,14 @@
         else:
             return self.evaluate_statement(node.falseblock)
 
-    def evaluate_foreach(self, node):
+    def evaluate_foreach(self, node: mparser.ForeachClauseNode) -> None:
         assert(isinstance(node, mparser.ForeachClauseNode))
         items = self.evaluate_statement(node.items)
 
         if isinstance(items, list):
             if len(node.varnames) != 1:
                 raise InvalidArguments('Foreach on array does not unpack')
-            varname = node.varnames[0].value
+            varname = node.varnames[0]
             for item in items:
                 self.set_variable(varname, item)
                 try:
@@ -706,8 +824,8 @@
             if len(node.varnames) != 2:
                 raise InvalidArguments('Foreach on dict unpacks key and value')
             for key, value in items.items():
-                self.set_variable(node.varnames[0].value, key)
-                self.set_variable(node.varnames[1].value, value)
+                self.set_variable(node.varnames[0], key)
+                self.set_variable(node.varnames[1], value)
                 try:
                     self.evaluate_codeblock(node.block)
                 except ContinueRequest:
@@ -717,16 +835,15 @@
         else:
             raise InvalidArguments('Items of foreach loop must be an array or a dict')
 
-    def evaluate_plusassign(self, node):
+    def evaluate_plusassign(self, node: mparser.PlusAssignmentNode) -> None:
         assert(isinstance(node, mparser.PlusAssignmentNode))
         varname = node.var_name
         addition = self.evaluate_statement(node.value)
-        if is_disabler(addition):
-            self.set_variable(varname, addition)
-            return
+
         # Remember that all variables are immutable. We must always create a
         # full new variable and then assign it.
         old_variable = self.get_variable(varname)
+        new_value = None  # type: T.Union[str, int, float, bool, dict, list]
         if isinstance(old_variable, str):
             if not isinstance(addition, str):
                 raise InvalidArguments('The += operator requires a string on the right hand side if the variable on the left is a string')
@@ -746,13 +863,13 @@
             new_value = {**old_variable, **addition}
         # Add other data types here.
         else:
-            raise InvalidArguments('The += operator currently only works with arrays, dicts, strings or ints ')
+            raise InvalidArguments('The += operator currently only works with arrays, dicts, strings or ints')
         self.set_variable(varname, new_value)
 
-    def evaluate_indexing(self, node):
+    def evaluate_indexing(self, node: mparser.IndexNode) -> TYPE_var:
         assert(isinstance(node, mparser.IndexNode))
         iobject = self.evaluate_statement(node.iobject)
-        if is_disabler(iobject):
+        if isinstance(iobject, Disabler):
             return iobject
         if not hasattr(iobject, '__getitem__'):
             raise InterpreterException(
@@ -763,33 +880,38 @@
             if not isinstance(index, str):
                 raise InterpreterException('Key is not a string')
             try:
-                return iobject[index]
+                # The cast is required because we don't have recursive types...
+                return T.cast(TYPE_var, iobject[index])
             except KeyError:
                 raise InterpreterException('Key %s is not in dict' % index)
         else:
             if not isinstance(index, int):
                 raise InterpreterException('Index value is not an integer.')
             try:
-                return iobject[index]
+                # Ignore the MyPy error, since we don't know all indexable types here
+                # and we handle non indexable types with an exception
+                # TODO maybe find a better solution
+                return iobject[index]  # type: ignore
             except IndexError:
-                raise InterpreterException('Index %d out of bounds of array of size %d.' % (index, len(iobject)))
+                # We are already checking for the existance of __getitem__, so this should be save
+                raise InterpreterException('Index %d out of bounds of array of size %d.' % (index, len(iobject)))  # type: ignore
 
-    def function_call(self, node):
+    def function_call(self, node: mparser.FunctionNode) -> T.Optional[TYPE_var]:
         func_name = node.func_name
         (posargs, kwargs) = self.reduce_arguments(node.args)
-        if is_disabled(posargs, kwargs) and func_name != 'set_variable' and func_name != 'is_disabler':
+        if is_disabled(posargs, kwargs) and func_name not in {'get_variable', 'set_variable', 'is_disabler'}:
             return Disabler()
         if func_name in self.funcs:
             func = self.funcs[func_name]
+            func_args = posargs  # type: T.Any
             if not getattr(func, 'no-args-flattening', False):
-                posargs = flatten(posargs)
-
-            self.current_node = node
-            return func(node, posargs, kwargs)
+                func_args = flatten(posargs)
+            return func(node, func_args, kwargs)
         else:
             self.unknown_function_called(func_name)
+            return None
 
-    def method_call(self, node):
+    def method_call(self, node: mparser.MethodNode) -> TYPE_var:
         invokable = node.source_object
         if isinstance(invokable, mparser.IdNode):
             object_name = invokable.value
@@ -797,22 +919,23 @@
         else:
             obj = self.evaluate_statement(invokable)
         method_name = node.name
-        args = node.args
+        (args, kwargs) = self.reduce_arguments(node.args)
+        if is_disabled(args, kwargs):
+            return Disabler()
         if isinstance(obj, str):
-            return self.string_method_call(obj, method_name, args)
+            return self.string_method_call(obj, method_name, args, kwargs)
         if isinstance(obj, bool):
-            return self.bool_method_call(obj, method_name, args)
+            return self.bool_method_call(obj, method_name, args, kwargs)
         if isinstance(obj, int):
-            return self.int_method_call(obj, method_name, args)
+            return self.int_method_call(obj, method_name, args, kwargs)
         if isinstance(obj, list):
-            return self.array_method_call(obj, method_name, args)
+            return self.array_method_call(obj, method_name, args, kwargs)
         if isinstance(obj, dict):
-            return self.dict_method_call(obj, method_name, args)
+            return self.dict_method_call(obj, method_name, args, kwargs)
         if isinstance(obj, mesonlib.File):
             raise InvalidArguments('File object "%s" is not callable.' % obj)
         if not isinstance(obj, InterpreterObject):
             raise InvalidArguments('Variable "%s" is not callable.' % object_name)
-        (args, kwargs) = self.reduce_arguments(args)
         # Special case. This is the only thing you can do with a disabler
         # object. Every other use immediately returns the disabler object.
         if isinstance(obj, Disabler):
@@ -820,17 +943,15 @@
                 return False
             else:
                 return Disabler()
-        if is_disabled(args, kwargs):
-            return Disabler()
         if method_name == 'extract_objects':
+            if not isinstance(obj, ObjectHolder):
+                raise InvalidArguments('Invalid operation "extract_objects" on variable "{}"'.format(object_name))
             self.validate_extraction(obj.held_object)
         obj.current_node = node
         return obj.method_call(method_name, args, kwargs)
 
-    def bool_method_call(self, obj, method_name, args):
-        (posargs, kwargs) = self.reduce_arguments(args)
-        if is_disabled(posargs, kwargs):
-            return Disabler()
+    @builtinMethodNoKwargs
+    def bool_method_call(self, obj: bool, method_name: str, posargs: T.List[TYPE_nvar], kwargs: T.Dict[str, T.Any]) -> T.Union[str, int]:
         if method_name == 'to_string':
             if not posargs:
                 if obj:
@@ -852,10 +973,8 @@
         else:
             raise InterpreterException('Unknown method "%s" for a boolean.' % method_name)
 
-    def int_method_call(self, obj, method_name, args):
-        (posargs, kwargs) = self.reduce_arguments(args)
-        if is_disabled(posargs, kwargs):
-            return Disabler()
+    @builtinMethodNoKwargs
+    def int_method_call(self, obj: int, method_name: str, posargs: T.List[TYPE_nvar], kwargs: T.Dict[str, T.Any]) -> T.Union[str, bool]:
         if method_name == 'is_even':
             if not posargs:
                 return obj % 2 == 0
@@ -875,7 +994,7 @@
             raise InterpreterException('Unknown method "%s" for an integer.' % method_name)
 
     @staticmethod
-    def _get_one_string_posarg(posargs, method_name):
+    def _get_one_string_posarg(posargs: T.List[TYPE_nvar], method_name: str) -> str:
         if len(posargs) > 1:
             m = '{}() must have zero or one arguments'
             raise InterpreterException(m.format(method_name))
@@ -887,17 +1006,15 @@
             return s
         return None
 
-    def string_method_call(self, obj, method_name, args):
-        (posargs, kwargs) = self.reduce_arguments(args)
-        if is_disabled(posargs, kwargs):
-            return Disabler()
+    @builtinMethodNoKwargs
+    def string_method_call(self, obj: str, method_name: str, posargs: T.List[TYPE_nvar], kwargs: T.Dict[str, T.Any]) -> T.Union[str, int, bool, T.List[str]]:
         if method_name == 'strip':
-            s = self._get_one_string_posarg(posargs, 'strip')
-            if s is not None:
-                return obj.strip(s)
+            s1 = self._get_one_string_posarg(posargs, 'strip')
+            if s1 is not None:
+                return obj.strip(s1)
             return obj.strip()
         elif method_name == 'format':
-            return self.format_string(obj, args)
+            return self.format_string(obj, posargs)
         elif method_name == 'to_upper':
             return obj.upper()
         elif method_name == 'to_lower':
@@ -905,19 +1022,19 @@
         elif method_name == 'underscorify':
             return re.sub(r'[^a-zA-Z0-9]', '_', obj)
         elif method_name == 'split':
-            s = self._get_one_string_posarg(posargs, 'split')
-            if s is not None:
-                return obj.split(s)
+            s2 = self._get_one_string_posarg(posargs, 'split')
+            if s2 is not None:
+                return obj.split(s2)
             return obj.split()
         elif method_name == 'startswith' or method_name == 'contains' or method_name == 'endswith':
-            s = posargs[0]
-            if not isinstance(s, str):
+            s3 = posargs[0]
+            if not isinstance(s3, str):
                 raise InterpreterException('Argument must be a string.')
             if method_name == 'startswith':
-                return obj.startswith(s)
+                return obj.startswith(s3)
             elif method_name == 'contains':
-                return obj.find(s) >= 0
-            return obj.endswith(s)
+                return obj.find(s3) >= 0
+            return obj.endswith(s3)
         elif method_name == 'to_int':
             try:
                 return int(obj)
@@ -928,6 +1045,7 @@
                 raise InterpreterException('Join() takes exactly one argument.')
             strlist = posargs[0]
             check_stringlist(strlist)
+            assert isinstance(strlist, list)  # Required for mypy
             return obj.join(strlist)
         elif method_name == 'version_compare':
             if len(posargs) != 1:
@@ -935,20 +1053,35 @@
             cmpr = posargs[0]
             if not isinstance(cmpr, str):
                 raise InterpreterException('Version_compare() argument must be a string.')
+            if isinstance(obj, MesonVersionString):
+                self.tmp_meson_version = cmpr
             return mesonlib.version_compare(obj, cmpr)
+        elif method_name == 'substring':
+            if len(posargs) > 2:
+                raise InterpreterException('substring() takes maximum two arguments.')
+            start = 0
+            end = len(obj)
+            if len (posargs) > 0:
+                if not isinstance(posargs[0], int):
+                    raise InterpreterException('substring() argument must be an int')
+                start = posargs[0]
+            if len (posargs) > 1:
+                if not isinstance(posargs[1], int):
+                    raise InterpreterException('substring() argument must be an int')
+                end = posargs[1]
+            return obj[start:end]
         raise InterpreterException('Unknown method "%s" for a string.' % method_name)
 
-    def format_string(self, templ, args):
-        if isinstance(args, mparser.ArgumentNode):
-            args = args.arguments
+    def format_string(self, templ: str, args: T.List[TYPE_nvar]) -> str:
         arg_strings = []
         for arg in args:
-            arg = self.evaluate_statement(arg)
+            if isinstance(arg, mparser.BaseNode):
+                arg = self.evaluate_statement(arg)
             if isinstance(arg, bool): # Python boolean is upper case.
                 arg = str(arg).lower()
             arg_strings.append(str(arg))
 
-        def arg_replace(match):
+        def arg_replace(match: T.Match[str]) -> str:
             idx = int(match.group(1))
             if idx >= len(arg_strings):
                 raise InterpreterException('Format placeholder @{}@ out of range.'.format(idx))
@@ -956,15 +1089,25 @@
 
         return re.sub(r'@(\d+)@', arg_replace, templ)
 
-    def unknown_function_called(self, func_name):
+    def unknown_function_called(self, func_name: str) -> None:
         raise InvalidCode('Unknown function "%s".' % func_name)
 
-    def array_method_call(self, obj, method_name, args):
-        (posargs, kwargs) = self.reduce_arguments(args)
-        if is_disabled(posargs, kwargs):
-            return Disabler()
+    @builtinMethodNoKwargs
+    def array_method_call(self, obj: T.List[TYPE_var], method_name: str, posargs: T.List[TYPE_nvar], kwargs: T.Dict[str, T.Any]) -> TYPE_var:
         if method_name == 'contains':
-            return self.check_contains(obj, posargs)
+            def check_contains(el: list) -> bool:
+                if len(posargs) != 1:
+                    raise InterpreterException('Contains method takes exactly one argument.')
+                item = posargs[0]
+                for element in el:
+                    if isinstance(element, list):
+                        found = check_contains(element)
+                        if found:
+                            return True
+                    if element == item:
+                        return True
+                return False
+            return check_contains(obj)
         elif method_name == 'length':
             return len(obj)
         elif method_name == 'get':
@@ -983,16 +1126,15 @@
                 if fallback is None:
                     m = 'Array index {!r} is out of bounds for array of size {!r}.'
                     raise InvalidArguments(m.format(index, len(obj)))
+                if isinstance(fallback, mparser.BaseNode):
+                    return self.evaluate_statement(fallback)
                 return fallback
             return obj[index]
         m = 'Arrays do not have a method called {!r}.'
         raise InterpreterException(m.format(method_name))
 
-    def dict_method_call(self, obj, method_name, args):
-        (posargs, kwargs) = self.reduce_arguments(args)
-        if is_disabled(posargs, kwargs):
-            return Disabler()
-
+    @builtinMethodNoKwargs
+    def dict_method_call(self, obj: T.Dict[str, TYPE_var], method_name: str, posargs: T.List[TYPE_nvar], kwargs: T.Dict[str, T.Any]) -> TYPE_var:
         if method_name in ('has_key', 'get'):
             if method_name == 'has_key':
                 if len(posargs) != 1:
@@ -1014,7 +1156,10 @@
                 return obj[key]
 
             if len(posargs) == 2:
-                return posargs[1]
+                fallback = posargs[1]
+                if isinstance(fallback, mparser.BaseNode):
+                    return self.evaluate_statement(fallback)
+                return fallback
 
             raise InterpreterException('Key {!r} is not in the dictionary.'.format(key))
 
@@ -1025,21 +1170,31 @@
 
         raise InterpreterException('Dictionaries do not have a method called "%s".' % method_name)
 
-    def reduce_arguments(self, args):
+    def reduce_arguments(
+                self,
+                args: mparser.ArgumentNode,
+                key_resolver: T.Callable[[mparser.BaseNode], str] = default_resolve_key,
+                duplicate_key_error: T.Optional[str] = None,
+            ) -> T.Tuple[T.List[TYPE_nvar], TYPE_nkwargs]:
         assert(isinstance(args, mparser.ArgumentNode))
         if args.incorrect_order():
             raise InvalidArguments('All keyword arguments must be after positional arguments.')
         self.argument_depth += 1
-        reduced_pos = [self.evaluate_statement(arg) for arg in args.arguments]
-        reduced_kw = {}
-        for key in args.kwargs.keys():
-            a = args.kwargs[key]
-            reduced_kw[key] = self.evaluate_statement(a)
+        reduced_pos = [self.evaluate_statement(arg) for arg in args.arguments]  # type: T.List[TYPE_nvar]
+        reduced_kw = {}  # type: TYPE_nkwargs
+        for key, val in args.kwargs.items():
+            reduced_key = key_resolver(key)
+            reduced_val = val  # type: TYPE_nvar
+            if isinstance(reduced_val, mparser.BaseNode):
+                reduced_val = self.evaluate_statement(reduced_val)
+            if duplicate_key_error and reduced_key in reduced_kw:
+                raise InvalidArguments(duplicate_key_error.format(reduced_key))
+            reduced_kw[reduced_key] = reduced_val
         self.argument_depth -= 1
         final_kw = self.expand_default_kwargs(reduced_kw)
         return reduced_pos, final_kw
 
-    def expand_default_kwargs(self, kwargs):
+    def expand_default_kwargs(self, kwargs: TYPE_nkwargs) -> TYPE_nkwargs:
         if 'kwargs' not in kwargs:
             return kwargs
         to_expand = kwargs.pop('kwargs')
@@ -1053,7 +1208,7 @@
             kwargs[k] = v
         return kwargs
 
-    def assignment(self, node):
+    def assignment(self, node: mparser.AssignmentNode) -> None:
         assert(isinstance(node, mparser.AssignmentNode))
         if self.argument_depth != 0:
             raise InvalidArguments('''Tried to assign values inside an argument list.
@@ -1070,7 +1225,7 @@
         self.set_variable(var_name, value)
         return None
 
-    def set_variable(self, varname, variable):
+    def set_variable(self, varname: str, variable: TYPE_var) -> None:
         if variable is None:
             raise InvalidCode('Can not assign None to variable.')
         if not isinstance(varname, str):
@@ -1083,16 +1238,16 @@
             raise InvalidCode('Tried to overwrite internal variable "%s"' % varname)
         self.variables[varname] = variable
 
-    def get_variable(self, varname):
+    def get_variable(self, varname: str) -> TYPE_var:
         if varname in self.builtin:
             return self.builtin[varname]
         if varname in self.variables:
             return self.variables[varname]
         raise InvalidCode('Unknown variable "%s".' % varname)
 
-    def is_assignable(self, value):
+    def is_assignable(self, value: T.Any) -> bool:
         return isinstance(value, (InterpreterObject, dependencies.Dependency,
                                   str, int, list, dict, mesonlib.File))
 
-    def is_elementary_type(self, v):
-        return isinstance(v, (int, float, str, bool, list))
+    def validate_extraction(self, buildtarget: InterpreterObject) -> None:
+        raise InterpreterException('validate_extraction is not implemented in this context (please file a bug)')
diff -Nru meson-0.53.2/mesonbuild/interpreter.py meson-0.57.0+really0.56.2/mesonbuild/interpreter.py
--- meson-0.53.2/mesonbuild/interpreter.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/interpreter.py	2021-01-06 10:39:48.000000000 +0000
@@ -1,4 +1,4 @@
-# Copyright 2012-2018 The Meson development team
+# Copyright 2012-2019 The Meson development team
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
@@ -21,7 +21,7 @@
 from . import compilers
 from .wrap import wrap, WrapMode
 from . import mesonlib
-from .mesonlib import FileMode, MachineChoice, Popen_safe, listify, extract_as_list, has_path_sep
+from .mesonlib import FileMode, MachineChoice, Popen_safe, listify, extract_as_list, has_path_sep, unholder
 from .dependencies import ExternalProgram
 from .dependencies import InternalDependency, Dependency, NotFoundDependency, DependencyException
 from .depfile import DepFile
@@ -29,17 +29,22 @@
 from .interpreterbase import check_stringlist, flatten, noPosargs, noKwargs, stringArgs, permittedKwargs, noArgsFlattening
 from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest
 from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler, disablerIfNotFound
-from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs
-from .interpreterbase import ObjectHolder
-from .modules import ModuleReturnValue
+from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs, FeatureDeprecatedKwargs
+from .interpreterbase import ObjectHolder, MesonVersionString
+from .interpreterbase import TYPE_var, TYPE_nkwargs
+from .modules import ModuleReturnValue, ExtensionModule
 from .cmake import CMakeInterpreter
+from .backend.backends import TestProtocol, Backend
 
-from pathlib import Path, PurePath
-import os, shutil, uuid
-import re, shlex
+from ._pathlib import Path, PurePath
+import os
+import shutil
+import uuid
+import re
+import shlex
+import stat
 import subprocess
 import collections
-from itertools import chain
 import functools
 import typing as T
 
@@ -426,6 +431,7 @@
                              'partial_dependency': self.partial_dependency_method,
                              'include_type': self.include_type_method,
                              'as_system': self.as_system_method,
+                             'as_link_whole': self.as_link_whole_method,
                              })
 
     def found(self):
@@ -453,6 +459,8 @@
     def name_method(self, args, kwargs):
         return self.held_object.get_name()
 
+    @FeatureDeprecated('Dependency.get_pkgconfig_variable', '0.56.0',
+                       'use Dependency.get_variable(pkgconfig : ...) instead')
     @permittedKwargs({'define_variable', 'default'})
     def pkgconfig_method(self, args, kwargs):
         args = listify(args)
@@ -464,6 +472,8 @@
         return self.held_object.get_pkgconfig_variable(varname, kwargs)
 
     @FeatureNew('dep.get_configtool_variable', '0.44.0')
+    @FeatureDeprecated('Dependency.get_configtool_variable', '0.56.0',
+                       'use Dependency.get_variable(configtool : ...) instead')
     @permittedKwargs({})
     def configtool_method(self, args, kwargs):
         args = listify(args)
@@ -483,7 +493,8 @@
 
     @FeatureNew('dep.get_variable', '0.51.0')
     @noPosargs
-    @permittedKwargs({'cmake', 'pkgconfig', 'configtool', 'default_value', 'pkgconfig_define'})
+    @permittedKwargs({'cmake', 'pkgconfig', 'configtool', 'internal', 'default_value', 'pkgconfig_define'})
+    @FeatureNewKwargs('dep.get_variable', '0.54.0', ['internal'])
     def variable_method(self, args, kwargs):
         return self.held_object.get_variable(**kwargs)
 
@@ -505,12 +516,24 @@
         new_dep = self.held_object.generate_system_dependency(new_is_system)
         return DependencyHolder(new_dep, self.subproject)
 
+    @FeatureNew('dep.as_link_whole', '0.56.0')
+    @permittedKwargs({})
+    @noPosargs
+    def as_link_whole_method(self, args, kwargs):
+        if not isinstance(self.held_object, InternalDependency):
+            raise InterpreterException('as_link_whole method is only supported on declare_dependency() objects')
+        new_dep = self.held_object.generate_link_whole_dependency()
+        return DependencyHolder(new_dep, self.subproject)
+
 class ExternalProgramHolder(InterpreterObject, ObjectHolder):
-    def __init__(self, ep):
+    def __init__(self, ep, subproject, backend=None):
         InterpreterObject.__init__(self)
         ObjectHolder.__init__(self, ep)
+        self.subproject = subproject
+        self.backend = backend
         self.methods.update({'found': self.found_method,
-                             'path': self.path_method})
+                             'path': self.path_method,
+                             'full_path': self.full_path_method})
         self.cached_version = None
 
     @noPosargs
@@ -520,8 +543,22 @@
 
     @noPosargs
     @permittedKwargs({})
+    @FeatureDeprecated('ExternalProgram.path', '0.55.0',
+                       'use ExternalProgram.full_path() instead')
     def path_method(self, args, kwargs):
-        return self.held_object.get_path()
+        return self._full_path()
+
+    @noPosargs
+    @permittedKwargs({})
+    @FeatureNew('ExternalProgram.full_path', '0.55.0')
+    def full_path_method(self, args, kwargs):
+        return self._full_path()
+
+    def _full_path(self):
+        exe = self.held_object
+        if isinstance(exe, build.Executable):
+            return self.backend.get_target_filename_abs(exe)
+        return exe.get_path()
 
     def found(self):
         return isinstance(self.held_object, build.Executable) or self.held_object.found()
@@ -530,9 +567,14 @@
         return self.held_object.get_command()
 
     def get_name(self):
-        return self.held_object.get_name()
+        exe = self.held_object
+        if isinstance(exe, build.Executable):
+            return exe.name
+        return exe.get_name()
 
     def get_version(self, interpreter):
+        if isinstance(self.held_object, build.Executable):
+            return self.held_object.project_version
         if not self.cached_version:
             raw_cmd = self.get_command() + ['--version']
             cmd = [self, '--version']
@@ -646,22 +688,22 @@
 
     @noPosargs
     @permittedKwargs({})
-    def cpu_family_method(self, args, kwargs):
+    def cpu_family_method(self, args: T.List[TYPE_var], kwargs: TYPE_nkwargs) -> str:
         return self.held_object.cpu_family
 
     @noPosargs
     @permittedKwargs({})
-    def cpu_method(self, args, kwargs):
+    def cpu_method(self, args: T.List[TYPE_var], kwargs: TYPE_nkwargs) -> str:
         return self.held_object.cpu
 
     @noPosargs
     @permittedKwargs({})
-    def system_method(self, args, kwargs):
+    def system_method(self, args: T.List[TYPE_var], kwargs: TYPE_nkwargs) -> str:
         return self.held_object.system
 
     @noPosargs
     @permittedKwargs({})
-    def endian_method(self, args, kwargs):
+    def endian_method(self, args: T.List[TYPE_var], kwargs: TYPE_nkwargs) -> str:
         return self.held_object.endian
 
 class IncludeDirsHolder(InterpreterObject, ObjectHolder):
@@ -713,7 +755,8 @@
         return self.held_object.install_dir
 
 class InstallDir(InterpreterObject):
-    def __init__(self, src_subdir, inst_subdir, install_dir, install_mode, exclude, strip_directory):
+    def __init__(self, src_subdir, inst_subdir, install_dir, install_mode,
+                 exclude, strip_directory, from_source_dir=True):
         InterpreterObject.__init__(self)
         self.source_subdir = src_subdir
         self.installable_subdir = inst_subdir
@@ -721,6 +764,7 @@
         self.install_mode = install_mode
         self.exclude = exclude
         self.strip_directory = strip_directory
+        self.from_source_dir = from_source_dir
 
 class Man(InterpreterObject):
 
@@ -767,6 +811,7 @@
         super().__init__(target, interp)
         self.methods.update({'extract_objects': self.extract_objects_method,
                              'extract_all_objects': self.extract_all_objects_method,
+                             'name': self.name_method,
                              'get_id': self.get_id_method,
                              'outdir': self.outdir_method,
                              'full_path': self.full_path_method,
@@ -821,6 +866,12 @@
     def get_id_method(self, args, kwargs):
         return self.held_object.get_id()
 
+    @FeatureNew('name', '0.54.0')
+    @noPosargs
+    @permittedKwargs({})
+    def name_method(self, args, kwargs):
+        return self.held_object.name
+
 class ExecutableHolder(BuildTargetHolder):
     def __init__(self, target, interp):
         super().__init__(target, interp)
@@ -870,15 +921,23 @@
     def __init__(self, target, interp):
         super().__init__(target, interp)
 
-class CustomTargetIndexHolder(InterpreterObject, ObjectHolder):
-    def __init__(self, object_to_hold):
-        InterpreterObject.__init__(self)
-        ObjectHolder.__init__(self, object_to_hold)
+class CustomTargetIndexHolder(TargetHolder):
+    def __init__(self, target, interp):
+        super().__init__(target, interp)
+        self.methods.update({'full_path': self.full_path_method,
+                             })
+
+    @FeatureNew('custom_target[i].full_path', '0.54.0')
+    @noPosargs
+    @permittedKwargs({})
+    def full_path_method(self, args, kwargs):
+        return self.interpreter.backend.get_target_filename_abs(self.held_object)
 
 class CustomTargetHolder(TargetHolder):
     def __init__(self, target, interp):
         super().__init__(target, interp)
         self.methods.update({'full_path': self.full_path_method,
+                             'to_list': self.to_list_method,
                              })
 
     def __repr__(self):
@@ -891,8 +950,17 @@
     def full_path_method(self, args, kwargs):
         return self.interpreter.backend.get_target_filename_abs(self.held_object)
 
+    @FeatureNew('custom_target.to_list', '0.54.0')
+    @noPosargs
+    @permittedKwargs({})
+    def to_list_method(self, args, kwargs):
+        result = []
+        for i in self.held_object:
+            result.append(CustomTargetIndexHolder(i, self.interpreter))
+        return result
+
     def __getitem__(self, index):
-        return CustomTargetIndexHolder(self.held_object[index])
+        return CustomTargetIndexHolder(self.held_object[index], self.interpreter)
 
     def __setitem__(self, index, value):  # lgtm[py/unexpected-raise-in-special-method]
         raise InterpreterException('Cannot set a member of a CustomTarget')
@@ -931,7 +999,7 @@
         self.should_fail = should_fail
         self.timeout = timeout
         self.workdir = workdir
-        self.protocol = protocol
+        self.protocol = TestProtocol.from_str(protocol)
         self.priority = priority
 
     def get_exe(self):
@@ -942,11 +1010,14 @@
 
 class SubprojectHolder(InterpreterObject, ObjectHolder):
 
-    def __init__(self, subinterpreter, subproject_dir, name):
+    def __init__(self, subinterpreter, subdir, warnings=0, disabled_feature=None,
+                 exception=None):
         InterpreterObject.__init__(self)
         ObjectHolder.__init__(self, subinterpreter)
-        self.name = name
-        self.subproject_dir = subproject_dir
+        self.warnings = warnings
+        self.disabled_feature = disabled_feature
+        self.exception = exception
+        self.subdir = PurePath(subdir).as_posix()
         self.methods.update({'get_variable': self.get_variable_method,
                              'found': self.found_method,
                              })
@@ -965,8 +1036,7 @@
         if len(args) < 1 or len(args) > 2:
             raise InterpreterException('Get_variable takes one or two arguments.')
         if not self.found():
-            raise InterpreterException('Subproject "%s/%s" disabled can\'t get_variable on it.' % (
-                self.subproject_dir, self.name))
+            raise InterpreterException('Subproject "%s" disabled can\'t get_variable on it.' % (self.subdir))
         varname = args[0]
         if not isinstance(varname, str):
             raise InterpreterException('Get_variable first argument must be a string.')
@@ -1080,7 +1150,7 @@
                 args += self.compiler.get_include_args(idir, False)
         if not nobuiltins:
             for_machine = Interpreter.machine_from_native_kwarg(kwargs)
-            opts = self.environment.coredata.compiler_options[for_machine]
+            opts = self.environment.coredata.compiler_options[for_machine][self.compiler.language]
             args += self.compiler.get_option_compile_args(opts)
             if mode == 'link':
                 args += self.compiler.get_option_link_args(opts)
@@ -1582,21 +1652,26 @@
             libtype = mesonlib.LibType.STATIC if kwargs['static'] else mesonlib.LibType.SHARED
         linkargs = self.compiler.find_library(libname, self.environment, search_dirs, libtype)
         if required and not linkargs:
-            raise InterpreterException(
-                '{} library {!r} not found'.format(self.compiler.get_display_language(), libname))
+            if libtype == mesonlib.LibType.PREFER_SHARED:
+                libtype = 'shared or static'
+            else:
+                libtype = libtype.name.lower()
+            raise InterpreterException('{} {} library {!r} not found'
+                                       .format(self.compiler.get_display_language(),
+                                               libtype, libname))
         lib = dependencies.ExternalLibrary(libname, linkargs, self.environment,
                                            self.compiler.language)
         return ExternalLibraryHolder(lib, self.subproject)
 
     @permittedKwargs({})
-    def has_argument_method(self, args, kwargs):
+    def has_argument_method(self, args: T.Sequence[str], kwargs) -> bool:
         args = mesonlib.stringlistify(args)
         if len(args) != 1:
             raise InterpreterException('has_argument takes exactly one argument.')
         return self.has_multi_arguments_method(args, kwargs)
 
     @permittedKwargs({})
-    def has_multi_arguments_method(self, args, kwargs):
+    def has_multi_arguments_method(self, args: T.Sequence[str], kwargs: dict):
         args = mesonlib.stringlistify(args)
         result, cached = self.compiler.has_multi_arguments(args, self.environment)
         if result:
@@ -1621,11 +1696,11 @@
         return supported_args
 
     @permittedKwargs({})
-    def first_supported_argument_method(self, args, kwargs):
-        for i in mesonlib.stringlistify(args):
-            if self.has_argument_method(i, kwargs):
-                mlog.log('First supported argument:', mlog.bold(i))
-                return [i]
+    def first_supported_argument_method(self, args: T.Sequence[str], kwargs: dict) -> T.List[str]:
+        for arg in mesonlib.stringlistify(args):
+            if self.has_argument_method(arg, kwargs):
+                mlog.log('First supported argument:', mlog.bold(arg))
+                return [arg]
         mlog.log('First supported argument:', mlog.red('None'))
         return []
 
@@ -1749,6 +1824,11 @@
             target_machine=self.interpreter.builtin['target_machine'].held_object,
             current_node=self.current_node
         )
+        # Many modules do for example self.interpreter.find_program_impl(),
+        # so we have to ensure they use the current interpreter and not the one
+        # that first imported that module, otherwise it will use outdated
+        # overrides.
+        self.held_object.interpreter = self.interpreter
         if self.held_object.is_snippet(method_name):
             value = fn(self.interpreter, state, args, kwargs)
             return self.interpreter.holderify(value)
@@ -1770,6 +1850,9 @@
         bool_yn = kwargs.get('bool_yn', False)
         if not isinstance(bool_yn, bool):
             raise InterpreterException('bool_yn keyword argument must be boolean')
+        list_sep = kwargs.get('list_sep')
+        if list_sep is not None and not isinstance(list_sep, str):
+            raise InterpreterException('list_sep keyword argument must be string')
         for k, v in values.items():
             if k in self.sections[section]:
                 raise InterpreterException('Summary section {!r} already have key {!r}'.format(section, k))
@@ -1781,12 +1864,18 @@
                 if bool_yn and isinstance(i, bool):
                     formatted_values.append(mlog.green('YES') if i else mlog.red('NO'))
                 else:
-                    formatted_values.append(i)
-            if not formatted_values:
-                formatted_values = ['']
-            self.sections[section][k] = formatted_values
+                    formatted_values.append(str(i))
+            self.sections[section][k] = (formatted_values, list_sep)
             self.max_key_len = max(self.max_key_len, len(k))
 
+    def text_len(self, v):
+        if isinstance(v, str):
+            return len(v)
+        elif isinstance(v, mlog.AnsiDecorator):
+            return len(v.text)
+        else:
+            raise RuntimeError('Expecting only strings or AnsiDecorator')
+
     def dump(self):
         mlog.log(self.project_name, mlog.normal_cyan(self.project_version))
         for section, values in self.sections.items():
@@ -1794,13 +1883,32 @@
             if section:
                 mlog.log(' ', mlog.bold(section))
             for k, v in values.items():
+                v, list_sep = v
                 indent = self.max_key_len - len(k) + 3
-                mlog.log(' ' * indent, k + ':', v[0])
-                indent = self.max_key_len + 5
-                for i in v[1:]:
-                    mlog.log(' ' * indent, i)
+                end = ' ' if v else ''
+                mlog.log(' ' * indent, k + ':', end=end)
+                indent = self.max_key_len + 6
+                self.dump_value(v, list_sep, indent)
         mlog.log('')  # newline
 
+    def dump_value(self, arr, list_sep, indent):
+        lines_sep = '\n' + ' ' * indent
+        if list_sep is None:
+            mlog.log(*arr, sep=lines_sep)
+            return
+        max_len = shutil.get_terminal_size().columns
+        line = []
+        line_len = indent
+        lines_sep = list_sep.rstrip() + lines_sep
+        for v in arr:
+            v_len = self.text_len(v) + len(list_sep)
+            if line and line_len + v_len > max_len:
+                mlog.log(*line, sep=list_sep, end=lines_sep)
+                line_len = indent
+                line = []
+            line.append(v)
+            line_len += v_len
+        mlog.log(*line, sep=list_sep)
 
 class MesonMain(InterpreterObject):
     def __init__(self, build, interpreter):
@@ -1811,67 +1919,125 @@
         self.methods.update({'get_compiler': self.get_compiler_method,
                              'is_cross_build': self.is_cross_build_method,
                              'has_exe_wrapper': self.has_exe_wrapper_method,
+                             'can_run_host_binaries': self.can_run_host_binaries_method,
                              'is_unity': self.is_unity_method,
                              'is_subproject': self.is_subproject_method,
                              'current_source_dir': self.current_source_dir_method,
                              'current_build_dir': self.current_build_dir_method,
                              'source_root': self.source_root_method,
                              'build_root': self.build_root_method,
+                             'project_source_root': self.project_source_root_method,
+                             'project_build_root': self.project_build_root_method,
                              'add_install_script': self.add_install_script_method,
                              'add_postconf_script': self.add_postconf_script_method,
                              'add_dist_script': self.add_dist_script_method,
                              'install_dependency_manifest': self.install_dependency_manifest_method,
+                             'override_dependency': self.override_dependency_method,
                              'override_find_program': self.override_find_program_method,
                              'project_version': self.project_version_method,
                              'project_license': self.project_license_method,
                              'version': self.version_method,
                              'project_name': self.project_name_method,
                              'get_cross_property': self.get_cross_property_method,
+                             'get_external_property': self.get_external_property_method,
                              'backend': self.backend_method,
                              })
 
-    def _find_source_script(self, name, args):
+    def _find_source_script(self, prog: T.Union[str, ExecutableHolder], args):
+        if isinstance(prog, ExecutableHolder):
+            prog_path = self.interpreter.backend.get_target_filename(prog.held_object)
+            return build.RunScript([prog_path], args)
+        elif isinstance(prog, ExternalProgramHolder):
+            return build.RunScript(prog.get_command(), args)
+
         # Prefer scripts in the current source directory
         search_dir = os.path.join(self.interpreter.environment.source_dir,
                                   self.interpreter.subdir)
-        key = (name, search_dir)
+        key = (prog, search_dir)
         if key in self._found_source_scripts:
             found = self._found_source_scripts[key]
         else:
-            found = dependencies.ExternalProgram(name, search_dir=search_dir)
+            found = dependencies.ExternalProgram(prog, search_dir=search_dir)
             if found.found():
                 self._found_source_scripts[key] = found
             else:
                 m = 'Script or command {!r} not found or not executable'
-                raise InterpreterException(m.format(name))
+                raise InterpreterException(m.format(prog))
         return build.RunScript(found.get_command(), args)
 
-    @permittedKwargs({})
-    def add_install_script_method(self, args, kwargs):
+    def _process_script_args(
+            self, name: str, args: T.List[T.Union[
+                str, mesonlib.File, CustomTargetHolder,
+                CustomTargetIndexHolder, ConfigureFileHolder,
+                ExternalProgramHolder, ExecutableHolder,
+            ]], allow_built: bool = False) -> T.List[str]:
+        script_args = []  # T.List[str]
+        new = False
+        for a in args:
+            a = unholder(a)
+            if isinstance(a, str):
+                script_args.append(a)
+            elif isinstance(a, mesonlib.File):
+                new = True
+                script_args.append(a.rel_to_builddir(self.interpreter.environment.source_dir))
+            elif isinstance(a, (build.BuildTarget, build.CustomTarget, build.CustomTargetIndex)):
+                if not allow_built:
+                    raise InterpreterException('Arguments to {} cannot be built'.format(name))
+                new = True
+                script_args.extend([os.path.join(a.get_subdir(), o) for o in a.get_outputs()])
+
+                # This feels really hacky, but I'm not sure how else to fix
+                # this without completely rewriting install script handling.
+                # This is complicated by the fact that the install target
+                # depends on all.
+                if isinstance(a, build.CustomTargetIndex):
+                    a.target.build_by_default = True
+                else:
+                    a.build_by_default = True
+            elif isinstance(a, build.ConfigureFile):
+                new = True
+                script_args.append(os.path.join(a.subdir, a.targetname))
+            elif isinstance(a, dependencies.ExternalProgram):
+                script_args.extend(a.command)
+                new = True
+            else:
+                raise InterpreterException(
+                    'Arguments to {} must be strings, Files, CustomTargets, '
+                    'Indexes of CustomTargets, or ConfigureFiles'.format(name))
+        if new:
+            FeatureNew.single_use(
+                'Calling "{}" with File, CustomTaget, Index of CustomTarget, '
+                'ConfigureFile, Executable, or ExternalProgram'.format(name),
+                '0.55.0', self.interpreter.subproject)
+        return script_args
+
+    @permittedKwargs(set())
+    def add_install_script_method(self, args: 'T.Tuple[T.Union[str, ExecutableHolder], T.Union[str, mesonlib.File, CustomTargetHolder, CustomTargetIndexHolder, ConfigureFileHolder], ...]', kwargs):
         if len(args) < 1:
             raise InterpreterException('add_install_script takes one or more arguments')
-        check_stringlist(args, 'add_install_script args must be strings')
-        script = self._find_source_script(args[0], args[1:])
+        script_args = self._process_script_args('add_install_script', args[1:], allow_built=True)
+        script = self._find_source_script(args[0], script_args)
         self.build.install_scripts.append(script)
 
-    @permittedKwargs({})
+    @permittedKwargs(set())
     def add_postconf_script_method(self, args, kwargs):
         if len(args) < 1:
             raise InterpreterException('add_postconf_script takes one or more arguments')
-        check_stringlist(args, 'add_postconf_script arguments must be strings')
-        script = self._find_source_script(args[0], args[1:])
+        script_args = self._process_script_args('add_postconf_script', args[1:], allow_built=True)
+        script = self._find_source_script(args[0], script_args)
         self.build.postconf_scripts.append(script)
 
-    @permittedKwargs({})
+    @permittedKwargs(set())
     def add_dist_script_method(self, args, kwargs):
         if len(args) < 1:
             raise InterpreterException('add_dist_script takes one or more arguments')
         if len(args) > 1:
-            FeatureNew('Calling "add_dist_script" with multiple arguments', '0.49.0').use(self.interpreter.subproject)
-        check_stringlist(args, 'add_dist_script argument must be a string')
+            FeatureNew.single_use('Calling "add_dist_script" with multiple arguments',
+                                  '0.49.0', self.interpreter.subproject)
         if self.interpreter.subproject != '':
             raise InterpreterException('add_dist_script may not be used in a subproject.')
-        script = self._find_source_script(args[0], args[1:])
+        script_args = self._process_script_args('add_dist_script', args[1:], allow_built=True)
+        script = self._find_source_script(args[0], script_args)
         self.build.dist_scripts.append(script)
 
     @noPosargs
@@ -1899,19 +2065,51 @@
 
     @noPosargs
     @permittedKwargs({})
+    @FeatureDeprecated('meson.source_root', '0.56.0', 'use meson.current_source_dir instead.')
     def source_root_method(self, args, kwargs):
         return self.interpreter.environment.source_dir
 
     @noPosargs
     @permittedKwargs({})
+    @FeatureDeprecated('meson.build_root', '0.56.0', 'use meson.current_build_dir instead.')
     def build_root_method(self, args, kwargs):
         return self.interpreter.environment.build_dir
 
     @noPosargs
     @permittedKwargs({})
-    def has_exe_wrapper_method(self, args, kwargs):
-        if self.is_cross_build_method(None, None) and \
-           self.build.environment.need_exe_wrapper():
+    @FeatureNew('meson.project_source_root', '0.56.0')
+    def project_source_root_method(self, args, kwargs):
+        src = self.interpreter.environment.source_dir
+        sub = self.interpreter.root_subdir
+        if sub == '':
+            return src
+        return os.path.join(src, sub)
+
+    @noPosargs
+    @permittedKwargs({})
+    @FeatureNew('meson.project_build_root', '0.56.0')
+    def project_build_root_method(self, args, kwargs):
+        src = self.interpreter.environment.build_dir
+        sub = self.interpreter.root_subdir
+        if sub == '':
+            return src
+        return os.path.join(src, sub)
+
+    @noPosargs
+    @permittedKwargs({})
+    @FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.')
+    def has_exe_wrapper_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool:
+        return self.can_run_host_binaries_impl(args, kwargs)
+
+    @noPosargs
+    @permittedKwargs({})
+    @FeatureNew('meson.can_run_host_binaries', '0.55.0')
+    def can_run_host_binaries_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool:
+        return self.can_run_host_binaries_impl(args, kwargs)
+
+    def can_run_host_binaries_impl(self, args, kwargs):
+        if (self.is_cross_build_method(None, None) and
+                self.build.environment.need_exe_wrapper()):
             if self.build.environment.exe_wrapper is None:
                 return False
         # We return True when exe_wrap is defined, when it's not needed, and
@@ -1933,7 +2131,7 @@
         clist = self.interpreter.coredata.compilers[for_machine]
         if cname in clist:
             return CompilerHolder(clist[cname], self.build.environment, self.interpreter.subproject)
-        raise InterpreterException('Tried to access compiler for unspecified language "%s".' % cname)
+        raise InterpreterException('Tried to access compiler for language "%s", not specified for %s machine.' % (cname, for_machine.get_lower_case_name()))
 
     @noPosargs
     @permittedKwargs({})
@@ -1971,11 +2169,34 @@
                                         self.interpreter.environment.build_dir)
             if not os.path.exists(abspath):
                 raise InterpreterException('Tried to override %s with a file that does not exist.' % name)
-            exe = OverrideProgram(abspath)
+            exe = OverrideProgram(name, abspath)
         if not isinstance(exe, (dependencies.ExternalProgram, build.Executable)):
             raise InterpreterException('Second argument must be an external program or executable.')
         self.interpreter.add_find_program_override(name, exe)
 
+    @FeatureNew('meson.override_dependency', '0.54.0')
+    @permittedKwargs({'native'})
+    def override_dependency_method(self, args, kwargs):
+        if len(args) != 2:
+            raise InterpreterException('Override needs two arguments')
+        name = args[0]
+        dep = args[1]
+        if not isinstance(name, str) or not name:
+            raise InterpreterException('First argument must be a string and cannot be empty')
+        if hasattr(dep, 'held_object'):
+            dep = dep.held_object
+        if not isinstance(dep, dependencies.Dependency):
+            raise InterpreterException('Second argument must be a dependency object')
+        identifier = dependencies.get_dep_identifier(name, kwargs)
+        for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+        override = self.build.dependency_overrides[for_machine].get(identifier)
+        if override:
+            m = 'Tried to override dependency {!r} which has already been resolved or overridden at {}'
+            location = mlog.get_error_location_string(override.node.filename, override.node.lineno)
+            raise InterpreterException(m.format(name, location))
+        self.build.dependency_overrides[for_machine][identifier] = \
+            build.DependencyOverride(dep, self.interpreter.current_node)
+
     @noPosargs
     @permittedKwargs({})
     def project_version_method(self, args, kwargs):
@@ -1990,7 +2211,7 @@
     @noPosargs
     @permittedKwargs({})
     def version_method(self, args, kwargs):
-        return coredata.version
+        return MesonVersionString(coredata.version)
 
     @noPosargs
     @permittedKwargs({})
@@ -1999,7 +2220,7 @@
 
     @noArgsFlattening
     @permittedKwargs({})
-    def get_cross_property_method(self, args, kwargs):
+    def get_cross_property_method(self, args, kwargs) -> str:
         if len(args) < 1 or len(args) > 2:
             raise InterpreterException('Must have one or two arguments.')
         propname = args[0]
@@ -2013,6 +2234,34 @@
                 return args[1]
             raise InterpreterException('Unknown cross property: %s.' % propname)
 
+    @noArgsFlattening
+    @permittedKwargs({'native'})
+    @FeatureNew('meson.get_external_property', '0.54.0')
+    def get_external_property_method(self, args: T.Sequence[str], kwargs: dict) -> str:
+        if len(args) < 1 or len(args) > 2:
+            raise InterpreterException('Must have one or two positional arguments.')
+        propname = args[0]
+        if not isinstance(propname, str):
+            raise InterpreterException('Property name must be string.')
+
+        def _get_native() -> str:
+            try:
+                props = self.interpreter.environment.properties.build
+                return props[propname]
+            except Exception:
+                if len(args) == 2:
+                    return args[1]
+                raise InterpreterException('Unknown native property: %s.' % propname)
+        if 'native' in kwargs:
+            if kwargs['native']:
+                return _get_native()
+            else:
+                return self.get_cross_property_method(args, {})
+        else:  # native: not specified
+            if self.build.environment.is_cross_build():
+                return self.get_cross_property_method(args, kwargs)
+            else:
+                return _get_native()
 
 known_library_kwargs = (
     build.known_shlib_kwargs |
@@ -2030,7 +2279,7 @@
 
 permitted_kwargs = {'add_global_arguments': {'language', 'native'},
                     'add_global_link_arguments': {'language', 'native'},
-                    'add_languages': {'required'},
+                    'add_languages': {'required', 'native'},
                     'add_project_link_arguments': {'language', 'native'},
                     'add_project_arguments': {'language', 'native'},
                     'add_test_setup': {'exe_wrapper', 'gdb', 'timeout_multiplier', 'env', 'is_default'},
@@ -2070,6 +2319,7 @@
                                    'main',
                                    'method',
                                    'modules',
+                                   'components',
                                    'cmake_module_path',
                                    'optional_modules',
                                    'native',
@@ -2089,6 +2339,7 @@
                                            'link_args',
                                            'link_whole',
                                            'version',
+                                           'variables',
                                            },
                     'executable': build.known_exe_kwargs,
                     'find_program': {'required', 'native', 'version', 'dirs'},
@@ -2121,15 +2372,25 @@
 
 class Interpreter(InterpreterBase):
 
-    def __init__(self, build, backend=None, subproject='', subdir='', subproject_dir='subprojects',
-                 modules = None, default_project_options=None, mock=False, ast=None):
-        super().__init__(build.environment.get_source_dir(), subdir)
+    def __init__(
+                self,
+                build: build.Build,
+                backend: T.Optional[Backend] = None,
+                subproject: str = '',
+                subdir: str = '',
+                subproject_dir: str = 'subprojects',
+                modules: T.Optional[T.Dict[str, ExtensionModule]] = None,
+                default_project_options: T.Optional[T.Dict[str, str]] = None,
+                mock: bool = False,
+                ast: T.Optional[mparser.CodeBlockNode] = None,
+                is_translated: bool = False,
+            ) -> None:
+        super().__init__(build.environment.get_source_dir(), subdir, subproject)
         self.an_unpicklable_object = mesonlib.an_unpicklable_object
         self.build = build
         self.environment = build.environment
         self.coredata = self.environment.get_coredata()
         self.backend = backend
-        self.subproject = subproject
         self.summary = {}
         if modules is None:
             self.modules = {}
@@ -2161,11 +2422,21 @@
             self.default_project_options = {}
         self.project_default_options = {}
         self.build_func_dict()
+
         # build_def_files needs to be defined before parse_project is called
-        self.build_def_files = [os.path.join(self.subdir, environment.build_filename)]
+        #
+        # For non-meson subprojects, we'll be using the ast. Even if it does
+        # exist we don't want to add a dependency on it, it's autogenerated
+        # from the actual build files, and is just for reference.
+        self.build_def_files = []
+        build_filename = os.path.join(self.subdir, environment.build_filename)
+        if not is_translated:
+            self.build_def_files.append(build_filename)
         if not mock:
             self.parse_project()
+        self._redetect_machines()
 
+    def _redetect_machines(self):
         # Re-initialize machine descriptions. We can do a better job now because we
         # have the compilers needed to gain more knowledge, so wipe out old
         # inference and start over.
@@ -2183,7 +2454,8 @@
         self.builtin['target_machine'] = \
             MachineHolder(self.build.environment.machines.target)
 
-    def get_non_matching_default_options(self):
+    # TODO: Why is this in interpreter.py and not CoreData or Environment?
+    def get_non_matching_default_options(self) -> T.Iterator[T.Tuple[str, str, coredata.UserOption]]:
         env = self.environment
         for def_opt_name, def_opt_value in self.project_default_options.items():
             for opts in env.coredata.get_all_options():
@@ -2260,7 +2532,7 @@
 
         if isinstance(item, build.CustomTarget):
             return CustomTargetHolder(item, self)
-        elif isinstance(item, (int, str, bool)) or item is None:
+        elif isinstance(item, (int, str, bool, Disabler, InterpreterObject)) or item is None:
             return item
         elif isinstance(item, build.Executable):
             return ExecutableHolder(item, self)
@@ -2275,9 +2547,11 @@
         elif isinstance(item, dependencies.Dependency):
             return DependencyHolder(item, self.subproject)
         elif isinstance(item, dependencies.ExternalProgram):
-            return ExternalProgramHolder(item)
+            return ExternalProgramHolder(item, self.subproject)
         elif hasattr(item, 'held_object'):
             return item
+        elif isinstance(item, InterpreterObject):
+            return item
         else:
             raise InterpreterException('Module returned a value of unknown type.')
 
@@ -2298,14 +2572,16 @@
             elif isinstance(v, build.Data):
                 self.build.data.append(v)
             elif isinstance(v, dependencies.ExternalProgram):
-                return ExternalProgramHolder(v)
+                return ExternalProgramHolder(v, self.subproject)
             elif isinstance(v, dependencies.InternalDependency):
                 # FIXME: This is special cased and not ideal:
                 # The first source is our new VapiTarget, the rest are deps
                 self.process_new_values(v.sources[0])
+            elif isinstance(v, InstallDir):
+                self.build.install_dirs.append(v)
             elif hasattr(v, 'held_object'):
                 pass
-            elif isinstance(v, (int, str, bool)):
+            elif isinstance(v, (int, str, bool, Disabler)):
                 pass
             else:
                 raise InterpreterException('Module returned a value of unknown type.')
@@ -2317,7 +2593,7 @@
         self.process_new_values(invalues)
         return self.holderify(return_object.return_value)
 
-    def get_build_def_files(self):
+    def get_build_def_files(self) -> T.List[str]:
         return self.build_def_files
 
     def add_build_def_file(self, f):
@@ -2330,14 +2606,26 @@
                 return
             f = os.path.normpath(f.relative_name())
         elif os.path.isfile(f) and not f.startswith('/dev'):
-            srcdir = self.environment.get_source_dir()
-            builddir = self.environment.get_build_dir()
-            f = os.path.normpath(f)
-            rel_path = mesonlib.relpath(f, start=srcdir)
-            if not rel_path.startswith('..'):
-                f = rel_path
-            elif not mesonlib.relpath(f, start=builddir).startswith('..'):
+            srcdir = Path(self.environment.get_source_dir())
+            builddir = Path(self.environment.get_build_dir())
+            try:
+                f = Path(f).resolve()
+            except OSError:
+                f = Path(f)
+                s = f.stat()
+                if (hasattr(s, 'st_file_attributes') and
+                        s.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT != 0 and
+                        s.st_reparse_tag == stat.IO_REPARSE_TAG_APPEXECLINK):
+                    # This is a Windows Store link which we can't
+                    # resolve, so just do our best otherwise.
+                    f = f.parent.resolve() / f.name
+                else:
+                    raise
+            if builddir in f.parents:
                 return
+            if srcdir in f.parents:
+                f = f.relative_to(srcdir)
+            f = str(f)
         else:
             return
         if f not in self.build_def_files:
@@ -2347,21 +2635,35 @@
         return self.variables
 
     def check_stdlibs(self):
-        for for_machine in MachineChoice:
+        machine_choices = [MachineChoice.HOST]
+        if self.coredata.is_cross_build():
+            machine_choices.append(MachineChoice.BUILD)
+        for for_machine in machine_choices:
             props = self.build.environment.properties[for_machine]
             for l in self.coredata.compilers[for_machine].keys():
                 try:
                     di = mesonlib.stringlistify(props.get_stdlib(l))
-                    if len(di) != 2:
-                        raise InterpreterException('Stdlib definition for %s should have exactly two elements.'
-                                                   % l)
-                    projname, depname = di
-                    subproj = self.do_subproject(projname, 'meson', {})
-                    self.build.stdlibs.host[l] = subproj.get_variable_method([depname], {})
                 except KeyError:
-                    pass
-                except InvalidArguments:
-                    pass
+                    continue
+                if len(di) == 1:
+                    FeatureNew.single_use('stdlib without variable name', '0.56.0', self.subproject)
+                kwargs = {'fallback': di,
+                          'native': for_machine is MachineChoice.BUILD,
+                          }
+                name = display_name = l + '_stdlib'
+                dep = self.dependency_impl(name, display_name, kwargs, force_fallback=True)
+                self.build.stdlibs[for_machine][l] = dep
+
+    def import_module(self, modname):
+        if modname in self.modules:
+            return
+        try:
+            module = importlib.import_module('mesonbuild.modules.' + modname)
+        except ImportError:
+            raise InvalidArguments('Module "%s" does not exist' % (modname, ))
+        ext_module = module.initialize(self)
+        assert isinstance(ext_module, ExtensionModule)
+        self.modules[modname] = ext_module
 
     @stringArgs
     @noKwargs
@@ -2371,14 +2673,15 @@
         modname = args[0]
         if modname.startswith('unstable-'):
             plainname = modname.split('-', 1)[1]
-            mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node)
-            modname = 'unstable_' + plainname
-        if modname not in self.modules:
             try:
-                module = importlib.import_module('mesonbuild.modules.' + modname)
-            except ImportError:
-                raise InvalidArguments('Module "%s" does not exist' % (modname, ))
-            self.modules[modname] = module.initialize(self)
+                # check if stable module exists
+                self.import_module(plainname)
+                mlog.warning('Module %s is now stable, please use the %s module instead.' % (modname, plainname))
+                modname = plainname
+            except InvalidArguments:
+                mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node)
+                modname = 'unstable_' + plainname
+        self.import_module(modname)
         return ModuleHolder(modname, self.modules[modname], self)
 
     @stringArgs
@@ -2386,7 +2689,34 @@
     def func_files(self, node, args, kwargs):
         return [mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, fname) for fname in args]
 
+    # Used by declare_dependency() and pkgconfig.generate()
+    def extract_variables(self, kwargs, argname='variables', list_new=False, dict_new=False):
+        variables = kwargs.get(argname, {})
+        if isinstance(variables, dict):
+            if dict_new and variables:
+                FeatureNew.single_use('variables as dictionary', '0.56.0', self.subproject)
+        else:
+            varlist = mesonlib.stringlistify(variables)
+            if list_new:
+                FeatureNew.single_use('variables as list of strings', '0.56.0', self.subproject)
+            variables = collections.OrderedDict()
+            for v in varlist:
+                try:
+                    (key, value) = v.split('=', 1)
+                except ValueError:
+                    raise InterpreterException('Variable {!r} must have a value separated by equals sign.'.format(v))
+                variables[key.strip()] = value.strip()
+        for k, v in variables.items():
+            if not k or not v:
+                raise InterpreterException('Empty variable name or value')
+            if any(c.isspace() for c in k):
+                raise InterpreterException('Invalid whitespace in variable name "{}"'.format(k))
+            if not isinstance(v, str):
+                raise InterpreterException('variables values must be strings.')
+        return variables
+
     @FeatureNewKwargs('declare_dependency', '0.46.0', ['link_whole'])
+    @FeatureNewKwargs('declare_dependency', '0.54.0', ['variables'])
     @permittedKwargs(permitted_kwargs['declare_dependency'])
     @noPosargs
     def func_declare_dependency(self, node, args, kwargs):
@@ -2394,13 +2724,14 @@
         if not isinstance(version, str):
             raise InterpreterException('Version must be a string.')
         incs = self.extract_incdirs(kwargs)
-        libs = extract_as_list(kwargs, 'link_with', unholder=True)
-        libs_whole = extract_as_list(kwargs, 'link_whole', unholder=True)
+        libs = unholder(extract_as_list(kwargs, 'link_with'))
+        libs_whole = unholder(extract_as_list(kwargs, 'link_whole'))
         sources = extract_as_list(kwargs, 'sources')
-        sources = listify(self.source_strings_to_files(sources), unholder=True)
-        deps = extract_as_list(kwargs, 'dependencies', unholder=True)
+        sources = unholder(listify(self.source_strings_to_files(sources)))
+        deps = unholder(extract_as_list(kwargs, 'dependencies'))
         compile_args = mesonlib.stringlistify(kwargs.get('compile_args', []))
         link_args = mesonlib.stringlistify(kwargs.get('link_args', []))
+        variables = self.extract_variables(kwargs, list_new=True)
         final_deps = []
         for d in deps:
             try:
@@ -2415,13 +2746,14 @@
                 raise InterpreterException('''Entries in "link_with" may only be self-built targets,
 external dependencies (including libraries) must go to "dependencies".''')
         dep = dependencies.InternalDependency(version, incs, compile_args,
-                                              link_args, libs, libs_whole, sources, final_deps)
+                                              link_args, libs, libs_whole, sources, final_deps,
+                                              variables)
         return DependencyHolder(dep, self.subproject)
 
     @noKwargs
     def func_assert(self, node, args, kwargs):
         if len(args) == 1:
-            FeatureNew('assert function without message argument', '0.53.0').use(self.subproject)
+            FeatureNew.single_use('assert function without message argument', '0.53.0', self.subproject)
             value = args[0]
             message = None
         elif len(args) == 2:
@@ -2481,7 +2813,7 @@
                       ' and therefore cannot be used during configuration'
                 raise InterpreterException(msg.format(progname, cmd.description()))
             if not cmd.found():
-                raise InterpreterException('command {!r} not found or not executable'.format(cmd))
+                raise InterpreterException('command {!r} not found or not executable'.format(cmd.get_name()))
         elif isinstance(cmd, CompilerHolder):
             exelist = cmd.compiler.get_exelist()
             cmd = exelist[0]
@@ -2536,79 +2868,75 @@
     def func_subproject(self, nodes, args, kwargs):
         if len(args) != 1:
             raise InterpreterException('Subproject takes exactly one argument')
-        dirname = args[0]
-        return self.do_subproject(dirname, 'meson', kwargs)
+        subp_name = args[0]
+        return self.do_subproject(subp_name, 'meson', kwargs)
 
-    def disabled_subproject(self, dirname, feature=None):
-        sub = SubprojectHolder(None, self.subproject_dir, dirname)
-        if feature:
-            sub.disabled_feature = feature
-        self.subprojects[dirname] = sub
+    def disabled_subproject(self, subp_name, disabled_feature=None, exception=None):
+        sub = SubprojectHolder(None, os.path.join(self.subproject_dir, subp_name),
+                               disabled_feature=disabled_feature, exception=exception)
+        self.subprojects[subp_name] = sub
         return sub
 
-    def do_subproject(self, dirname: str, method: str, kwargs):
+    def get_subproject(self, subp_name):
+        sub = self.subprojects.get(subp_name)
+        if sub and sub.found():
+            return sub
+        return None
+
+    def do_subproject(self, subp_name: str, method: str, kwargs):
         disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
         if disabled:
-            mlog.log('Subproject', mlog.bold(dirname), ':', 'skipped: feature', mlog.bold(feature), 'disabled')
-            return self.disabled_subproject(dirname, feature)
+            mlog.log('Subproject', mlog.bold(subp_name), ':', 'skipped: feature', mlog.bold(feature), 'disabled')
+            return self.disabled_subproject(subp_name, disabled_feature=feature)
 
         default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
         default_options = coredata.create_options_dict(default_options)
-        if dirname == '':
-            raise InterpreterException('Subproject dir name must not be empty.')
-        if dirname[0] == '.':
-            raise InterpreterException('Subproject dir name must not start with a period.')
-        if '..' in dirname:
+
+        if subp_name == '':
+            raise InterpreterException('Subproject name must not be empty.')
+        if subp_name[0] == '.':
+            raise InterpreterException('Subproject name must not start with a period.')
+        if '..' in subp_name:
             raise InterpreterException('Subproject name must not contain a ".." path segment.')
-        if os.path.isabs(dirname):
+        if os.path.isabs(subp_name):
             raise InterpreterException('Subproject name must not be an absolute path.')
-        if has_path_sep(dirname):
+        if has_path_sep(subp_name):
             mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.',
                          location=self.current_node)
-        if dirname in self.subproject_stack:
-            fullstack = self.subproject_stack + [dirname]
+        if subp_name in self.subproject_stack:
+            fullstack = self.subproject_stack + [subp_name]
             incpath = ' => '.join(fullstack)
             raise InvalidCode('Recursive include of subprojects: %s.' % incpath)
-        if dirname in self.subprojects:
-            subproject = self.subprojects[dirname]
+        if subp_name in self.subprojects:
+            subproject = self.subprojects[subp_name]
             if required and not subproject.found():
-                raise InterpreterException('Subproject "%s/%s" required but not found.' % (
-                                           self.subproject_dir, dirname))
+                raise InterpreterException('Subproject "%s" required but not found.' % (subproject.subdir))
             return subproject
 
-        subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
-        r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode'))
+        r = self.environment.wrap_resolver
         try:
-            resolved = r.resolve(dirname, method)
+            subdir = r.resolve(subp_name, method, self.subproject)
         except wrap.WrapException as e:
-            subprojdir = os.path.join(self.subproject_dir, r.directory)
-            if isinstance(e, wrap.WrapNotFoundException):
-                # if the reason subproject execution failed was because
-                # the directory doesn't exist, try to give some helpful
-                # advice if it's a nested subproject that needs
-                # promotion...
-                self.print_nested_info(dirname)
             if not required:
                 mlog.log(e)
-                mlog.log('Subproject ', mlog.bold(subprojdir), 'is buildable:', mlog.red('NO'), '(disabling)')
-                return self.disabled_subproject(dirname)
+                mlog.log('Subproject ', mlog.bold(subp_name), 'is buildable:', mlog.red('NO'), '(disabling)')
+                return self.disabled_subproject(subp_name, exception=e)
             raise e
 
-        subdir = os.path.join(self.subproject_dir, resolved)
-        subdir_abs = os.path.join(subproject_dir_abs, resolved)
+        subdir_abs = os.path.join(self.environment.get_source_dir(), subdir)
         os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
         self.global_args_frozen = True
 
         mlog.log()
         with mlog.nested():
-            mlog.log('Executing subproject', mlog.bold(dirname), 'method', mlog.bold(method), '\n')
+            mlog.log('Executing subproject', mlog.bold(subp_name), 'method', mlog.bold(method), '\n')
         try:
             if method == 'meson':
-                return self._do_subproject_meson(dirname, subdir, default_options, kwargs)
+                return self._do_subproject_meson(subp_name, subdir, default_options, kwargs)
             elif method == 'cmake':
-                return self._do_subproject_cmake(dirname, subdir, subdir_abs, default_options, kwargs)
+                return self._do_subproject_cmake(subp_name, subdir, subdir_abs, default_options, kwargs)
             else:
-                raise InterpreterException('The method {} is invalid for the subproject {}'.format(method, dirname))
+                raise InterpreterException('The method {} is invalid for the subproject {}'.format(method, subp_name))
         # Invalid code is always an error
         except InvalidCode:
             raise
@@ -2618,21 +2946,29 @@
                     # Suppress the 'ERROR:' prefix because this exception is not
                     # fatal and VS CI treat any logs with "ERROR:" as fatal.
                     mlog.exception(e, prefix=mlog.yellow('Exception:'))
-                mlog.log('\nSubproject', mlog.bold(dirname), 'is buildable:', mlog.red('NO'), '(disabling)')
-                return self.disabled_subproject(dirname)
+                mlog.log('\nSubproject', mlog.bold(subdir), 'is buildable:', mlog.red('NO'), '(disabling)')
+                return self.disabled_subproject(subp_name, exception=e)
             raise e
 
-    def _do_subproject_meson(self, dirname, subdir, default_options, kwargs, ast=None, build_def_files=None):
+    def _do_subproject_meson(self, subp_name: str, subdir: str, default_options, kwargs,
+                             ast: T.Optional[mparser.CodeBlockNode] = None,
+                             build_def_files: T.Optional[T.List[str]] = None,
+                             is_translated: bool = False) -> SubprojectHolder:
         with mlog.nested():
             new_build = self.build.copy()
-            subi = Interpreter(new_build, self.backend, dirname, subdir, self.subproject_dir,
-                               self.modules, default_options, ast=ast)
+            subi = Interpreter(new_build, self.backend, subp_name, subdir, self.subproject_dir,
+                               self.modules, default_options, ast=ast, is_translated=is_translated)
             subi.subprojects = self.subprojects
 
-            subi.subproject_stack = self.subproject_stack + [dirname]
+            subi.subproject_stack = self.subproject_stack + [subp_name]
             current_active = self.active_projectname
+            current_warnings_counter = mlog.log_warnings_counter
+            mlog.log_warnings_counter = 0
             subi.run()
-            mlog.log('Subproject', mlog.bold(dirname), 'finished.')
+            subi_warnings = mlog.log_warnings_counter
+            mlog.log_warnings_counter = current_warnings_counter
+
+            mlog.log('Subproject', mlog.bold(subp_name), 'finished.')
 
         mlog.log()
 
@@ -2640,31 +2976,39 @@
             pv = subi.project_version
             wanted = kwargs['version']
             if pv == 'undefined' or not mesonlib.version_compare_many(pv, wanted)[0]:
-                raise InterpreterException('Subproject %s version is %s but %s required.' % (dirname, pv, wanted))
+                raise InterpreterException('Subproject %s version is %s but %s required.' % (subp_name, pv, wanted))
         self.active_projectname = current_active
         self.subprojects.update(subi.subprojects)
-        self.subprojects[dirname] = SubprojectHolder(subi, self.subproject_dir, dirname)
+        self.subprojects[subp_name] = SubprojectHolder(subi, subdir, warnings=subi_warnings)
         # Duplicates are possible when subproject uses files from project root
         if build_def_files:
             self.build_def_files = list(set(self.build_def_files + build_def_files))
-        else:
-            self.build_def_files = list(set(self.build_def_files + subi.build_def_files))
+        # We always need the subi.build_def_files, to propgate sub-sub-projects
+        self.build_def_files = list(set(self.build_def_files + subi.build_def_files))
         self.build.merge(subi.build)
-        self.build.subprojects[dirname] = subi.project_version
+        self.build.subprojects[subp_name] = subi.project_version
         self.summary.update(subi.summary)
-        return self.subprojects[dirname]
+        return self.subprojects[subp_name]
 
-    def _do_subproject_cmake(self, dirname, subdir, subdir_abs, default_options, kwargs):
+    def _do_subproject_cmake(self, subp_name, subdir, subdir_abs, default_options, kwargs):
         with mlog.nested():
             new_build = self.build.copy()
             prefix = self.coredata.builtins['prefix'].value
+
+            from .modules.cmake import CMakeSubprojectOptions
+            options = kwargs.get('options', CMakeSubprojectOptions())
+            if not isinstance(options, CMakeSubprojectOptions):
+                raise InterpreterException('"options" kwarg must be CMakeSubprojectOptions'
+                                           ' object (created by cmake.subproject_options())')
+
             cmake_options = mesonlib.stringlistify(kwargs.get('cmake_options', []))
-            cm_int = CMakeInterpreter(new_build, subdir, subdir_abs, prefix, new_build.environment, self.backend)
+            cmake_options += options.cmake_options
+            cm_int = CMakeInterpreter(new_build, Path(subdir), Path(subdir_abs), Path(prefix), new_build.environment, self.backend)
             cm_int.initialise(cmake_options)
             cm_int.analyse()
 
             # Generate a meson ast and execute it with the normal do_subproject_meson
-            ast = cm_int.pretend_to_be_meson()
+            ast = cm_int.pretend_to_be_meson(options.target_options)
 
             mlog.log()
             with mlog.nested():
@@ -2684,26 +3028,30 @@
                 mlog.cmd_ci_include(meson_filename)
                 mlog.log()
 
-            result = self._do_subproject_meson(dirname, subdir, default_options, kwargs, ast, cm_int.bs_files)
+            result = self._do_subproject_meson(subp_name, subdir, default_options, kwargs, ast, cm_int.bs_files, is_translated=True)
             result.cm_interpreter = cm_int
 
         mlog.log()
         return result
 
     def get_option_internal(self, optname):
-        for opts in chain(
-                [self.coredata.base_options, compilers.base_options, self.coredata.builtins],
-                self.coredata.get_prefixed_options_per_machine(self.coredata.builtins_per_machine),
-                self.coredata.get_prefixed_options_per_machine(self.coredata.compiler_options),
-        ):
-            v = opts.get(optname)
-            if v is not None:
-                return v
-
         raw_optname = optname
         if self.is_subproject():
             optname = self.subproject + ':' + optname
 
+
+        for opts in [
+                self.coredata.base_options, compilers.base_options, self.coredata.builtins,
+                dict(self.coredata.get_prefixed_options_per_machine(self.coredata.builtins_per_machine)),
+                dict(self.coredata.flatten_lang_iterator(
+                    self.coredata.get_prefixed_options_per_machine(self.coredata.compiler_options))),
+        ]:
+            v = opts.get(optname)
+            if v is None or v.yielding:
+                v = opts.get(raw_optname)
+            if v is not None:
+                return v
+
         try:
             opt = self.coredata.user_options[optname]
             if opt.yielding and ':' in optname and raw_optname in self.coredata.user_options:
@@ -2749,7 +3097,7 @@
         if len(args) > 1:
             raise InterpreterException('configuration_data takes only one optional positional arguments')
         elif len(args) == 1:
-            FeatureNew('configuration_data dictionary', '0.49.0').use(self.subproject)
+            FeatureNew.single_use('configuration_data dictionary', '0.49.0', self.subproject)
             initial_values = args[0]
             if not isinstance(initial_values, dict):
                 raise InterpreterException('configuration_data first argument must be a dictionary')
@@ -2763,7 +3111,7 @@
             return
         backend = self.coredata.get_builtin_option('backend')
         from .backend import backends
-        self.backend = backends.get_backend_from_name(backend, self.build)
+        self.backend = backends.get_backend_from_name(backend, self.build, self)
 
         if self.backend is None:
             raise InterpreterException('Unknown backend "%s".' % backend)
@@ -2777,7 +3125,7 @@
         if self.environment.first_invocation:
             self.coredata.init_backend_options(backend)
 
-        options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')}
+        options = {k: v for k, v in self.environment.raw_options.items() if k.startswith('backend_')}
         self.coredata.set_options(options)
 
     @stringArgs
@@ -2789,16 +3137,20 @@
         if ':' in proj_name:
             raise InvalidArguments("Project name {!r} must not contain ':'".format(proj_name))
 
+        # This needs to be evaluated as early as possible, as meson uses this
+        # for things like deprecation testing.
         if 'meson_version' in kwargs:
             cv = coredata.version
             pv = kwargs['meson_version']
             if not mesonlib.version_compare(cv, pv):
                 raise InterpreterException('Meson version is %s but project requires %s' % (cv, pv))
+            mesonlib.project_meson_versions[self.subproject] = kwargs['meson_version']
 
         if os.path.exists(self.option_file):
             oi = optinterpreter.OptionInterpreter(self.subproject)
             oi.process(self.option_file)
             self.coredata.merge_user_options(oi.options)
+            self.add_build_def_file(self.option_file)
 
         # Do not set default_options on reconfigure otherwise it would override
         # values previously set from command line. That means that changing
@@ -2807,8 +3159,9 @@
         self.project_default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
         self.project_default_options = coredata.create_options_dict(self.project_default_options)
         if self.environment.first_invocation:
-            default_options = self.project_default_options
+            default_options = self.project_default_options.copy()
             default_options.update(self.default_project_options)
+            self.coredata.init_builtins(self.subproject)
         else:
             default_options = {}
         self.coredata.set_default_options(default_options, self.subproject, self.environment)
@@ -2824,8 +3177,11 @@
                                               'license': proj_license}
         if self.subproject in self.build.projects:
             raise InvalidCode('Second call to project().')
-        if not self.is_subproject() and 'subproject_dir' in kwargs:
-            spdirname = kwargs['subproject_dir']
+
+        # spdirname is the subproject_dir for this project, relative to self.subdir.
+        # self.subproject_dir is the subproject_dir for the main project, relative to top source dir.
+        spdirname = kwargs.get('subproject_dir')
+        if spdirname:
             if not isinstance(spdirname, str):
                 raise InterpreterException('Subproject_dir must be a string')
             if os.path.isabs(spdirname):
@@ -2834,22 +3190,34 @@
                 raise InterpreterException('Subproject_dir must not begin with a period.')
             if '..' in spdirname:
                 raise InterpreterException('Subproject_dir must not contain a ".." segment.')
-            self.subproject_dir = spdirname
-
+            if not self.is_subproject():
+                self.subproject_dir = spdirname
+        else:
+            spdirname = 'subprojects'
         self.build.subproject_dir = self.subproject_dir
 
-        mesonlib.project_meson_versions[self.subproject] = ''
-        if 'meson_version' in kwargs:
-            mesonlib.project_meson_versions[self.subproject] = kwargs['meson_version']
+        # Load wrap files from this (sub)project.
+        wrap_mode = self.coredata.get_builtin_option('wrap_mode')
+        if not self.is_subproject() or wrap_mode != WrapMode.nopromote:
+            subdir = os.path.join(self.subdir, spdirname)
+            r = wrap.Resolver(self.environment.get_source_dir(), subdir, wrap_mode)
+            if self.is_subproject():
+                self.environment.wrap_resolver.merge_wraps(r)
+            else:
+                self.environment.wrap_resolver = r
 
         self.build.projects[self.subproject] = proj_name
         mlog.log('Project name:', mlog.bold(proj_name))
         mlog.log('Project version:', mlog.bold(self.project_version))
-        self.add_languages(proj_langs, True)
+
+        self.add_languages(proj_langs, True, MachineChoice.HOST)
+        self.add_languages(proj_langs, False, MachineChoice.BUILD)
+
         self.set_backend()
         if not self.is_subproject():
             self.check_stdlibs()
 
+    @FeatureNewKwargs('add_languages', '0.54.0', ['native'])
     @permittedKwargs(permitted_kwargs['add_languages'])
     @stringArgs
     def func_add_languages(self, node, args, kwargs):
@@ -2858,15 +3226,20 @@
             for lang in sorted(args, key=compilers.sort_clink):
                 mlog.log('Compiler for language', mlog.bold(lang), 'skipped: feature', mlog.bold(feature), 'disabled')
             return False
-        return self.add_languages(args, required)
+        if 'native' in kwargs:
+            return self.add_languages(args, required, self.machine_from_native_kwarg(kwargs))
+        else:
+            # absent 'native' means 'both' for backwards compatibility
+            tv = FeatureNew.get_target_version(self.subproject)
+            if FeatureNew.check_version(tv, '0.54.0'):
+                mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.',
+                             location=self.current_node)
 
-    def get_message_string_arg(self, node):
-        # reduce arguments again to avoid flattening posargs
-        (posargs, _) = self.reduce_arguments(node.args)
-        if len(posargs) != 1:
-            raise InvalidArguments('Expected 1 argument, got %d' % len(posargs))
+            success = self.add_languages(args, False, MachineChoice.BUILD)
+            success &= self.add_languages(args, required, MachineChoice.HOST)
+            return success
 
-        arg = posargs[0]
+    def get_message_string_arg(self, arg):
         if isinstance(arg, list):
             argstr = stringifyUserArguments(arg)
         elif isinstance(arg, dict):
@@ -2880,16 +3253,20 @@
 
         return argstr
 
+    @noArgsFlattening
     @noKwargs
     def func_message(self, node, args, kwargs):
-        argstr = self.get_message_string_arg(node)
-        self.message_impl(argstr)
+        if len(args) > 1:
+            FeatureNew.single_use('message with more than one argument', '0.54.0', self.subproject)
+        args_str = [self.get_message_string_arg(i) for i in args]
+        self.message_impl(args_str)
 
-    def message_impl(self, argstr):
-        mlog.log(mlog.bold('Message:'), argstr)
+    def message_impl(self, args):
+        mlog.log(mlog.bold('Message:'), *args)
 
     @noArgsFlattening
-    @permittedKwargs({'section', 'bool_yn'})
+    @FeatureNewKwargs('summary', '0.54.0', ['list_sep'])
+    @permittedKwargs({'section', 'bool_yn', 'list_sep'})
     @FeatureNew('summary', '0.53.0')
     def func_summary(self, node, args, kwargs):
         if len(args) == 1:
@@ -2917,11 +3294,18 @@
         all_subprojects = collections.OrderedDict()
         for name, subp in sorted(self.subprojects.items()):
             value = subp.found()
-            if not value and hasattr(subp, 'disabled_feature'):
-                value = 'Feature {!r} disabled'.format(subp.disabled_feature)
+            if subp.disabled_feature:
+                value = [value, 'Feature {!r} disabled'.format(subp.disabled_feature)]
+            elif subp.exception:
+                value = [value, str(subp.exception)]
+            elif subp.warnings > 0:
+                value = [value, '{} warnings'.format(subp.warnings)]
             all_subprojects[name] = value
         if all_subprojects:
-            self.summary_impl('Subprojects', all_subprojects, {'bool_yn': True})
+            self.summary_impl('Subprojects', all_subprojects,
+                              {'bool_yn': True,
+                               'list_sep': ' ',
+                              })
         # Print all summaries, main project last.
         mlog.log('')  # newline
         main_summary = self.summary.pop('', None)
@@ -2930,11 +3314,14 @@
         if main_summary:
             main_summary.dump()
 
+    @noArgsFlattening
     @FeatureNew('warning', '0.44.0')
     @noKwargs
     def func_warning(self, node, args, kwargs):
-        argstr = self.get_message_string_arg(node)
-        mlog.warning(argstr, location=node)
+        if len(args) > 1:
+            FeatureNew.single_use('warning with more than one argument', '0.54.0', self.subproject)
+        args_str = [self.get_message_string_arg(i) for i in args]
+        mlog.warning(*args_str, location=node)
 
     @noKwargs
     def func_error(self, node, args, kwargs):
@@ -2946,17 +3333,33 @@
         self.validate_arguments(args, 0, [])
         raise Exception()
 
-    def add_languages(self, args: T.Sequence[str], required: bool) -> bool:
-        success = self.add_languages_for(args, required, MachineChoice.BUILD)
-        success &= self.add_languages_for(args, required, MachineChoice.HOST)
+    def add_languages(self, args: T.Sequence[str], required: bool, for_machine: MachineChoice) -> bool:
+        success = self.add_languages_for(args, required, for_machine)
         if not self.coredata.is_cross_build():
             self.coredata.copy_build_options_from_regular_ones()
+        self._redetect_machines()
         return success
 
+    def should_skip_sanity_check(self, for_machine: MachineChoice) -> bool:
+        should = self.environment.properties.host.get('skip_sanity_check', False)
+        if not isinstance(should, bool):
+            raise InterpreterException('Option skip_sanity_check must be a boolean.')
+        if for_machine != MachineChoice.HOST and not should:
+            return False
+        if not self.environment.is_cross_build() and not should:
+            return False
+        return should
+
     def add_languages_for(self, args, required, for_machine: MachineChoice):
+        args = [a.lower() for a in args]
+        langs = set(self.coredata.compilers[for_machine].keys())
+        langs.update(args)
+        if 'vala' in langs:
+            if 'c' not in langs:
+                raise InterpreterException('Compiling Vala requires C. Add C to your project languages and rerun Meson.')
+
         success = True
         for lang in sorted(args, key=compilers.sort_clink):
-            lang = lang.lower()
             clist = self.coredata.compilers[for_machine]
             machine_name = for_machine.get_lower_case_name()
             if lang in clist:
@@ -2966,7 +3369,10 @@
                     comp = self.environment.detect_compiler_for(lang, for_machine)
                     if comp is None:
                         raise InvalidArguments('Tried to use unknown language "%s".' % lang)
-                    comp.sanity_check(self.environment.get_scratch_dir(), self.environment)
+                    if self.should_skip_sanity_check(for_machine):
+                        mlog.log_once('Cross compiler sanity tests disabled via the cross file.')
+                    else:
+                        comp.sanity_check(self.environment.get_scratch_dir(), self.environment)
                 except Exception:
                     if not required:
                         mlog.log('Compiler for language',
@@ -2988,28 +3394,20 @@
                            mlog.bold(' '.join(comp.linker.get_exelist())), comp.linker.id, comp.linker.version)
             self.build.ensure_static_linker(comp)
 
-        langs = self.coredata.compilers[for_machine].keys()
-        if 'vala' in langs:
-            if 'c' not in langs:
-                raise InterpreterException('Compiling Vala requires C. Add C to your project languages and rerun Meson.')
-
         return success
 
-    def program_from_file_for(self, for_machine, prognames, silent):
-        bins = self.environment.binaries[for_machine]
-        for p in prognames:
-            if hasattr(p, 'held_object'):
-                p = p.held_object
+    def program_from_file_for(self, for_machine, prognames):
+        for p in unholder(prognames):
             if isinstance(p, mesonlib.File):
                 continue # Always points to a local (i.e. self generated) file.
             if not isinstance(p, str):
                 raise InterpreterException('Executable name must be a string')
-            prog = ExternalProgram.from_bin_list(bins, p)
+            prog = ExternalProgram.from_bin_list(self.environment, for_machine, p)
             if prog.found():
-                return ExternalProgramHolder(prog)
+                return ExternalProgramHolder(prog, self.subproject)
         return None
 
-    def program_from_system(self, args, search_dirs, silent=False):
+    def program_from_system(self, args, search_dirs, extra_info):
         # Search for scripts relative to current subdir.
         # Do not cache found programs because find_program('foobar')
         # might give different results when run from different source dirs.
@@ -3032,21 +3430,20 @@
                                        'files, not {!r}'.format(exename))
             extprog = dependencies.ExternalProgram(exename, search_dir=search_dir,
                                                    extra_search_dirs=extra_search_dirs,
-                                                   silent=silent)
-            progobj = ExternalProgramHolder(extprog)
+                                                   silent=True)
+            progobj = ExternalProgramHolder(extprog, self.subproject)
             if progobj.found():
+                extra_info.append('({})'.format(' '.join(progobj.get_command())))
                 return progobj
 
-    def program_from_overrides(self, command_names, silent=False):
+    def program_from_overrides(self, command_names, extra_info):
         for name in command_names:
             if not isinstance(name, str):
                 continue
             if name in self.build.find_overrides:
                 exe = self.build.find_overrides[name]
-                if not silent:
-                    mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'),
-                             '(overridden: %s)' % exe.description())
-                return ExternalProgramHolder(exe)
+                extra_info.append(mlog.blue('(overriden)'))
+                return ExternalProgramHolder(exe, self.subproject, self.backend)
         return None
 
     def store_name_lookups(self, command_names):
@@ -3063,40 +3460,79 @@
                                        % name)
         self.build.find_overrides[name] = exe
 
+    def notfound_program(self, args):
+        return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)), self.subproject)
+
     # TODO update modules to always pass `for_machine`. It is bad-form to assume
     # the host machine.
     def find_program_impl(self, args, for_machine: MachineChoice = MachineChoice.HOST,
-                          required=True, silent=True, wanted='', search_dirs=None):
-        if not isinstance(args, list):
-            args = [args]
+                          required=True, silent=True, wanted='', search_dirs=None,
+                          version_func=None):
+        args = mesonlib.listify(args)
 
-        progobj = self.program_from_overrides(args, silent=silent)
-        if progobj is None:
-            progobj = self.program_from_file_for(for_machine, args, silent=silent)
-        if progobj is None:
-            progobj = self.program_from_system(args, search_dirs, silent=silent)
-        if progobj is None and args[0].endswith('python3'):
-            prog = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True)
-            progobj = ExternalProgramHolder(prog)
-        if required and (progobj is None or not progobj.found()):
-            raise InvalidArguments('Program(s) {!r} not found or not executable'.format(args))
+        extra_info = []
+        progobj = self.program_lookup(args, for_machine, required, search_dirs, extra_info)
         if progobj is None:
-            return ExternalProgramHolder(dependencies.NonExistingExternalProgram())
-        # Only store successful lookups
-        self.store_name_lookups(args)
+            progobj = self.notfound_program(args)
+
+        if not progobj.found():
+            mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'))
+            if required:
+                m = 'Program {!r} not found'
+                raise InterpreterException(m.format(progobj.get_name()))
+            return progobj
+
         if wanted:
-            version = progobj.get_version(self)
+            if version_func:
+                version = version_func(progobj)
+            else:
+                version = progobj.get_version(self)
             is_found, not_found, found = mesonlib.version_compare_many(version, wanted)
             if not is_found:
                 mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'),
-                         'found {!r} but need:'.format(version),
-                         ', '.join(["'{}'".format(e) for e in not_found]))
+                         'found', mlog.normal_cyan(version), 'but need:',
+                         mlog.bold(', '.join(["'{}'".format(e) for e in not_found])), *extra_info)
                 if required:
                     m = 'Invalid version of program, need {!r} {!r} found {!r}.'
-                    raise InvalidArguments(m.format(progobj.get_name(), not_found, version))
-                return ExternalProgramHolder(dependencies.NonExistingExternalProgram())
+                    raise InterpreterException(m.format(progobj.get_name(), not_found, version))
+                return self.notfound_program(args)
+            extra_info.insert(0, mlog.normal_cyan(version))
+
+        # Only store successful lookups
+        self.store_name_lookups(args)
+        mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.green('YES'), *extra_info)
+        return progobj
+
+    def program_lookup(self, args, for_machine, required, search_dirs, extra_info):
+        progobj = self.program_from_overrides(args, extra_info)
+        if progobj:
+            return progobj
+
+        fallback = None
+        wrap_mode = self.coredata.get_builtin_option('wrap_mode')
+        if wrap_mode != WrapMode.nofallback and self.environment.wrap_resolver:
+            fallback = self.environment.wrap_resolver.find_program_provider(args)
+        if fallback and wrap_mode == WrapMode.forcefallback:
+            return self.find_program_fallback(fallback, args, required, extra_info)
+
+        progobj = self.program_from_file_for(for_machine, args)
+        if progobj is None:
+            progobj = self.program_from_system(args, search_dirs, extra_info)
+        if progobj is None and args[0].endswith('python3'):
+            prog = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True)
+            progobj = ExternalProgramHolder(prog, self.subproject) if prog.found() else None
+        if progobj is None and fallback and required:
+            progobj = self.find_program_fallback(fallback, args, required, extra_info)
+
         return progobj
 
+    def find_program_fallback(self, fallback, args, required, extra_info):
+        mlog.log('Fallback to subproject', mlog.bold(fallback), 'which provides program',
+                 mlog.bold(' '.join(args)))
+        sp_kwargs = { 'required': required }
+        self.do_subproject(fallback, 'meson', sp_kwargs)
+        return self.program_from_overrides(args, extra_info)
+
     @FeatureNewKwargs('find_program', '0.53.0', ['dirs'])
     @FeatureNewKwargs('find_program', '0.52.0', ['version'])
     @FeatureNewKwargs('find_program', '0.49.0', ['disabler'])
@@ -3109,7 +3545,7 @@
         disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
         if disabled:
             mlog.log('Program', mlog.bold(' '.join(args)), 'skipped: feature', mlog.bold(feature), 'disabled')
-            return ExternalProgramHolder(dependencies.NonExistingExternalProgram())
+            return self.notfound_program(args)
 
         search_dirs = extract_search_dirs(kwargs)
         wanted = mesonlib.stringlistify(kwargs.get('version', []))
@@ -3124,34 +3560,51 @@
                           'Look here for example: http://mesonbuild.com/howtox.html#add-math-library-lm-portably\n'
                           )
 
-    def _find_cached_dep(self, name, kwargs):
+    def _find_cached_dep(self, name, display_name, kwargs):
         # Check if we want this as a build-time / build machine or runt-time /
         # host machine dep.
         for_machine = self.machine_from_native_kwarg(kwargs)
-
         identifier = dependencies.get_dep_identifier(name, kwargs)
-        cached_dep = self.coredata.deps[for_machine].get(identifier)
-        if cached_dep:
+        wanted_vers = mesonlib.stringlistify(kwargs.get('version', []))
+
+        override = self.build.dependency_overrides[for_machine].get(identifier)
+        if override:
+            info = [mlog.blue('(overridden)' if override.explicit else '(cached)')]
+            cached_dep = override.dep
+            # We don't implicitly override not-found dependencies, but user could
+            # have explicitly called meson.override_dependency() with a not-found
+            # dep.
             if not cached_dep.found():
-                mlog.log('Dependency', mlog.bold(name),
-                         'found:', mlog.red('NO'), mlog.blue('(cached)'))
+                mlog.log('Dependency', mlog.bold(display_name),
+                         'found:', mlog.red('NO'), *info)
                 return identifier, cached_dep
-
-            # Verify the cached dep version match
-            wanted_vers = mesonlib.stringlistify(kwargs.get('version', []))
             found_vers = cached_dep.get_version()
-            if not wanted_vers or mesonlib.version_compare_many(found_vers, wanted_vers)[0]:
-                info = [mlog.blue('(cached)')]
-                if found_vers:
-                    info = [mlog.normal_cyan(found_vers), *info]
+            if not self.check_version(wanted_vers, found_vers):
                 mlog.log('Dependency', mlog.bold(name),
-                         'found:', mlog.green('YES'), *info)
-                return identifier, cached_dep
+                         'found:', mlog.red('NO'),
+                         'found', mlog.normal_cyan(found_vers), 'but need:',
+                         mlog.bold(', '.join(["'{}'".format(e) for e in wanted_vers])),
+                         *info)
+                return identifier, NotFoundDependency(self.environment)
+        else:
+            info = [mlog.blue('(cached)')]
+            cached_dep = self.coredata.deps[for_machine].get(identifier)
+            if cached_dep:
+                found_vers = cached_dep.get_version()
+                if not self.check_version(wanted_vers, found_vers):
+                    return identifier, None
+
+        if cached_dep:
+            if found_vers:
+                info = [mlog.normal_cyan(found_vers), *info]
+            mlog.log('Dependency', mlog.bold(display_name),
+                     'found:', mlog.green('YES'), *info)
+            return identifier, cached_dep
 
         return identifier, None
 
     @staticmethod
-    def check_subproject_version(wanted, found):
+    def check_version(wanted, found):
         if not wanted:
             return True
         if found == 'undefined' or not mesonlib.version_compare_many(found, wanted)[0]:
@@ -3161,63 +3614,102 @@
     def notfound_dependency(self):
         return DependencyHolder(NotFoundDependency(self.environment), self.subproject)
 
-    def get_subproject_dep(self, display_name, dirname, varname, kwargs):
+    def verify_fallback_consistency(self, subp_name, varname, cached_dep):
+        subi = self.get_subproject(subp_name)
+        if not cached_dep or not varname or not subi or not cached_dep.found():
+            return
+        dep = subi.get_variable_method([varname], {})
+        if dep.held_object != cached_dep:
+            m = 'Inconsistency: Subproject has overridden the dependency with another variable than {!r}'
+            raise DependencyException(m.format(varname))
+
+    def get_subproject_dep(self, name, display_name, subp_name, varname, kwargs):
+        required = kwargs.get('required', True)
+        wanted = mesonlib.stringlistify(kwargs.get('version', []))
         dep = self.notfound_dependency()
+
+        # Verify the subproject is found
+        subproject = self.subprojects.get(subp_name)
+        if not subproject or not subproject.found():
+            mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
+                     mlog.bold(subproject.subdir), 'found:', mlog.red('NO'),
+                     mlog.blue('(subproject failed to configure)'))
+            if required:
+                m = 'Subproject {} failed to configure for dependency {}'
+                raise DependencyException(m.format(subproject.subdir, display_name))
+            return dep
+
+        extra_info = []
         try:
-            subproject = self.subprojects[dirname]
-            if subproject.found():
-                dep = self.subprojects[dirname].get_variable_method([varname], {})
+            # Check if the subproject overridden the dependency
+            _, cached_dep = self._find_cached_dep(name, display_name, kwargs)
+            if cached_dep:
+                if varname:
+                    self.verify_fallback_consistency(subp_name, varname, cached_dep)
+                if required and not cached_dep.found():
+                    m = 'Dependency {!r} is not satisfied'
+                    raise DependencyException(m.format(display_name))
+                return DependencyHolder(cached_dep, self.subproject)
+            elif varname is None:
+                mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
+                         mlog.bold(subproject.subdir), 'found:', mlog.red('NO'))
+                if required:
+                    m = 'Subproject {} did not override dependency {}'
+                    raise DependencyException(m.format(subproject.subdir, display_name))
+                return self.notfound_dependency()
+            else:
+                # The subproject did not override the dependency, but we know the
+                # variable name to take.
+                dep = subproject.get_variable_method([varname], {})
         except InvalidArguments:
-            pass
+            # This is raised by get_variable_method() if varname does no exist
+            # in the subproject. Just add the reason in the not-found message
+            # that will be printed later.
+            extra_info.append(mlog.blue('(Variable {!r} not found)'.format(varname)))
 
         if not isinstance(dep, DependencyHolder):
             raise InvalidCode('Fetched variable {!r} in the subproject {!r} is '
-                              'not a dependency object.'.format(varname, dirname))
-
-        required = kwargs.get('required', True)
-        wanted = mesonlib.stringlistify(kwargs.get('version', []))
-        subproj_path = os.path.join(self.subproject_dir, dirname)
+                              'not a dependency object.'.format(varname, subp_name))
 
         if not dep.found():
+            mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
+                     mlog.bold(subproject.subdir), 'found:', mlog.red('NO'), *extra_info)
             if required:
                 raise DependencyException('Could not find dependency {} in subproject {}'
-                                          ''.format(varname, dirname))
-            # If the dependency is not required, don't raise an exception
-            mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
-                     mlog.bold(subproj_path), 'found:', mlog.red('NO'))
+                                          ''.format(varname, subp_name))
             return dep
 
         found = dep.held_object.get_version()
-        if not self.check_subproject_version(wanted, found):
-            if required:
-                raise DependencyException('Version {} of subproject dependency {} already '
-                                          'cached, requested incompatible version {} for '
-                                          'dep {}'.format(found, dirname, wanted, display_name))
-
+        if not self.check_version(wanted, found):
             mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
-                     mlog.bold(subproj_path), 'found:', mlog.red('NO'),
+                     mlog.bold(subproject.subdir), 'found:', mlog.red('NO'),
                      'found', mlog.normal_cyan(found), 'but need:',
                      mlog.bold(', '.join(["'{}'".format(e) for e in wanted])))
+            if required:
+                raise DependencyException('Version {} of subproject dependency {} already '
+                                          'cached, requested incompatible version {} for '
+                                          'dep {}'.format(found, subp_name, wanted, display_name))
             return self.notfound_dependency()
 
         found = mlog.normal_cyan(found) if found else None
         mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
-                 mlog.bold(subproj_path), 'found:', mlog.green('YES'), found)
+                 mlog.bold(subproject.subdir), 'found:', mlog.green('YES'), found)
         return dep
 
     def _handle_featurenew_dependencies(self, name):
         'Do a feature check on dependencies used by this subproject'
         if name == 'mpi':
-            FeatureNew('MPI Dependency', '0.42.0').use(self.subproject)
+            FeatureNew.single_use('MPI Dependency', '0.42.0', self.subproject)
         elif name == 'pcap':
-            FeatureNew('Pcap Dependency', '0.42.0').use(self.subproject)
+            FeatureNew.single_use('Pcap Dependency', '0.42.0', self.subproject)
         elif name == 'vulkan':
-            FeatureNew('Vulkan Dependency', '0.42.0').use(self.subproject)
+            FeatureNew.single_use('Vulkan Dependency', '0.42.0', self.subproject)
         elif name == 'libwmf':
-            FeatureNew('LibWMF Dependency', '0.44.0').use(self.subproject)
+            FeatureNew.single_use('LibWMF Dependency', '0.44.0', self.subproject)
         elif name == 'openmp':
-            FeatureNew('OpenMP Dependency', '0.46.0').use(self.subproject)
+            FeatureNew.single_use('OpenMP Dependency', '0.46.0', self.subproject)
 
+    @FeatureNewKwargs('dependency', '0.54.0', ['components'])
     @FeatureNewKwargs('dependency', '0.52.0', ['include_type'])
     @FeatureNewKwargs('dependency', '0.50.0', ['not_found_message', 'cmake_module_path', 'cmake_args'])
     @FeatureNewKwargs('dependency', '0.49.0', ['disabler'])
@@ -3229,6 +3721,9 @@
         self.validate_arguments(args, 1, [str])
         name = args[0]
         display_name = name if name else '(anonymous)'
+        mods = extract_as_list(kwargs, 'modules')
+        if mods:
+            display_name += ' (modules: {})'.format(', '.join(str(i) for i in mods))
         not_found_message = kwargs.get('not_found_message', '')
         if not isinstance(not_found_message, str):
             raise InvalidArguments('The not_found_message must be a string.')
@@ -3236,52 +3731,98 @@
             d = self.dependency_impl(name, display_name, kwargs)
         except Exception:
             if not_found_message:
-                self.message_impl(not_found_message)
+                self.message_impl([not_found_message])
             raise
+        assert isinstance(d, DependencyHolder)
         if not d.found() and not_found_message:
-            self.message_impl(not_found_message)
+            self.message_impl([not_found_message])
+            self.message_impl([not_found_message])
+        # Ensure the correct include type
+        if 'include_type' in kwargs:
+            wanted = kwargs['include_type']
+            actual = d.include_type_method([], {})
+            if wanted != actual:
+                mlog.debug('Current include type of {} is {}. Converting to requested {}'.format(name, actual, wanted))
+                d = d.as_system_method([wanted], {})
+        # Override this dependency to have consistent results in subsequent
+        # dependency lookups.
+        if name and d.found():
+            for_machine = self.machine_from_native_kwarg(kwargs)
+            identifier = dependencies.get_dep_identifier(name, kwargs)
+            if identifier not in self.build.dependency_overrides[for_machine]:
+                self.build.dependency_overrides[for_machine][identifier] = \
+                    build.DependencyOverride(d.held_object, node, explicit=False)
         return d
 
-    def dependency_impl(self, name, display_name, kwargs):
+    def dependency_impl(self, name, display_name, kwargs, force_fallback=False):
         disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
         if disabled:
             mlog.log('Dependency', mlog.bold(display_name), 'skipped: feature', mlog.bold(feature), 'disabled')
             return self.notfound_dependency()
 
-        has_fallback = 'fallback' in kwargs
-        if 'default_options' in kwargs and not has_fallback:
-            mlog.warning('The "default_options" keyworg argument does nothing without a "fallback" keyword argument.',
+        fallback = kwargs.get('fallback', None)
+        allow_fallback = kwargs.get('allow_fallback', None)
+        if allow_fallback is not None:
+            FeatureNew.single_use('"allow_fallback" keyword argument for dependency', '0.56.0', self.subproject)
+            if fallback is not None:
+                raise InvalidArguments('"fallback" and "allow_fallback" arguments are mutually exclusive')
+            if not isinstance(allow_fallback, bool):
+                raise InvalidArguments('"allow_fallback" argument must be boolean')
+
+        # If "fallback" is absent, look for an implicit fallback.
+        if name and fallback is None and allow_fallback is not False:
+            # Add an implicit fallback if we have a wrap file or a directory with the same name,
+            # but only if this dependency is required. It is common to first check for a pkg-config,
+            # then fallback to use find_library() and only afterward check again the dependency
+            # with a fallback. If the fallback has already been configured then we have to use it
+            # even if the dependency is not required.
+            provider = self.environment.wrap_resolver.find_dep_provider(name)
+            if not provider and allow_fallback is True:
+                raise InvalidArguments('Fallback wrap or subproject not found for dependency \'%s\'' % name)
+            subp_name = mesonlib.listify(provider)[0]
+            if provider and (allow_fallback is True or required or self.get_subproject(subp_name)):
+                fallback = provider
+
+        if 'default_options' in kwargs and not fallback:
+            mlog.warning('The "default_options" keyword argument does nothing without a fallback subproject.',
                          location=self.current_node)
 
         # writing just "dependency('')" is an error, because it can only fail
-        if name == '' and required and not has_fallback:
+        if name == '' and required and not fallback:
             raise InvalidArguments('Dependency is both required and not-found')
 
         if '<' in name or '>' in name or '=' in name:
             raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
                                    'version\n requirements use the \'version\' keyword argument instead.')
 
-        identifier, cached_dep = self._find_cached_dep(name, kwargs)
+        identifier, cached_dep = self._find_cached_dep(name, display_name, kwargs)
         if cached_dep:
+            if fallback:
+                subp_name, varname = self.get_subproject_infos(fallback)
+                self.verify_fallback_consistency(subp_name, varname, cached_dep)
             if required and not cached_dep.found():
                 m = 'Dependency {!r} was already checked and was not found'
                 raise DependencyException(m.format(display_name))
             return DependencyHolder(cached_dep, self.subproject)
 
-        # If the dependency has already been configured, possibly by
-        # a higher level project, try to use it first.
-        if has_fallback:
-            dirname, varname = self.get_subproject_infos(kwargs)
-            if dirname in self.subprojects:
-                return self.get_subproject_dep(name, dirname, varname, kwargs)
+        if fallback:
+            # If the dependency has already been configured, possibly by
+            # a higher level project, try to use it first.
+            subp_name, varname = self.get_subproject_infos(fallback)
+            if self.get_subproject(subp_name):
+                return self.get_subproject_dep(name, display_name, subp_name, varname, kwargs)
+
+            wrap_mode = self.coredata.get_builtin_option('wrap_mode')
+            force_fallback_for = self.coredata.get_builtin_option('force_fallback_for')
+            force_fallback = (force_fallback or
+                              wrap_mode == WrapMode.forcefallback or
+                              name in force_fallback_for or
+                              subp_name in force_fallback_for)
 
-        wrap_mode = self.coredata.get_builtin_option('wrap_mode')
-        forcefallback = wrap_mode == WrapMode.forcefallback and has_fallback
-        if name != '' and not forcefallback:
+        if name != '' and (not fallback or not force_fallback):
             self._handle_featurenew_dependencies(name)
-            kwargs['required'] = required and not has_fallback
+            kwargs['required'] = required and not fallback
             dep = dependencies.find_external_dependency(name, self.environment, kwargs)
-
             kwargs['required'] = required
             # Only store found-deps in the cache
             # Never add fallback deps to self.coredata.deps since we
@@ -3292,8 +3833,8 @@
                 self.coredata.deps[for_machine].put(identifier, dep)
                 return DependencyHolder(dep, self.subproject)
 
-        if has_fallback:
-            return self.dependency_fallback(display_name, kwargs)
+        if fallback:
+            return self.dependency_fallback(name, display_name, fallback, kwargs)
 
         return self.notfound_dependency()
 
@@ -3303,35 +3844,32 @@
     def func_disabler(self, node, args, kwargs):
         return Disabler()
 
-    def print_nested_info(self, dependency_name):
-        message = ['Dependency', mlog.bold(dependency_name), 'not found but it is available in a sub-subproject.\n' +
-                   'To use it in the current project, promote it by going in the project source\n'
-                   'root and issuing']
-        sprojs = mesonlib.detect_subprojects('subprojects', self.source_root)
-        if dependency_name not in sprojs:
-            return
-        found = sprojs[dependency_name]
-        if len(found) > 1:
-            message.append('one of the following commands:')
-        else:
-            message.append('the following command:')
-        command_templ = '\nmeson wrap promote {}'
-        for l in found:
-            message.append(mlog.bold(command_templ.format(l[len(self.source_root) + 1:])))
-        mlog.warning(*message, location=self.current_node)
-
-    def get_subproject_infos(self, kwargs):
-        fbinfo = kwargs['fallback']
-        check_stringlist(fbinfo)
-        if len(fbinfo) != 2:
-            raise InterpreterException('Fallback info must have exactly two items.')
+    def get_subproject_infos(self, fbinfo):
+        fbinfo = mesonlib.stringlistify(fbinfo)
+        if len(fbinfo) == 1:
+            FeatureNew.single_use('Fallback without variable name', '0.53.0', self.subproject)
+            return fbinfo[0], None
+        elif len(fbinfo) != 2:
+            raise InterpreterException('Fallback info must have one or two items.')
         return fbinfo
 
-    def dependency_fallback(self, display_name, kwargs):
-        if self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback:
+    def dependency_fallback(self, name, display_name, fallback, kwargs):
+        subp_name, varname = self.get_subproject_infos(fallback)
+        required = kwargs.get('required', True)
+
+        # Explicitly listed fallback preferences for specific subprojects
+        # take precedence over wrap-mode
+        force_fallback_for = self.coredata.get_builtin_option('force_fallback_for')
+        if name in force_fallback_for or subp_name in force_fallback_for:
+            mlog.log('Looking for a fallback subproject for the dependency',
+                     mlog.bold(display_name), 'because:\nUse of fallback was forced for that specific subproject')
+        elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback:
             mlog.log('Not looking for a fallback subproject for the dependency',
-                     mlog.bold(display_name), 'because:\nUse of fallback'
+                     mlog.bold(display_name), 'because:\nUse of fallback '
                      'dependencies is disabled.')
+            if required:
+                m = 'Dependency {!r} not found and fallback is disabled'
+                raise DependencyException(m.format(display_name))
             return self.notfound_dependency()
         elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback:
             mlog.log('Looking for a fallback subproject for the dependency',
@@ -3339,15 +3877,16 @@
         else:
             mlog.log('Looking for a fallback subproject for the dependency',
                      mlog.bold(display_name))
-        dirname, varname = self.get_subproject_infos(kwargs)
         sp_kwargs = {
             'default_options': kwargs.get('default_options', []),
-            'required': kwargs.get('required', True),
+            'required': required,
         }
-        self.do_subproject(dirname, 'meson', sp_kwargs)
-        return self.get_subproject_dep(display_name, dirname, varname, kwargs)
+        self.do_subproject(subp_name, 'meson', sp_kwargs)
+        return self.get_subproject_dep(name, display_name, subp_name, varname, kwargs)
 
     @FeatureNewKwargs('executable', '0.42.0', ['implib'])
+    @FeatureNewKwargs('executable', '0.56.0', ['win_subsystem'])
+    @FeatureDeprecatedKwargs('executable', '0.56.0', ['gui_app'], extra_message="Use 'win_subsystem' instead.")
     @permittedKwargs(permitted_kwargs['executable'])
     def func_executable(self, node, args, kwargs):
         return self.build_target(node, args, kwargs, ExecutableHolder)
@@ -3405,11 +3944,13 @@
             raise InterpreterException('Unknown target_type.')
 
     @permittedKwargs(permitted_kwargs['vcs_tag'])
+    @FeatureDeprecatedKwargs('custom_target', '0.47.0', ['build_always'],
+                             'combine build_by_default and build_always_stale instead.')
     def func_vcs_tag(self, node, args, kwargs):
         if 'input' not in kwargs or 'output' not in kwargs:
             raise InterpreterException('Keyword arguments input and output must exist')
         if 'fallback' not in kwargs:
-            FeatureNew('T.Optional fallback in vcs_tag', '0.41.0').use(self.subproject)
+            FeatureNew.single_use('Optional fallback in vcs_tag', '0.41.0', self.subproject)
         fallback = kwargs.pop('fallback', self.project_version)
         if not isinstance(fallback, str):
             raise InterpreterException('Keyword argument fallback must be a string.')
@@ -3462,7 +4003,7 @@
         if len(args) != 1:
             raise InterpreterException('custom_target: Only one positional argument is allowed, and it must be a string name')
         if 'depfile' in kwargs and ('@BASENAME@' in kwargs['depfile'] or '@PLAINNAME@' in kwargs['depfile']):
-            FeatureNew('substitutions in custom_target depfile', '0.47.0').use(self.subproject)
+            FeatureNew.single_use('substitutions in custom_target depfile', '0.47.0', self.subproject)
         return self._func_custom_target_impl(node, args, kwargs)
 
     def _func_custom_target_impl(self, node, args, kwargs):
@@ -3487,12 +4028,12 @@
             if 'command' not in kwargs:
                 raise InterpreterException('Missing "command" keyword argument')
             all_args = extract_as_list(kwargs, 'command')
-            deps = extract_as_list(kwargs, 'depends', unholder=True)
+            deps = unholder(extract_as_list(kwargs, 'depends'))
         else:
             raise InterpreterException('Run_target needs at least one positional argument.')
 
         cleaned_args = []
-        for i in listify(all_args, unholder=True):
+        for i in unholder(listify(all_args)):
             if not isinstance(i, (str, build.BuildTarget, build.CustomTarget, dependencies.ExternalProgram, mesonlib.File)):
                 mlog.debug('Wrong type:', str(i))
                 raise InterpreterException('Invalid argument to run_target.')
@@ -3523,7 +4064,7 @@
         name = args[0]
         if not isinstance(name, str):
             raise InterpreterException('First argument must be a string.')
-        deps = listify(args[1:], unholder=True)
+        deps = unholder(listify(args[1:]))
         for d in deps:
             if not isinstance(d, (build.BuildTarget, build.CustomTarget)):
                 raise InterpreterException('Depends items must be build targets.')
@@ -3550,6 +4091,8 @@
     @FeatureNewKwargs('test', '0.52.0', ['priority'])
     @permittedKwargs(permitted_kwargs['test'])
     def func_test(self, node, args, kwargs):
+        if kwargs.get('protocol') == 'gtest':
+            FeatureNew.single_use('"gtest" protocol for tests', '0.55.0', self.subproject)
         self.add_test(node, args, kwargs, True)
 
     def unpack_env_kwarg(self, kwargs) -> build.EnvironmentVariables:
@@ -3557,7 +4100,7 @@
         if isinstance(envlist, EnvironmentVariablesHolder):
             env = envlist.held_object
         elif isinstance(envlist, dict):
-            FeatureNew('environment dictionary', '0.52.0').use(self.subproject)
+            FeatureNew.single_use('environment dictionary', '0.52.0', self.subproject)
             env = EnvironmentVariablesHolder(envlist)
             env = env.held_object
         else:
@@ -3569,9 +4112,14 @@
 
     def add_test(self, node, args, kwargs, is_base_test):
         if len(args) != 2:
-            raise InterpreterException('Incorrect number of arguments')
-        if not isinstance(args[0], str):
+            raise InterpreterException('test expects 2 arguments, {} given'.format(len(args)))
+        name = args[0]
+        if not isinstance(name, str):
             raise InterpreterException('First argument of test must be a string.')
+        if ':' in name:
+            mlog.deprecation('":" is not allowed in test name "{}", it has been replaced with "_"'.format(name),
+                             location=node)
+            name = name.replace(':', '_')
         exe = args[1]
         if not isinstance(exe, (ExecutableHolder, JarHolder, ExternalProgramHolder)):
             if isinstance(exe, mesonlib.File):
@@ -3581,7 +4129,7 @@
         par = kwargs.get('is_parallel', True)
         if not isinstance(par, bool):
             raise InterpreterException('Keyword argument is_parallel must be a boolean.')
-        cmd_args = extract_as_list(kwargs, 'args', unholder=True)
+        cmd_args = unholder(extract_as_list(kwargs, 'args'))
         for i in cmd_args:
             if not isinstance(i, (str, mesonlib.File, build.Target)):
                 raise InterpreterException('Command line arguments must be strings, files or targets.')
@@ -3601,29 +4149,29 @@
         if not isinstance(timeout, int):
             raise InterpreterException('Timeout must be an integer.')
         protocol = kwargs.get('protocol', 'exitcode')
-        if protocol not in ('exitcode', 'tap'):
-            raise InterpreterException('Protocol must be "exitcode" or "tap".')
+        if protocol not in {'exitcode', 'tap', 'gtest'}:
+            raise InterpreterException('Protocol must be "exitcode", "tap", or "gtest".')
         suite = []
         prj = self.subproject if self.is_subproject() else self.build.project_name
         for s in mesonlib.stringlistify(kwargs.get('suite', '')):
             if len(s) > 0:
                 s = ':' + s
             suite.append(prj.replace(' ', '_').replace(':', '_') + s)
-        depends = extract_as_list(kwargs, 'depends', unholder=True)
+        depends = unholder(extract_as_list(kwargs, 'depends'))
         for dep in depends:
             if not isinstance(dep, (build.CustomTarget, build.BuildTarget)):
                 raise InterpreterException('Depends items must be build targets.')
         priority = kwargs.get('priority', 0)
         if not isinstance(priority, int):
             raise InterpreterException('Keyword argument priority must be an integer.')
-        t = Test(args[0], prj, suite, exe.held_object, depends, par, cmd_args,
+        t = Test(name, prj, suite, exe.held_object, depends, par, cmd_args,
                  env, should_fail, timeout, workdir, protocol, priority)
         if is_base_test:
             self.build.tests.append(t)
-            mlog.debug('Adding test', mlog.bold(args[0], True))
+            mlog.debug('Adding test', mlog.bold(name, True))
         else:
             self.build.benchmarks.append(t)
-            mlog.debug('Adding benchmark', mlog.bold(args[0], True))
+            mlog.debug('Adding benchmark', mlog.bold(name, True))
 
     @FeatureNewKwargs('install_headers', '0.47.0', ['install_mode'])
     @permittedKwargs(permitted_kwargs['install_headers'])
@@ -3676,14 +4224,14 @@
         absname = os.path.join(self.environment.get_source_dir(), buildfilename)
         if not os.path.isfile(absname):
             self.subdir = prev_subdir
-            raise InterpreterException('Non-existent build file {!r}'.format(buildfilename))
+            raise InterpreterException("Non-existent build file '{!s}'".format(buildfilename))
         with open(absname, encoding='utf8') as f:
             code = f.read()
         assert(isinstance(code, str))
         try:
-            codeblock = mparser.Parser(code, self.subdir).parse()
+            codeblock = mparser.Parser(code, absname).parse()
         except mesonlib.MesonException as me:
-            me.file = buildfilename
+            me.file = absname
             raise me
         try:
             self.evaluate_codeblock(codeblock)
@@ -3724,7 +4272,7 @@
             elif isinstance(s, str):
                 source_strings.append(s)
             else:
-                raise InvalidArguments('Argument {!r} must be string or file.'.format(s))
+                raise InvalidArguments('Argument must be string or file.')
         sources += self.source_strings_to_files(source_strings)
         install_dir = kwargs.get('install_dir', None)
         if not isinstance(install_dir, (str, type(None))):
@@ -3873,7 +4421,7 @@
         if 'configuration' in kwargs:
             conf = kwargs['configuration']
             if isinstance(conf, dict):
-                FeatureNew('configure_file.configuration dictionary', '0.49.0').use(self.subproject)
+                FeatureNew.single_use('configure_file.configuration dictionary', '0.49.0', self.subproject)
                 conf = ConfigurationDataHolder(self.subproject, conf)
             elif not isinstance(conf, ConfigurationDataHolder):
                 raise InterpreterException('Argument "configuration" is not of type configuration_data')
@@ -3903,7 +4451,7 @@
             conf.mark_used()
         elif 'command' in kwargs:
             if len(inputs) > 1:
-                FeatureNew('multiple inputs in configure_file()', '0.52.0').use(self.subproject)
+                FeatureNew.single_use('multiple inputs in configure_file()', '0.52.0', self.subproject)
             # We use absolute paths for input and output here because the cwd
             # that the command is run from is 'unspecified', so it could change.
             # Currently it's builddir/subdir for in_builddir else srcdir/subdir.
@@ -3938,8 +4486,7 @@
             if len(inputs_abs) != 1:
                 raise InterpreterException('Exactly one input file must be given in copy mode')
             os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
-            shutil.copyfile(inputs_abs[0], ofile_abs)
-            shutil.copystat(inputs_abs[0], ofile_abs)
+            shutil.copy2(inputs_abs[0], ofile_abs)
         else:
             # Not reachable
             raise AssertionError
@@ -3972,7 +4519,7 @@
         return mesonlib.File.from_built_file(self.subdir, output)
 
     def extract_incdirs(self, kwargs):
-        prospectives = listify(kwargs.get('include_directories', []), unholder=True)
+        prospectives = unholder(extract_as_list(kwargs, 'include_directories'))
         result = []
         for p in prospectives:
             if isinstance(p, build.IncludeDirs):
@@ -3998,8 +4545,9 @@
 
         for a in incdir_strings:
             if a.startswith(src_root):
-                raise InvalidArguments('''Tried to form an absolute path to a source dir. You should not do that but use
-relative paths instead.
+                raise InvalidArguments('Tried to form an absolute path to a source dir. '
+                                       'You should not do that but use relative paths instead.'
+                                       '''
 
 To get include path to any directory relative to the current dir do
 
@@ -4033,7 +4581,7 @@
         if ":" not in setup_name:
             setup_name = (self.subproject if self.subproject else self.build.project_name) + ":" + setup_name
         try:
-            inp = extract_as_list(kwargs, 'exe_wrapper', unholder=True)
+            inp = unholder(extract_as_list(kwargs, 'exe_wrapper'))
             exe_wrapper = []
             for i in inp:
                 if isinstance(i, str):
@@ -4150,7 +4698,7 @@
         if len(args) > 1:
             raise InterpreterException('environment takes only one optional positional arguments')
         elif len(args) == 1:
-            FeatureNew('environment positional arguments', '0.52.0').use(self.subproject)
+            FeatureNew.single_use('environment positional arguments', '0.52.0', self.subproject)
             initial_values = args[0]
             if not isinstance(initial_values, dict) and not isinstance(initial_values, list):
                 raise InterpreterException('environment first argument must be a dictionary or a list')
@@ -4163,7 +4711,7 @@
     def func_join_paths(self, node, args, kwargs):
         return self.join_path_strings(args)
 
-    def run(self):
+    def run(self) -> None:
         super().run()
         mlog.log('Build targets in project:', mlog.bold(str(len(self.build.targets))))
         FeatureNew.report(self.subproject)
@@ -4173,14 +4721,14 @@
         if self.subproject == '':
             self._print_summary()
 
-    def print_extra_warnings(self):
+    def print_extra_warnings(self) -> None:
         # TODO cross compilation
         for c in self.coredata.compilers.host.values():
             if c.get_id() == 'clang':
                 self.check_clang_asan_lundef()
                 break
 
-    def check_clang_asan_lundef(self):
+    def check_clang_asan_lundef(self) -> None:
         if 'b_lundef' not in self.coredata.base_options:
             return
         if 'b_sanitize' not in self.coredata.base_options:
@@ -4192,11 +4740,11 @@
 Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_sanitize'].value),
                          location=self.current_node)
 
-    def evaluate_subproject_info(self, path_from_source_root, subproject_dirname):
+    def evaluate_subproject_info(self, path_from_source_root, subproject_dir):
         depth = 0
         subproj_name = ''
         segs = PurePath(path_from_source_root).parts
-        segs_spd = PurePath(subproject_dirname).parts
+        segs_spd = PurePath(subproject_dir).parts
         while segs and segs[0] == segs_spd[0]:
             if len(segs_spd) == 1:
                 subproj_name = segs[1]
@@ -4266,7 +4814,7 @@
         if name.startswith('meson-'):
             raise InvalidArguments("Target names starting with 'meson-' are reserved "
                                    "for Meson's internal use. Please rename.")
-        if name in coredata.forbidden_target_names:
+        if name in coredata.FORBIDDEN_TARGET_NAMES:
             raise InvalidArguments("Target name '%s' is reserved for Meson's "
                                    "internal use. Please rename." % name)
         # To permit an executable and a shared library to have the
@@ -4304,7 +4852,7 @@
         return BothLibrariesHolder(shared_holder, static_holder, self)
 
     def build_library(self, node, args, kwargs):
-        default_library = self.coredata.get_builtin_option('default_library')
+        default_library = self.coredata.get_builtin_option('default_library', self.subproject)
         if default_library == 'shared':
             return self.build_target(node, args, kwargs, SharedLibraryHolder)
         elif default_library == 'static':
@@ -4338,15 +4886,15 @@
             ef = extract_as_list(kwargs, 'extra_files')
             kwargs['extra_files'] = self.source_strings_to_files(ef)
         self.check_sources_exist(os.path.join(self.source_root, self.subdir), sources)
-        if targetholder is ExecutableHolder:
+        if targetholder == ExecutableHolder:
             targetclass = build.Executable
-        elif targetholder is SharedLibraryHolder:
+        elif targetholder == SharedLibraryHolder:
             targetclass = build.SharedLibrary
-        elif targetholder is SharedModuleHolder:
+        elif targetholder == SharedModuleHolder:
             targetclass = build.SharedModule
-        elif targetholder is StaticLibraryHolder:
+        elif targetholder == StaticLibraryHolder:
             targetclass = build.StaticLibrary
-        elif targetholder is JarHolder:
+        elif targetholder == JarHolder:
             targetclass = build.Jar
         else:
             mlog.debug('Unknown target type:', str(targetholder))
@@ -4359,9 +4907,9 @@
 
         kwargs['include_directories'] = self.extract_incdirs(kwargs)
         target = targetclass(name, self.subdir, self.subproject, for_machine, sources, objs, self.environment, kwargs)
+        target.project_version = self.project_version
 
-        if not self.environment.machines.matches_build_machine(for_machine):
-            self.add_cross_stdlib_info(target)
+        self.add_stdlib_info(target)
         l = targetholder(target, self)
         self.add_target(name, l.held_object)
         self.project_args_frozen = True
@@ -4385,23 +4933,19 @@
             kwargs['d_import_dirs'] = cleaned_items
 
     def get_used_languages(self, target):
-        result = {}
+        result = set()
         for i in target.sources:
-            # TODO other platforms
-            for lang, c in self.coredata.compilers.host.items():
+            for lang, c in self.coredata.compilers[target.for_machine].items():
                 if c.can_compile(i):
-                    result[lang] = True
+                    result.add(lang)
                     break
         return result
 
-    def add_cross_stdlib_info(self, target):
-        if target.for_machine != MachineChoice.HOST:
-            return
+    def add_stdlib_info(self, target):
         for l in self.get_used_languages(target):
-            props = self.environment.properties.host
-            if props.has_stdlib(l) \
-                    and self.subproject != props.get_stdlib(l)[0]:
-                target.add_deps(self.build.stdlibs.host[l])
+            dep = self.build.stdlibs[target.for_machine].get(l, None)
+            if dep:
+                target.add_deps(dep)
 
     def check_sources_exist(self, subdir, sources):
         for s in sources:
@@ -4412,28 +4956,9 @@
                 raise InterpreterException('Tried to add non-existing source file %s.' % s)
 
     # Only permit object extraction from the same subproject
-    def validate_extraction(self, buildtarget):
-        if not self.subdir.startswith(self.subproject_dir):
-            if buildtarget.subdir.startswith(self.subproject_dir):
-                raise InterpreterException('Tried to extract objects from a subproject target.')
-        else:
-            if not buildtarget.subdir.startswith(self.subproject_dir):
-                raise InterpreterException('Tried to extract objects from the main project from a subproject.')
-            if self.subdir.split('/')[1] != buildtarget.subdir.split('/')[1]:
-                raise InterpreterException('Tried to extract objects from a different subproject.')
-
-    def check_contains(self, obj, args):
-        if len(args) != 1:
-            raise InterpreterException('Contains method takes exactly one argument.')
-        item = args[0]
-        for element in obj:
-            if isinstance(element, list):
-                found = self.check_contains(element, args)
-                if found:
-                    return True
-            if element == item:
-                return True
-        return False
+    def validate_extraction(self, buildtarget: InterpreterObject) -> None:
+        if self.subproject != buildtarget.subproject:
+            raise InterpreterException('Tried to extract objects from a different subproject.')
 
     def is_subproject(self):
         return self.subproject != ''
@@ -4452,6 +4977,8 @@
         if len(args) < 1 or len(args) > 2:
             raise InvalidCode('Get_variable takes one or two arguments.')
         varname = args[0]
+        if isinstance(varname, Disabler):
+            return varname
         if not isinstance(varname, str):
             raise InterpreterException('First argument must be a string.')
         try:
diff -Nru meson-0.53.2/mesonbuild/linkers.py meson-0.57.0+really0.56.2/mesonbuild/linkers.py
--- meson-0.53.2/mesonbuild/linkers.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/linkers.py	2021-01-06 10:39:48.000000000 +0000
@@ -17,9 +17,12 @@
 import typing as T
 
 from . import mesonlib
+from .arglist import CompilerArgs
+from .envconfig import get_env_var
 
 if T.TYPE_CHECKING:
     from .coredata import OptionDictType
+    from .envconfig import MachineChoice
     from .environment import Environment
 
 
@@ -28,6 +31,9 @@
     def __init__(self, exelist: T.List[str]):
         self.exelist = exelist
 
+    def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs:
+        return CompilerArgs(self, args)
+
     def can_linker_accept_rsp(self) -> bool:
         """
         Determines whether the linker can accept arguments using the @rsp syntax.
@@ -55,8 +61,8 @@
 
     def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
                          rpath_paths: str, build_rpath: str,
-                         install_rpath: str) -> T.List[str]:
-        return []
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
 
     def thread_link_flags(self, env: 'Environment') -> T.List[str]:
         return []
@@ -75,6 +81,9 @@
     def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
         return args[:]
 
+    def get_link_debugfile_name(self, targetfile: str) -> str:
+        return None
+
     def get_link_debugfile_args(self, targetfile: str) -> T.List[str]:
         # Static libraries do not have PDB files
         return []
@@ -145,6 +154,10 @@
             self.std_args = ['csrD']
         else:
             self.std_args = ['csr']
+        self.can_rsp = '@<' in stdo
+
+    def can_linker_accept_rsp(self) -> bool:
+        return self.can_rsp
 
     def get_std_link_args(self) -> T.List[str]:
         return self.std_args
@@ -197,12 +210,68 @@
         return False
 
     def get_output_args(self, target: str) -> T.List[str]:
-        return ['-output=%s' % target]
+        return ['-output={}'.format(target)]
 
     def get_linker_always_args(self) -> T.List[str]:
         return ['-nologo', '-form=library']
 
 
+class Xc16Linker(StaticLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'xc16-ar'
+
+    def can_linker_accept_rsp(self) -> bool:
+        return False
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return ['{}'.format(target)]
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return ['rcs']
+
+class CompCertLinker(StaticLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'ccomp'
+
+    def can_linker_accept_rsp(self) -> bool:
+        return False
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return ['-o{}'.format(target)]
+
+
+class C2000Linker(StaticLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'ar2000'
+
+    def can_linker_accept_rsp(self) -> bool:
+        return False
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return ['{}'.format(target)]
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return ['-r']
+
+
+class AIXArLinker(ArLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        StaticLinker.__init__(self, exelist)
+        self.id = 'aixar'
+        self.std_args = ['-csr', '-Xany']
+
+    def can_linker_accept_rsp(self) -> bool:
+        # AIXAr can't accept arguments using the @rsp syntax
+        return False
+
+
 def prepare_rpaths(raw_rpaths: str, build_dir: str, from_dir: str) -> T.List[str]:
     # The rpaths we write must be relative if they point to the build dir,
     # because otherwise they have different length depending on the build
@@ -233,7 +302,20 @@
         return os.path.relpath(os.path.join(build_dir, p), os.path.join(build_dir, from_dir))
 
 
-class DynamicLinker(metaclass=abc.ABCMeta):
+class LinkerEnvVarsMixin(metaclass=abc.ABCMeta):
+
+    """Mixin reading LDFLAGS from the environment."""
+
+    @staticmethod
+    def get_args_from_envvars(for_machine: mesonlib.MachineChoice,
+                              is_cross: bool) -> T.List[str]:
+        raw_value = get_env_var(for_machine, is_cross, 'LDFLAGS')
+        if raw_value is not None:
+            return mesonlib.split_args(raw_value)
+        else:
+            return []
+
+class DynamicLinker(LinkerEnvVarsMixin, metaclass=abc.ABCMeta):
 
     """Base class for dynamic linkers."""
 
@@ -246,6 +328,10 @@
         'custom': [],
     }  # type: T.Dict[str, T.List[str]]
 
+    @abc.abstractproperty
+    def id(self) -> str:
+        pass
+
     def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]:
         args = [arg] if isinstance(arg, str) else arg
         if self.prefix_arg is None:
@@ -257,15 +343,15 @@
             ret += self.prefix_arg + [arg]
         return ret
 
-    def __init__(self, id_: str, exelist: T.List[str],
+    def __init__(self, exelist: T.List[str],
                  for_machine: mesonlib.MachineChoice, prefix_arg: T.Union[str, T.List[str]],
                  always_args: T.List[str], *, version: str = 'unknown version'):
         self.exelist = exelist
         self.for_machine = for_machine
         self.version = version
-        self.id = id_
         self.prefix_arg = prefix_arg
         self.always_args = always_args
+        self.machine = None  # type: T.Optional[str]
 
     def __repr__(self) -> str:
         return '<{}: v{} `{}`>'.format(type(self).__name__, self.version, ' '.join(self.exelist))
@@ -292,12 +378,6 @@
 
     # XXX: is use_ldflags a compiler or a linker attribute?
 
-    def get_args_from_envvars(self) -> T.List[str]:
-        flags = os.environ.get('LDFLAGS')
-        if not flags:
-            return []
-        return mesonlib.split_args(flags)
-
     def get_option_args(self, options: 'OptionDictType') -> T.List[str]:
         return []
 
@@ -305,6 +385,10 @@
         m = 'Language {} does not support has_multi_link_arguments.'
         raise mesonlib.EnvironmentException(m.format(self.id))
 
+    def get_debugfile_name(self, targetfile: str) -> str:
+        '''Name of debug file written out (see below)'''
+        return None
+
     def get_debugfile_args(self, targetfile: str) -> T.List[str]:
         """Some compilers (MSVC) write debug into a separate file.
 
@@ -389,16 +473,17 @@
         """Arguments to make all warnings errors."""
         return []
 
+    def headerpad_args(self) -> T.List[str]:
+        # Only used by the Apple linker
+        return []
+
     def bitcode_args(self) -> T.List[str]:
         raise mesonlib.MesonException('This linker does not support bitcode bundles')
 
-    def get_debug_crt_args(self) -> T.List[str]:
-        return []
-
     def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
                          rpath_paths: str, build_rpath: str,
-                         install_rpath: str) -> T.List[str]:
-        return []
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
 
     def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
                         suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
@@ -433,6 +518,10 @@
     other linkers like GNU-ld.
     """
 
+    if T.TYPE_CHECKING:
+        for_machine = MachineChoice.HOST
+        def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]: ...
+
     _BUILDTYPE_ARGS = {
         'plain': [],
         'debug': [],
@@ -504,12 +593,12 @@
 
     def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
                          rpath_paths: str, build_rpath: str,
-                         install_rpath: str) -> T.List[str]:
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
         m = env.machines[self.for_machine]
         if m.is_windows() or m.is_cygwin():
-            return []
+            return ([], set())
         if not rpath_paths and not install_rpath and not build_rpath:
-            return []
+            return ([], set())
         args = []
         origin_placeholder = '$ORIGIN'
         processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
@@ -517,9 +606,14 @@
         # is *very* allergic to duplicate -delete_rpath arguments
         # when calling depfixer on installation.
         all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
+        rpath_dirs_to_remove = set()
+        for p in all_paths:
+            rpath_dirs_to_remove.add(p.encode('utf8'))
         # Build_rpath is used as-is (it is usually absolute).
         if build_rpath != '':
             all_paths.add(build_rpath)
+            for p in build_rpath.split(':'):
+                rpath_dirs_to_remove.add(p.encode('utf8'))
 
         # TODO: should this actually be "for (dragonfly|open)bsd"?
         if mesonlib.is_dragonflybsd() or mesonlib.is_openbsd():
@@ -543,7 +637,7 @@
 
         # TODO: should this actually be "for solaris/sunos"?
         if mesonlib.is_sunos():
-            return args
+            return (args, rpath_dirs_to_remove)
 
         # Rpaths to use while linking must be absolute. These are not
         # written to the binary. Needed only with GNU ld:
@@ -563,15 +657,14 @@
         for p in rpath_paths:
             args.extend(self._apply_prefix('-rpath-link,' + os.path.join(build_dir, p)))
 
-        return args
+        return (args, rpath_dirs_to_remove)
 
 
 class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
 
     """Apple's ld implementation."""
 
-    def __init__(self, *args, **kwargs):
-        super().__init__('ld64', *args, **kwargs)
+    id = 'ld64'
 
     def get_asneeded_args(self) -> T.List[str]:
         return self._apply_prefix('-dead_strip_dylibs')
@@ -583,7 +676,7 @@
         return ['-bundle'] + self._apply_prefix('-undefined,dynamic_lookup')
 
     def get_pie_args(self) -> T.List[str]:
-        return ['-pie']
+        return []
 
     def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
         result = []  # type: T.List[str]
@@ -603,8 +696,8 @@
     def no_undefined_args(self) -> T.List[str]:
         return self._apply_prefix('-undefined,error')
 
-    def get_always_args(self) -> T.List[str]:
-        return self._apply_prefix('-headerpad_max_install_names') + super().get_always_args()
+    def headerpad_args(self) -> T.List[str]:
+        return self._apply_prefix('-headerpad_max_install_names')
 
     def bitcode_args(self) -> T.List[str]:
         return self._apply_prefix('-bitcode_bundle')
@@ -629,12 +722,10 @@
 
     def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
                          rpath_paths: str, build_rpath: str,
-                         install_rpath: str) -> T.List[str]:
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
         if not rpath_paths and not install_rpath and not build_rpath:
-            return []
-        # Ensure that there is enough space for install_name_tool in-place
-        # editing of large RPATHs
-        args = self._apply_prefix('-headerpad_max_install_names')
+            return ([], set())
+        args = []
         # @loader_path is the equivalent of $ORIGIN on macOS
         # https://stackoverflow.com/q/26280738
         origin_placeholder = '@loader_path'
@@ -645,24 +736,25 @@
         for rp in all_paths:
             args.extend(self._apply_prefix('-rpath,' + rp))
 
-        return args
+        return (args, set())
 
 
 class GnuDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
 
     """Representation of GNU ld.bfd and ld.gold."""
 
+    def get_accepts_rsp(self) -> bool:
+        return True
+
 
 class GnuGoldDynamicLinker(GnuDynamicLinker):
 
-    def __init__(self, *args, **kwargs):
-        super().__init__('ld.gold', *args, **kwargs)
+    id = 'ld.gold'
 
 
 class GnuBFDDynamicLinker(GnuDynamicLinker):
 
-    def __init__(self, *args, **kwargs):
-        super().__init__('ld.bfd', *args, **kwargs)
+    id = 'ld.bfd'
 
 
 class LLVMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
@@ -673,17 +765,58 @@
     linkers.
     """
 
-    def __init__(self, *args, **kwargs):
-        super().__init__('ld.lld', *args, **kwargs)
+    id = 'ld.lld'
+
+    def __init__(self, exelist: T.List[str],
+                 for_machine: mesonlib.MachineChoice, prefix_arg: T.Union[str, T.List[str]],
+                 always_args: T.List[str], *, version: str = 'unknown version'):
+        super().__init__(exelist, for_machine, prefix_arg, always_args, version=version)
+
+        # Some targets don't seem to support this argument (windows, wasm, ...)
+        _, _, e = mesonlib.Popen_safe(self.exelist + self._apply_prefix('--allow-shlib-undefined'))
+        self.has_allow_shlib_undefined = not ('unknown argument: --allow-shlib-undefined' in e)
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        if self.has_allow_shlib_undefined:
+            return self._apply_prefix('--allow-shlib-undefined')
+        return []
+
+
+class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Emscripten's wasm-ld."""
+
+    id = 'ld.wasm'
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return ['-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0']
+
+    def no_undefined_args(self) -> T.List[str]:
+        return ['-s', 'ERROR_ON_UNDEFINED_SYMBOLS=1']
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        raise mesonlib.MesonException('{} does not support shared libraries.'.format(self.id))
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return []
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
 
 
 class CcrxDynamicLinker(DynamicLinker):
 
     """Linker for Renesis CCrx compiler."""
 
+    id = 'rlink'
+
     def __init__(self, for_machine: mesonlib.MachineChoice,
                  *, version: str = 'unknown version'):
-        super().__init__('rlink', ['rlink.exe'], for_machine, '', [],
+        super().__init__(['rlink.exe'], for_machine, '', [],
                          version=version)
 
     def get_accepts_rsp(self) -> bool:
@@ -696,7 +829,7 @@
         return []
 
     def get_output_args(self, outputname: str) -> T.List[str]:
-        return ['-output=%s' % outputname]
+        return ['-output={}'.format(outputname)]
 
     def get_search_args(self, dirname: str) -> 'T.NoReturn':
         raise EnvironmentError('rlink.exe does not have a search dir argument')
@@ -710,13 +843,141 @@
         return []
 
 
+class Xc16DynamicLinker(DynamicLinker):
+
+    """Linker for Microchip XC16 compiler."""
+
+    id = 'xc16-gcc'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(['xc16-gcc.exe'], for_machine, '', [],
+                         version=version)
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('--start-group') + args + self._apply_prefix('--end-group')
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_lib_prefix(self) -> str:
+        return ''
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return ['-o{}'.format(outputname)]
+
+    def get_search_args(self, dirname: str) -> 'T.NoReturn':
+        raise EnvironmentError('xc16-gcc.exe does not have a search dir argument')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        return []
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+class CompCertDynamicLinker(DynamicLinker):
+
+    """Linker for CompCert C compiler."""
+
+    id = 'ccomp'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(['ccomp'], for_machine, '', [],
+                         version=version)
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('-Wl,--whole-archive') + args + self._apply_prefix('-Wl,--no-whole-archive')
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_lib_prefix(self) -> str:
+        return ''
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return ['-o{}'.format(outputname)]
+
+    def get_search_args(self, dirname: str) -> T.List[str]:
+        return ['-L{}'.format(dirname)]
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        raise mesonlib.MesonException('{} does not support shared libraries.'.format(self.id))
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+class C2000DynamicLinker(DynamicLinker):
+
+    """Linker for Texas Instruments C2000 compiler."""
+
+    id = 'cl2000'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(['cl2000.exe'], for_machine, '', [],
+                         version=version)
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('--start-group') + args + self._apply_prefix('--end-group')
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_lib_prefix(self) -> str:
+        return '-l='
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return ['-z', '--output_file={}'.format(outputname)]
+
+    def get_search_args(self, dirname: str) -> 'T.NoReturn':
+        raise EnvironmentError('cl2000.exe does not have a search dir argument')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+
 class ArmDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
 
     """Linker for the ARM compiler."""
 
+    id = 'armlink'
+
     def __init__(self, for_machine: mesonlib.MachineChoice,
                  *, version: str = 'unknown version'):
-        super().__init__('armlink', ['armlink'], for_machine, '', [],
+        super().__init__(['armlink'], for_machine, '', [],
                          version=version)
 
     def get_accepts_rsp(self) -> bool:
@@ -743,13 +1004,18 @@
     def import_library_args(self, implibname: str) -> T.List[str]:
         return ['--symdefs=' + implibname]
 
+class QualcommLLVMDynamicLinker(LLVMDynamicLinker):
+
+    """ARM Linker from Snapdragon LLVM ARM Compiler."""
+
+    id = 'ld.qcld'
+
 
 class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
 
     """PGI linker."""
 
-    def __init__(self, *args, **kwargs):
-        super().__init__('pgi', *args, **kwargs)
+    id = 'pgi'
 
     def get_allow_undefined_args(self) -> T.List[str]:
         return []
@@ -769,10 +1035,12 @@
 
     def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
                          rpath_paths: str, build_rpath: str,
-                         install_rpath: str) -> T.List[str]:
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
         if not env.machines[self.for_machine].is_windows():
-            return ['-R' + os.path.join(build_dir, p) for p in rpath_paths]
-        return []
+            return (['-R' + os.path.join(build_dir, p) for p in rpath_paths], set())
+        return ([], set())
+
+NvidiaHPC_DynamicLinker = PGIDynamicLinker
 
 
 class PGIStaticLinker(StaticLinker):
@@ -787,9 +1055,17 @@
     def get_output_args(self, target: str) -> T.List[str]:
         return [target]
 
+NvidiaHPC_StaticLinker = PGIStaticLinker
+
 
 class VisualStudioLikeLinkerMixin:
 
+    """Mixin class for for dynamic linkers that act like Microsoft's link.exe."""
+
+    if T.TYPE_CHECKING:
+        for_machine = MachineChoice.HOST
+        def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]: ...
+
     _BUILDTYPE_ARGS = {
         'plain': [],
         'debug': [],
@@ -801,9 +1077,13 @@
         'custom': [],
     }  # type: T.Dict[str, T.List[str]]
 
-    def __init__(self, *args, direct: bool = True, machine: str = 'x86', **kwargs):
-        super().__init__(*args, **kwargs)
+    def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
+                 prefix_arg: T.Union[str, T.List[str]], always_args: T.List[str], *,
+                 version: str = 'unknown version', direct: bool = True, machine: str = 'x86'):
+        # There's no way I can find to make mypy understand what's going on here
+        super().__init__(exelist, for_machine, prefix_arg, always_args, version=version)  # type: ignore
         self.machine = machine
+        self.direct = direct
 
     def get_buildtype_args(self, buildtype: str) -> T.List[str]:
         return mesonlib.listify([self._apply_prefix(a) for a in self._BUILDTYPE_ARGS[buildtype]])
@@ -811,21 +1091,12 @@
     def invoked_by_compiler(self) -> bool:
         return not self.direct
 
-    def get_debug_crt_args(self) -> T.List[str]:
-        """Arguments needed to select a debug crt for the linker.
-
-        Sometimes we need to manually select the CRT (C runtime) to use with
-        MSVC. One example is when trying to link with static libraries since
-        MSVC won't auto-select a CRT for us in that case and will error out
-        asking us to select one.
-        """
-        return self._apply_prefix('/MDd')
-
     def get_output_args(self, outputname: str) -> T.List[str]:
         return self._apply_prefix(['/MACHINE:' + self.machine, '/OUT:' + outputname])
 
     def get_always_args(self) -> T.List[str]:
-        return self._apply_prefix('/nologo') + super().get_always_args()
+        parent = super().get_always_args() # type: ignore
+        return self._apply_prefix('/nologo') + T.cast(T.List[str], parent)
 
     def get_search_args(self, dirname: str) -> T.List[str]:
         return self._apply_prefix('/LIBPATH:' + dirname)
@@ -833,10 +1104,12 @@
     def get_std_shared_lib_args(self) -> T.List[str]:
         return self._apply_prefix('/DLL')
 
+    def get_debugfile_name(self, targetfile: str) -> str:
+        basename = targetfile.rsplit('.', maxsplit=1)[0]
+        return basename + '.pdb'
+
     def get_debugfile_args(self, targetfile: str) -> T.List[str]:
-        pdbarr = targetfile.split('.')[:-1]
-        pdbarr += ['pdb']
-        return self._apply_prefix(['/DEBUG', '/PDB:' + '.'.join(pdbarr)])
+        return self._apply_prefix(['/DEBUG', '/PDB:' + self.get_debugfile_name(targetfile)])
 
     def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
         # Only since VS2015
@@ -854,17 +1127,23 @@
                         is_shared_module: bool) -> T.List[str]:
         return []
 
+    def import_library_args(self, implibname: str) -> T.List[str]:
+        """The command to generate the import library."""
+        return self._apply_prefix(['/IMPLIB:' + implibname])
+
 
 class MSVCDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
 
     """Microsoft's Link.exe."""
 
+    id = 'link'
+
     def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str], *,
                  exelist: T.Optional[T.List[str]] = None,
                  prefix: T.Union[str, T.List[str]] = '',
                  machine: str = 'x86', version: str = 'unknown version',
                  direct: bool = True):
-        super().__init__('link', exelist or ['link.exe'], for_machine,
+        super().__init__(exelist or ['link.exe'], for_machine,
                          prefix, always_args, machine=machine, version=version, direct=direct)
 
     def get_always_args(self) -> T.List[str]:
@@ -875,12 +1154,14 @@
 
     """Clang's lld-link.exe."""
 
+    id = 'lld-link'
+
     def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str], *,
                  exelist: T.Optional[T.List[str]] = None,
                  prefix: T.Union[str, T.List[str]] = '',
                  machine: str = 'x86', version: str = 'unknown version',
                  direct: bool = True):
-        super().__init__('lld-link', exelist or ['lld-link.exe'], for_machine,
+        super().__init__(exelist or ['lld-link.exe'], for_machine,
                          prefix, always_args, machine=machine, version=version, direct=direct)
 
 
@@ -888,23 +1169,40 @@
 
     """Intel's Xilink.exe."""
 
-    def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str],
-                 *, version: str = 'unknown version'):
-        super().__init__('xilink', ['xilink.exe'], for_machine, '', always_args, version=version)
+    id = 'xilink'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str], *,
+                 exelist: T.Optional[T.List[str]] = None,
+                 prefix: T.Union[str, T.List[str]] = '',
+                 machine: str = 'x86', version: str = 'unknown version',
+                 direct: bool = True):
+        super().__init__(['xilink.exe'], for_machine, '', always_args, version=version)
 
 
 class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
 
     """Sys-V derived linker used on Solaris and OpenSolaris."""
 
-    def __init__(self, *args, **kwargs):
-        super().__init__('ld.solaris', *args, **kwargs)
+    id = 'ld.solaris'
 
     def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
         if not args:
             return args
         return self._apply_prefix('--whole-archive') + args + self._apply_prefix('--no-whole-archive')
 
+    def get_pie_args(self) -> T.List[str]:
+        # Available in Solaris 11.2 and later
+        pc, stdo, stde = mesonlib.Popen_safe(self.exelist + self._apply_prefix('-zhelp'))
+        for line in (stdo + stde).split('\n'):
+            if '-z type' in line:
+                if 'pie' in line:
+                    return ['-z', 'type=pie']
+                break
+        return []
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return self._apply_prefix(['-z', 'ignore'])
+
     def no_undefined_args(self) -> T.List[str]:
         return ['-z', 'defs']
 
@@ -916,13 +1214,18 @@
 
     def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
                          rpath_paths: str, build_rpath: str,
-                         install_rpath: str) -> T.List[str]:
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
         if not rpath_paths and not install_rpath and not build_rpath:
-            return []
+            return ([], set())
         processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
         all_paths = mesonlib.OrderedSet([os.path.join('$ORIGIN', p) for p in processed_rpaths])
+        rpath_dirs_to_remove = set()
+        for p in all_paths:
+            rpath_dirs_to_remove.add(p.encode('utf8'))
         if build_rpath != '':
             all_paths.add(build_rpath)
+            for p in build_rpath.split(':'):
+                rpath_dirs_to_remove.add(p.encode('utf8'))
 
         # In order to avoid relinking for RPATH removal, the binary needs to contain just
         # enough space in the ELF header to hold the final installation RPATH.
@@ -933,7 +1236,7 @@
                 paths = padding
             else:
                 paths = paths + ':' + padding
-        return self._apply_prefix('-rpath,{}'.format(paths))
+        return (self._apply_prefix('-rpath,{}'.format(paths)), rpath_dirs_to_remove)
 
     def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
                         suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
@@ -942,28 +1245,67 @@
         return self._apply_prefix('-soname,{}{}.{}{}'.format(prefix, shlib_name, suffix, sostr))
 
 
+class AIXDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Sys-V derived linker used on AIX"""
+
+    id = 'ld.aix'
+
+    def get_always_args(self) -> T.List[str]:
+        return self._apply_prefix(['-bsvr4', '-bnoipath', '-bbigtoc']) + super().get_always_args()
+
+    def no_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix(['-z', 'defs'])
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix(['-z', 'nodefs'])
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        all_paths = mesonlib.OrderedSet(['/opt/freeware/lib']) # for libgcc_s.a
+        for p in rpath_paths:
+            all_paths.add(os.path.join(build_dir, p))
+        if build_rpath != '':
+            all_paths.add(build_rpath)
+        if install_rpath != '':
+            all_paths.add(install_rpath)
+        return (self._apply_prefix([x for p in all_paths for x in ('-R', p)]), set())
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return ['-pthread']
+
+
 class OptlinkDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
 
     """Digital Mars dynamic linker for windows."""
 
-    def __init__(self, for_machine: mesonlib.MachineChoice,
+    id = 'optlink'
+
+    def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
                  *, version: str = 'unknown version'):
         # Use optlink instead of link so we don't interfer with other link.exe
         # implementations.
-        super().__init__('optlink', ['optlink.exe'], for_machine, '', [], version=version)
+        super().__init__(exelist, for_machine, '', [], version=version)
 
     def get_allow_undefined_args(self) -> T.List[str]:
         return []
 
+    def get_debugfile_args(self, targetfile: str) -> T.List[str]:
+        # Optlink does not generate pdb files.
+        return []
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
 
 class CudaLinker(PosixDynamicLinkerMixin, DynamicLinker):
     """Cuda linker (nvlink)"""
 
-    def __init__(self, *args, **kwargs):
-        super().__init__('nvlink', *args, **kwargs)
+    id = 'nvlink'
 
     @staticmethod
-    def parse_version():
+    def parse_version() -> str:
         version_cmd = ['nvlink', '--version']
         try:
             _, out, _ = mesonlib.Popen_safe(version_cmd)
diff -Nru meson-0.53.2/mesonbuild/mcompile.py meson-0.57.0+really0.56.2/mesonbuild/mcompile.py
--- meson-0.53.2/mesonbuild/mcompile.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/mcompile.py	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,356 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Entrypoint script for backend agnostic compile."""
+
+import os
+import json
+import re
+import sys
+import shutil
+import typing as T
+from collections import defaultdict
+from ._pathlib import Path
+
+from . import mlog
+from . import mesonlib
+from . import coredata
+from .mesonlib import MesonException
+from mesonbuild.environment import detect_ninja
+from mesonbuild.coredata import UserArrayOption
+
+if T.TYPE_CHECKING:
+    import argparse
+
+def array_arg(value: str) -> T.List[str]:
+    return UserArrayOption(None, value, allow_dups=True, user_input=True).value
+
+def validate_builddir(builddir: Path) -> None:
+    if not (builddir / 'meson-private' / 'coredata.dat' ).is_file():
+        raise MesonException('Current directory is not a meson build directory: `{}`.\n'
+                             'Please specify a valid build dir or change the working directory to it.\n'
+                             'It is also possible that the build directory was generated with an old\n'
+                             'meson version. Please regenerate it in this case.'.format(builddir))
+
+def get_backend_from_coredata(builddir: Path) -> str:
+    """
+    Gets `backend` option value from coredata
+    """
+    backend = coredata.load(str(builddir)).get_builtin_option('backend')
+    assert isinstance(backend, str)
+    return backend
+
+def parse_introspect_data(builddir: Path) -> T.Dict[str, T.List[dict]]:
+    """
+    Converts a List of name-to-dict to a dict of name-to-dicts (since names are not unique)
+    """
+    path_to_intro = builddir / 'meson-info' / 'intro-targets.json'
+    if not path_to_intro.exists():
+        raise MesonException('`{}` is missing! Directory is not configured yet?'.format(path_to_intro.name))
+    with path_to_intro.open() as f:
+        schema = json.load(f)
+
+    parsed_data = defaultdict(list) # type: T.Dict[str, T.List[dict]]
+    for target in schema:
+        parsed_data[target['name']] += [target]
+    return parsed_data
+
+class ParsedTargetName:
+    full_name = ''
+    name = ''
+    type = ''
+    path = ''
+
+    def __init__(self, target: str):
+        self.full_name = target
+        split = target.rsplit(':', 1)
+        if len(split) > 1:
+            self.type = split[1]
+            if not self._is_valid_type(self.type):
+                raise MesonException('Can\'t invoke target `{}`: unknown target type: `{}`'.format(target, self.type))
+
+        split = split[0].rsplit('/', 1)
+        if len(split) > 1:
+            self.path = split[0]
+            self.name = split[1]
+        else:
+            self.name = split[0]
+
+    @staticmethod
+    def _is_valid_type(type: str) -> bool:
+        # Ammend docs in Commands.md when editing this list
+        allowed_types = {
+            'executable',
+            'static_library',
+            'shared_library',
+            'shared_module',
+            'custom',
+            'run',
+            'jar',
+        }
+        return type in allowed_types
+
+def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introspect_data: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]:
+    if target.name not in introspect_data:
+        raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name))
+
+    intro_targets = introspect_data[target.name]
+    found_targets = []  # type: T.List[T.Dict[str, T.Any]]
+
+    resolved_bdir = builddir.resolve()
+
+    if not target.type and not target.path:
+        found_targets = intro_targets
+    else:
+        for intro_target in intro_targets:
+            if (intro_target['subproject'] or
+                    (target.type and target.type != intro_target['type'].replace(' ', '_')) or
+                    (target.path
+                         and intro_target['filename'] != 'no_name'
+                         and Path(target.path) != Path(intro_target['filename'][0]).relative_to(resolved_bdir).parent)):
+                continue
+            found_targets += [intro_target]
+
+    if not found_targets:
+        raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name))
+    elif len(found_targets) > 1:
+        raise MesonException('Can\'t invoke target `{}`: ambigious name. Add target type and/or path: `PATH/NAME:TYPE`'.format(target.full_name))
+
+    return found_targets[0]
+
+def generate_target_names_ninja(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> T.List[str]:
+    intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+    if intro_target['type'] == 'run':
+        return [target.name]
+    else:
+        return [str(Path(out_file).relative_to(builddir.resolve())) for out_file in intro_target['filename']]
+
+def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path) -> T.Tuple[T.List[str], T.Optional[T.Dict[str, str]]]:
+    runner = detect_ninja()
+    if runner is None:
+        raise MesonException('Cannot find ninja.')
+    mlog.log('Found runner:', str(runner))
+
+    cmd = runner + ['-C', builddir.as_posix()]
+
+    if options.targets:
+        intro_data = parse_introspect_data(builddir)
+        for t in options.targets:
+            cmd.extend(generate_target_names_ninja(ParsedTargetName(t), builddir, intro_data))
+    if options.clean:
+        cmd.append('clean')
+
+    # If the value is set to < 1 then don't set anything, which let's
+    # ninja/samu decide what to do.
+    if options.jobs > 0:
+        cmd.extend(['-j', str(options.jobs)])
+    if options.load_average > 0:
+        cmd.extend(['-l', str(options.load_average)])
+
+    if options.verbose:
+        cmd.append('-v')
+
+    cmd += options.ninja_args
+
+    return cmd, None
+
+def generate_target_name_vs(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> str:
+    intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+    assert intro_target['type'] != 'run', 'Should not reach here: `run` targets must be handle above'
+
+    # Normalize project name
+    # Source: https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-build-specific-targets-in-solutions-by-using-msbuild-exe
+    target_name = re.sub('[\%\$\@\;\.\(\)\']', '_', intro_target['id'])  # type: str
+    rel_path = Path(intro_target['filename'][0]).relative_to(builddir.resolve()).parent
+    if rel_path != Path('.'):
+        target_name = str(rel_path / target_name)
+    return target_name
+
+def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path) -> T.Tuple[T.List[str], T.Optional[T.Dict[str, str]]]:
+    slns = list(builddir.glob('*.sln'))
+    assert len(slns) == 1, 'More than one solution in a project?'
+    sln = slns[0]
+
+    cmd = ['msbuild']
+
+    if options.targets:
+        intro_data = parse_introspect_data(builddir)
+        has_run_target = any(map(
+            lambda t:
+                get_target_from_intro_data(ParsedTargetName(t), builddir, intro_data)['type'] == 'run',
+            options.targets
+        ))
+
+        if has_run_target:
+            # `run` target can't be used the same way as other targets on `vs` backend.
+            # They are defined as disabled projects, which can't be invoked as `.sln`
+            # target and have to be invoked directly as project instead.
+            # Issue: https://github.com/microsoft/msbuild/issues/4772
+
+            if len(options.targets) > 1:
+                raise MesonException('Only one target may be specified when `run` target type is used on this backend.')
+            intro_target = get_target_from_intro_data(ParsedTargetName(options.targets[0]), builddir, intro_data)
+            proj_dir = Path(intro_target['filename'][0]).parent
+            proj = proj_dir/'{}.vcxproj'.format(intro_target['id'])
+            cmd += [str(proj.resolve())]
+        else:
+            cmd += [str(sln.resolve())]
+            cmd.extend(['-target:{}'.format(generate_target_name_vs(ParsedTargetName(t), builddir, intro_data)) for t in options.targets])
+    else:
+        cmd += [str(sln.resolve())]
+
+    if options.clean:
+        cmd.extend(['-target:Clean'])
+
+    # In msbuild `-maxCpuCount` with no number means "detect cpus", the default is `-maxCpuCount:1`
+    if options.jobs > 0:
+        cmd.append('-maxCpuCount:{}'.format(options.jobs))
+    else:
+        cmd.append('-maxCpuCount')
+
+    if options.load_average:
+        mlog.warning('Msbuild does not have a load-average switch, ignoring.')
+
+    if not options.verbose:
+        cmd.append('-verbosity:minimal')
+
+    cmd += options.vs_args
+
+    # Remove platform from env so that msbuild does not pick x86 platform when solution platform is Win32
+    env = os.environ.copy()
+    del env['PLATFORM']
+
+    return cmd, env
+
+def get_parsed_args_xcode(options: 'argparse.Namespace', builddir: Path) -> T.Tuple[T.List[str], T.Optional[T.Dict[str, str]]]:
+    runner = 'xcodebuild'
+    if not shutil.which(runner):
+        raise MesonException('Cannot find xcodebuild, did you install XCode?')
+
+    # No argument to switch directory
+    os.chdir(str(builddir))
+
+    cmd = [runner, '-parallelizeTargets']
+
+    if options.targets:
+        for t in options.targets:
+            cmd += ['-target', t]
+
+    if options.clean:
+        if options.targets:
+            cmd += ['clean']
+        else:
+            cmd += ['-alltargets', 'clean']
+        # Otherwise xcodebuild tries to delete the builddir and fails
+        cmd += ['-UseNewBuildSystem=FALSE']
+
+    if options.jobs > 0:
+        cmd.extend(['-jobs', str(options.jobs)])
+
+    if options.load_average > 0:
+        mlog.warning('xcodebuild does not have a load-average switch, ignoring')
+
+    if options.verbose:
+        # xcodebuild is already quite verbose, and -quiet doesn't print any
+        # status messages
+        pass
+
+    cmd += options.xcode_args
+    return cmd, None
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+    """Add compile specific arguments."""
+    parser.add_argument(
+        'targets',
+        metavar='TARGET',
+        nargs='*',
+        default=None,
+        help='Targets to build. Target has the following format: [PATH_TO_TARGET/]TARGET_NAME[:TARGET_TYPE].')
+    parser.add_argument(
+        '--clean',
+        action='store_true',
+        help='Clean the build directory.'
+    )
+    parser.add_argument(
+        '-C',
+        action='store',
+        dest='builddir',
+        type=Path,
+        default='.',
+        help='The directory containing build files to be built.'
+    )
+    parser.add_argument(
+        '-j', '--jobs',
+        action='store',
+        default=0,
+        type=int,
+        help='The number of worker jobs to run (if supported). If the value is less than 1 the build program will guess.'
+    )
+    parser.add_argument(
+        '-l', '--load-average',
+        action='store',
+        default=0,
+        type=int,
+        help='The system load average to try to maintain (if supported).'
+    )
+    parser.add_argument(
+        '-v', '--verbose',
+        action='store_true',
+        help='Show more verbose output.'
+    )
+    parser.add_argument(
+        '--ninja-args',
+        type=array_arg,
+        default=[],
+        help='Arguments to pass to `ninja` (applied only on `ninja` backend).'
+    )
+    parser.add_argument(
+        '--vs-args',
+        type=array_arg,
+        default=[],
+        help='Arguments to pass to `msbuild` (applied only on `vs` backend).'
+    )
+    parser.add_argument(
+        '--xcode-args',
+        type=array_arg,
+        default=[],
+        help='Arguments to pass to `xcodebuild` (applied only on `xcode` backend).'
+    )
+
+def run(options: 'argparse.Namespace') -> int:
+    bdir = options.builddir  # type: Path
+    validate_builddir(bdir.resolve())
+
+    cmd = []    # type: T.List[str]
+    env = None  # type: T.Optional[T.Dict[str, str]]
+
+    if options.targets and options.clean:
+        raise MesonException('`TARGET` and `--clean` can\'t be used simultaneously')
+
+    backend = get_backend_from_coredata(bdir)
+    if backend == 'ninja':
+        cmd, env = get_parsed_args_ninja(options, bdir)
+    elif backend.startswith('vs'):
+        cmd, env = get_parsed_args_vs(options, bdir)
+    elif backend == 'xcode':
+        cmd, env = get_parsed_args_xcode(options, bdir)
+    else:
+        raise MesonException(
+            'Backend `{}` is not yet supported by `compile`. Use generated project files directly instead.'.format(backend))
+
+    p, *_ = mesonlib.Popen_safe(cmd, stdout=sys.stdout.buffer, stderr=sys.stderr.buffer, env=env)
+
+    return p.returncode
diff -Nru meson-0.53.2/mesonbuild/mconf.py meson-0.57.0+really0.56.2/mesonbuild/mconf.py
--- meson-0.53.2/mesonbuild/mconf.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/mconf.py	2021-01-06 10:39:48.000000000 +0000
@@ -15,15 +15,19 @@
 import os
 from . import coredata, environment, mesonlib, build, mintro, mlog
 from .ast import AstIDGenerator
+import typing as T
 
-def add_arguments(parser):
+if T.TYPE_CHECKING:
+    import argparse
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
     coredata.register_builtin_arguments(parser)
     parser.add_argument('builddir', nargs='?', default='.')
     parser.add_argument('--clearcache', action='store_true', default=False,
                         help='Clear cached state (e.g. found dependencies)')
 
 
-def make_lower_case(val):
+def make_lower_case(val: T.Any) -> T.Union[str, T.List[T.Any]]:  # T.Any because of recursion...
     if isinstance(val, bool):
         return str(val).lower()
     elif isinstance(val, list):
@@ -184,19 +188,7 @@
         if not self.default_values_only:
             print('  Build dir ', self.build_dir)
 
-        dir_option_names = ['bindir',
-                            'datadir',
-                            'includedir',
-                            'infodir',
-                            'libdir',
-                            'libexecdir',
-                            'localedir',
-                            'localstatedir',
-                            'mandir',
-                            'prefix',
-                            'sbindir',
-                            'sharedstatedir',
-                            'sysconfdir']
+        dir_option_names = list(coredata.BUILTIN_DIR_OPTIONS)
         test_option_names = ['errorlogs',
                              'stdsplit']
         core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names]
@@ -205,15 +197,14 @@
         test_options = {k: o for k, o in self.coredata.builtins.items() if k in test_option_names}
         core_options = {k: o for k, o in self.coredata.builtins.items() if k in core_option_names}
 
-        def insert_build_prefix(k):
-            idx = k.find(':')
-            if idx < 0:
-                return 'build.' + k
-            return k[:idx + 1] + 'build.' + k[idx + 1:]
-
         core_options = self.split_options_per_subproject(core_options)
-        host_compiler_options = self.split_options_per_subproject(self.coredata.compiler_options.host)
-        build_compiler_options = self.split_options_per_subproject({insert_build_prefix(k): o for k, o in self.coredata.compiler_options.build.items()})
+        host_compiler_options = self.split_options_per_subproject(
+            dict(self.coredata.flatten_lang_iterator(
+                self.coredata.compiler_options.host.items())))
+        build_compiler_options = self.split_options_per_subproject(
+            dict(self.coredata.flatten_lang_iterator(
+                (self.coredata.insert_build_prefix(k), o)
+                for k, o in self.coredata.compiler_options.build.items())))
         project_options = self.split_options_per_subproject(self.coredata.user_options)
         show_build_options = self.default_values_only or self.build.environment.is_cross_build()
 
@@ -221,7 +212,7 @@
         self.print_options('Core options', core_options[''])
         self.print_options('', self.coredata.builtins_per_machine.host)
         if show_build_options:
-            self.print_options('', {insert_build_prefix(k): o for k, o in self.coredata.builtins_per_machine.build.items()})
+            self.print_options('', {self.coredata.insert_build_prefix(k): o for k, o in self.coredata.builtins_per_machine.build.items()})
         self.print_options('Backend options', self.coredata.backend_options)
         self.print_options('Base options', self.coredata.base_options)
         self.print_options('Compiler options', host_compiler_options.get('', {}))
diff -Nru meson-0.53.2/mesonbuild/mdist.py meson-0.57.0+really0.56.2/mesonbuild/mdist.py
--- meson-0.53.2/mesonbuild/mdist.py	2020-01-23 22:29:05.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/mdist.py	2021-01-06 10:39:48.000000000 +0000
@@ -21,7 +21,7 @@
 import hashlib
 import json
 from glob import glob
-from pathlib import Path
+from ._pathlib import Path
 from mesonbuild.environment import detect_ninja
 from mesonbuild.mesonlib import windows_proof_rmtree, MesonException
 from mesonbuild.wrap import wrap
@@ -35,10 +35,12 @@
 def add_arguments(parser):
     parser.add_argument('-C', default='.', dest='wd',
                         help='directory to cd into before running')
-    parser.add_argument('--formats', default='xztar',
+    parser.add_argument('--formats', default='xztar', choices=archive_choices,
                         help='Comma separated list of archive types to create.')
     parser.add_argument('--include-subprojects', action='store_true',
                         help='Include source code of subprojects that have been used for the build.')
+    parser.add_argument('--no-tests', action='store_true',
+                        help='Do not build and test generated packages.')
 
 
 def create_hash(fname):
@@ -46,7 +48,8 @@
     m = hashlib.sha256()
     m.update(open(fname, 'rb').read())
     with open(hashname, 'w') as f:
-        f.write('%s %s\n' % (m.hexdigest(), os.path.basename(fname)))
+        f.write('{} {}\n'.format(m.hexdigest(), os.path.basename(fname)))
+    print(os.path.relpath(fname), m.hexdigest())
 
 
 def del_gitfiles(dirname):
@@ -73,10 +76,12 @@
         del_gitfiles(os.path.join(dirname, v))
 
 
-def run_dist_scripts(dist_root, dist_scripts):
+def run_dist_scripts(src_root, bld_root, dist_root, dist_scripts):
     assert(os.path.isabs(dist_root))
     env = os.environ.copy()
     env['MESON_DIST_ROOT'] = dist_root
+    env['MESON_SOURCE_ROOT'] = src_root
+    env['MESON_BUILD_ROOT'] = bld_root
     for d in dist_scripts:
         script = d['exe']
         args = d['args']
@@ -121,7 +126,7 @@
             git_clone(sub_src_root, sub_distdir)
         else:
             shutil.copytree(sub_src_root, sub_distdir)
-    run_dist_scripts(distdir, dist_scripts)
+    run_dist_scripts(src_root, bld_root, distdir, dist_scripts)
     output_names = []
     for a in archives:
         compressed_name = distdir + archive_extension[a]
@@ -141,16 +146,25 @@
 def create_dist_hg(dist_name, archives, src_root, bld_root, dist_sub, dist_scripts):
     if hg_have_dirty_index(src_root):
         mlog.warning('Repository has uncommitted changes that will not be included in the dist tarball')
+    if dist_scripts:
+        mlog.warning('dist scripts are not supported in Mercurial projects')
 
     os.makedirs(dist_sub, exist_ok=True)
     tarname = os.path.join(dist_sub, dist_name + '.tar')
     xzname = tarname + '.xz'
     gzname = tarname + '.gz'
     zipname = os.path.join(dist_sub, dist_name + '.zip')
-    subprocess.check_call(['hg', 'archive', '-R', src_root, '-S', '-t', 'tar', tarname])
+    # Note that -X interprets relative paths using the current working
+    # directory, not the repository root, so this must be an absolute path:
+    # https://bz.mercurial-scm.org/show_bug.cgi?id=6267
+    #
+    # .hg[a-z]* is used instead of .hg* to keep .hg_archival.txt, which may
+    # be useful to link the tarball to the Mercurial revision for either
+    # manual inspection or in case any code interprets it for a --version or
+    # similar.
+    subprocess.check_call(['hg', 'archive', '-R', src_root, '-S', '-t', 'tar',
+                           '-X', src_root + '/.hg[a-z]*', tarname])
     output_names = []
-    if dist_scripts:
-        mlog.warning('dist scripts are not supported in Mercurial projects')
     if 'xztar' in archives:
         import lzma
         with lzma.open(xzname, 'wb') as xf, open(tarname, 'rb') as tf:
@@ -166,46 +180,51 @@
         output_names.append(zipname)
     return output_names
 
+def run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_args):
+    if subprocess.call(meson_command + ['--backend=ninja', unpacked_src_dir, builddir]) != 0:
+        print('Running Meson on distribution package failed')
+        return 1
+    if subprocess.call(ninja_args, cwd=builddir) != 0:
+        print('Compiling the distribution package failed')
+        return 1
+    if subprocess.call(ninja_args + ['test'], cwd=builddir) != 0:
+        print('Running unit tests on the distribution package failed')
+        return 1
+    myenv = os.environ.copy()
+    myenv['DESTDIR'] = installdir
+    if subprocess.call(ninja_args + ['install'], cwd=builddir, env=myenv) != 0:
+        print('Installing the distribution package failed')
+        return 1
+    return 0
 
 def check_dist(packagename, meson_command, extra_meson_args, bld_root, privdir):
-    print('Testing distribution package %s' % packagename)
+    print('Testing distribution package {}'.format(packagename))
     unpackdir = os.path.join(privdir, 'dist-unpack')
     builddir = os.path.join(privdir, 'dist-build')
     installdir = os.path.join(privdir, 'dist-install')
     for p in (unpackdir, builddir, installdir):
         if os.path.exists(p):
-            shutil.rmtree(p)
+            windows_proof_rmtree(p)
         os.mkdir(p)
-    ninja_bin = detect_ninja()
-    try:
-        shutil.unpack_archive(packagename, unpackdir)
-        unpacked_files = glob(os.path.join(unpackdir, '*'))
-        assert(len(unpacked_files) == 1)
-        unpacked_src_dir = unpacked_files[0]
-        with open(os.path.join(bld_root, 'meson-info', 'intro-buildoptions.json')) as boptions:
-            meson_command += ['-D{name}={value}'.format(**o) for o in json.load(boptions)
-                              if o['name'] not in ['backend', 'install_umask']]
-        meson_command += extra_meson_args
-        if subprocess.call(meson_command + ['--backend=ninja', unpacked_src_dir, builddir]) != 0:
-            print('Running Meson on distribution package failed')
-            return 1
-        if subprocess.call([ninja_bin], cwd=builddir) != 0:
-            print('Compiling the distribution package failed')
-            return 1
-        if subprocess.call([ninja_bin, 'test'], cwd=builddir) != 0:
-            print('Running unit tests on the distribution package failed')
-            return 1
-        myenv = os.environ.copy()
-        myenv['DESTDIR'] = installdir
-        if subprocess.call([ninja_bin, 'install'], cwd=builddir, env=myenv) != 0:
-            print('Installing the distribution package failed')
-            return 1
-    finally:
-        shutil.rmtree(unpackdir)
-        shutil.rmtree(builddir)
-        shutil.rmtree(installdir)
-    print('Distribution package %s tested' % packagename)
-    return 0
+    ninja_args = detect_ninja()
+    shutil.unpack_archive(packagename, unpackdir)
+    unpacked_files = glob(os.path.join(unpackdir, '*'))
+    assert(len(unpacked_files) == 1)
+    unpacked_src_dir = unpacked_files[0]
+    with open(os.path.join(bld_root, 'meson-info', 'intro-buildoptions.json')) as boptions:
+        meson_command += ['-D{name}={value}'.format(**o) for o in json.load(boptions)
+                          if o['name'] not in ['backend', 'install_umask', 'buildtype']]
+    meson_command += extra_meson_args
+
+    ret = run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_args)
+    if ret > 0:
+        print('Dist check build directory was {}'.format(builddir))
+    else:
+        windows_proof_rmtree(unpackdir)
+        windows_proof_rmtree(builddir)
+        windows_proof_rmtree(installdir)
+        print('Distribution package {} tested'.format(packagename))
+    return ret
 
 def determine_archives_to_generate(options):
     result = []
@@ -240,7 +259,7 @@
     if options.include_subprojects:
         subproject_dir = os.path.join(src_root, b.subproject_dir)
         for sub in b.subprojects:
-            _, directory = wrap.get_directory(subproject_dir, sub)
+            directory = wrap.get_directory(subproject_dir, sub)
             subprojects.append(os.path.join(b.subproject_dir, directory))
         extra_meson_args.append('-Dwrap_mode=nodownload')
 
@@ -256,8 +275,10 @@
         return 1
     if names is None:
         return 1
-    # Check only one.
-    rc = check_dist(names[0], meson_command, extra_meson_args, bld_root, priv_dir)
+    rc = 0
+    if not options.no_tests:
+        # Check only one.
+        rc = check_dist(names[0], meson_command, extra_meson_args, bld_root, priv_dir)
     if rc == 0:
         for name in names:
             create_hash(name)
diff -Nru meson-0.53.2/mesonbuild/mesondata.py meson-0.57.0+really0.56.2/mesonbuild/mesondata.py
--- meson-0.53.2/mesonbuild/mesondata.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/mesondata.py	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,390 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+####
+####  WARNING: This is an automatically generated file! Do not edit!
+####           Generated by tools/gen_data.py
+####
+
+
+# TODO: Remember to remove this also from tools/gen_data.py
+from ._pathlib import Path
+import typing as T
+
+if T.TYPE_CHECKING:
+    from .environment import Environment
+
+######################
+# BEGIN Data section #
+######################
+
+file_0_data_CMakeListsLLVM_txt = '''\
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} )
+
+set(PACKAGE_FOUND FALSE)
+
+while(TRUE)
+  find_package(LLVM REQUIRED CONFIG QUIET)
+
+  # ARCHS has to be set via the CMD interface
+  if(LLVM_FOUND OR "${ARCHS}" STREQUAL "")
+    break()
+  endif()
+
+  list(GET       ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+  list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(LLVM_FOUND)
+  set(PACKAGE_FOUND TRUE)
+
+  foreach(mod IN LISTS LLVM_MESON_MODULES)
+    # Reset variables
+    set(out_mods)
+    set(real_mods)
+
+    # Generate a lower and upper case version
+    string(TOLOWER "${mod}" mod_L)
+    string(TOUPPER "${mod}" mod_U)
+
+    # Get the mapped components
+    llvm_map_components_to_libnames(out_mods ${mod} ${mod_L} ${mod_U})
+    list(SORT              out_mods)
+    list(REMOVE_DUPLICATES out_mods)
+
+    # Make sure that the modules exist
+    foreach(i IN LISTS out_mods)
+      if(TARGET ${i})
+        list(APPEND real_mods ${i})
+      endif()
+    endforeach()
+
+    # Set the output variables
+    set(MESON_LLVM_TARGETS_${mod} ${real_mods})
+    foreach(i IN LISTS real_mods)
+      set(MESON_TARGET_TO_LLVM_${i} ${mod})
+    endforeach()
+  endforeach()
+
+  # Check the following variables:
+  # LLVM_PACKAGE_VERSION
+  # LLVM_VERSION
+  # LLVM_VERSION_STRING
+  if(NOT DEFINED PACKAGE_VERSION)
+    if(DEFINED LLVM_PACKAGE_VERSION)
+      set(PACKAGE_VERSION "${LLVM_PACKAGE_VERSION}")
+    elseif(DEFINED LLVM_VERSION)
+      set(PACKAGE_VERSION "${LLVM_VERSION}")
+    elseif(DEFINED LLVM_VERSION_STRING)
+      set(PACKAGE_VERSION "${LLVM_VERSION_STRING}")
+    endif()
+  endif()
+
+  # Check the following variables:
+  # LLVM_LIBRARIES
+  # LLVM_LIBS
+  set(libs)
+  if(DEFINED LLVM_LIBRARIES)
+    set(libs LLVM_LIBRARIES)
+  elseif(DEFINED LLVM_LIBS)
+    set(libs LLVM_LIBS)
+  endif()
+
+  # Check the following variables:
+  # LLVM_INCLUDE_DIRS
+  # LLVM_INCLUDES
+  # LLVM_INCLUDE_DIR
+  set(includes)
+  if(DEFINED LLVM_INCLUDE_DIRS)
+    set(includes LLVM_INCLUDE_DIRS)
+  elseif(DEFINED LLVM_INCLUDES)
+    set(includes LLVM_INCLUDES)
+  elseif(DEFINED LLVM_INCLUDE_DIR)
+    set(includes LLVM_INCLUDE_DIR)
+  endif()
+
+  # Check the following variables:
+  # LLVM_DEFINITIONS
+  set(definitions)
+  if(DEFINED LLVM_DEFINITIONS)
+    set(definitions LLVM_DEFINITIONS)
+  endif()
+
+  set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+  set(PACKAGE_DEFINITIONS  "${${definitions}}")
+  set(PACKAGE_LIBRARIES    "${${libs}}")
+endif()
+'''
+
+file_1_data_CMakePathInfo_txt = '''\
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION})
+
+set(TMP_PATHS_LIST)
+list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH})
+
+set(LIB_ARCH_LIST)
+if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
+  file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
+  foreach(dir ${implicit_dirs})
+    if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
+      list(APPEND LIB_ARCH_LIST "${dir}")
+    endif()
+  endforeach()
+endif()
+
+# "Export" these variables:
+set(MESON_ARCH_LIST ${LIB_ARCH_LIST})
+set(MESON_PATHS_LIST ${TMP_PATHS_LIST})
+set(MESON_CMAKE_ROOT ${CMAKE_ROOT})
+set(MESON_CMAKE_SYSROOT ${CMAKE_SYSROOT})
+set(MESON_FIND_ROOT_PATH ${CMAKE_FIND_ROOT_PATH})
+
+message(STATUS ${TMP_PATHS_LIST})
+'''
+
+file_2_data_CMakeLists_txt = '''\
+# fail noisily if attempt to use this file without setting:
+# cmake_minimum_required(VERSION ${CMAKE_VERSION})
+# project(... LANGUAGES ...)
+
+cmake_policy(SET CMP0000 NEW)
+
+set(PACKAGE_FOUND FALSE)
+set(_packageName "${NAME}")
+string(TOUPPER "${_packageName}" PACKAGE_NAME)
+
+while(TRUE)
+  find_package("${NAME}" QUIET COMPONENTS ${COMPS})
+
+  # ARCHS has to be set via the CMD interface
+  if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "")
+    break()
+  endif()
+
+  list(GET       ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+  list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(${_packageName}_FOUND  OR  ${PACKAGE_NAME}_FOUND)
+  set(PACKAGE_FOUND TRUE)
+
+  # Check the following variables:
+  # FOO_VERSION
+  # Foo_VERSION
+  # FOO_VERSION_STRING
+  # Foo_VERSION_STRING
+  if(NOT DEFINED PACKAGE_VERSION)
+    if(DEFINED ${_packageName}_VERSION)
+      set(PACKAGE_VERSION "${${_packageName}_VERSION}")
+    elseif(DEFINED ${PACKAGE_NAME}_VERSION)
+      set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}")
+    elseif(DEFINED ${_packageName}_VERSION_STRING)
+      set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}")
+    elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING)
+      set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}")
+    endif()
+  endif()
+
+  # Check the following variables:
+  # FOO_LIBRARIES
+  # Foo_LIBRARIES
+  # FOO_LIBS
+  # Foo_LIBS
+  set(libs)
+  if(DEFINED ${_packageName}_LIBRARIES)
+    set(libs ${_packageName}_LIBRARIES)
+  elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES)
+    set(libs ${PACKAGE_NAME}_LIBRARIES)
+  elseif(DEFINED ${_packageName}_LIBS)
+    set(libs ${_packageName}_LIBS)
+  elseif(DEFINED ${PACKAGE_NAME}_LIBS)
+    set(libs ${PACKAGE_NAME}_LIBS)
+  endif()
+
+  # Check the following variables:
+  # FOO_INCLUDE_DIRS
+  # Foo_INCLUDE_DIRS
+  # FOO_INCLUDES
+  # Foo_INCLUDES
+  # FOO_INCLUDE_DIR
+  # Foo_INCLUDE_DIR
+  set(includes)
+  if(DEFINED ${_packageName}_INCLUDE_DIRS)
+    set(includes ${_packageName}_INCLUDE_DIRS)
+  elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS)
+    set(includes ${PACKAGE_NAME}_INCLUDE_DIRS)
+  elseif(DEFINED ${_packageName}_INCLUDES)
+    set(includes ${_packageName}_INCLUDES)
+  elseif(DEFINED ${PACKAGE_NAME}_INCLUDES)
+    set(includes ${PACKAGE_NAME}_INCLUDES)
+  elseif(DEFINED ${_packageName}_INCLUDE_DIR)
+    set(includes ${_packageName}_INCLUDE_DIR)
+  elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR)
+    set(includes ${PACKAGE_NAME}_INCLUDE_DIR)
+  endif()
+
+  # Check the following variables:
+  # FOO_DEFINITIONS
+  # Foo_DEFINITIONS
+  set(definitions)
+  if(DEFINED ${_packageName}_DEFINITIONS)
+    set(definitions ${_packageName}_DEFINITIONS)
+  elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS)
+    set(definitions ${PACKAGE_NAME}_DEFINITIONS)
+  endif()
+
+  set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+  set(PACKAGE_DEFINITIONS  "${${definitions}}")
+  set(PACKAGE_LIBRARIES    "${${libs}}")
+endif()
+'''
+
+file_3_data_preload_cmake = '''\
+if(MESON_PS_LOADED)
+  return()
+endif()
+
+set(MESON_PS_LOADED ON)
+
+cmake_policy(PUSH)
+cmake_policy(SET CMP0054 NEW) # https://cmake.org/cmake/help/latest/policy/CMP0054.html
+
+# Dummy macros that have a special meaning in the meson code
+macro(meson_ps_execute_delayed_calls)
+endmacro()
+
+macro(meson_ps_reload_vars)
+endmacro()
+
+macro(meson_ps_disabled_function)
+  message(WARNING "The function '${ARGV0}' is disabled in the context of CMake subporjects.\n"
+                  "This should not be an issue but may lead to compilaton errors.")
+endmacro()
+
+# Helper macro to inspect the current CMake state
+macro(meson_ps_inspect_vars)
+  set(MESON_PS_CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}")
+  set(MESON_PS_CMAKE_CURRENT_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}")
+  meson_ps_execute_delayed_calls()
+endmacro()
+
+
+# Override some system functions with custom code and forward the args
+# to the original function
+macro(add_custom_command)
+  meson_ps_inspect_vars()
+  _add_custom_command(${ARGV})
+endmacro()
+
+macro(add_custom_target)
+  meson_ps_inspect_vars()
+  _add_custom_target(${ARGV})
+endmacro()
+
+macro(set_property)
+  meson_ps_inspect_vars()
+  _set_property(${ARGV})
+endmacro()
+
+function(set_source_files_properties)
+  set(FILES)
+  set(I 0)
+  set(PROPERTIES OFF)
+
+  while(I LESS ARGC)
+    if(NOT PROPERTIES)
+      if("${ARGV${I}}" STREQUAL "PROPERTIES")
+        set(PROPERTIES ON)
+      else()
+        list(APPEND FILES "${ARGV${I}}")
+      endif()
+
+      math(EXPR I "${I} + 1")
+    else()
+      set(ID_IDX ${I})
+      math(EXPR PROP_IDX "${ID_IDX} + 1")
+
+      set(ID   "${ARGV${ID_IDX}}")
+      set(PROP "${ARGV${PROP_IDX}}")
+
+      set_property(SOURCE ${FILES} PROPERTY "${ID}" "${PROP}")
+      math(EXPR I "${I} + 2")
+    endif()
+  endwhile()
+endfunction()
+
+# Disable some functions that would mess up the CMake meson integration
+macro(target_precompile_headers)
+  meson_ps_disabled_function(target_precompile_headers)
+endmacro()
+
+set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target;set_property)
+meson_ps_reload_vars()
+
+cmake_policy(POP)
+'''
+
+
+####################
+# END Data section #
+####################
+
+class DataFile:
+    def __init__(self, path: Path, sha256sum: str, data: str) -> None:
+        self.path = path
+        self.sha256sum = sha256sum
+        self.data = data
+
+    def write_once(self, path: Path) -> None:
+        if not path.exists():
+            path.write_text(self.data)
+
+    def write_to_private(self, env: 'Environment') -> Path:
+        out_file = Path(env.scratch_dir) / 'data' / self.path.name
+        out_file.parent.mkdir(exist_ok=True)
+        self.write_once(out_file)
+        return out_file
+
+
+mesondata = {
+    'dependencies/data/CMakeListsLLVM.txt': DataFile(
+        Path('dependencies/data/CMakeListsLLVM.txt'),
+        '412cec3315597041a978d018cdaca282dcd47693793540da88ae2f80d0cbd7cd',
+        file_0_data_CMakeListsLLVM_txt,
+    ),
+    'dependencies/data/CMakePathInfo.txt': DataFile(
+        Path('dependencies/data/CMakePathInfo.txt'),
+        '90da8b443982d9c87139b7dc84228eb58cab4315764949637208f25e2bda7db2',
+        file_1_data_CMakePathInfo_txt,
+    ),
+    'dependencies/data/CMakeLists.txt': DataFile(
+        Path('dependencies/data/CMakeLists.txt'),
+        '71a2d58381f912bbfb1c8709884d34d721f682edf2fca001e1f582f0bffd0da7',
+        file_2_data_CMakeLists_txt,
+    ),
+    'cmake/data/preload.cmake': DataFile(
+        Path('cmake/data/preload.cmake'),
+        '2b4e632aeb74acb2b441880cf85c0b6fcab03e75b182d3077715a97e739a7918',
+        file_3_data_preload_cmake,
+    ),
+}
diff -Nru meson-0.53.2/mesonbuild/mesonlib.py meson-0.57.0+really0.56.2/mesonbuild/mesonlib.py
--- meson-0.53.2/mesonbuild/mesonlib.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/mesonlib.py	2021-01-06 10:39:48.000000000 +0000
@@ -1,4 +1,4 @@
-# Copyright 2012-2015 The Meson development team
+# Copyright 2012-2020 The Meson development team
 
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,27 +13,38 @@
 # limitations under the License.
 
 """A library of random helper functionality."""
-from pathlib import Path
+from ._pathlib import Path
 import sys
 import stat
 import time
 import platform, subprocess, operator, os, shlex, shutil, re
 import collections
-from enum import Enum
-from functools import lru_cache, update_wrapper
+from enum import IntEnum
+from functools import lru_cache, wraps
 from itertools import tee, filterfalse
 import typing as T
 import uuid
+import textwrap
 
 from mesonbuild import mlog
 
+if T.TYPE_CHECKING:
+    from .build import ConfigurationData
+    from .coredata import OptionDictType, UserOption
+    from .compilers.compilers import CompilerType
+    from .interpreterbase import ObjectHolder
+
+    FileOrString = T.Union['File', str]
+
 _T = T.TypeVar('_T')
 _U = T.TypeVar('_U')
 
 have_fcntl = False
 have_msvcrt = False
+# TODO: this is such a hack, this really should be either in coredata or in the
+# interpreter
 # {subproject: project_meson_version}
-project_meson_versions = {}
+project_meson_versions = collections.defaultdict(str)  # type: T.DefaultDict[str, str]
 
 try:
     import fcntl
@@ -56,20 +67,50 @@
     python_command = [sys.executable]
 meson_command = None
 
+class MesonException(Exception):
+    '''Exceptions thrown by Meson'''
+
+    file = None    # type: T.Optional[str]
+    lineno = None  # type: T.Optional[int]
+    colno = None   # type: T.Optional[int]
+
+class EnvironmentException(MesonException):
+    '''Exceptions thrown while processing and creating the build environment'''
+
+class GitException(MesonException):
+    def __init__(self, msg: str, output: T.Optional[str] = None):
+        super().__init__(msg)
+        self.output = output.strip() if output else ''
+
 GIT = shutil.which('git')
-def git(cmd: T.List[str], workingdir: str, **kwargs) -> subprocess.CompletedProcess:
-    pc = subprocess.run([GIT, '-C', workingdir] + cmd,
-                        # Redirect stdin to DEVNULL otherwise git messes up the
-                        # console and ANSI colors stop working on Windows.
-                        stdin=subprocess.DEVNULL, **kwargs)
-    # Sometimes git calls git recursively, such as `git submodule update
-    # --recursive` which will be without the above workaround, so set the
-    # console mode again just in case.
-    mlog.setup_console()
-    return pc
+def git(cmd: T.List[str], workingdir: str, check: bool = False, **kwargs: T.Any) -> T.Tuple[subprocess.Popen, str, str]:
+    cmd = [GIT] + cmd
+    p, o, e = Popen_safe(cmd, cwd=workingdir, **kwargs)
+    if check and p.returncode != 0:
+        raise GitException('Git command failed: ' + str(cmd), e)
+    return p, o, e
 
+def quiet_git(cmd: T.List[str], workingdir: str, check: bool = False) -> T.Tuple[bool, str]:
+    if not GIT:
+        m = 'Git program not found.'
+        if check:
+            raise GitException(m)
+        return False, m
+    p, o, e = git(cmd, workingdir, check)
+    if p.returncode != 0:
+        return False, e
+    return True, o
+
+def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool:
+    if not GIT:
+        m = 'Git program not found.'
+        if check:
+            raise GitException(m)
+        return False
+    p, _, _ = git(cmd, workingdir, check, stdout=None, stderr=None)
+    return p.returncode == 0
 
-def set_meson_command(mainfile):
+def set_meson_command(mainfile: str) -> None:
     global python_command
     global meson_command
     # On UNIX-like systems `meson` is a Python script
@@ -86,7 +127,8 @@
     if 'MESON_COMMAND_TESTS' in os.environ:
         mlog.log('meson_command is {!r}'.format(meson_command))
 
-def is_ascii_string(astring) -> bool:
+
+def is_ascii_string(astring: T.Union[str, bytes]) -> bool:
     try:
         if isinstance(astring, str):
             astring.encode('ascii')
@@ -96,7 +138,8 @@
         return False
     return True
 
-def check_direntry_issues(direntry_array):
+
+def check_direntry_issues(direntry_array: T.Union[T.List[T.Union[str, bytes]], str, bytes]) -> None:
     import locale
     # Warn if the locale is not UTF-8. This can cause various unfixable issues
     # such as os.stat not being able to decode filenames with unicode in them.
@@ -109,29 +152,18 @@
         for de in direntry_array:
             if is_ascii_string(de):
                 continue
-            mlog.warning('''You are using {!r} which is not a Unicode-compatible '
-locale but you are trying to access a file system entry called {!r} which is
-not pure ASCII. This may cause problems.
-'''.format(e, de), file=sys.stderr)
+            mlog.warning(textwrap.dedent('''
+                You are using {!r} which is not a Unicode-compatible
+                locale but you are trying to access a file system entry called {!r} which is
+                not pure ASCII. This may cause problems.
+                '''.format(e, de)), file=sys.stderr)
+
 
 # Put this in objects that should not get dumped to pickle files
 # by accident.
 import threading
 an_unpicklable_object = threading.Lock()
 
-class MesonException(Exception):
-    '''Exceptions thrown by Meson'''
-
-    def get_msg_with_context(self):
-        s = ''
-        if hasattr(self, 'lineno') and hasattr(self, 'file'):
-            s = get_error_location_string(self.file, self.lineno) + ' '
-        s += str(self)
-        return s
-
-class EnvironmentException(MesonException):
-    '''Exceptions thrown while processing and creating the build environment'''
-
 class FileMode:
     # The first triad is for owner permissions, the second for group permissions,
     # and the third for others (everyone else).
@@ -158,18 +190,19 @@
                                       '[r-][w-][xsS-]' # Group perms
                                       '[r-][w-][xtT-]') # Others perms
 
-    def __init__(self, perms=None, owner=None, group=None):
+    def __init__(self, perms: T.Optional[str] = None, owner: T.Optional[str] = None,
+                 group: T.Optional[str] = None):
         self.perms_s = perms
         self.perms = self.perms_s_to_bits(perms)
         self.owner = owner
         self.group = group
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         ret = ' int:
         '''
         Does the opposite of stat.filemode(), converts strings of the form
         'rwxr-xr-x' to st_mode enums which can be passed to os.chmod()
@@ -228,8 +261,7 @@
         self.is_built = is_built
         self.subdir = subdir
         self.fname = fname
-        assert(isinstance(self.subdir, str))
-        assert(isinstance(self.fname, str))
+        self.hash = hash((is_built, subdir, fname))
 
     def __str__(self) -> str:
         return self.relative_name()
@@ -243,17 +275,17 @@
 
     @staticmethod
     @lru_cache(maxsize=None)
-    def from_source_file(source_root: str, subdir: str, fname: str):
+    def from_source_file(source_root: str, subdir: str, fname: str) -> 'File':
         if not os.path.isfile(os.path.join(source_root, subdir, fname)):
             raise MesonException('File %s does not exist.' % fname)
         return File(False, subdir, fname)
 
     @staticmethod
-    def from_built_file(subdir: str, fname: str):
+    def from_built_file(subdir: str, fname: str) -> 'File':
         return File(True, subdir, fname)
 
     @staticmethod
-    def from_absolute_file(fname: str):
+    def from_absolute_file(fname: str) -> 'File':
         return File(False, '', fname)
 
     @lru_cache(maxsize=None)
@@ -276,25 +308,31 @@
     def split(self, s: str) -> T.List[str]:
         return self.fname.split(s)
 
-    def __eq__(self, other) -> bool:
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, File):
+            return NotImplemented
+        if self.hash != other.hash:
+            return False
         return (self.fname, self.subdir, self.is_built) == (other.fname, other.subdir, other.is_built)
 
     def __hash__(self) -> int:
-        return hash((self.fname, self.subdir, self.is_built))
+        return self.hash
 
     @lru_cache(maxsize=None)
     def relative_name(self) -> str:
         return os.path.join(self.subdir, self.fname)
 
 
-def get_compiler_for_source(compilers, src):
+def get_compiler_for_source(compilers: T.Iterable['CompilerType'], src: str) -> 'CompilerType':
+    """Given a set of compilers and a source, find the compiler for that source type."""
     for comp in compilers:
         if comp.can_compile(src):
             return comp
     raise MesonException('No specified compiler can handle file {!s}'.format(src))
 
-def classify_unity_sources(compilers, sources):
-    compsrclist = {}
+
+def classify_unity_sources(compilers: T.Iterable['CompilerType'], sources: T.Iterable[str]) -> T.Dict['CompilerType', T.List[str]]:
+    compsrclist = {}  # type: T.Dict[CompilerType, T.List[str]]
     for src in sources:
         comp = get_compiler_for_source(compilers, src)
         if comp not in compsrclist:
@@ -303,32 +341,8 @@
             compsrclist[comp].append(src)
     return compsrclist
 
-class OrderedEnum(Enum):
-    """
-    An Enum which additionally offers homogeneous ordered comparison.
-    """
-    def __ge__(self, other):
-        if self.__class__ is other.__class__:
-            return self.value >= other.value
-        return NotImplemented
-
-    def __gt__(self, other):
-        if self.__class__ is other.__class__:
-            return self.value > other.value
-        return NotImplemented
 
-    def __le__(self, other):
-        if self.__class__ is other.__class__:
-            return self.value <= other.value
-        return NotImplemented
-
-    def __lt__(self, other):
-        if self.__class__ is other.__class__:
-            return self.value < other.value
-        return NotImplemented
-
-
-class MachineChoice(OrderedEnum):
+class MachineChoice(IntEnum):
 
     """Enum class representing one of the two abstract machine names used in
     most places: the build, and host, machines.
@@ -337,15 +351,15 @@
     BUILD = 0
     HOST = 1
 
-    def get_lower_case_name(self):
+    def get_lower_case_name(self) -> str:
         return PerMachine('build', 'host')[self]
 
-    def get_prefix(self):
+    def get_prefix(self) -> str:
         return PerMachine('build.', '')[self]
 
 
 class PerMachine(T.Generic[_T]):
-    def __init__(self, build: _T, host: _T):
+    def __init__(self, build: _T, host: _T) -> None:
         self.build = build
         self.host = host
 
@@ -372,6 +386,9 @@
             unfreeze.host = None
         return unfreeze
 
+    def __repr__(self) -> str:
+        return 'PerMachine({!r}, {!r})'.format(self.build, self.host)
+
 
 class PerThreeMachine(PerMachine[_T]):
     """Like `PerMachine` but includes `target` too.
@@ -380,7 +397,7 @@
     need to computer the `target` field so we don't bother overriding the
     `__getitem__`/`__setitem__` methods.
     """
-    def __init__(self, build: _T, host: _T, target: _T):
+    def __init__(self, build: _T, host: _T, target: _T) -> None:
         super().__init__(build, host)
         self.target = target
 
@@ -404,6 +421,10 @@
     def matches_build_machine(self, machine: MachineChoice) -> bool:
         return self.build == self[machine]
 
+    def __repr__(self) -> str:
+        return 'PerThreeMachine({!r}, {!r}, {!r})'.format(self.build, self.host, self.target)
+
+
 class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
     """Extends `PerMachine` with the ability to default from `None`s.
     """
@@ -421,6 +442,9 @@
             freeze.host = freeze.build
         return freeze
 
+    def __repr__(self) -> str:
+        return 'PerMachineDefaultable({!r}, {!r})'.format(self.build, self.host)
+
 
 class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Optional[_T]]):
     """Extends `PerThreeMachine` with the ability to default from `None`s.
@@ -442,44 +466,70 @@
             freeze.target = freeze.host
         return freeze
 
+    def __repr__(self) -> str:
+        return 'PerThreeMachineDefaultable({!r}, {!r}, {!r})'.format(self.build, self.host, self.target)
+
 
 def is_sunos() -> bool:
     return platform.system().lower() == 'sunos'
 
+
 def is_osx() -> bool:
     return platform.system().lower() == 'darwin'
 
+
 def is_linux() -> bool:
     return platform.system().lower() == 'linux'
 
+
 def is_android() -> bool:
     return platform.system().lower() == 'android'
 
+
 def is_haiku() -> bool:
     return platform.system().lower() == 'haiku'
 
+
 def is_openbsd() -> bool:
     return platform.system().lower() == 'openbsd'
 
+
 def is_windows() -> bool:
     platname = platform.system().lower()
-    return platname == 'windows' or 'mingw' in platname
+    return platname == 'windows'
+
 
 def is_cygwin() -> bool:
-    return platform.system().lower().startswith('cygwin')
+    return sys.platform == 'cygwin'
+
 
 def is_debianlike() -> bool:
     return os.path.isfile('/etc/debian_version')
 
+
 def is_dragonflybsd() -> bool:
     return platform.system().lower() == 'dragonfly'
 
+
 def is_netbsd() -> bool:
     return platform.system().lower() == 'netbsd'
 
+
 def is_freebsd() -> bool:
     return platform.system().lower() == 'freebsd'
 
+def is_irix() -> bool:
+    return platform.system().startswith('irix')
+
+def is_hurd() -> bool:
+    return platform.system().lower() == 'gnu'
+
+def is_qnx() -> bool:
+    return platform.system().lower() == 'qnx'
+
+def is_aix() -> bool:
+    return platform.system().lower() == 'aix'
+
 def exe_exists(arglist: T.List[str]) -> bool:
     try:
         if subprocess.run(arglist, timeout=10).returncode == 0:
@@ -488,8 +538,9 @@
         pass
     return False
 
+
 @lru_cache(maxsize=None)
-def darwin_get_object_archs(objpath):
+def darwin_get_object_archs(objpath: str) -> T.List[str]:
     '''
     For a specific object (executable, static library, dylib, etc), run `lipo`
     to fetch the list of archs supported by it. Supports both thin objects and
@@ -508,74 +559,81 @@
         stdo += ' arm'
     return stdo.split()
 
-def detect_vcs(source_dir):
+
+def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[T.Dict[str, str]]:
     vcs_systems = [
         dict(name = 'git',        cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'),
         dict(name = 'mercurial',  cmd = 'hg',  repo_dir = '.hg',  get_rev = 'hg id -i',               rev_regex = '(.*)', dep = '.hg/dirstate'),
         dict(name = 'subversion', cmd = 'svn', repo_dir = '.svn', get_rev = 'svn info',               rev_regex = 'Revision: (.*)', dep = '.svn/wc.db'),
         dict(name = 'bazaar',     cmd = 'bzr', repo_dir = '.bzr', get_rev = 'bzr revno',              rev_regex = '(.*)', dep = '.bzr'),
     ]
-    # FIXME: this is much cleaner with pathlib.Path
-    segs = source_dir.replace('\\', '/').split('/')
-    for i in range(len(segs), -1, -1):
-        curdir = '/'.join(segs[:i])
+    if isinstance(source_dir, str):
+        source_dir = Path(source_dir)
+
+    parent_paths_and_self = collections.deque(source_dir.parents)
+    # Prepend the source directory to the front so we can check it;
+    # source_dir.parents doesn't include source_dir
+    parent_paths_and_self.appendleft(source_dir)
+    for curdir in parent_paths_and_self:
         for vcs in vcs_systems:
-            if os.path.isdir(os.path.join(curdir, vcs['repo_dir'])) and shutil.which(vcs['cmd']):
-                vcs['wc_dir'] = curdir
+            if Path.is_dir(curdir.joinpath(vcs['repo_dir'])) and shutil.which(vcs['cmd']):
+                vcs['wc_dir'] = str(curdir)
                 return vcs
     return None
 
 # a helper class which implements the same version ordering as RPM
 class Version:
-    def __init__(self, s):
+    def __init__(self, s: str) -> None:
         self._s = s
 
         # split into numeric, alphabetic and non-alphanumeric sequences
-        sequences = re.finditer(r'(\d+|[a-zA-Z]+|[^a-zA-Z\d]+)', s)
+        sequences1 = re.finditer(r'(\d+|[a-zA-Z]+|[^a-zA-Z\d]+)', s)
+
         # non-alphanumeric separators are discarded
-        sequences = [m for m in sequences if not re.match(r'[^a-zA-Z\d]+', m.group(1))]
+        sequences2 = [m for m in sequences1 if not re.match(r'[^a-zA-Z\d]+', m.group(1))]
+
         # numeric sequences are converted from strings to ints
-        sequences = [int(m.group(1)) if m.group(1).isdigit() else m.group(1) for m in sequences]
+        sequences3 = [int(m.group(1)) if m.group(1).isdigit() else m.group(1) for m in sequences2]
 
-        self._v = sequences
+        self._v = sequences3
 
-    def __str__(self):
+    def __str__(self) -> str:
         return '%s (V=%s)' % (self._s, str(self._v))
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return ''.format(self._s)
 
-    def __lt__(self, other):
+    def __lt__(self, other: object) -> bool:
         if isinstance(other, Version):
             return self.__cmp(other, operator.lt)
         return NotImplemented
 
-    def __gt__(self, other):
+    def __gt__(self, other: object) -> bool:
         if isinstance(other, Version):
             return self.__cmp(other, operator.gt)
         return NotImplemented
 
-    def __le__(self, other):
+    def __le__(self, other: object) -> bool:
         if isinstance(other, Version):
             return self.__cmp(other, operator.le)
         return NotImplemented
 
-    def __ge__(self, other):
+    def __ge__(self, other: object) -> bool:
         if isinstance(other, Version):
             return self.__cmp(other, operator.ge)
         return NotImplemented
 
-    def __eq__(self, other):
+    def __eq__(self, other: object) -> bool:
         if isinstance(other, Version):
             return self._v == other._v
         return NotImplemented
 
-    def __ne__(self, other):
+    def __ne__(self, other: object) -> bool:
         if isinstance(other, Version):
             return self._v != other._v
         return NotImplemented
 
-    def __cmp(self, other, comparator):
+    def __cmp(self, other: 'Version', comparator: T.Callable[[T.Any, T.Any], bool]) -> bool:
         # compare each sequence in order
         for ours, theirs in zip(self._v, other._v):
             # sort a non-digit sequence before a digit sequence
@@ -591,6 +649,7 @@
         # otherwise, the version with a suffix remaining is greater
         return comparator(len(self._v), len(other._v))
 
+
 def _version_extract_cmpop(vstr2: str) -> T.Tuple[T.Callable[[T.Any, T.Any], bool], str]:
     if vstr2.startswith('>='):
         cmpop = operator.ge
@@ -618,12 +677,14 @@
 
     return (cmpop, vstr2)
 
+
 def version_compare(vstr1: str, vstr2: str) -> bool:
     (cmpop, vstr2) = _version_extract_cmpop(vstr2)
     return cmpop(Version(vstr1), Version(vstr2))
 
-def version_compare_many(vstr1, conditions):
-    if not isinstance(conditions, (list, tuple, frozenset)):
+
+def version_compare_many(vstr1: str, conditions: T.Union[str, T.Iterable[str]]) -> T.Tuple[bool, T.List[str], T.List[str]]:
+    if isinstance(conditions, str):
         conditions = [conditions]
     found = []
     not_found = []
@@ -634,6 +695,7 @@
             found.append(req)
     return not_found == [], not_found, found
 
+
 # determine if the minimum version satisfying the condition |condition| exceeds
 # the minimum version for a feature |minimum|
 def version_compare_condition_with_min(condition: str, minimum: str) -> bool:
@@ -672,9 +734,10 @@
     if re.match(r'^\d+.\d+$', condition):
         condition += '.0'
 
-    return cmpop(Version(minimum), Version(condition))
+    return T.cast(bool, cmpop(Version(minimum), Version(condition)))
 
-def default_libdir():
+
+def default_libdir() -> str:
     if is_debianlike():
         try:
             pc = subprocess.Popen(['dpkg-architecture', '-qDEB_HOST_MULTIARCH'],
@@ -686,19 +749,22 @@
                 return 'lib/' + archpath
         except Exception:
             pass
-    if is_freebsd():
+    if is_freebsd() or is_irix():
         return 'lib'
     if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
         return 'lib64'
     return 'lib'
 
-def default_libexecdir():
+
+def default_libexecdir() -> str:
     # There is no way to auto-detect this, so it must be set at build time
     return 'libexec'
 
-def default_prefix():
+
+def default_prefix() -> str:
     return 'c:/' if is_windows() else '/usr/local'
 
+
 def get_library_dirs() -> T.List[str]:
     if is_windows():
         return ['C:/mingw/lib'] # TODO: get programmatically
@@ -744,7 +810,8 @@
 
     return unixdirs
 
-def has_path_sep(name, sep='/\\'):
+
+def has_path_sep(name: str, sep: str = '/\\') -> bool:
     'Checks if any of the specified @sep path separators are in @name'
     for each in sep:
         if each in name:
@@ -761,7 +828,7 @@
     _whitespace = ' \t\n\r'
     _find_unsafe_char = re.compile(r'[{}"]'.format(_whitespace)).search
 
-    def quote_arg(arg):
+    def quote_arg(arg: str) -> str:
         if arg and not _find_unsafe_char(arg):
             return arg
 
@@ -783,7 +850,7 @@
         result += (num_backslashes * 2) * '\\' + '"'
         return result
 
-    def split_args(cmd: T.Sequence[str]) -> T.List[str]:
+    def split_args(cmd: str) -> T.List[str]:
         result = []
         arg = ''
         num_backslashes = 0
@@ -818,26 +885,29 @@
 
         return result
 else:
-    def quote_arg(arg):
+    def quote_arg(arg: str) -> str:
         return shlex.quote(arg)
 
-    def split_args(cmd):
+    def split_args(cmd: str) -> T.List[str]:
         return shlex.split(cmd)
 
 
-def join_args(args):
+def join_args(args: T.Iterable[str]) -> str:
     return ' '.join([quote_arg(x) for x in args])
 
 
-def do_replacement(regex, line, variable_format, confdata):
-    missing_variables = set()
-    start_tag = '@'
-    backslash_tag = '\\@'
+def do_replacement(regex: T.Pattern[str], line: str, variable_format: str,
+                   confdata: 'ConfigurationData') -> T.Tuple[str, T.Set[str]]:
+    missing_variables = set()  # type: T.Set[str]
     if variable_format == 'cmake':
         start_tag = '${'
         backslash_tag = '\\${'
+    else:
+        assert variable_format in ['meson', 'cmake@']
+        start_tag = '@'
+        backslash_tag = '\\@'
 
-    def variable_replace(match):
+    def variable_replace(match: T.Match[str]) -> str:
         # Pairs of escape characters before '@' or '\@'
         if match.group(0).endswith('\\'):
             num_escapes = match.end(0) - match.start(0)
@@ -848,26 +918,38 @@
         # Template variable to be replaced
         else:
             varname = match.group(1)
+            var_str = ''
             if varname in confdata:
                 (var, desc) = confdata.get(varname)
                 if isinstance(var, str):
-                    pass
+                    var_str = var
                 elif isinstance(var, int):
-                    var = str(var)
+                    var_str = str(var)
                 else:
                     msg = 'Tried to replace variable {!r} value with ' \
                           'something other than a string or int: {!r}'
                     raise MesonException(msg.format(varname, var))
             else:
                 missing_variables.add(varname)
-                var = ''
-            return var
+            return var_str
     return re.sub(regex, variable_replace, line), missing_variables
 
-def do_mesondefine(line, confdata):
+def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', variable_format: str) -> str:
+    def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str:
+        arr = line.split()
+        define_value=[]
+        for token in arr[2:]:
+            try:
+                (v, desc) = confdata.get(token)
+                define_value += [str(v)]
+            except KeyError:
+                define_value += [token]
+        return ' '.join(define_value)
+
     arr = line.split()
-    if len(arr) != 2:
+    if variable_format == 'meson' and len(arr) != 2:
         raise MesonException('#mesondefine does not contain exactly two tokens: %s' % line.strip())
+
     varname = arr[1]
     try:
         (v, desc) = confdata.get(varname)
@@ -881,17 +963,17 @@
     elif isinstance(v, int):
         return '#define %s %d\n' % (varname, v)
     elif isinstance(v, str):
-        return '#define %s %s\n' % (varname, v)
+        if variable_format == 'meson':
+            result = v
+        else:
+            result = get_cmake_define(line, confdata)
+        result = '#define %s %s\n' % (varname, result)
+        (result, missing_variable) = do_replacement(regex, result, variable_format, confdata)
+        return result
     else:
         raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
 
-
-def do_conf_file(src, dst, confdata, variable_format, encoding='utf-8'):
-    try:
-        with open(src, encoding=encoding, newline='') as f:
-            data = f.readlines()
-    except Exception as e:
-        raise MesonException('Could not read input file %s: %s' % (src, str(e)))
+def get_variable_regex(variable_format: str = 'meson') -> T.Pattern[str]:
     # Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
     # Also allow escaping '@' with '\@'
     if variable_format in ['meson', 'cmake@']:
@@ -900,6 +982,20 @@
         regex = re.compile(r'(?:\\\\)+(?=\\?\$)|\\\${|\${([-a-zA-Z0-9_]+)}')
     else:
         raise MesonException('Format "{}" not handled'.format(variable_format))
+    return regex
+
+def do_conf_str (data: list, confdata: 'ConfigurationData', variable_format: str,
+                 encoding: str = 'utf-8') -> T.Tuple[T.List[str],T.Set[str], bool]:
+    def line_is_valid(line : str, variable_format: str) -> bool:
+        if variable_format == 'meson':
+            if '#cmakedefine' in line:
+                return False
+        else: #cmake format
+            if '#mesondefine' in line:
+                return False
+        return True
+
+    regex = get_variable_regex(variable_format)
 
     search_token = '#mesondefine'
     if variable_format != 'meson':
@@ -913,13 +1009,27 @@
     for line in data:
         if line.startswith(search_token):
             confdata_useless = False
-            line = do_mesondefine(line, confdata)
+            line = do_define(regex, line, confdata, variable_format)
         else:
+            if not line_is_valid(line,variable_format):
+                raise MesonException('Format "{}" mismatched'.format(variable_format))
             line, missing = do_replacement(regex, line, variable_format, confdata)
             missing_variables.update(missing)
             if missing:
                 confdata_useless = False
         result.append(line)
+
+    return result, missing_variables, confdata_useless
+
+def do_conf_file(src: str, dst: str, confdata: 'ConfigurationData', variable_format: str,
+                 encoding: str = 'utf-8') -> T.Tuple[T.Set[str], bool]:
+    try:
+        with open(src, encoding=encoding, newline='') as f:
+            data = f.readlines()
+    except Exception as e:
+        raise MesonException('Could not read input file %s: %s' % (src, str(e)))
+
+    (result, missing_variables, confdata_useless) = do_conf_str(data, confdata, variable_format, encoding)
     dst_tmp = dst + '~'
     try:
         with open(dst_tmp, 'w', encoding=encoding, newline='') as f:
@@ -944,7 +1054,7 @@
 
 '''
 
-def dump_conf_header(ofilename, cdata, output_format):
+def dump_conf_header(ofilename: str, cdata: 'ConfigurationData', output_format: str) -> None:
     if output_format == 'c':
         prelude = CONF_C_PRELUDE
         prefix = '#'
@@ -974,7 +1084,8 @@
                 raise MesonException('Unknown data type in configuration file entry: ' + k)
     replace_if_different(ofilename, ofilename_tmp)
 
-def replace_if_different(dst, dst_tmp):
+
+def replace_if_different(dst: str, dst_tmp: str) -> None:
     # If contents are identical, don't touch the file to prevent
     # unnecessary rebuilds.
     different = True
@@ -989,49 +1100,57 @@
     else:
         os.unlink(dst_tmp)
 
-def listify(item: T.Any,
-            flatten: bool = True,
-            unholder: bool = False) -> T.List[T.Any]:
+
+@T.overload
+def unholder(item: 'ObjectHolder[_T]') -> _T: ...
+
+@T.overload
+def unholder(item: T.List['ObjectHolder[_T]']) -> T.List[_T]: ...
+
+@T.overload
+def unholder(item: T.List[_T]) -> T.List[_T]: ...
+
+@T.overload
+def unholder(item: T.List[T.Union[_T, 'ObjectHolder[_T]']]) -> T.List[_T]: ...
+
+def unholder(item):  # type: ignore  # TODO fix overload (somehow)
+    """Get the held item of an object holder or list of object holders."""
+    if isinstance(item, list):
+        return [i.held_object if hasattr(i, 'held_object') else i for i in item]
+    if hasattr(item, 'held_object'):
+        return item.held_object
+    return item
+
+
+def listify(item: T.Any, flatten: bool = True) -> T.List[T.Any]:
     '''
     Returns a list with all args embedded in a list if they are not a list.
     This function preserves order.
     @flatten: Convert lists of lists to a flat list
-    @unholder: Replace each item with the object it holds, if required
-
-    Note: unholding only works recursively when flattening
     '''
     if not isinstance(item, list):
-        if unholder and hasattr(item, 'held_object'):
-            item = item.held_object
         return [item]
-    result = []
+    result = []  # type: T.List[T.Any]
     for i in item:
-        if unholder and hasattr(i, 'held_object'):
-            i = i.held_object
         if flatten and isinstance(i, list):
-            result += listify(i, flatten=True, unholder=unholder)
+            result += listify(i, flatten=True)
         else:
             result.append(i)
     return result
 
 
-def extract_as_list(dict_object, *keys, pop=False, **kwargs):
+def extract_as_list(dict_object: T.Dict[_T, _U], key: _T, pop: bool = False) -> T.List[_U]:
     '''
     Extracts all values from given dict_object and listifies them.
     '''
-    result = []
     fetch = dict_object.get
     if pop:
         fetch = dict_object.pop
     # If there's only one key, we don't return a list with one element
-    if len(keys) == 1:
-        return listify(fetch(keys[0], []), **kwargs)
-    # Return a list of values corresponding to *keys
-    for key in keys:
-        result.append(listify(fetch(key, []), **kwargs))
-    return result
+    return listify(fetch(key, []), flatten=True)
+
 
-def typeslistify(item: 'T.Union[_T, T.List[_T]]',
+def typeslistify(item: 'T.Union[_T, T.Sequence[_T]]',
                  types: 'T.Union[T.Type[_T], T.Tuple[T.Type[_T]]]') -> T.List[_T]:
     '''
     Ensure that type(@item) is one of @types or a
@@ -1046,11 +1165,13 @@
             raise MesonException('List item must be one of {!r}'.format(types))
     return item
 
-def stringlistify(item: T.Union[str, T.List[str]]) -> T.List[str]:
+
+def stringlistify(item: T.Union[T.Any, T.Sequence[T.Any]]) -> T.List[str]:
     return typeslistify(item, str)
 
-def expand_arguments(args):
-    expended_args = []
+
+def expand_arguments(args: T.Iterable[str]) -> T.Optional[T.List[str]]:
+    expended_args = []  # type: T.List[str]
     for arg in args:
         if not arg.startswith('@'):
             expended_args.append(arg)
@@ -1062,17 +1183,24 @@
                 extended_args = f.read().split()
             expended_args += extended_args
         except Exception as e:
-            print('Error expanding command line arguments, %s not found' % args_file)
-            print(e)
+            mlog.error('Expanding command line arguments:',  args_file, 'not found')
+            mlog.exception(e)
             return None
     return expended_args
 
-def partition(pred, iterable):
-    'Use a predicate to partition entries into false entries and true entries'
-    # partition(is_odd, range(10)) --> 0 2 4 6 8   and  1 3 5 7 9
+
+def partition(pred: T.Callable[[_T], object], iterable: T.Iterator[_T]) -> T.Tuple[T.Iterator[_T], T.Iterator[_T]]:
+    """Use a predicate to partition entries into false entries and true
+    entries.
+
+    >>> x, y = partition(is_odd, range(10))
+    >>> (list(x), list(y))
+    ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
+    """
     t1, t2 = tee(iterable)
     return filterfalse(pred, t1), filter(pred, t2)
 
+
 def Popen_safe(args: T.List[str], write: T.Optional[str] = None,
                stdout: T.Union[T.BinaryIO, int] = subprocess.PIPE,
                stderr: T.Union[T.BinaryIO, int] = subprocess.PIPE,
@@ -1095,6 +1223,7 @@
     mlog.setup_console()
     return p, o, e
 
+
 def Popen_safe_legacy(args: T.List[str], write: T.Optional[str] = None,
                       stdout: T.Union[T.BinaryIO, int] = subprocess.PIPE,
                       stderr: T.Union[T.BinaryIO, int] = subprocess.PIPE,
@@ -1117,7 +1246,8 @@
             e = e.decode(errors='replace').replace('\r\n', '\n')
     return p, o, e
 
-def iter_regexin_iter(regexiter, initer):
+
+def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.Optional[str]:
     '''
     Takes each regular expression in @regexiter and tries to search for it in
     every item in @initer. If there is a match, returns that match.
@@ -1130,12 +1260,13 @@
             match = re.search(regex, ii)
             if match:
                 return match.group()
-    return False
+    return None
 
-def _substitute_values_check_errors(command, values):
+
+def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, str]) -> None:
     # Error checking
-    inregex = ('@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@')
-    outregex = ('@OUTPUT([0-9]+)?@', '@OUTDIR@')
+    inregex = ['@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@']  # type: T.List[str]
+    outregex = ['@OUTPUT([0-9]+)?@', '@OUTDIR@']                 # type: T.List[str]
     if '@INPUT@' not in values:
         # Error out if any input-derived templates are present in the command
         match = iter_regexin_iter(inregex, command)
@@ -1153,10 +1284,10 @@
         for each in command:
             if not isinstance(each, str):
                 continue
-            match = re.search(inregex[0], each)
-            if match and match.group() not in values:
+            match2 = re.search(inregex[0], each)
+            if match2 and match2.group() not in values:
                 m = 'Command cannot have {!r} since there are only {!r} inputs'
-                raise MesonException(m.format(match.group(), len(values['@INPUT@'])))
+                raise MesonException(m.format(match2.group(), len(values['@INPUT@'])))
     if '@OUTPUT@' not in values:
         # Error out if any output-derived templates are present in the command
         match = iter_regexin_iter(outregex, command)
@@ -1168,12 +1299,13 @@
         for each in command:
             if not isinstance(each, str):
                 continue
-            match = re.search(outregex[0], each)
-            if match and match.group() not in values:
+            match2 = re.search(outregex[0], each)
+            if match2 and match2.group() not in values:
                 m = 'Command cannot have {!r} since there are only {!r} outputs'
-                raise MesonException(m.format(match.group(), len(values['@OUTPUT@'])))
+                raise MesonException(m.format(match2.group(), len(values['@OUTPUT@'])))
+
 
-def substitute_values(command, values):
+def substitute_values(command: T.List[str], values: T.Dict[str, str]) -> T.List[str]:
     '''
     Substitute the template strings in the @values dict into the list of
     strings @command and return a new list. For a full list of the templates,
@@ -1186,7 +1318,7 @@
     # Error checking
     _substitute_values_check_errors(command, values)
     # Substitution
-    outcmd = []
+    outcmd = []  # type: T.List[str]
     rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')]
     value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None
     for vv in command:
@@ -1221,7 +1353,8 @@
             outcmd.append(vv)
     return outcmd
 
-def get_filenames_templates_dict(inputs, outputs):
+
+def get_filenames_templates_dict(inputs: T.List[str], outputs: T.List[str]) -> T.Dict[str, T.Union[str, T.List[str]]]:
     '''
     Create a dictionary with template strings as keys and values as values for
     the following templates:
@@ -1243,7 +1376,7 @@
 
     @OUTPUT0@, @OUTPUT1@, ... one for each output file
     '''
-    values = {}
+    values = {}  # type: T.Dict[str, T.Union[str, T.List[str]]]
     # Gather values derived from the input
     if inputs:
         # We want to substitute all the inputs.
@@ -1268,7 +1401,7 @@
     return values
 
 
-def _make_tree_writable(topdir):
+def _make_tree_writable(topdir: str) -> None:
     # Ensure all files and directories under topdir are writable
     # (and readable) by owner.
     for d, _, files in os.walk(topdir):
@@ -1279,7 +1412,7 @@
                 os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
 
 
-def windows_proof_rmtree(f):
+def windows_proof_rmtree(f: str) -> None:
     # On Windows if anyone is holding a file open you can't
     # delete it. As an example an anti virus scanner might
     # be scanning files you are trying to delete. The only
@@ -1299,7 +1432,7 @@
     shutil.rmtree(f)
 
 
-def windows_proof_rm(fpath):
+def windows_proof_rm(fpath: str) -> None:
     """Like windows_proof_rmtree, but for a single file."""
     if os.path.isfile(fpath):
         os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
@@ -1315,7 +1448,8 @@
     os.unlink(fpath)
 
 
-def detect_subprojects(spdir_name, current_dir='', result=None):
+def detect_subprojects(spdir_name: str, current_dir: str = '',
+                       result: T.Optional[T.Dict[str, T.List[str]]] = None) -> T.Optional[T.Dict[str, T.List[str]]]:
     if result is None:
         result = {}
     spdir = os.path.join(current_dir, spdir_name)
@@ -1339,15 +1473,6 @@
                 result[basename] = [trial]
     return result
 
-# This isn't strictly correct. What we really want here is something like:
-# class StringProtocol(typing_extensions.Protocol):
-#
-#      def __str__(self) -> str: ...
-#
-# This would more accurately embody what this funcitonc an handle, but we
-# don't have that yet, so instead we'll do some casting to work around it
-def get_error_location_string(fname: str, lineno: str) -> str:
-    return '{}:{}:'.format(fname, lineno)
 
 def substring_is_in_list(substr: str, strlist: T.List[str]) -> bool:
     for s in strlist:
@@ -1355,54 +1480,59 @@
             return True
     return False
 
-class OrderedSet(collections.abc.MutableSet):
+
+class OrderedSet(T.MutableSet[_T]):
     """A set that preserves the order in which items are added, by first
     insertion.
     """
-    def __init__(self, iterable=None):
-        self.__container = collections.OrderedDict()
+    def __init__(self, iterable: T.Optional[T.Iterable[_T]] = None):
+        # typing.OrderedDict is new in 3.7.2, so we can't use that, but we can
+        # use MutableMapping, which is fine in this case.
+        self.__container = collections.OrderedDict()  # type: T.MutableMapping[_T, None]
         if iterable:
             self.update(iterable)
 
-    def __contains__(self, value):
+    def __contains__(self, value: object) -> bool:
         return value in self.__container
 
-    def __iter__(self):
+    def __iter__(self) -> T.Iterator[_T]:
         return iter(self.__container.keys())
 
-    def __len__(self):
+    def __len__(self) -> int:
         return len(self.__container)
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         # Don't print 'OrderedSet("")' for an empty set.
         if self.__container:
             return 'OrderedSet("{}")'.format(
                 '", "'.join(repr(e) for e in self.__container.keys()))
         return 'OrderedSet()'
 
-    def __reversed__(self):
-        return reversed(self.__container)
+    def __reversed__(self) -> T.Iterator[_T]:
+        # Mypy is complaining that sets cant be reversed, which is true for
+        # unordered sets, but this is an ordered, set so reverse() makes sense.
+        return reversed(self.__container.keys())  # type: ignore
 
-    def add(self, value):
+    def add(self, value: _T) -> None:
         self.__container[value] = None
 
-    def discard(self, value):
+    def discard(self, value: _T) -> None:
         if value in self.__container:
             del self.__container[value]
 
-    def update(self, iterable):
+    def update(self, iterable: T.Iterable[_T]) -> None:
         for item in iterable:
             self.__container[item] = None
 
-    def difference(self, set_):
+    def difference(self, set_: T.Union[T.Set[_T], 'OrderedSet[_T]']) -> 'OrderedSet[_T]':
         return type(self)(e for e in self if e not in set_)
 
 class BuildDirLock:
 
-    def __init__(self, builddir):
+    def __init__(self, builddir: str) -> None:
         self.lockfilename = os.path.join(builddir, 'meson-private/meson.lock')
 
-    def __enter__(self):
+    def __enter__(self) -> None:
         self.lockfile = open(self.lockfilename, 'w')
         try:
             if have_fcntl:
@@ -1413,7 +1543,7 @@
             self.lockfile.close()
             raise MesonException('Some other Meson process is already using this build directory. Exiting.')
 
-    def __exit__(self, *args):
+    def __exit__(self, *args: T.Any) -> None:
         if have_fcntl:
             fcntl.flock(self.lockfile, fcntl.LOCK_UN)
         elif have_msvcrt:
@@ -1429,8 +1559,27 @@
     except (TypeError, ValueError):
         return path
 
+def path_is_in_root(path: Path, root: Path, resolve: bool = False) -> bool:
+    # Check wheter a path is within the root directory root
+    try:
+        if resolve:
+            path.resolve().relative_to(root.resolve())
+        else:
+            path.relative_to(root)
+    except ValueError:
+        return False
+    return True
+
+def relative_to_if_possible(path: Path, root: Path, resolve: bool = False) -> Path:
+    try:
+        if resolve:
+            return path.resolve().relative_to(root.resolve())
+        else:
+            return path.relative_to(root)
+    except ValueError:
+        return path
 
-class LibType(Enum):
+class LibType(IntEnum):
 
     """Enumeration for library types."""
 
@@ -1448,7 +1597,8 @@
     fallback, it is safe to ignore the 'Iterator does not return self from
     __iter__ method' warning.
     '''
-    def __init__(self, iterable=None, total=None, bar_type=None, desc=None):
+    def __init__(self, iterable: T.Optional[T.Iterable[str]] = None, total: T.Optional[int] = None,
+                 bar_type: T.Optional[str] = None, desc: T.Optional[str] = None):
         if iterable is not None:
             self.iterable = iter(iterable)
             return
@@ -1462,18 +1612,18 @@
 
     # Pretend to be an iterator when called as one and don't print any
     # progress
-    def __iter__(self):
+    def __iter__(self) -> T.Iterator[str]:
         return self.iterable
 
-    def __next__(self):
+    def __next__(self) -> str:
         return next(self.iterable)
 
-    def print_dot(self):
+    def print_dot(self) -> None:
         print('.', end='')
         sys.stdout.flush()
         self.printed_dots += 1
 
-    def update(self, progress):
+    def update(self, progress: int) -> None:
         self.done += progress
         if not self.total:
             # Just print one dot per call if we don't have a total length
@@ -1483,32 +1633,33 @@
         while self.printed_dots < ratio:
             self.print_dot()
 
-    def close(self):
+    def close(self) -> None:
         print('')
 
 try:
     from tqdm import tqdm
-
-    class ProgressBar(tqdm):
-        def __init__(self, *args, bar_type=None, **kwargs):
+except ImportError:
+    # ideally we would use a typing.Protocol here, but it's part of typing_extensions until 3.8
+    ProgressBar = ProgressBarFallback  # type: T.Union[T.Type[ProgressBarFallback], T.Type[ProgressBarTqdm]]
+else:
+    class ProgressBarTqdm(tqdm):
+        def __init__(self, *args: T.Any, bar_type: T.Optional[str] = None, **kwargs: T.Any) -> None:
             if bar_type == 'download':
                 kwargs.update({'unit': 'bytes', 'leave': True})
             else:
                 kwargs.update({'leave': False})
             kwargs['ncols'] = 100
             super().__init__(*args, **kwargs)
-except ImportError:
-    ProgressBar = ProgressBarFallback
 
+    ProgressBar = ProgressBarTqdm
 
-def get_wine_shortpath(winecmd, wine_paths):
 
-    """ Get A short version of @wine_paths to avoid
-    reaching WINEPATH number of char limit.
+def get_wine_shortpath(winecmd: T.List[str], wine_paths: T.Sequence[str]) -> str:
+    """Get A short version of @wine_paths to avoid reaching WINEPATH number
+    of char limit.
     """
 
-    seen = set()
-    wine_paths = [p for p in wine_paths if not (p in seen or seen.add(p))]
+    wine_paths = list(OrderedSet(wine_paths))
 
     getShortPathScript = '%s.bat' % str(uuid.uuid4()).lower()[:5]
     with open(getShortPathScript, mode='w') as f:
@@ -1533,10 +1684,12 @@
 
     return wine_path.strip(';')
 
-def run_once(func):
-    ret = []
 
-    def wrapper(*args, **kwargs):
+def run_once(func: T.Callable[..., _T]) -> T.Callable[..., _T]:
+    ret = []  # type: T.List[_T]
+
+    @wraps(func)
+    def wrapper(*args: T.Any, **kwargs: T.Any) -> _T:
         if ret:
             return ret[0]
 
@@ -1544,34 +1697,48 @@
         ret.append(val)
         return val
 
-    return update_wrapper(wrapper, func)
+    return wrapper
 
 
-class OptionProxy:
-    def __init__(self, value):
+class OptionProxy(T.Generic[_T]):
+    def __init__(self, value: _T):
         self.value = value
 
-class OptionOverrideProxy:
-    '''Mimic an option list but transparently override
-    selected option values.'''
-    def __init__(self, overrides, *options):
-        self.overrides = overrides
-        self.options = options
-
-    def __getitem__(self, option_name):
-        for opts in self.options:
-            if option_name in opts:
-                return self._get_override(option_name, opts[option_name])
-        raise KeyError('Option not found', option_name)
-
-    def _get_override(self, option_name, base_opt):
-        if option_name in self.overrides:
-            return OptionProxy(base_opt.validate_value(self.overrides[option_name]))
-        return base_opt
 
-    def copy(self):
-        result = {}
-        for opts in self.options:
-            for option_name in opts:
-                result[option_name] = self._get_override(option_name, opts[option_name])
-        return result
+class OptionOverrideProxy(collections.abc.MutableMapping):
+
+    '''Mimic an option list but transparently override selected option
+    values.
+    '''
+
+    # TODO: the typing here could be made more explicit using a TypeDict from
+    # python 3.8 or typing_extensions
+
+    def __init__(self, overrides: T.Dict[str, T.Any], *options: 'OptionDictType'):
+        self.overrides = overrides.copy()
+        self.options = {}  # type: T.Dict[str, UserOption]
+        for o in options:
+            self.options.update(o)
+
+    def __getitem__(self, key: str) -> T.Union['UserOption', OptionProxy]:
+        if key in self.options:
+            opt = self.options[key]
+            if key in self.overrides:
+                return OptionProxy(opt.validate_value(self.overrides[key]))
+            return opt
+        raise KeyError('Option not found', key)
+
+    def __setitem__(self, key: str, value: T.Union['UserOption', OptionProxy]) -> None:
+        self.overrides[key] = value.value
+
+    def __delitem__(self, key: str) -> None:
+        del self.overrides[key]
+
+    def __iter__(self) -> T.Iterator[str]:
+        return iter(self.options)
+
+    def __len__(self) -> int:
+        return len(self.options)
+
+    def copy(self) -> 'OptionOverrideProxy':
+        return OptionOverrideProxy(self.overrides.copy(), self.options.copy())
diff -Nru meson-0.53.2/mesonbuild/mesonmain.py meson-0.57.0+really0.56.2/mesonbuild/mesonmain.py
--- meson-0.53.2/mesonbuild/mesonmain.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/mesonmain.py	2021-01-06 10:39:48.000000000 +0000
@@ -22,7 +22,7 @@
 
 from . import mesonlib
 from . import mlog
-from . import mconf, mdist, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects, munstable_coredata
+from . import mconf, mdist, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects, munstable_coredata, mcompile
 from .mesonlib import MesonException
 from .environment import detect_msys2_arch
 from .wrap import wraptool
@@ -38,7 +38,7 @@
         self.commands = {}
         self.hidden_commands = []
         self.parser = argparse.ArgumentParser(prog='meson', formatter_class=self.formatter)
-        self.subparsers = self.parser.add_subparsers(title='Commands',
+        self.subparsers = self.parser.add_subparsers(title='Commands', dest='command',
                                                      description='If no command is specified it defaults to setup command.')
         self.add_command('setup', msetup.add_arguments, msetup.run,
                          help_msg='Configure the project')
@@ -62,6 +62,8 @@
                          help_msg='Print help of a subcommand')
         self.add_command('rewrite', lambda parser: rewriter.add_arguments(parser, self.formatter), rewriter.run,
                          help_msg='Modify the project definition')
+        self.add_command('compile', mcompile.add_arguments, mcompile.run,
+                         help_msg='Build the project')
 
         # Hidden commands
         self.add_command('runpython', self.add_runpython_arguments, self.run_runpython_command,
@@ -109,6 +111,7 @@
         return 0
 
     def run(self, args):
+        print_py35_notice = False
         # If first arg is not a known command, assume user wants to run the setup
         # command.
         known_commands = list(self.commands.keys()) + ['-h', '--help']
@@ -117,14 +120,22 @@
 
         # Hidden commands have their own parser instead of using the global one
         if args[0] in self.hidden_commands:
-            parser = self.commands[args[0]]
+            command = args[0]
+            parser = self.commands[command]
             args = args[1:]
         else:
             parser = self.parser
+            command = None
 
         args = mesonlib.expand_arguments(args)
         options = parser.parse_args(args)
 
+        if command is None:
+            command = options.command
+
+        if command in ('setup', 'compile', 'test', 'install') and sys.version_info < (3, 6):
+            print_py35_notice = True
+
         try:
             return options.run_func(options)
         except MesonException as e:
@@ -141,6 +152,9 @@
             traceback.print_exc()
             return 2
         finally:
+            if print_py35_notice:
+                mlog.notice('You are using Python 3.5 which is EOL. Starting with v0.57, '
+                            'Meson will require Python 3.6 or newer', fatal=False)
             mlog.shutdown()
 
 def run_script_command(script_name, script_args):
@@ -180,7 +194,7 @@
 def run(original_args, mainfile):
     if sys.version_info < (3, 5):
         print('Meson works correctly only with python 3.5+.')
-        print('You have python %s.' % sys.version)
+        print('You have python {}.'.format(sys.version))
         print('Please update your environment')
         return 1
 
diff -Nru meson-0.53.2/mesonbuild/minit.py meson-0.57.0+really0.56.2/mesonbuild/minit.py
--- meson-0.53.2/mesonbuild/minit.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/minit.py	2021-01-06 10:39:48.000000000 +0000
@@ -14,76 +14,64 @@
 
 """Code that creates simple startup projects."""
 
-from pathlib import Path
-import re, shutil, subprocess
+from ._pathlib import Path
+from enum import Enum
+import subprocess
+import shutil
+import sys
+import os
+import re
 from glob import glob
 from mesonbuild import mesonlib
 from mesonbuild.environment import detect_ninja
+from mesonbuild.templates.samplefactory import sameple_generator
+import typing as T
 
-from mesonbuild.templates.ctemplates import (create_exe_c_sample, create_lib_c_sample)
-from mesonbuild.templates.cpptemplates import (create_exe_cpp_sample, create_lib_cpp_sample)
-from mesonbuild.templates.objctemplates import (create_exe_objc_sample, create_lib_objc_sample)
-from mesonbuild.templates.dlangtemplates import (create_exe_d_sample, create_lib_d_sample)
-from mesonbuild.templates.fortrantemplates import (create_exe_fortran_sample, create_lib_fortran_sample)
-from mesonbuild.templates.rusttemplates import (create_exe_rust_sample, create_lib_rust_sample)
+if T.TYPE_CHECKING:
+    import argparse
 
-FORTRAN_SUFFIXES = ['.f', '.for', '.F', '.f90', '.F90']
+'''
+we currently have one meson template at this time.
+'''
+from mesonbuild.templates.mesontemplates import create_meson_build
 
-info_message = '''Sample project created. To build it run the
+FORTRAN_SUFFIXES = {'.f', '.for', '.F', '.f90', '.F90'}
+LANG_SUFFIXES = {'.c', '.cc', '.cpp', '.cs', '.cu', '.d', '.m', '.mm', '.rs', '.java'} | FORTRAN_SUFFIXES
+LANG_SUPPORTED = {'c', 'cpp', 'cs', 'cuda', 'd', 'fortran', 'java', 'rust', 'objc', 'objcpp'}
+
+DEFAULT_PROJECT = 'executable'
+DEFAULT_VERSION = '0.1'
+class DEFAULT_TYPES(Enum):
+    EXE = 'executable'
+    LIB = 'library'
+
+INFO_MESSAGE = '''Sample project created. To build it run the
 following commands:
 
-meson builddir
-ninja -C builddir
+meson setup builddir
+meson compile -C builddir
 '''
 
-def create_sample(options):
-    if options.language == 'c':
-        if options.type == 'executable':
-            create_exe_c_sample(options.name, options.version)
-        elif options.type == 'library':
-            create_lib_c_sample(options.name, options.version)
-        else:
-            raise RuntimeError('Unreachable code')
-    elif options.language == 'cpp':
-        if options.type == 'executable':
-            create_exe_cpp_sample(options.name, options.version)
-        elif options.type == 'library':
-            create_lib_cpp_sample(options.name, options.version)
-        else:
-            raise RuntimeError('Unreachable code')
-    elif options.language == 'd':
-        if options.type == 'executable':
-            create_exe_d_sample(options.name, options.version)
-        elif options.type == 'library':
-            create_lib_d_sample(options.name, options.version)
-        else:
-            raise RuntimeError('Unreachable code')
-    elif options.language == 'fortran':
-        if options.type == 'executable':
-            create_exe_fortran_sample(options.name, options.version)
-        elif options.type == 'library':
-            create_lib_fortran_sample(options.name, options.version)
-        else:
-            raise RuntimeError('Unreachable code')
-    elif options.language == 'rust':
-        if options.type == 'executable':
-            create_exe_rust_sample(options.name, options.version)
-        elif options.type == 'library':
-            create_lib_rust_sample(options.name, options.version)
-        else:
-            raise RuntimeError('Unreachable code')
-    elif options.language == 'objc':
-        if options.type == 'executable':
-            create_exe_objc_sample(options.name, options.version)
-        elif options.type == 'library':
-            create_lib_objc_sample(options.name, options.version)
-        else:
-            raise RuntimeError('Unreachable code')
+
+def create_sample(options: 'argparse.Namespace') -> None:
+    '''
+    Based on what arguments are passed we check for a match in language
+    then check for project type and create new Meson samples project.
+    '''
+    sample_gen = sameple_generator(options)
+    if options.type == DEFAULT_TYPES['EXE'].value:
+        sample_gen.create_executable()
+    elif options.type == DEFAULT_TYPES['LIB'].value:
+        sample_gen.create_library()
     else:
         raise RuntimeError('Unreachable code')
-    print(info_message)
+    print(INFO_MESSAGE)
 
-def autodetect_options(options, sample: bool = False):
+def autodetect_options(options: 'argparse.Namespace', sample: bool = False) -> None:
+    '''
+    Here we autodetect options for args not passed in so don't have to
+    think about it.
+    '''
     if not options.name:
         options.name = Path().resolve().stem
         if not re.match('[a-zA-Z_][a-zA-Z0-9]*', options.name) and sample:
@@ -101,7 +89,7 @@
     if not options.srcfiles:
         srcfiles = []
         for f in (f for f in Path().iterdir() if f.is_file()):
-            if f.suffix in (['.cc', '.cpp', '.c', '.d', '.m', '.rs'] + FORTRAN_SUFFIXES):
+            if f.suffix in LANG_SUFFIXES:
                 srcfiles.append(f)
         if not srcfiles:
             raise SystemExit('No recognizable source files found.\n'
@@ -111,11 +99,17 @@
     options.srcfiles = [Path(f) for f in options.srcfiles]
     if not options.language:
         for f in options.srcfiles:
+            if f.suffix == '.c':
+                options.language = 'c'
+                break
             if f.suffix in ('.cc', '.cpp'):
                 options.language = 'cpp'
                 break
-            if f.suffix == '.c':
-                options.language = 'c'
+            if f.suffix in '.cs':
+                options.language = 'cs'
+                break
+            if f.suffix == '.cu':
+                options.language = 'cuda'
                 break
             if f.suffix == '.d':
                 options.language = 'd'
@@ -129,66 +123,41 @@
             if f.suffix == '.m':
                 options.language = 'objc'
                 break
+            if f.suffix == '.mm':
+                options.language = 'objcpp'
+                break
+            if f.suffix == '.java':
+                options.language = 'java'
+                break
         if not options.language:
             raise SystemExit("Can't autodetect language, please specify it with -l.")
         print("Detected language: " + options.language)
 
-
-meson_executable_template = '''project('{project_name}', '{language}',
-  version : '{version}',
-  default_options : [{default_options}])
-
-executable('{executable}',
-           {sourcespec},{depspec}
-           install : true)
-'''
-
-def create_meson_build(options):
-    if options.type != 'executable':
-        raise SystemExit('\nGenerating a meson.build file from existing sources is\n'
-                         'supported only for project type "executable".\n'
-                         'Run meson init in an empty directory to create a sample project.')
-    default_options = ['warning_level=3']
-    if options.language == 'cpp':
-        # This shows how to set this very common option.
-        default_options += ['cpp_std=c++14']
-    # If we get a meson.build autoformatter one day, this code could
-    # be simplified quite a bit.
-    formatted_default_options = ', '.join("'{}'".format(x) for x in default_options)
-    sourcespec = ',\n           '.join("'{}'".format(x) for x in options.srcfiles)
-    depspec = ''
-    if options.deps:
-        depspec = '\n           dependencies : [\n              '
-        depspec += ',\n              '.join("dependency('{}')".format(x)
-                                            for x in options.deps.split(','))
-        depspec += '],'
-    content = meson_executable_template.format(project_name=options.name,
-                                               language=options.language,
-                                               version=options.version,
-                                               executable=options.executable,
-                                               sourcespec=sourcespec,
-                                               depspec=depspec,
-                                               default_options=formatted_default_options)
-    open('meson.build', 'w').write(content)
-    print('Generated meson.build file:\n\n' + content)
-
-def add_arguments(parser):
-    parser.add_argument("srcfiles", metavar="sourcefile", nargs="*",
-                        help="source files. default: all recognized files in current directory")
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+    '''
+    Here we add args for that the user can passed when making a new
+    Meson project.
+    '''
+    parser.add_argument("srcfiles", metavar="sourcefile", nargs="*", help="source files. default: all recognized files in current directory")
+    parser.add_argument('-C', default='.', dest='wd', help='directory to cd into before running')
     parser.add_argument("-n", "--name", help="project name. default: name of current directory")
     parser.add_argument("-e", "--executable", help="executable name. default: project name")
     parser.add_argument("-d", "--deps", help="dependencies, comma-separated")
-    parser.add_argument("-l", "--language", choices=['c', 'cpp', 'd', 'fortran', 'rust', 'objc'],
-                        help="project language. default: autodetected based on source files")
-    parser.add_argument("-b", "--build", help="build after generation", action='store_true')
-    parser.add_argument("--builddir", help="directory for build", default='build')
-    parser.add_argument("-f", "--force", action="store_true",
-                        help="force overwrite of existing files and directories.")
-    parser.add_argument('--type', default='executable',
-                        choices=['executable', 'library'])
-    parser.add_argument('--version', default='0.1')
+    parser.add_argument("-l", "--language", choices=sorted(LANG_SUPPORTED), help="project language. default: autodetected based on source files")
+    parser.add_argument("-b", "--build", action='store_true', help="build after generation")
+    parser.add_argument("--builddir", default='build', help="directory for build")
+    parser.add_argument("-f", "--force", action="store_true", help="force overwrite of existing files and directories.")
+    parser.add_argument('--type', default=DEFAULT_PROJECT, choices=('executable', 'library'), help="project type. default: {} based project".format(DEFAULT_PROJECT))
+    parser.add_argument('--version', default=DEFAULT_VERSION, help="project version. default: {}".format(DEFAULT_VERSION))
+
+def run(options: 'argparse.Namespace') -> int:
+    '''
+    Here we generate the new Meson sample project.
+    '''
+    if not Path(options.wd).exists():
+        sys.exit('Project source root directory not found. Run this command in source directory root.')
+    os.chdir(options.wd)
 
-def run(options) -> int:
     if not glob('*'):
         autodetect_options(options, sample=True)
         if not options.language:
@@ -209,7 +178,7 @@
         ret = subprocess.run(cmd)
         if ret.returncode:
             raise SystemExit
-        cmd = [detect_ninja(), '-C', options.builddir]
+        cmd = detect_ninja() + ['-C', options.builddir]
         ret = subprocess.run(cmd)
         if ret.returncode:
             raise SystemExit
diff -Nru meson-0.53.2/mesonbuild/minstall.py meson-0.57.0+really0.56.2/mesonbuild/minstall.py
--- meson-0.53.2/mesonbuild/minstall.py	2019-12-29 22:47:27.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/minstall.py	2021-01-06 10:39:48.000000000 +0000
@@ -13,12 +13,16 @@
 # limitations under the License.
 
 import sys, pickle, os, shutil, subprocess, errno
+import argparse
 import shlex
 from glob import glob
 from .scripts import depfixer
 from .scripts import destdir_join
 from .mesonlib import is_windows, Popen_safe
 from .mtest import rebuild_all
+from .backend.backends import InstallData
+from .coredata import major_versions_differ, MesonVersionMismatchException
+from .coredata import version as coredata_version
 try:
     from __main__ import __file__ as main_file
 except ImportError:
@@ -35,10 +39,14 @@
 def add_arguments(parser):
     parser.add_argument('-C', default='.', dest='wd',
                         help='directory to cd into before running')
+    parser.add_argument('--profile-self', action='store_true', dest='profile',
+                        help=argparse.SUPPRESS)
     parser.add_argument('--no-rebuild', default=False, action='store_true',
                         help='Do not rebuild before installing.')
     parser.add_argument('--only-changed', default=False, action='store_true',
                         help='Only overwrite files that are older than the copied file.')
+    parser.add_argument('--quiet', default=False, action='store_true',
+                        help='Do not print every file that was installed.')
 
 class DirMaker:
     def __init__(self, lf):
@@ -166,6 +174,10 @@
         # If we don't have restorecon, failure is ignored quietly.
         return
 
+    if not selinux_updates:
+        # If the list of files is empty, do not try to call restorecon.
+        return
+
     with subprocess.Popen(['restorecon', '-F', '-f-', '-0'],
                           stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
         out, err = proc.communicate(input=b'\0'.join(os.fsencode(f) for f in selinux_updates) + b'\0')
@@ -214,6 +226,11 @@
         self.did_install_something = False
         self.options = options
         self.lf = lf
+        self.preserved_file_count = 0
+
+    def log(self, msg):
+        if not self.options.quiet:
+            print(msg)
 
     def should_preserve_existing_file(self, from_file, to_file):
         if not self.options.only_changed:
@@ -225,7 +242,7 @@
         to_time = os.stat(to_file).st_mtime
         return from_time <= to_time
 
-    def do_copyfile(self, from_file, to_file):
+    def do_copyfile(self, from_file, to_file, makedirs=None):
         outdir = os.path.split(to_file)[0]
         if not os.path.isfile(from_file) and not os.path.islink(from_file):
             raise RuntimeError('Tried to install something that isn\'t a file:'
@@ -238,11 +255,16 @@
                 raise RuntimeError('Destination {!r} already exists and is not '
                                    'a file'.format(to_file))
             if self.should_preserve_existing_file(from_file, to_file):
-                append_to_log(self.lf, '# Preserving old file %s\n' % to_file)
-                print('Preserving existing file %s' % to_file)
+                append_to_log(self.lf, '# Preserving old file {}\n'.format(to_file))
+                self.preserved_file_count += 1
                 return False
             os.remove(to_file)
-        print('Installing %s to %s' % (from_file, outdir))
+        elif makedirs:
+            # Unpack tuple
+            dirmaker, outdir = makedirs
+            # Create dirs if needed
+            dirmaker.makedirs(outdir, exist_ok=True)
+        self.log('Installing {} to {}'.format(from_file, outdir))
         if os.path.islink(from_file):
             if not os.path.exists(from_file):
                 # Dangling symlink. Replicate as is.
@@ -251,11 +273,9 @@
                 # Remove this entire branch when changing the behaviour to duplicate
                 # symlinks rather than copying what they point to.
                 print(symlink_warning)
-                shutil.copyfile(from_file, to_file)
-                shutil.copystat(from_file, to_file)
+                shutil.copy2(from_file, to_file)
         else:
-            shutil.copyfile(from_file, to_file)
-            shutil.copystat(from_file, to_file)
+            shutil.copy2(from_file, to_file)
         selinux_updates.append(to_file)
         append_to_log(self.lf, to_file)
         return True
@@ -284,9 +304,9 @@
                      each element of the set is a path relative to src_dir.
         '''
         if not os.path.isabs(src_dir):
-            raise ValueError('src_dir must be absolute, got %s' % src_dir)
+            raise ValueError('src_dir must be absolute, got {}'.format(src_dir))
         if not os.path.isabs(dst_dir):
-            raise ValueError('dst_dir must be absolute, got %s' % dst_dir)
+            raise ValueError('dst_dir must be absolute, got {}'.format(dst_dir))
         if exclude is not None:
             exclude_files, exclude_dirs = exclude
         else:
@@ -304,7 +324,7 @@
                 if os.path.isdir(abs_dst):
                     continue
                 if os.path.exists(abs_dst):
-                    print('Tried to copy directory %s but a file of that name already exists.' % abs_dst)
+                    print('Tried to copy directory {} but a file of that name already exists.'.format(abs_dst))
                     sys.exit(1)
                 data.dirmaker.makedirs(abs_dst)
                 shutil.copystat(abs_src, abs_dst)
@@ -316,7 +336,8 @@
                     continue
                 abs_dst = os.path.join(dst_dir, filepart)
                 if os.path.isdir(abs_dst):
-                    print('Tried to copy file %s but a directory of that name already exists.' % abs_dst)
+                    print('Tried to copy file {} but a directory of that name already exists.'.format(abs_dst))
+                    sys.exit(1)
                 parent_dir = os.path.dirname(abs_dst)
                 if not os.path.isdir(parent_dir):
                     os.mkdir(parent_dir)
@@ -325,9 +346,18 @@
                 self.do_copyfile(abs_src, abs_dst)
                 set_mode(abs_dst, install_mode, data.install_umask)
 
+    @staticmethod
+    def check_installdata(obj: InstallData) -> InstallData:
+        if not isinstance(obj, InstallData) or not hasattr(obj, 'version'):
+            raise MesonVersionMismatchException('', coredata_version)
+        if major_versions_differ(obj.version, coredata_version):
+            raise MesonVersionMismatchException(obj.version, coredata_version)
+        return obj
+
     def do_install(self, datafilename):
         with open(datafilename, 'rb') as ifile:
-            d = pickle.load(ifile)
+            d = self.check_installdata(pickle.load(ifile))
+
         d.destdir = os.environ.get('DESTDIR', '')
         d.fullprefix = destdir_join(d.destdir, d.prefix)
 
@@ -346,7 +376,10 @@
                 restore_selinux_contexts()
                 self.run_install_script(d)
                 if not self.did_install_something:
-                    print('Nothing to install.')
+                    self.log('Nothing to install.')
+                if not self.options.quiet and self.preserved_file_count > 0:
+                    self.log('Preserved {} unchanged files, see {} for the full list'
+                             .format(self.preserved_file_count, os.path.normpath(self.lf.name)))
         except PermissionError:
             if shutil.which('pkexec') is not None and 'PKEXEC_UID' not in os.environ:
                 print('Installation failed due to insufficient permissions.')
@@ -360,42 +393,39 @@
         for (src_dir, dst_dir, mode, exclude) in d.install_subdirs:
             self.did_install_something = True
             full_dst_dir = get_destdir_path(d, dst_dir)
-            print('Installing subdir %s to %s' % (src_dir, full_dst_dir))
+            self.log('Installing subdir {} to {}'.format(src_dir, full_dst_dir))
             d.dirmaker.makedirs(full_dst_dir, exist_ok=True)
             self.do_copydir(d, src_dir, full_dst_dir, exclude, mode)
 
     def install_data(self, d):
         for i in d.data:
-            self.did_install_something = True
             fullfilename = i[0]
             outfilename = get_destdir_path(d, i[1])
             mode = i[2]
             outdir = os.path.dirname(outfilename)
-            d.dirmaker.makedirs(outdir, exist_ok=True)
-            self.do_copyfile(fullfilename, outfilename)
+            if self.do_copyfile(fullfilename, outfilename, makedirs=(d.dirmaker, outdir)):
+                self.did_install_something = True
             set_mode(outfilename, mode, d.install_umask)
 
     def install_man(self, d):
         for m in d.man:
-            self.did_install_something = True
             full_source_filename = m[0]
             outfilename = get_destdir_path(d, m[1])
             outdir = os.path.dirname(outfilename)
-            d.dirmaker.makedirs(outdir, exist_ok=True)
             install_mode = m[2]
-            self.do_copyfile(full_source_filename, outfilename)
+            if self.do_copyfile(full_source_filename, outfilename, makedirs=(d.dirmaker, outdir)):
+                self.did_install_something = True
             set_mode(outfilename, install_mode, d.install_umask)
 
     def install_headers(self, d):
         for t in d.headers:
-            self.did_install_something = True
             fullfilename = t[0]
             fname = os.path.basename(fullfilename)
             outdir = get_destdir_path(d, t[1])
             outfilename = os.path.join(outdir, fname)
             install_mode = t[2]
-            d.dirmaker.makedirs(outdir, exist_ok=True)
-            self.do_copyfile(fullfilename, outfilename)
+            if self.do_copyfile(fullfilename, outfilename, makedirs=(d.dirmaker, outdir)):
+                self.did_install_something = True
             set_mode(outfilename, install_mode, d.install_umask)
 
     def run_install_script(self, d):
@@ -405,6 +435,8 @@
                'MESON_INSTALL_DESTDIR_PREFIX': d.fullprefix,
                'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in d.mesonintrospect]),
                }
+        if self.options.quiet:
+            env['MESON_INSTALL_QUIET'] = '1'
 
         child_env = os.environ.copy()
         child_env.update(env)
@@ -414,25 +446,27 @@
             script = i['exe']
             args = i['args']
             name = ' '.join(script + args)
-            print('Running custom install script {!r}'.format(name))
+            self.log('Running custom install script {!r}'.format(name))
             try:
                 rc = subprocess.call(script + args, env=child_env)
-                if rc != 0:
-                    sys.exit(rc)
             except OSError:
-                print('Failed to run install script {!r}'.format(name))
-                sys.exit(1)
+                print('FAILED: install script \'{}\' could not be run, stopped'.format(name))
+                # POSIX shells return 127 when a command could not be found
+                sys.exit(127)
+            if rc != 0:
+                print('FAILED: install script \'{}\' exit code {}, stopped'.format(name, rc))
+                sys.exit(rc)
 
     def install_targets(self, d):
         for t in d.targets:
-            self.did_install_something = True
             if not os.path.exists(t.fname):
                 # For example, import libraries of shared modules are optional
                 if t.optional:
-                    print('File {!r} not found, skipping'.format(t.fname))
+                    self.log('File {!r} not found, skipping'.format(t.fname))
                     continue
                 else:
                     raise RuntimeError('File {!r} could not be found'.format(t.fname))
+            file_copied = False # not set when a directory is copied
             fname = check_for_stampfile(t.fname)
             outdir = get_destdir_path(d, t.outdir)
             outname = os.path.join(outdir, os.path.basename(fname))
@@ -442,22 +476,21 @@
             install_rpath = t.install_rpath
             install_name_mappings = t.install_name_mappings
             install_mode = t.install_mode
-            d.dirmaker.makedirs(outdir, exist_ok=True)
             if not os.path.exists(fname):
                 raise RuntimeError('File {!r} could not be found'.format(fname))
             elif os.path.isfile(fname):
-                self.do_copyfile(fname, outname)
+                file_copied = self.do_copyfile(fname, outname, makedirs=(d.dirmaker, outdir))
                 set_mode(outname, install_mode, d.install_umask)
                 if should_strip and d.strip_bin is not None:
                     if fname.endswith('.jar'):
-                        print('Not stripping jar target:', os.path.basename(fname))
+                        self.log('Not stripping jar target: {}'.format(os.path.basename(fname)))
                         continue
-                    print('Stripping target {!r} using {}.'.format(fname, d.strip_bin[0]))
+                    self.log('Stripping target {!r} using {}.'.format(fname, d.strip_bin[0]))
                     ps, stdo, stde = Popen_safe(d.strip_bin + [outname])
                     if ps.returncode != 0:
                         print('Could not strip file.\n')
-                        print('Stdout:\n%s\n' % stdo)
-                        print('Stderr:\n%s\n' % stde)
+                        print('Stdout:\n{}\n'.format(stdo))
+                        print('Stderr:\n{}\n'.format(stde))
                         sys.exit(1)
                 if fname.endswith('.js'):
                     # Emscripten outputs js files and optionally a wasm file.
@@ -465,10 +498,11 @@
                     wasm_source = os.path.splitext(fname)[0] + '.wasm'
                     if os.path.exists(wasm_source):
                         wasm_output = os.path.splitext(outname)[0] + '.wasm'
-                        self.do_copyfile(wasm_source, wasm_output)
+                        file_copied = self.do_copyfile(wasm_source, wasm_output)
             elif os.path.isdir(fname):
                 fname = os.path.join(d.build_dir, fname.rstrip('/'))
                 outname = os.path.join(outdir, os.path.basename(fname))
+                d.dirmaker.makedirs(outdir, exist_ok=True)
                 self.do_copydir(d, fname, outname, None, install_mode)
             else:
                 raise RuntimeError('Unknown file type for {!r}'.format(fname))
@@ -487,9 +521,10 @@
                         print("Symlink creation does not work on this platform. "
                               "Skipping all symlinking.")
                         printed_symlink_error = True
-            if os.path.isfile(outname):
+            if file_copied:
+                self.did_install_something = True
                 try:
-                    depfixer.fix_rpath(outname, install_rpath, final_path,
+                    depfixer.fix_rpath(outname, t.rpath_dirs_to_remove, install_rpath, final_path,
                                        install_name_mappings, verbose=False)
                 except SystemExit as e:
                     if isinstance(e.code, int) and e.code == 0:
@@ -511,5 +546,10 @@
         installer = Installer(opts, lf)
         append_to_log(lf, '# List of files installed by Meson')
         append_to_log(lf, '# Does not contain files installed by custom scripts.')
-        installer.do_install(datafilename)
+        if opts.profile:
+            import cProfile as profile
+            fname = os.path.join(private_dir, 'profile-installer.log')
+            profile.runctx('installer.do_install(datafilename)', globals(), locals(), filename=fname)
+        else:
+            installer.do_install(datafilename)
     return 0
diff -Nru meson-0.53.2/mesonbuild/mintro.py meson-0.57.0+really0.56.2/mesonbuild/mintro.py
--- meson-0.53.2/mesonbuild/mintro.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/mintro.py	2021-01-06 10:39:48.000000000 +0000
@@ -19,16 +19,19 @@
 Currently only works for the Ninja backend. Others use generated
 project files and don't need this info."""
 
+import collections
 import json
 from . import build, coredata as cdata
 from . import mesonlib
-from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator
+from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter
 from . import mlog
 from .backend import backends
-from .mparser import FunctionNode, ArrayNode, ArgumentNode, StringNode
+from .mparser import BaseNode, FunctionNode, ArrayNode, ArgumentNode, StringNode
+from .interpreter import Interpreter
+from ._pathlib import Path, PurePath
 import typing as T
 import os
-import pathlib
+import argparse
 
 def get_meson_info_file(info_dir: str) -> str:
     return os.path.join(info_dir, 'meson-info.json')
@@ -51,33 +54,36 @@
 def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
                                   builddata: T.Optional[build.Build] = None,
                                   backend: T.Optional[backends.Backend] = None,
-                                  sourcedir: T.Optional[str] = None) -> T.Dict[str, IntroCommand]:
+                                  sourcedir: T.Optional[str] = None) -> 'T.Mapping[str, IntroCommand]':
     if backend and builddata:
         benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks())
         testdata = backend.create_test_serialisation(builddata.get_tests())
         installdata = backend.create_install_data()
+        interpreter = backend.interpreter
     else:
         benchmarkdata = testdata = installdata = None
 
-    return {
-        'benchmarks': IntroCommand('T.List all benchmarks', func=lambda: list_benchmarks(benchmarkdata)),
-        'buildoptions': IntroCommand('T.List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source),
-        'buildsystem_files': IntroCommand('T.List files that make up the build system', func=lambda: list_buildsystem_files(builddata)),
-        'dependencies': IntroCommand('T.List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source),
-        'scan_dependencies': IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source),
-        'installed': IntroCommand('T.List all installed files and directories', func=lambda: list_installed(installdata)),
-        'projectinfo': IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source),
-        'targets': IntroCommand('T.List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source),
-        'tests': IntroCommand('T.List all unit tests', func=lambda: list_tests(testdata)),
-    }
+    # Enforce key order for argparse
+    return collections.OrderedDict([
+        ('ast', IntroCommand('Dump the AST of the meson file', no_bd=dump_ast)),
+        ('benchmarks', IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata))),
+        ('buildoptions', IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source)),
+        ('buildsystem_files', IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter))),
+        ('dependencies', IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source)),
+        ('scan_dependencies', IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source)),
+        ('installed', IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata))),
+        ('projectinfo', IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source)),
+        ('targets', IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source)),
+        ('tests', IntroCommand('List all unit tests', func=lambda: list_tests(testdata))),
+    ])
 
-def add_arguments(parser):
+def add_arguments(parser: argparse.ArgumentParser) -> None:
     intro_types = get_meson_introspection_types()
     for key, val in intro_types.items():
         flag = '--' + key.replace('_', '-')
         parser.add_argument(flag, action='store_true', dest=key, default=False, help=val.desc)
 
-    parser.add_argument('--backend', choices=cdata.backendlist, dest='backend', default='ninja',
+    parser.add_argument('--backend', choices=sorted(cdata.backendlist), dest='backend', default='ninja',
                         help='The backend to use for the --buildoptions introspection.')
     parser.add_argument('-a', '--all', action='store_true', dest='all', default=False,
                         help='Print all available information.')
@@ -87,12 +93,20 @@
                         help='Always use the new JSON format for multiple entries (even for 0 and 1 introspection commands)')
     parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
 
-def list_installed(installdata):
+def dump_ast(intr: IntrospectionInterpreter) -> T.Dict[str, T.Any]:
+    printer = AstJSONPrinter()
+    intr.ast.accept(printer)
+    return printer.result
+
+def list_installed(installdata: backends.InstallData) -> T.Dict[str, str]:
     res = {}
     if installdata is not None:
         for t in installdata.targets:
             res[os.path.join(installdata.build_dir, t.fname)] = \
                 os.path.join(installdata.prefix, t.outdir, os.path.basename(t.fname))
+            for alias in t.aliases.keys():
+                res[os.path.join(installdata.build_dir, alias)] = \
+                    os.path.join(installdata.prefix, t.outdir, os.path.basename(alias))
         for path, installpath, _ in installdata.data:
             res[path] = os.path.join(installdata.prefix, installpath)
         for path, installdir, _ in installdata.headers:
@@ -105,10 +119,11 @@
 
 def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
     tlist = []  # type: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]
-    for i in intr.targets:
-        sources = []  # type: T.List[str]
-        for n in i['sources']:
-            args = []  # type: T.List[T.Union[str, StringNode]]
+    root_dir = Path(intr.source_root)
+    def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]:
+        res = []  # type: T.List[Path]
+        for n in node_list:
+            args = []  # type: T.List[BaseNode]
             if isinstance(n, FunctionNode):
                 args = list(n.args.arguments)
                 if n.func_name in build_target_functions:
@@ -119,9 +134,17 @@
                 args = n.arguments
             for j in args:
                 if isinstance(j, StringNode):
-                    sources += [j.value]
+                    assert isinstance(j.value, str)
+                    res += [Path(j.value)]
                 elif isinstance(j, str):
-                    sources += [j]
+                    res += [Path(j)]
+        res = [root_dir / i['subdir'] / x for x in res]
+        res = [x.resolve() for x in res]
+        return res
+
+    for i in intr.targets:
+        sources = nodes_to_paths(i['sources'])
+        extra_f = nodes_to_paths(i['extra_files'])
 
         tlist += [{
             'name': i['name'],
@@ -134,16 +157,17 @@
                 'language': 'unknown',
                 'compiler': [],
                 'parameters': [],
-                'sources': [os.path.normpath(os.path.join(os.path.abspath(intr.source_root), i['subdir'], x)) for x in sources],
+                'sources': [str(x) for x in sources],
                 'generated_sources': []
             }],
+            'extra_files': [str(x) for x in extra_f],
             'subproject': None, # Subprojects are not supported
             'installed': i['installed']
         }]
 
     return tlist
 
-def list_targets(builddata: build.Build, installdata, backend: backends.Backend) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
+def list_targets(builddata: build.Build, installdata: backends.InstallData, backend: backends.Backend) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
     tlist = []  # type: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]
     build_dir = builddata.environment.get_build_dir()
     src_dir = builddata.environment.get_source_dir()
@@ -151,8 +175,9 @@
     # Fast lookup table for installation files
     install_lookuptable = {}
     for i in installdata.targets:
-        outname = os.path.join(installdata.prefix, i.outdir, os.path.basename(i.fname))
-        install_lookuptable[os.path.basename(i.fname)] = str(pathlib.PurePath(outname))
+        out = [os.path.join(installdata.prefix, i.outdir, os.path.basename(i.fname))]
+        out += [os.path.join(installdata.prefix, i.outdir, os.path.basename(x)) for x in i.aliases]
+        install_lookuptable[os.path.basename(i.fname)] = [str(PurePath(x)) for x in out]
 
     for (idname, target) in builddata.get_targets().items():
         if not isinstance(target, build.Target):
@@ -166,36 +191,27 @@
             'filename': [os.path.join(build_dir, target.subdir, x) for x in target.get_outputs()],
             'build_by_default': target.build_by_default,
             'target_sources': backend.get_introspection_data(idname, target),
+            'extra_files': [os.path.normpath(os.path.join(src_dir, x.subdir, x.fname)) for x in target.extra_files],
             'subproject': target.subproject or None
         }
 
         if installdata and target.should_install():
             t['installed'] = True
-            t['install_filename'] = [install_lookuptable.get(x, None) for x in target.get_outputs()]
+            t['install_filename'] = [install_lookuptable.get(x, [None]) for x in target.get_outputs()]
+            t['install_filename'] = [x for sublist in t['install_filename'] for x in sublist]  # flatten the list
         else:
             t['installed'] = False
         tlist.append(t)
     return tlist
 
 def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
-    return list_buildoptions(intr.coredata)
+    subprojects = [i['name'] for i in intr.project_data['subprojects']]
+    return list_buildoptions(intr.coredata, subprojects)
 
-def list_buildoptions(coredata: cdata.CoreData) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
+def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
     optlist = []  # type: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]
 
-    dir_option_names = ['bindir',
-                        'datadir',
-                        'includedir',
-                        'infodir',
-                        'libdir',
-                        'libexecdir',
-                        'localedir',
-                        'localstatedir',
-                        'mandir',
-                        'prefix',
-                        'sbindir',
-                        'sharedstatedir',
-                        'sysconfdir']
+    dir_option_names = list(cdata.BUILTIN_DIR_OPTIONS)
     test_option_names = ['errorlogs',
                          'stdsplit']
     core_option_names = [k for k in coredata.builtins if k not in dir_option_names + test_option_names]
@@ -204,7 +220,17 @@
     test_options = {k: o for k, o in coredata.builtins.items() if k in test_option_names}
     core_options = {k: o for k, o in coredata.builtins.items() if k in core_option_names}
 
-    def add_keys(options: T.Dict[str, cdata.UserOption], section: str, machine: str = 'any') -> None:
+    if subprojects:
+        # Add per subproject built-in options
+        sub_core_options = {}
+        for sub in subprojects:
+            for k, o in core_options.items():
+                if o.yielding:
+                    continue
+                sub_core_options[sub + ':' + k] = o
+        core_options.update(sub_core_options)
+
+    def add_keys(options: 'cdata.OptionDictType', section: str, machine: str = 'any') -> None:
         for key in sorted(options.keys()):
             opt = options[key]
             optdict = {'name': key, 'value': opt.value, 'section': section, 'machine': machine}
@@ -234,9 +260,14 @@
     )
     add_keys(coredata.backend_options, 'backend')
     add_keys(coredata.base_options, 'base')
-    add_keys(coredata.compiler_options.host, 'compiler', machine='host')
     add_keys(
-        {'build.' + k: o for k, o in coredata.compiler_options.build.items()},
+        dict(coredata.flatten_lang_iterator(coredata.compiler_options.host.items())),
+        'compiler',
+        machine='host',
+    )
+    tmp_dict = dict(coredata.flatten_lang_iterator(coredata.compiler_options.build.items()))  # type: T.Dict[str, cdata.UserOption]
+    add_keys(
+        {'build.' + k: o for k, o in tmp_dict.items()},
         'compiler',
         machine='build',
     )
@@ -245,7 +276,7 @@
     add_keys(test_options, 'test')
     return optlist
 
-def find_buildsystem_files_list(src_dir) -> T.List[str]:
+def find_buildsystem_files_list(src_dir: str) -> T.List[str]:
     # I feel dirty about this. But only slightly.
     filelist = []  # type: T.List[str]
     for root, _, files in os.walk(src_dir):
@@ -254,10 +285,10 @@
                 filelist.append(os.path.relpath(os.path.join(root, f), src_dir))
     return filelist
 
-def list_buildsystem_files(builddata: build.Build) -> T.List[str]:
+def list_buildsystem_files(builddata: build.Build, interpreter: Interpreter) -> T.List[str]:
     src_dir = builddata.environment.get_source_dir()
-    filelist = find_buildsystem_files_list(src_dir)
-    filelist = [os.path.join(src_dir, x) for x in filelist]
+    filelist = interpreter.get_build_def_files()  # type: T.List[str]
+    filelist = [PurePath(src_dir, x).as_posix() for x in filelist]
     return filelist
 
 def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool]]]:
@@ -283,10 +314,10 @@
                         'link_args': d.get_link_args()}]
     return result
 
-def get_test_list(testdata) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
+def get_test_list(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
     result = []  # type: T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]
     for t in testdata:
-        to = {}
+        to = {}  # type: T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]
         if isinstance(t.fname, str):
             fname = [t.fname]
         else:
@@ -302,25 +333,26 @@
         to['suite'] = t.suite
         to['is_parallel'] = t.is_parallel
         to['priority'] = t.priority
-        to['protocol'] = t.protocol
+        to['protocol'] = str(t.protocol)
+        to['depends'] = t.depends
         result.append(to)
     return result
 
-def list_tests(testdata) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
+def list_tests(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
     return get_test_list(testdata)
 
-def list_benchmarks(benchdata) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
+def list_benchmarks(benchdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
     return get_test_list(benchdata)
 
 def list_projinfo(builddata: build.Build) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]:
     result = {'version': builddata.project_version,
               'descriptive_name': builddata.project_name,
-              'subproject_dir': builddata.subproject_dir}
+              'subproject_dir': builddata.subproject_dir}    # type: T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]
     subprojects = []
     for k, v in builddata.subprojects.items():
         c = {'name': k,
              'version': v,
-             'descriptive_name': builddata.projects.get(k)}
+             'descriptive_name': builddata.projects.get(k)}  # type: T.Dict[str, str]
         subprojects.append(c)
     result['subprojects'] = subprojects
     return result
@@ -339,7 +371,7 @@
     intr.project_data['subproject_dir'] = intr.subproject_dir
     return intr.project_data
 
-def print_results(options, results: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], indent: int) -> int:
+def print_results(options: argparse.Namespace, results: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], indent: int) -> int:
     if not results and not options.force_dict:
         print('No command specified')
         return 1
@@ -353,7 +385,7 @@
         print(json.dumps(out, indent=indent))
     return 0
 
-def run(options) -> int:
+def run(options: argparse.Namespace) -> int:
     datadir = 'meson-private'
     infodir = 'meson-info'
     if options.builddir is not None:
@@ -367,7 +399,8 @@
     if 'meson.build' in [os.path.basename(options.builddir), options.builddir]:
         # Make sure that log entries in other parts of meson don't interfere with the JSON output
         mlog.disable()
-        backend = backends.get_backend_from_name(options.backend, None)
+        backend = backends.get_backend_from_name(options.backend)
+        assert backend is not None
         intr = IntrospectionInterpreter(sourcedir, '', backend.name, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
         intr.analyze()
         # Re-enable logging just in case
@@ -438,7 +471,7 @@
 
     write_intro_info(intro_info, builddata.environment.info_dir)
 
-def update_build_options(coredata: cdata.CoreData, info_dir) -> None:
+def update_build_options(coredata: cdata.CoreData, info_dir: str) -> None:
     intro_info = [
         ('buildoptions', list_buildoptions(coredata))
     ]
diff -Nru meson-0.53.2/mesonbuild/mlog.py meson-0.57.0+really0.56.2/mesonbuild/mlog.py
--- meson-0.53.2/mesonbuild/mlog.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/mlog.py	2021-01-06 10:39:48.000000000 +0000
@@ -19,7 +19,7 @@
 import platform
 import typing as T
 from contextlib import contextmanager
-from pathlib import Path
+from ._pathlib import Path
 
 """This is (mostly) a standalone module used to write logging
 information about Meson runs. Some output goes to screen,
@@ -40,15 +40,32 @@
     # original behavior
     return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON'))
 
-def setup_console() -> bool:
+def colorize_console() -> bool:
+    _colorize_console = getattr(sys.stdout, 'colorize_console', None)  # type: bool
+    if _colorize_console is not None:
+        return _colorize_console
+
     try:
         if platform.system().lower() == 'windows':
-            return os.isatty(sys.stdout.fileno()) and _windows_ansi()
-        return os.isatty(sys.stdout.fileno()) and os.environ.get('TERM') != 'dumb'
+            _colorize_console = os.isatty(sys.stdout.fileno()) and _windows_ansi()
+        else:
+            _colorize_console = os.isatty(sys.stdout.fileno()) and os.environ.get('TERM', 'dumb') != 'dumb'
     except Exception:
-        return False
+        _colorize_console = False
+
+    sys.stdout.colorize_console = _colorize_console  # type: ignore[attr-defined]
+    return _colorize_console
+
+def setup_console() -> None:
+    # on Windows, a subprocess might call SetConsoleMode() on the console
+    # connected to stdout and turn off ANSI escape processing. Call this after
+    # running a subprocess to ensure we turn it on again.
+    if platform.system().lower() == 'windows':
+        try:
+            delattr(sys.stdout, 'colorize_console')
+        except AttributeError:
+            pass
 
-colorize_console = setup_console()
 log_dir = None               # type: T.Optional[str]
 log_file = None              # type: T.Optional[T.TextIO]
 log_fname = 'meson-log.txt'  # type: str
@@ -59,6 +76,7 @@
 log_errors_only = False      # type: bool
 _in_ci = 'CI' in os.environ  # type: bool
 _logged_once = set()         # type: T.Set[T.Tuple[str, ...]]
+log_warnings_counter = 0     # type: int
 
 def disable() -> None:
     global log_disable_stdout
@@ -203,7 +221,7 @@
     if log_file is not None:
         print(*arr, file=log_file, **kwargs)
         log_file.flush()
-    if colorize_console:
+    if colorize_console():
         arr = process_markup(args, True)
     if not log_errors_only or is_error:
         force_print(*arr, **kwargs)
@@ -212,7 +230,7 @@
              **kwargs: T.Any) -> None:
     """Log variant that only prints a given message one time per meson invocation.
 
-    This considers nasi decorated values by the values they wrap without
+    This considers ansi decorated values by the values they wrap without
     regard for the AnsiDecorator itself.
     """
     t = tuple(a.text if isinstance(a, AnsiDecorator) else a for a in args)
@@ -221,16 +239,26 @@
     _logged_once.add(t)
     log(*args, is_error=is_error, **kwargs)
 
+# This isn't strictly correct. What we really want here is something like:
+# class StringProtocol(typing_extensions.Protocol):
+#
+#      def __str__(self) -> str: ...
+#
+# This would more accurately embody what this function can handle, but we
+# don't have that yet, so instead we'll do some casting to work around it
+def get_error_location_string(fname: str, lineno: str) -> str:
+    return '{}:{}:'.format(fname, lineno)
+
 def _log_error(severity: str, *rargs: T.Union[str, AnsiDecorator],
-               once: bool = False, **kwargs: T.Any) -> None:
-    from .mesonlib import get_error_location_string
-    from .environment import build_filename
-    from .mesonlib import MesonException
+               once: bool = False, fatal: bool = True, **kwargs: T.Any) -> None:
+    from .mesonlib import MesonException, relpath
 
-    # The tping requirements here are non-obvious. Lists are invariant,
+    # The typing requirements here are non-obvious. Lists are invariant,
     # therefore T.List[A] and T.List[T.Union[A, B]] are not able to be joined
-    if severity == 'warning':
-        label = [yellow('WARNING:')]  # type: T.List[T.Union[str, AnsiDecorator]]
+    if severity == 'notice':
+        label = [bold('NOTICE:')]  # type: T.List[T.Union[str, AnsiDecorator]]
+    elif severity == 'warning':
+        label = [yellow('WARNING:')]
     elif severity == 'error':
         label = [red('ERROR:')]
     elif severity == 'deprecation':
@@ -242,7 +270,7 @@
 
     location = kwargs.pop('location', None)
     if location is not None:
-        location_file = os.path.join(location.subdir, build_filename)
+        location_file = relpath(location.filename, os.getcwd())
         location_str = get_error_location_string(location_file, location.lineno)
         # Unions are frankly awful, and we have to T.cast here to get mypy
         # to understand that the list concatenation is safe
@@ -254,17 +282,23 @@
     else:
         log(*args, **kwargs)
 
-    if log_fatal_warnings:
+    global log_warnings_counter
+    log_warnings_counter += 1
+
+    if log_fatal_warnings and fatal:
         raise MesonException("Fatal warnings enabled, aborting")
 
-def error(*args: T.Union[str, AnsiDecorator], once: bool = False, **kwargs: T.Any) -> None:
-    return _log_error('error', *args, **kwargs, is_error=True, once=once)
+def error(*args: T.Union[str, AnsiDecorator], **kwargs: T.Any) -> None:
+    return _log_error('error', *args, **kwargs, is_error=True)
+
+def warning(*args: T.Union[str, AnsiDecorator], **kwargs: T.Any) -> None:
+    return _log_error('warning', *args, **kwargs, is_error=True)
 
-def warning(*args: T.Union[str, AnsiDecorator], once: bool = False, **kwargs: T.Any) -> None:
-    return _log_error('warning', *args, **kwargs, is_error=True, once=once)
+def deprecation(*args: T.Union[str, AnsiDecorator], **kwargs: T.Any) -> None:
+    return _log_error('deprecation', *args, **kwargs, is_error=True)
 
-def deprecation(*args: T.Union[str, AnsiDecorator], once: bool = False, **kwargs: T.Any) -> None:
-    return _log_error('deprecation', *args, **kwargs, is_error=True, once=once)
+def notice(*args: T.Union[str, AnsiDecorator], **kwargs: T.Any) -> None:
+    return _log_error('notice', *args, **kwargs, is_error=False)
 
 def get_relative_path(target: Path, current: Path) -> Path:
     """Get the path to target from current"""
@@ -286,11 +320,11 @@
         prefix = red('ERROR:')
     log()
     args = []  # type: T.List[T.Union[AnsiDecorator, str]]
-    if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'):
-        # Mypy can't figure this out, and it's pretty easy to vidual inspect
+    if all(getattr(e, a, None) is not None for a in ['file', 'lineno', 'colno']):
+        # Mypy doesn't follow hasattr, and it's pretty easy to visually inspect
         # that this is correct, so we'll just ignore it.
-        path = get_relative_path(Path(e.file), Path(os.getcwd()))
-        args.append('%s:%d:%d:' % (path, e.lineno, e.colno))  # type: ignore
+        path = get_relative_path(Path(e.file), Path(os.getcwd()))  # type: ignore
+        args.append('{}:{}:{}:'.format(path, e.lineno, e.colno))  # type: ignore
     if prefix:
         args.append(prefix)
     args.append(str(e))
diff -Nru meson-0.53.2/mesonbuild/modules/cmake.py meson-0.57.0+really0.56.2/mesonbuild/modules/cmake.py
--- meson-0.53.2/mesonbuild/modules/cmake.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/cmake.py	2021-01-06 10:39:48.000000000 +0000
@@ -14,12 +14,28 @@
 import re
 import os, os.path, pathlib
 import shutil
+import typing as T
 
 from . import ExtensionModule, ModuleReturnValue
 
 from .. import build, dependencies, mesonlib, mlog
-from ..interpreterbase import permittedKwargs, FeatureNew, stringArgs, InterpreterObject, ObjectHolder, noPosargs
-from ..interpreter import ConfigurationDataHolder, InterpreterException, SubprojectHolder
+from ..cmake import SingleTargetOptions, TargetOptions, cmake_defines_to_args
+from ..interpreter import ConfigurationDataHolder, InterpreterException, SubprojectHolder, DependencyHolder
+from ..interpreterbase import (
+    InterpreterObject,
+    ObjectHolder,
+
+    FeatureNew,
+    FeatureNewKwargs,
+    FeatureDeprecatedKwargs,
+
+    stringArgs,
+    permittedKwargs,
+    noPosargs,
+    noKwargs,
+
+    InvalidArguments,
+)
 
 
 COMPATIBILITIES = ['AnyNewerVersion', 'SameMajorVersion', 'SameMinorVersion', 'ExactVersion']
@@ -76,48 +92,122 @@
         tgt = args[0]
         res = self.held_object.cm_interpreter.target_info(tgt)
         if res is None:
-            raise InterpreterException('The CMake target {} does not exist'.format(tgt))
+            raise InterpreterException('The CMake target {} does not exist\n'.format(tgt) +
+                                       '  Use the following command in your meson.build to list all available targets:\n\n' +
+                                       '    message(\'CMaket targets:\\n - \' + \'\\n - \'.join(.target_list()))')
 
         # Make sure that all keys are present (if not this is a bug)
         assert(all([x in res for x in ['inc', 'src', 'dep', 'tgt', 'func']]))
         return res
 
-    @permittedKwargs({})
+    @noKwargs
+    @stringArgs
     def get_variable(self, args, kwargs):
         return self.held_object.get_variable_method(args, kwargs)
 
-    @permittedKwargs({})
+    @FeatureNewKwargs('dependency', '0.56.0', ['include_type'])
+    @permittedKwargs({'include_type'})
+    @stringArgs
     def dependency(self, args, kwargs):
         info = self._args_to_info(args)
-        return self.get_variable([info['dep']], kwargs)
+        orig = self.get_variable([info['dep']], {})
+        assert isinstance(orig, DependencyHolder)
+        actual = orig.include_type_method([], {})
+        if 'include_type' in kwargs and kwargs['include_type'] != actual:
+            mlog.debug('Current include type is {}. Converting to requested {}'.format(actual, kwargs['include_type']))
+            return orig.as_system_method([kwargs['include_type']], {})
+        return orig
 
-    @permittedKwargs({})
+    @noKwargs
+    @stringArgs
     def include_directories(self, args, kwargs):
         info = self._args_to_info(args)
         return self.get_variable([info['inc']], kwargs)
 
-    @permittedKwargs({})
+    @noKwargs
+    @stringArgs
     def target(self, args, kwargs):
         info = self._args_to_info(args)
         return self.get_variable([info['tgt']], kwargs)
 
-    @permittedKwargs({})
+    @noKwargs
+    @stringArgs
     def target_type(self, args, kwargs):
         info = self._args_to_info(args)
         return info['func']
 
     @noPosargs
-    @permittedKwargs({})
+    @noKwargs
     def target_list(self, args, kwargs):
         return self.held_object.cm_interpreter.target_list()
 
     @noPosargs
-    @permittedKwargs({})
+    @noKwargs
     @FeatureNew('CMakeSubproject.found()', '0.53.2')
     def found_method(self, args, kwargs):
         return self.held_object is not None
 
 
+class CMakeSubprojectOptions(InterpreterObject):
+    def __init__(self) -> None:
+        super().__init__()
+        self.cmake_options = []  # type: T.List[str]
+        self.target_options = TargetOptions()
+
+        self.methods.update(
+            {
+                'add_cmake_defines': self.add_cmake_defines,
+                'set_override_option': self.set_override_option,
+                'set_install': self.set_install,
+                'append_compile_args': self.append_compile_args,
+                'append_link_args': self.append_link_args,
+                'clear': self.clear,
+            }
+        )
+
+    def _get_opts(self, kwargs: dict) -> SingleTargetOptions:
+        if 'target' in kwargs:
+            return self.target_options[kwargs['target']]
+        return self.target_options.global_options
+
+    @noKwargs
+    def add_cmake_defines(self, args, kwargs) -> None:
+        self.cmake_options += cmake_defines_to_args(args)
+
+    @stringArgs
+    @permittedKwargs({'target'})
+    def set_override_option(self, args, kwargs) -> None:
+        if len(args) != 2:
+            raise InvalidArguments('set_override_option takes exactly 2 positional arguments')
+        self._get_opts(kwargs).set_opt(args[0], args[1])
+
+    @permittedKwargs({'target'})
+    def set_install(self, args, kwargs) -> None:
+        if len(args) != 1 or not isinstance(args[0], bool):
+            raise InvalidArguments('set_install takes exactly 1 boolean argument')
+        self._get_opts(kwargs).set_install(args[0])
+
+    @stringArgs
+    @permittedKwargs({'target'})
+    def append_compile_args(self, args, kwargs) -> None:
+        if len(args) < 2:
+            raise InvalidArguments('append_compile_args takes at least 2 positional arguments')
+        self._get_opts(kwargs).append_args(args[0], args[1:])
+
+    @stringArgs
+    @permittedKwargs({'target'})
+    def append_link_args(self, args, kwargs) -> None:
+        if not args:
+            raise InvalidArguments('append_link_args takes at least 1 positional argument')
+        self._get_opts(kwargs).append_link_args(args)
+
+    @noPosargs
+    @noKwargs
+    def clear(self, args, kwargs) -> None:
+        self.cmake_options.clear()
+        self.target_options = TargetOptions()
+
+
 class CmakeModule(ExtensionModule):
     cmake_detected = False
     cmake_root = None
@@ -252,8 +342,7 @@
         (ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, '{}Config.cmake'.format(name)))
         ofile_abs = os.path.join(state.environment.build_dir, ofile_path, ofile_fname)
 
-        if 'install_dir' not in kwargs:
-            install_dir = os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'cmake', name)
+        install_dir = kwargs.get('install_dir', os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'cmake', name))
         if not isinstance(install_dir, str):
             raise mesonlib.MesonException('"install_dir" must be a string.')
 
@@ -287,16 +376,27 @@
         return res
 
     @FeatureNew('subproject', '0.51.0')
-    @permittedKwargs({'cmake_options', 'required'})
+    @FeatureNewKwargs('subproject', '0.55.0', ['options'])
+    @FeatureDeprecatedKwargs('subproject', '0.55.0', ['cmake_options'])
+    @permittedKwargs({'cmake_options', 'required', 'options'})
     @stringArgs
     def subproject(self, interpreter, state, args, kwargs):
         if len(args) != 1:
             raise InterpreterException('Subproject takes exactly one argument')
+        if 'cmake_options' in kwargs and 'options' in kwargs:
+            raise InterpreterException('"options" cannot be used together with "cmake_options"')
         dirname = args[0]
         subp = interpreter.do_subproject(dirname, 'cmake', kwargs)
         if not subp.held_object:
             return subp
         return CMakeSubprojectHolder(subp, dirname)
 
+    @FeatureNew('subproject_options', '0.55.0')
+    @noKwargs
+    @noPosargs
+    def subproject_options(self, state, args, kwargs) -> ModuleReturnValue:
+        opts = CMakeSubprojectOptions()
+        return ModuleReturnValue(opts, [])
+
 def initialize(*args, **kwargs):
     return CmakeModule(*args, **kwargs)
diff -Nru meson-0.53.2/mesonbuild/modules/fs.py meson-0.57.0+really0.56.2/mesonbuild/modules/fs.py
--- meson-0.53.2/mesonbuild/modules/fs.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/fs.py	2021-01-06 10:39:48.000000000 +0000
@@ -14,35 +14,80 @@
 
 import typing as T
 import hashlib
-from pathlib import Path, PurePath
+from .._pathlib import Path, PurePath, PureWindowsPath
 
 from .. import mlog
 from . import ExtensionModule
 from . import ModuleReturnValue
 from ..mesonlib import MesonException
+from ..interpreterbase import FeatureNew
 
 from ..interpreterbase import stringArgs, noKwargs
 if T.TYPE_CHECKING:
-    from ..interpreter import ModuleState
+    from ..interpreter import Interpreter, ModuleState
 
 class FSModule(ExtensionModule):
 
-    def __init__(self, interpreter):
+    def __init__(self, interpreter: 'Interpreter') -> None:
         super().__init__(interpreter)
         self.snippets.add('generate_dub_file')
 
+    def _absolute_dir(self, state: 'ModuleState', arg: str) -> Path:
+        """
+        make an absolute path from a relative path, WITHOUT resolving symlinks
+        """
+        return Path(state.source_root) / Path(state.subdir) / Path(arg).expanduser()
+
     def _resolve_dir(self, state: 'ModuleState', arg: str) -> Path:
         """
-        resolves (makes absolute) a directory relative to calling meson.build,
+        resolves symlinks and makes absolute a directory relative to calling meson.build,
         if not already absolute
         """
-        return Path(state.source_root) / state.subdir / Path(arg).expanduser()
+        path = self._absolute_dir(state, arg)
+        try:
+            # accomodate unresolvable paths e.g. symlink loops
+            path = path.resolve()
+        except Exception:
+            # return the best we could do
+            pass
+        return path
 
     def _check(self, check: str, state: 'ModuleState', args: T.Sequence[str]) -> ModuleReturnValue:
         if len(args) != 1:
             raise MesonException('fs.{} takes exactly one argument.'.format(check))
         test_file = self._resolve_dir(state, args[0])
-        return ModuleReturnValue(getattr(test_file, check)(), [])
+        val = getattr(test_file, check)()
+        if isinstance(val, Path):
+            val = str(val)
+        return ModuleReturnValue(val, [])
+
+    @stringArgs
+    @noKwargs
+    @FeatureNew('fs.expanduser', '0.54.0')
+    def expanduser(self, state: 'ModuleState', args: T.Sequence[str], kwargs: dict) -> ModuleReturnValue:
+        if len(args) != 1:
+            raise MesonException('fs.expanduser takes exactly one argument.')
+        return ModuleReturnValue(str(Path(args[0]).expanduser()), [])
+
+    @stringArgs
+    @noKwargs
+    @FeatureNew('fs.is_absolute', '0.54.0')
+    def is_absolute(self, state: 'ModuleState', args: T.Sequence[str], kwargs: dict) -> ModuleReturnValue:
+        if len(args) != 1:
+            raise MesonException('fs.is_absolute takes exactly one argument.')
+        return ModuleReturnValue(PurePath(args[0]).is_absolute(), [])
+
+    @stringArgs
+    @noKwargs
+    @FeatureNew('fs.as_posix', '0.54.0')
+    def as_posix(self, state: 'ModuleState', args: T.Sequence[str], kwargs: dict) -> ModuleReturnValue:
+        """
+        this function assumes you are passing a Windows path, even if on a Unix-like system
+        and so ALL '\' are turned to '/', even if you meant to escape a character
+        """
+        if len(args) != 1:
+            raise MesonException('fs.as_posix takes exactly one argument.')
+        return ModuleReturnValue(PureWindowsPath(args[0]).as_posix(), [])
 
     @stringArgs
     @noKwargs
@@ -52,7 +97,9 @@
     @stringArgs
     @noKwargs
     def is_symlink(self, state: 'ModuleState', args: T.Sequence[str], kwargs: dict) -> ModuleReturnValue:
-        return self._check('is_symlink', state, args)
+        if len(args) != 1:
+            raise MesonException('fs.is_symlink takes exactly one argument.')
+        return ModuleReturnValue(self._absolute_dir(state, args[0]).is_symlink(), [])
 
     @stringArgs
     @noKwargs
@@ -68,7 +115,7 @@
     @noKwargs
     def hash(self, state: 'ModuleState', args: T.Sequence[str], kwargs: dict) -> ModuleReturnValue:
         if len(args) != 2:
-            raise MesonException('method takes exactly two arguments.')
+            raise MesonException('fs.hash takes exactly two arguments.')
         file = self._resolve_dir(state, args[0])
         if not file.is_file():
             raise MesonException('{} is not a file and therefore cannot be hashed'.format(file))
@@ -84,7 +131,7 @@
     @noKwargs
     def size(self, state: 'ModuleState', args: T.Sequence[str], kwargs: dict) -> ModuleReturnValue:
         if len(args) != 1:
-            raise MesonException('method takes exactly one argument.')
+            raise MesonException('fs.size takes exactly one argument.')
         file = self._resolve_dir(state, args[0])
         if not file.is_file():
             raise MesonException('{} is not a file and therefore cannot be sized'.format(file))
@@ -113,7 +160,7 @@
     @noKwargs
     def replace_suffix(self, state: 'ModuleState', args: T.Sequence[str], kwargs: dict) -> ModuleReturnValue:
         if len(args) != 2:
-            raise MesonException('method takes exactly two arguments.')
+            raise MesonException('fs.replace_suffix takes exactly two arguments.')
         original = PurePath(args[0])
         new = original.with_suffix(args[1])
         return ModuleReturnValue(str(new), [])
@@ -122,7 +169,7 @@
     @noKwargs
     def parent(self, state: 'ModuleState', args: T.Sequence[str], kwargs: dict) -> ModuleReturnValue:
         if len(args) != 1:
-            raise MesonException('method takes exactly one argument.')
+            raise MesonException('fs.parent takes exactly one argument.')
         original = PurePath(args[0])
         new = original.parent
         return ModuleReturnValue(str(new), [])
@@ -131,10 +178,20 @@
     @noKwargs
     def name(self, state: 'ModuleState', args: T.Sequence[str], kwargs: dict) -> ModuleReturnValue:
         if len(args) != 1:
-            raise MesonException('method takes exactly one argument.')
+            raise MesonException('fs.name takes exactly one argument.')
         original = PurePath(args[0])
         new = original.name
         return ModuleReturnValue(str(new), [])
 
-def initialize(*args, **kwargs) -> FSModule:
+    @stringArgs
+    @noKwargs
+    @FeatureNew('fs.stem', '0.54.0')
+    def stem(self, state: 'ModuleState', args: T.Sequence[str], kwargs: dict) -> ModuleReturnValue:
+        if len(args) != 1:
+            raise MesonException('fs.stem takes exactly one argument.')
+        original = PurePath(args[0])
+        new = original.stem
+        return ModuleReturnValue(str(new), [])
+
+def initialize(*args: T.Any, **kwargs: T.Any) -> FSModule:
     return FSModule(*args, **kwargs)
diff -Nru meson-0.53.2/mesonbuild/modules/gnome.py meson-0.57.0+really0.56.2/mesonbuild/modules/gnome.py
--- meson-0.53.2/mesonbuild/modules/gnome.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/gnome.py	2021-01-06 10:39:48.000000000 +0000
@@ -29,10 +29,11 @@
 from . import ExtensionModule
 from . import ModuleReturnValue
 from ..mesonlib import (
-    MachineChoice, MesonException, OrderedSet, Popen_safe, extract_as_list, join_args
+    MachineChoice, MesonException, OrderedSet, Popen_safe, extract_as_list,
+    join_args, unholder,
 )
-from ..dependencies import Dependency, PkgConfigDependency, InternalDependency
-from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs
+from ..dependencies import Dependency, PkgConfigDependency, InternalDependency, ExternalProgram
+from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs, FeatureDeprecatedKwargs
 
 # gresource compilation is broken due to the way
 # the resource compiler and Ninja clash about it
@@ -43,20 +44,6 @@
 
 native_glib_version = None
 
-@functools.lru_cache(maxsize=None)
-def gir_has_option(intr_obj, option):
-    try:
-        g_ir_scanner = intr_obj.find_program_impl('g-ir-scanner')
-        # Handle overridden g-ir-scanner
-        if isinstance(getattr(g_ir_scanner, "held_object", g_ir_scanner), interpreter.OverrideProgram):
-            assert option in ['--extra-library', '--sources-top-dirs']
-            return True
-
-        opts = Popen_safe(g_ir_scanner.get_command() + ['--help'], stderr=subprocess.STDOUT)[1]
-        return option in opts
-    except (MesonException, FileNotFoundError, subprocess.CalledProcessError):
-        return False
-
 class GnomeModule(ExtensionModule):
     gir_dep = None
 
@@ -83,11 +70,11 @@
                          mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=774368'))
 
     @staticmethod
-    @mesonlib.run_once
     def _print_gdbus_warning():
         mlog.warning('Code generated with gdbus_codegen() requires the root directory be added to\n'
                      '  include_directories of targets with GLib < 2.51.3:',
-                     mlog.bold('https://github.com/mesonbuild/meson/issues/1387'))
+                     mlog.bold('https://github.com/mesonbuild/meson/issues/1387'),
+                     once=True)
 
     @FeatureNewKwargs('gnome.compile_resources', '0.37.0', ['gresource_bundle', 'export', 'install_header'])
     @permittedKwargs({'source_dir', 'c_name', 'dependencies', 'export', 'gresource_bundle', 'install_header',
@@ -96,54 +83,59 @@
         self.__print_gresources_warning(state)
         glib_version = self._get_native_glib_version(state)
 
-        cmd = ['glib-compile-resources', '@INPUT@']
+        glib_compile_resources = self.interpreter.find_program_impl('glib-compile-resources')
+        cmd = [glib_compile_resources, '@INPUT@']
 
-        source_dirs, dependencies = mesonlib.extract_as_list(kwargs, 'source_dir', 'dependencies', pop=True)
+        source_dirs, dependencies = [mesonlib.extract_as_list(kwargs, c, pop=True) for c in  ['source_dir', 'dependencies']]
 
         if len(args) < 2:
             raise MesonException('Not enough arguments; the name of the resource '
                                  'and the path to the XML file are required')
 
         # Validate dependencies
-        for (ii, dep) in enumerate(dependencies):
-            if hasattr(dep, 'held_object'):
-                dependencies[ii] = dep = dep.held_object
-            if not isinstance(dep, (mesonlib.File, build.CustomTarget, build.CustomTargetIndex)):
-                m = 'Unexpected dependency type {!r} for gnome.compile_resources() ' \
-                    '"dependencies" argument.\nPlease pass the return value of ' \
-                    'custom_target() or configure_file()'
-                raise MesonException(m.format(dep))
-            if isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)):
+        subdirs = []
+        depends = []
+        for (ii, dep) in enumerate(unholder(dependencies)):
+            if isinstance(dep, mesonlib.File):
+                subdirs.append(dep.subdir)
+            elif isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)):
+                depends.append(dep)
+                subdirs.append(dep.get_subdir())
                 if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
                     m = 'The "dependencies" argument of gnome.compile_resources() can not\n' \
                         'be used with the current version of glib-compile-resources due to\n' \
                         ''
                     raise MesonException(m)
-
-        ifile = args[1]
-        if isinstance(ifile, mesonlib.File):
-            # glib-compile-resources will be run inside the source dir,
-            # so we need either 'src_to_build' or the absolute path.
-            # Absolute path is the easiest choice.
-            if ifile.is_built:
-                ifile = os.path.join(state.environment.get_build_dir(), ifile.subdir, ifile.fname)
             else:
-                ifile = os.path.join(ifile.subdir, ifile.fname)
-        elif isinstance(ifile, str):
-            ifile = os.path.join(state.subdir, ifile)
-        elif isinstance(ifile, (interpreter.CustomTargetHolder,
-                                interpreter.CustomTargetIndexHolder,
-                                interpreter.GeneratedObjectsHolder)):
-            m = 'Resource xml files generated at build-time cannot be used ' \
-                'with gnome.compile_resources() because we need to scan ' \
-                'the xml for dependencies. Use configure_file() instead ' \
-                'to generate it at configure-time.'
-            raise MesonException(m)
-        else:
-            raise MesonException('Invalid file argument: {!r}'.format(ifile))
+                m = 'Unexpected dependency type {!r} for gnome.compile_resources() ' \
+                    '"dependencies" argument.\nPlease pass the return value of ' \
+                    'custom_target() or configure_file()'
+                raise MesonException(m.format(dep))
 
-        depend_files, depends, subdirs = self._get_gresource_dependencies(
-            state, ifile, source_dirs, dependencies)
+        if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
+            ifile = args[1]
+            if isinstance(ifile, mesonlib.File):
+                # glib-compile-resources will be run inside the source dir,
+                # so we need either 'src_to_build' or the absolute path.
+                # Absolute path is the easiest choice.
+                if ifile.is_built:
+                    ifile = os.path.join(state.environment.get_build_dir(), ifile.subdir, ifile.fname)
+                else:
+                    ifile = os.path.join(ifile.subdir, ifile.fname)
+            elif isinstance(ifile, str):
+                ifile = os.path.join(state.subdir, ifile)
+            elif isinstance(ifile, (interpreter.CustomTargetHolder,
+                                    interpreter.CustomTargetIndexHolder,
+                                    interpreter.GeneratedObjectsHolder)):
+                m = 'Resource xml files generated at build-time cannot be used ' \
+                    'with gnome.compile_resources() because we need to scan ' \
+                    'the xml for dependencies. Use configure_file() instead ' \
+                    'to generate it at configure-time.'
+                raise MesonException(m)
+            else:
+                raise MesonException('Invalid file argument: {!r}'.format(ifile))
+            depend_files, depends, subdirs = self._get_gresource_dependencies(
+                state, ifile, source_dirs, dependencies)
 
         # Make source dirs relative to build dir now
         source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs]
@@ -170,8 +162,14 @@
             output = args[0] + '.gresource'
             name = args[0] + '_gresource'
         else:
-            output = args[0] + '.c'
-            name = args[0] + '_c'
+            if 'c' in state.environment.coredata.compilers.host.keys():
+                output = args[0] + '.c'
+                name = args[0] + '_c'
+            elif 'cpp' in state.environment.coredata.compilers.host.keys():
+                output = args[0] + '.cpp'
+                name = args[0] + '_cpp'
+            else:
+                raise MesonException('Compiling GResources into code is only supported in C and C++ projects')
 
         if kwargs.get('install', False) and not gresource:
             raise MesonException('The install kwarg only applies to gresource bundles, see install_header')
@@ -226,7 +224,10 @@
         for source_dir in source_dirs:
             cmd += ['--sourcedir', os.path.join(state.subdir, source_dir)]
 
-        pc, stdout, stderr = Popen_safe(cmd, cwd=state.environment.get_source_dir())
+        try:
+            pc, stdout, stderr = Popen_safe(cmd, cwd=state.environment.get_source_dir())
+        except (FileNotFoundError, PermissionError):
+            raise MesonException('Could not execute glib-compile-resources.')
         if pc.returncode != 0:
             m = 'glib-compile-resources failed to get dependencies for {}:\n{}'
             mlog.warning(m.format(cmd[1], stderr))
@@ -238,9 +239,7 @@
         subdirs = []
         for resfile in dep_files[:]:
             resbasename = os.path.basename(resfile)
-            for dep in dependencies:
-                if hasattr(dep, 'held_object'):
-                    dep = dep.held_object
+            for dep in unholder(dependencies):
                 if isinstance(dep, mesonlib.File):
                     if dep.fname != resbasename:
                         continue
@@ -301,7 +300,7 @@
                 link_command.append('-L' + d)
                 if include_rpath:
                     link_command.append('-Wl,-rpath,' + d)
-        if gir_has_option(self.interpreter, '--extra-library') and use_gir_args:
+        if use_gir_args and self._gir_has_option('--extra-library'):
             link_command.append('--extra-library=' + lib.name)
         else:
             link_command.append('-l' + lib.name)
@@ -316,15 +315,17 @@
         # require two args in order, such as -framework AVFoundation
         external_ldflags_nodedup = []
         gi_includes = OrderedSet()
-        deps = mesonlib.listify(deps, unholder=True)
+        deps = mesonlib.unholder(mesonlib.listify(deps))
 
         for dep in deps:
+            if isinstance(dep, Dependency):
+                girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='')
+                if girdir:
+                    gi_includes.update([girdir])
             if isinstance(dep, InternalDependency):
                 cflags.update(dep.get_compile_args())
                 cflags.update(get_include_args(dep.include_directories))
-                for lib in dep.libraries:
-                    if hasattr(lib, 'held_object'):
-                        lib = lib.held_object
+                for lib in unholder(dep.libraries):
                     if isinstance(lib, build.SharedLibrary):
                         internal_ldflags.update(self._get_link_args(state, lib, depends, include_rpath))
                         libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath,
@@ -341,9 +342,7 @@
                 external_ldflags.update(extdepflags[2])
                 external_ldflags_nodedup += extdepflags[3]
                 gi_includes.update(extdepflags[4])
-                for source in dep.sources:
-                    if hasattr(source, 'held_object'):
-                        source = source.held_object
+                for source in unholder(dep.sources):
                     if isinstance(source, GirTarget):
                         gi_includes.update([os.path.join(state.environment.get_build_dir(),
                                             source.get_subdir())])
@@ -373,11 +372,6 @@
                         external_ldflags_nodedup += [lib, next(ldflags)]
                     else:
                         external_ldflags.update([lib])
-
-                if isinstance(dep, PkgConfigDependency):
-                    girdir = dep.get_pkgconfig_variable("girdir", {'default': ''})
-                    if girdir:
-                        gi_includes.update([girdir])
             elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
                 cflags.update(get_include_args(dep.get_include_dirs()))
                 depends.append(dep)
@@ -385,7 +379,7 @@
                 mlog.log('dependency {!r} not handled to build gir files'.format(dep))
                 continue
 
-        if gir_has_option(self.interpreter, '--extra-library') and use_gir_args:
+        if use_gir_args and self._gir_has_option('--extra-library'):
             def fix_ldflags(ldflags):
                 fixed_ldflags = OrderedSet()
                 for ldflag in ldflags:
@@ -419,15 +413,37 @@
         return girtarget
 
     def _get_gir_dep(self, state):
-        try:
-            gir_dep = self.gir_dep or PkgConfigDependency('gobject-introspection-1.0',
-                                                          state.environment,
-                                                          {'native': True})
-            pkgargs = gir_dep.get_compile_args()
-        except Exception:
-            raise MesonException('gobject-introspection dependency was not found, gir cannot be generated.')
+        if not self.gir_dep:
+            kwargs = {'native': True, 'required': True}
+            holder = self.interpreter.func_dependency(state.current_node, ['gobject-introspection-1.0'], kwargs)
+            self.gir_dep = holder.held_object
+            giscanner = state.environment.lookup_binary_entry(MachineChoice.HOST, 'g-ir-scanner')
+            if giscanner is not None:
+                self.giscanner = ExternalProgram.from_entry('g-ir-scanner', giscanner)
+            elif self.gir_dep.type_name == 'pkgconfig':
+                self.giscanner = ExternalProgram('g_ir_scanner', self.gir_dep.get_pkgconfig_variable('g_ir_scanner', {}))
+            else:
+                self.giscanner = self.interpreter.find_program_impl('g-ir-scanner')
+            gicompiler = state.environment.lookup_binary_entry(MachineChoice.HOST, 'g-ir-compiler')
+            if gicompiler is not None:
+                self.gicompiler = ExternalProgram.from_entry('g-ir-compiler', gicompiler)
+            elif self.gir_dep.type_name == 'pkgconfig':
+                self.gicompiler = ExternalProgram('g_ir_compiler', self.gir_dep.get_pkgconfig_variable('g_ir_compiler', {}))
+            else:
+                self.gicompiler = self.interpreter.find_program_impl('g-ir-compiler')
+        return self.gir_dep, self.giscanner, self.gicompiler
 
-        return gir_dep, pkgargs
+    @functools.lru_cache(maxsize=None)
+    def _gir_has_option(self, option):
+        exe = self.giscanner
+        if hasattr(exe, 'held_object'):
+            exe = exe.held_object
+        if isinstance(exe, interpreter.OverrideProgram):
+            # Handle overridden g-ir-scanner
+            assert option in ['--extra-library', '--sources-top-dirs']
+            return True
+        p, o, e = Popen_safe(exe.get_command() + ['--help'], stderr=subprocess.STDOUT)
+        return p.returncode == 0 and option in o
 
     def _scan_header(self, kwargs):
         ret = []
@@ -457,9 +473,7 @@
 
         if 'includes' in kwargs:
             includes = mesonlib.extract_as_list(kwargs, 'includes', pop = True)
-            for inc in includes:
-                if hasattr(inc, 'held_object'):
-                    inc = inc.held_object
+            for inc in unholder(includes):
                 if isinstance(inc, str):
                     ret += ['--include=%s' % (inc, )]
                 elif isinstance(inc, GirTarget):
@@ -616,9 +630,7 @@
         gir_filelist_filename = os.path.join(gir_filelist_dir, '%s_%s_gir_filelist' % (ns, nsversion))
 
         with open(gir_filelist_filename, 'w', encoding='utf-8') as gir_filelist:
-            for s in libsources:
-                if hasattr(s, 'held_object'):
-                    s = s.held_object
+            for s in unholder(libsources):
                 if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
                     for custom_output in s.get_outputs():
                         gir_filelist.write(os.path.join(state.environment.get_build_dir(),
@@ -671,15 +683,11 @@
         # dependencies and also find the include directories needed for the
         # typelib generation custom target below.
         typelib_includes = []
-        for dep in deps:
-            if hasattr(dep, 'held_object'):
-                dep = dep.held_object
+        for dep in unholder(deps):
             # Add a dependency on each GirTarget listed in dependencies and add
             # the directory where it will be generated to the typelib includes
             if isinstance(dep, InternalDependency):
-                for source in dep.sources:
-                    if hasattr(source, 'held_object'):
-                        source = source.held_object
+                for source in unholder(dep.sources):
                     if isinstance(source, GirTarget) and source not in depends:
                         depends.append(source)
                         subdir = os.path.join(state.environment.get_build_dir(),
@@ -698,11 +706,10 @@
                                               source.get_subdir())
                         if subdir not in typelib_includes:
                             typelib_includes.append(subdir)
-            elif isinstance(dep, PkgConfigDependency):
-                girdir = dep.get_pkgconfig_variable("girdir", {'default': ''})
+            if isinstance(dep, Dependency):
+                girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='')
                 if girdir and girdir not in typelib_includes:
                     typelib_includes.append(girdir)
-
         return typelib_includes
 
     def _get_external_args_for_langs(self, state, langs):
@@ -725,39 +732,42 @@
             if f.startswith(('-L', '-l', '--extra-library')):
                 yield f
 
-    @FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
+    @FeatureNewKwargs('generate_gir', '0.55.0', ['fatal_warnings'])
+    @FeatureNewKwargs('generate_gir', '0.40.0', ['build_by_default'])
     @permittedKwargs({'sources', 'nsversion', 'namespace', 'symbol_prefix', 'identifier_prefix',
                       'export_packages', 'includes', 'dependencies', 'link_with', 'include_directories',
                       'install', 'install_dir_gir', 'install_dir_typelib', 'extra_args',
-                      'packages', 'header', 'build_by_default'})
+                      'packages', 'header', 'build_by_default', 'fatal_warnings'})
     def generate_gir(self, state, args, kwargs):
         if not args:
             raise MesonException('generate_gir takes at least one argument')
         if kwargs.get('install_dir'):
             raise MesonException('install_dir is not supported with generate_gir(), see "install_dir_gir" and "install_dir_typelib"')
 
-        giscanner = self.interpreter.find_program_impl('g-ir-scanner')
-        gicompiler = self.interpreter.find_program_impl('g-ir-compiler')
-
         girtargets = [self._unwrap_gir_target(arg, state) for arg in args]
 
         if len(girtargets) > 1 and any([isinstance(el, build.Executable) for el in girtargets]):
             raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable')
 
-        self.gir_dep, pkgargs = self._get_gir_dep(state)
+        gir_dep, giscanner, gicompiler = self._get_gir_dep(state)
 
-        ns = kwargs.pop('namespace')
-        nsversion = kwargs.pop('nsversion')
+        ns = kwargs.get('namespace')
+        if not ns:
+            raise MesonException('Missing "namespace" keyword argument')
+        nsversion = kwargs.get('nsversion')
+        if not nsversion:
+            raise MesonException('Missing "nsversion" keyword argument')
         libsources = mesonlib.extract_as_list(kwargs, 'sources', pop=True)
         girfile = '%s-%s.gir' % (ns, nsversion)
         srcdir = os.path.join(state.environment.get_source_dir(), state.subdir)
         builddir = os.path.join(state.environment.get_build_dir(), state.subdir)
-        depends = [] + girtargets
+        depends = gir_dep.sources + girtargets
         gir_inc_dirs = []
         langs_compilers = self._get_girtargets_langs_compilers(girtargets)
         cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers)
         deps = self._get_gir_targets_deps(girtargets)
-        deps += extract_as_list(kwargs, 'dependencies', pop=True, unholder=True)
+        deps += mesonlib.unholder(extract_as_list(kwargs, 'dependencies', pop=True))
+        deps += [gir_dep]
         typelib_includes = self._gather_typelib_includes_and_update_depends(state, deps, depends)
         # ldflags will be misinterpreted by gir scanner (showing
         # spurious dependencies) but building GStreamer fails if they
@@ -772,7 +782,6 @@
         inc_dirs = self._scan_inc_dirs(kwargs)
 
         scan_command = [giscanner]
-        scan_command += pkgargs
         scan_command += ['--no-libtool']
         scan_command += ['--namespace=' + ns, '--nsversion=' + nsversion]
         scan_command += ['--warn-all']
@@ -797,10 +806,18 @@
         scan_command += self._scan_langs(state, [lc[0] for lc in langs_compilers])
         scan_command += list(external_ldflags)
 
-        if gir_has_option(self.interpreter, '--sources-top-dirs'):
+        if self._gir_has_option('--sources-top-dirs'):
             scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_source_dir(), self.interpreter.subproject_dir, state.subproject)]
             scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), self.interpreter.subproject_dir, state.subproject)]
 
+        if '--warn-error' in scan_command:
+            mlog.deprecation('Passing --warn-error is deprecated in favor of "fatal_warnings" keyword argument since v0.55')
+        fatal_warnings = kwargs.get('fatal_warnings', False)
+        if not isinstance(fatal_warnings, bool):
+            raise MesonException('fatal_warnings keyword argument must be a boolean')
+        if fatal_warnings:
+            scan_command.append('--warn-error')
+
         scan_target = self._make_gir_target(state, girfile, scan_command, depends, kwargs)
 
         typelib_output = '%s-%s.typelib' % (ns, nsversion)
@@ -837,6 +854,8 @@
         return ModuleReturnValue(target_g, [target_g])
 
     @permittedKwargs({'sources', 'media', 'symlink_media', 'languages'})
+    @FeatureDeprecatedKwargs('gnome.yelp', '0.43.0', ['languages'],
+                             'Use a LINGUAS file in the source directory instead')
     def yelp(self, state, args, kwargs):
         if len(args) < 1:
             raise MesonException('Yelp requires a project id')
@@ -851,11 +870,6 @@
         source_str = '@@'.join(sources)
 
         langs = mesonlib.stringlistify(kwargs.pop('languages', []))
-        if langs:
-            mlog.deprecation('''The "languages" argument of gnome.yelp() is deprecated.
-Use a LINGUAS file in the sources directory instead.
-This will become a hard error in the future.''')
-
         media = mesonlib.stringlistify(kwargs.pop('media', []))
         symlinks = kwargs.pop('symlink_media', True)
 
@@ -912,7 +926,7 @@
                       'fixxref_args', 'html_args', 'html_assets', 'content_files',
                       'mkdb_args', 'ignore_headers', 'include_directories',
                       'namespace', 'mode', 'expand_content_files', 'module_version',
-                      'c_args'})
+                      'c_args', 'check'})
     def gtkdoc(self, state, args, kwargs):
         if len(args) != 1:
             raise MesonException('Gtkdoc must have one positional argument.')
@@ -984,9 +998,7 @@
 
         depends = []
         content_files = []
-        for s in mesonlib.extract_as_list(kwargs, 'content_files'):
-            if hasattr(s, 'held_object'):
-                s = s.held_object
+        for s in unholder(mesonlib.extract_as_list(kwargs, 'content_files')):
             if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
                 depends.append(s)
                 for o in s.get_outputs():
@@ -1038,7 +1050,7 @@
 
     def _get_build_args(self, kwargs, state, depends):
         args = []
-        deps = extract_as_list(kwargs, 'dependencies', unholder=True)
+        deps = mesonlib.unholder(extract_as_list(kwargs, 'dependencies'))
         cflags = []
         cflags.extend(mesonlib.stringlistify(kwargs.pop('c_args', [])))
         deps_cflags, internal_ldflags, external_ldflags, gi_includes = \
@@ -1582,9 +1594,7 @@
         vapi_includes = []
         ret = []
         remaining_args = []
-        for arg in arg_list:
-            if hasattr(arg, 'held_object'):
-                arg = arg.held_object
+        for arg in unholder(arg_list):
             if isinstance(arg, InternalDependency):
                 targets = [t for t in arg.sources if isinstance(t, VapiTarget)]
                 for target in targets:
@@ -1692,7 +1702,7 @@
         # - add relevant directories to include dirs
         incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)]
         sources = [vapi_target] + vapi_depends
-        rv = InternalDependency(None, incs, [], [], link_with, [], sources, [])
+        rv = InternalDependency(None, incs, [], [], link_with, [], sources, [], {})
         created_values.append(rv)
         return ModuleReturnValue(rv, created_values)
 
diff -Nru meson-0.53.2/mesonbuild/modules/hotdoc.py meson-0.57.0+really0.56.2/mesonbuild/modules/hotdoc.py
--- meson-0.53.2/mesonbuild/modules/hotdoc.py	2019-11-28 15:13:28.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/hotdoc.py	2021-01-06 10:39:48.000000000 +0000
@@ -326,7 +326,7 @@
         for path in self.include_paths.keys():
             self.cmd.extend(['--include-path', path])
 
-        if self.state.environment.coredata.get_builtin_option('werror'):
+        if self.state.environment.coredata.get_builtin_option('werror', self.state.subproject):
             self.cmd.append('--fatal-warning')
         self.generate_hotdoc_config()
 
diff -Nru meson-0.53.2/mesonbuild/modules/i18n.py meson-0.57.0+really0.56.2/mesonbuild/modules/i18n.py
--- meson-0.53.2/mesonbuild/modules/i18n.py	2019-09-28 23:52:33.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/i18n.py	2021-01-06 10:39:48.000000000 +0000
@@ -16,7 +16,7 @@
 
 from os import path
 from .. import coredata, mesonlib, build, mlog
-from ..mesonlib import MesonException, run_once
+from ..mesonlib import MesonException
 from . import ModuleReturnValue
 from . import ExtensionModule
 from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs
@@ -59,9 +59,8 @@
 class I18nModule(ExtensionModule):
 
     @staticmethod
-    @run_once
     def nogettext_warning():
-        mlog.warning('Gettext not found, all translation targets will be ignored.')
+        mlog.warning('Gettext not found, all translation targets will be ignored.', once=True)
         return ModuleReturnValue(None, [])
 
     @staticmethod
diff -Nru meson-0.53.2/mesonbuild/modules/__init__.py meson-0.57.0+really0.56.2/mesonbuild/modules/__init__.py
--- meson-0.53.2/mesonbuild/modules/__init__.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/__init__.py	2020-09-17 22:00:44.000000000 +0000
@@ -18,14 +18,19 @@
 import os
 
 from .. import build
+from ..mesonlib import unholder
+import typing as T
 
+if T.TYPE_CHECKING:
+    from ..interpreter import Interpreter
+    from ..interpreterbase import TYPE_var
 
 class ExtensionModule:
-    def __init__(self, interpreter):
+    def __init__(self, interpreter: 'Interpreter') -> None:
         self.interpreter = interpreter
-        self.snippets = set() # List of methods that operate only on the interpreter.
+        self.snippets = set()  # type: T.Set[str] # List of methods that operate only on the interpreter.
 
-    def is_snippet(self, funcname):
+    def is_snippet(self, funcname: str) -> bool:
         return funcname in self.snippets
 
 
@@ -38,12 +43,7 @@
         return []
 
     dirs_str = []
-    for incdirs in include_dirs:
-        if hasattr(incdirs, "held_object"):
-            dirs = incdirs.held_object
-        else:
-            dirs = incdirs
-
+    for dirs in unholder(include_dirs):
         if isinstance(dirs, str):
             dirs_str += ['%s%s' % (prefix, dirs)]
             continue
@@ -61,8 +61,19 @@
 
     return dirs_str
 
+def is_module_library(fname):
+    '''
+    Check if the file is a library-like file generated by a module-specific
+    target, such as GirTarget or TypelibTarget
+    '''
+    if hasattr(fname, 'fname'):
+        fname = fname.fname
+    suffix = fname.split('.')[-1]
+    return suffix in ('gir', 'typelib')
+
+
 class ModuleReturnValue:
-    def __init__(self, return_value, new_objects):
+    def __init__(self, return_value: 'TYPE_var', new_objects: T.List['TYPE_var']) -> None:
         self.return_value = return_value
         assert(isinstance(new_objects, list))
         self.new_objects = new_objects
diff -Nru meson-0.53.2/mesonbuild/modules/keyval.py meson-0.57.0+really0.56.2/mesonbuild/modules/keyval.py
--- meson-0.53.2/mesonbuild/modules/keyval.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/keyval.py	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,71 @@
+# Copyright 2017, 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import ExtensionModule
+
+from .. import mesonlib
+from ..mesonlib import typeslistify
+from ..interpreterbase import FeatureNew, noKwargs
+from ..interpreter import InvalidCode
+
+import os
+
+class KeyvalModule(ExtensionModule):
+
+    @FeatureNew('Keyval Module', '0.55.0')
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.snippets.add('load')
+
+    def _load_file(self, path_to_config):
+        result = dict()
+        try:
+            with open(path_to_config) as f:
+                for line in f:
+                    if '#' in line:
+                        comment_idx = line.index('#')
+                        line = line[:comment_idx]
+                    line = line.strip()
+                    try:
+                        name, val = line.split('=', 1)
+                    except ValueError:
+                        continue
+                    result[name.strip()] = val.strip()
+        except IOError as e:
+            raise mesonlib.MesonException('Failed to load {}: {}'.format(path_to_config, e))
+
+        return result
+
+    @noKwargs
+    def load(self, interpreter, state, args, kwargs):
+        sources = typeslistify(args, (str, mesonlib.File))
+        if len(sources) != 1:
+            raise InvalidCode('load takes only one file input.')
+
+        s = sources[0]
+        is_built = False
+        if isinstance(s, mesonlib.File):
+            is_built = is_built or s.is_built
+            s = s.absolute_path(interpreter.environment.source_dir, interpreter.environment.build_dir)
+        else:
+            s = os.path.join(interpreter.environment.source_dir, s)
+
+        if s not in interpreter.build_def_files and not is_built:
+            interpreter.build_def_files.append(s)
+
+        return self._load_file(s)
+
+
+def initialize(*args, **kwargs):
+    return KeyvalModule(*args, **kwargs)
diff -Nru meson-0.53.2/mesonbuild/modules/pkgconfig.py meson-0.57.0+really0.56.2/mesonbuild/modules/pkgconfig.py
--- meson-0.53.2/mesonbuild/modules/pkgconfig.py	2020-01-23 22:29:05.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/pkgconfig.py	2021-01-06 10:39:48.000000000 +0000
@@ -13,10 +13,11 @@
 # limitations under the License.
 
 import os, types
-from pathlib import PurePath
+from .._pathlib import PurePath
 
 from .. import build
 from .. import dependencies
+from ..dependencies.misc import ThreadDependency
 from .. import mesonlib
 from .. import mlog
 from . import ModuleReturnValue
@@ -26,7 +27,8 @@
 already_warned_objs = set()
 
 class DependenciesHelper:
-    def __init__(self, name):
+    def __init__(self, state, name):
+        self.state = state
         self.name = name
         self.pub_libs = []
         self.pub_reqs = []
@@ -34,6 +36,7 @@
         self.priv_reqs = []
         self.cflags = []
         self.version_reqs = {}
+        self.link_whole_targets = []
 
     def add_pub_libs(self, libs):
         libs, reqs, cflags = self._process_libs(libs, True)
@@ -72,7 +75,9 @@
     def _process_reqs(self, reqs):
         '''Returns string names of requirements'''
         processed_reqs = []
-        for obj in mesonlib.listify(reqs, unholder=True):
+        for obj in mesonlib.unholder(mesonlib.listify(reqs)):
+            if not isinstance(obj, str):
+                FeatureNew.single_use('pkgconfig.generate requirement from non-string object', '0.46.0', self.state.subproject)
             if hasattr(obj, 'generated_pc'):
                 self._check_generated_pc_deprecation(obj)
                 processed_reqs.append(obj.generated_pc)
@@ -91,7 +96,7 @@
                 self.add_version_reqs(name, version_req)
             elif isinstance(obj, dependencies.Dependency) and not obj.found():
                 pass
-            elif isinstance(obj, dependencies.ThreadDependency):
+            elif isinstance(obj, ThreadDependency):
                 pass
             else:
                 raise mesonlib.MesonException('requires argument not a string, '
@@ -104,7 +109,7 @@
         self.cflags += mesonlib.stringlistify(cflags)
 
     def _process_libs(self, libs, public):
-        libs = mesonlib.listify(libs, unholder=True)
+        libs = mesonlib.unholder(mesonlib.listify(libs))
         processed_libs = []
         processed_reqs = []
         processed_cflags = []
@@ -122,17 +127,11 @@
                 if obj.found():
                     processed_reqs.append(obj.name)
                     self.add_version_reqs(obj.name, obj.version_reqs)
-            elif isinstance(obj, dependencies.ThreadDependency):
-                processed_libs += obj.get_compiler().thread_link_flags(obj.env)
-                processed_cflags += obj.get_compiler().thread_flags(obj.env)
             elif isinstance(obj, dependencies.InternalDependency):
                 if obj.found():
                     processed_libs += obj.get_link_args()
                     processed_cflags += obj.get_compile_args()
-                    if public:
-                        self.add_pub_libs(obj.libraries)
-                    else:
-                        self.add_priv_libs(obj.libraries)
+                    self._add_lib_dependencies(obj.libraries, obj.whole_libraries, obj.ext_deps, public, private_external_deps=True)
             elif isinstance(obj, dependencies.Dependency):
                 if obj.found():
                     processed_libs += obj.get_link_args()
@@ -147,12 +146,13 @@
                 processed_libs.append(obj)
             elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)):
                 processed_libs.append(obj)
-                if isinstance(obj, build.StaticLibrary) and public:
-                    self.add_pub_libs(obj.get_dependencies(for_pkgconfig=True))
-                    self.add_pub_libs(obj.get_external_deps())
-                else:
-                    self.add_priv_libs(obj.get_dependencies(for_pkgconfig=True))
-                    self.add_priv_libs(obj.get_external_deps())
+                # If there is a static library in `Libs:` all its deps must be
+                # public too, otherwise the generated pc file will never be
+                # usable without --static.
+                self._add_lib_dependencies(obj.link_targets,
+                                           obj.link_whole_targets,
+                                           obj.external_deps,
+                                           isinstance(obj, build.StaticLibrary) and public)
             elif isinstance(obj, str):
                 processed_libs.append(obj)
             else:
@@ -160,6 +160,34 @@
 
         return processed_libs, processed_reqs, processed_cflags
 
+    def _add_lib_dependencies(self, link_targets, link_whole_targets, external_deps, public, private_external_deps=False):
+        add_libs = self.add_pub_libs if public else self.add_priv_libs
+        # Recursively add all linked libraries
+        for t in link_targets:
+            # Internal libraries (uninstalled static library) will be promoted
+            # to link_whole, treat them as such here.
+            if t.is_internal():
+                self._add_link_whole(t, public)
+            else:
+                add_libs([t])
+        for t in link_whole_targets:
+            self._add_link_whole(t, public)
+        # And finally its external dependencies
+        if private_external_deps:
+            self.add_priv_libs(external_deps)
+        else:
+            add_libs(external_deps)
+
+    def _add_link_whole(self, t, public):
+        # Don't include static libraries that we link_whole. But we still need to
+        # include their dependencies: a static library we link_whole
+        # could itself link to a shared library or an installed static library.
+        # Keep track of link_whole_targets so we can remove them from our
+        # lists in case a library is link_with and link_whole at the same time.
+        # See remove_dups() below.
+        self.link_whole_targets.append(t)
+        self._add_lib_dependencies(t.link_targets, t.link_whole_targets, t.external_deps, public)
+
     def add_version_reqs(self, name, version_reqs):
         if version_reqs:
             if name not in self.version_reqs:
@@ -195,6 +223,32 @@
         return ', '.join(result)
 
     def remove_dups(self):
+        # Set of ids that have already been handled and should not be added any more
+        exclude = set()
+
+        # We can't just check if 'x' is excluded because we could have copies of
+        # the same SharedLibrary object for example.
+        def _ids(x):
+            if hasattr(x, 'generated_pc'):
+                yield x.generated_pc
+            if isinstance(x, build.Target):
+                yield x.get_id()
+            yield x
+
+        # Exclude 'x' in all its forms and return if it was already excluded
+        def _add_exclude(x):
+            was_excluded = False
+            for i in _ids(x):
+                if i in exclude:
+                    was_excluded = True
+                else:
+                    exclude.add(i)
+            return was_excluded
+
+        # link_whole targets are already part of other targets, exclude them all.
+        for t in self.link_whole_targets:
+            _add_exclude(t)
+
         def _fn(xs, libs=False):
             # Remove duplicates whilst preserving original order
             result = []
@@ -205,19 +259,21 @@
                 cannot_dedup = libs and isinstance(x, str) and \
                     not x.startswith(('-l', '-L')) and \
                     x not in known_flags
-                if x not in result or cannot_dedup:
-                    result.append(x)
+                if not cannot_dedup and _add_exclude(x):
+                    continue
+                result.append(x)
             return result
-        self.pub_libs = _fn(self.pub_libs, True)
+
+        # Handle lists in priority order: public items can be excluded from
+        # private and Requires can excluded from Libs.
         self.pub_reqs = _fn(self.pub_reqs)
-        self.priv_libs = _fn(self.priv_libs, True)
+        self.pub_libs = _fn(self.pub_libs, True)
         self.priv_reqs = _fn(self.priv_reqs)
+        self.priv_libs = _fn(self.priv_libs, True)
+        # Reset exclude list just in case some values can be both cflags and libs.
+        exclude = set()
         self.cflags = _fn(self.cflags)
 
-        # Remove from private libs/reqs if they are in public already
-        self.priv_libs = [i for i in self.priv_libs if i not in self.pub_libs]
-        self.priv_reqs = [i for i in self.priv_reqs if i not in self.pub_reqs]
-
 class PkgConfigModule(ExtensionModule):
 
     def _get_lname(self, l, msg, pcfile):
@@ -255,24 +311,38 @@
             prefix = prefix.as_posix()
         if isinstance(subdir, PurePath):
             subdir = subdir.as_posix()
-        if subdir.startswith(prefix):
-            subdir = subdir.replace(prefix, '')
+        try:
+            if os.path.commonpath([prefix, subdir]) == prefix:
+                skip = len(prefix) + 1
+                subdir = subdir[skip:]
+        except ValueError:
+            pass
         return subdir
 
     def generate_pkgconfig_file(self, state, deps, subdirs, name, description,
-                                url, version, pcfile, conflicts, variables):
-        deps.remove_dups()
+                                url, version, pcfile, conflicts, variables,
+                                uninstalled=False, dataonly=False):
         coredata = state.environment.get_coredata()
-        outdir = state.environment.scratch_dir
-        fname = os.path.join(outdir, pcfile)
-        prefix = PurePath(coredata.get_builtin_option('prefix'))
+        if uninstalled:
+            outdir = os.path.join(state.environment.build_dir, 'meson-uninstalled')
+            if not os.path.exists(outdir):
+                os.mkdir(outdir)
+            prefix = PurePath(state.environment.get_build_dir())
+            srcdir = PurePath(state.environment.get_source_dir())
+        else:
+            outdir = state.environment.scratch_dir
+            prefix = PurePath(coredata.get_builtin_option('prefix'))
         # These always return paths relative to prefix
         libdir = PurePath(coredata.get_builtin_option('libdir'))
         incdir = PurePath(coredata.get_builtin_option('includedir'))
+        fname = os.path.join(outdir, pcfile)
         with open(fname, 'w', encoding='utf-8') as ofile:
-            ofile.write('prefix={}\n'.format(self._escape(prefix)))
-            ofile.write('libdir={}\n'.format(self._escape('${prefix}' / libdir)))
-            ofile.write('includedir={}\n'.format(self._escape('${prefix}' / incdir)))
+            if not dataonly:
+                ofile.write('prefix={}\n'.format(self._escape(prefix)))
+                if uninstalled:
+                    ofile.write('srcdir={}\n'.format(self._escape(srcdir)))
+                ofile.write('libdir={}\n'.format(self._escape('${prefix}' / libdir)))
+                ofile.write('includedir={}\n'.format(self._escape('${prefix}' / incdir)))
             if variables:
                 ofile.write('\n')
             for k, v in variables:
@@ -302,17 +372,20 @@
                     if isinstance(l, str):
                         yield l
                     else:
-                        install_dir = l.get_custom_install_dir()[0]
+                        if uninstalled:
+                            install_dir = os.path.dirname(state.backend.get_target_filename_abs(l))
+                        else:
+                            install_dir = l.get_custom_install_dir()[0]
                         if install_dir is False:
                             continue
                         if 'cs' in l.compilers:
                             if isinstance(install_dir, str):
-                                Lflag = '-r${prefix}/%s/%s ' % (self._escape(self._make_relative(prefix, install_dir)), l.filename)
+                                Lflag = '-r${prefix}/%s/%s' % (self._escape(self._make_relative(prefix, install_dir)), l.filename)
                             else:  # install_dir is True
                                 Lflag = '-r${libdir}/%s' % l.filename
                         else:
                             if isinstance(install_dir, str):
-                                Lflag = '-L${prefix}/%s ' % self._escape(self._make_relative(prefix, install_dir))
+                                Lflag = '-L${prefix}/%s' % self._escape(self._make_relative(prefix, install_dir))
                             else:  # install_dir is True
                                 Lflag = '-L${libdir}'
                         if Lflag not in Lflags:
@@ -326,39 +399,64 @@
                         if 'cs' not in l.compilers:
                             yield '-l%s' % lname
 
+            def get_uninstalled_include_dirs(libs):
+                result = []
+                for l in libs:
+                    if isinstance(l, str):
+                        continue
+                    if l.get_subdir() not in result:
+                        result.append(l.get_subdir())
+                    for i in l.get_include_dirs():
+                        curdir = i.get_curdir()
+                        for d in i.get_incdirs():
+                            path = os.path.join(curdir, d)
+                            if path not in result:
+                                result.append(path)
+                return result
+
+            def generate_uninstalled_cflags(libs):
+                for d in get_uninstalled_include_dirs(libs):
+                    for basedir in ['${prefix}', '${srcdir}']:
+                        path = os.path.join(basedir, d)
+                        yield '-I%s' % self._escape(path)
+
             if len(deps.pub_libs) > 0:
                 ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(deps.pub_libs))))
             if len(deps.priv_libs) > 0:
                 ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs))))
-            ofile.write('Cflags:')
-            for h in subdirs:
-                ofile.write(' ')
-                if h == '.':
-                    ofile.write('-I${includedir}')
-                else:
-                    ofile.write(self._escape(PurePath('-I${includedir}') / h))
-            for f in deps.cflags:
-                ofile.write(' ')
-                ofile.write(self._escape(f))
-            ofile.write('\n')
 
+            cflags = []
+            if uninstalled:
+                cflags += generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs)
+            else:
+                for d in subdirs:
+                    if d == '.':
+                        cflags.append('-I${includedir}')
+                    else:
+                        cflags.append(self._escape(PurePath('-I${includedir}') / d))
+            cflags += [self._escape(f) for f in deps.cflags]
+            if cflags and not dataonly:
+                ofile.write('Cflags: {}\n'.format(' '.join(cflags)))
+
+    @FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['uninstalled_variables'])
     @FeatureNewKwargs('pkgconfig.generate', '0.42.0', ['extra_cflags'])
     @FeatureNewKwargs('pkgconfig.generate', '0.41.0', ['variables'])
+    @FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['dataonly'])
     @permittedKwargs({'libraries', 'version', 'name', 'description', 'filebase',
                       'subdirs', 'requires', 'requires_private', 'libraries_private',
-                      'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions'})
+                      'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions',
+                      'dataonly', 'conflicts'})
     def generate(self, state, args, kwargs):
-        if 'variables' in kwargs:
-            FeatureNew('custom pkgconfig variables', '0.41.0').use(state.subproject)
         default_version = state.project_version['version']
         default_install_dir = None
         default_description = None
         default_name = None
         mainlib = None
+        default_subdirs = ['.']
         if not args and 'version' not in kwargs:
-            FeatureNew('pkgconfig.generate implicit version keyword', '0.46.0').use(state.subproject)
+            FeatureNew.single_use('pkgconfig.generate implicit version keyword', '0.46.0', state.subproject)
         elif len(args) == 1:
-            FeatureNew('pkgconfig.generate optional positional argument', '0.46.0').use(state.subproject)
+            FeatureNew.single_use('pkgconfig.generate optional positional argument', '0.46.0', state.subproject)
             mainlib = getattr(args[0], 'held_object', args[0])
             if not isinstance(mainlib, (build.StaticLibrary, build.SharedLibrary)):
                 raise mesonlib.MesonException('Pkgconfig_gen first positional argument must be a library object')
@@ -370,7 +468,14 @@
         elif len(args) > 1:
             raise mesonlib.MesonException('Too many positional arguments passed to Pkgconfig_gen.')
 
-        subdirs = mesonlib.stringlistify(kwargs.get('subdirs', ['.']))
+        dataonly = kwargs.get('dataonly', False)
+        if dataonly:
+            default_subdirs = []
+            blocked_vars = ['libraries', 'libraries_private', 'require_private', 'extra_cflags', 'subdirs']
+            if len(set(kwargs) & set(blocked_vars)) > 0:
+                raise mesonlib.MesonException('Cannot combine dataonly with any of {}'.format(blocked_vars))
+
+        subdirs = mesonlib.stringlistify(kwargs.get('subdirs', default_subdirs))
         version = kwargs.get('version', default_version)
         if not isinstance(version, str):
             raise mesonlib.MesonException('Version must be specified.')
@@ -395,7 +500,7 @@
         if mainlib:
             libraries = [mainlib] + libraries
 
-        deps = DependenciesHelper(filebase)
+        deps = DependenciesHelper(state, filebase)
         deps.add_pub_libs(libraries)
         deps.add_priv_libs(kwargs.get('libraries_private', []))
         deps.add_pub_reqs(kwargs.get('requires', []))
@@ -408,31 +513,19 @@
             if compiler:
                 deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None))
 
-        def parse_variable_list(stringlist):
+        deps.remove_dups()
+
+        def parse_variable_list(vardict):
             reserved = ['prefix', 'libdir', 'includedir']
             variables = []
-            for var in stringlist:
-                # foo=bar=baz is ('foo', 'bar=baz')
-                l = var.split('=', 1)
-                if len(l) < 2:
-                    raise mesonlib.MesonException('Invalid variable "{}". Variables must be in \'name=value\' format'.format(var))
-
-                name, value = l[0].strip(), l[1].strip()
-                if not name or not value:
-                    raise mesonlib.MesonException('Invalid variable "{}". Variables must be in \'name=value\' format'.format(var))
-
-                # Variable names must not contain whitespaces
-                if any(c.isspace() for c in name):
-                    raise mesonlib.MesonException('Invalid whitespace in assignment "{}"'.format(var))
-
+            for name, value in vardict.items():
                 if name in reserved:
                     raise mesonlib.MesonException('Variable "{}" is reserved'.format(name))
-
                 variables.append((name, value))
-
             return variables
 
-        variables = parse_variable_list(mesonlib.stringlistify(kwargs.get('variables', [])))
+        variables = self.interpreter.extract_variables(kwargs, dict_new=True)
+        variables = parse_variable_list(variables)
 
         pcfile = filebase + '.pc'
         pkgroot = kwargs.get('install_dir', default_install_dir)
@@ -444,8 +537,16 @@
         if not isinstance(pkgroot, str):
             raise mesonlib.MesonException('Install_dir must be a string.')
         self.generate_pkgconfig_file(state, deps, subdirs, name, description, url,
-                                     version, pcfile, conflicts, variables)
+                                     version, pcfile, conflicts, variables,
+                                     False, dataonly)
         res = build.Data(mesonlib.File(True, state.environment.get_scratch_dir(), pcfile), pkgroot)
+        variables = self.interpreter.extract_variables(kwargs, argname='uninstalled_variables', dict_new=True)
+        variables = parse_variable_list(variables)
+
+        pcfile = filebase + '-uninstalled.pc'
+        self.generate_pkgconfig_file(state, deps, subdirs, name, description, url,
+                                     version, pcfile, conflicts, variables,
+                                     uninstalled=True, dataonly=dataonly)
         # Associate the main library with this generated pc file. If the library
         # is used in any subsequent call to the generated, it will generate a
         # 'Requires:' or 'Requires.private:'.
@@ -461,8 +562,7 @@
             for lib in deps.pub_libs:
                 if not isinstance(lib, str) and not hasattr(lib, 'generated_pc'):
                     lib.generated_pc = filebase
-                    location = types.SimpleNamespace(subdir=state.subdir,
-                                                     lineno=state.current_lineno)
+                    location = state.current_node
                     lib.generated_pc_warn = [name, location]
         return ModuleReturnValue(res, [res])
 
diff -Nru meson-0.53.2/mesonbuild/modules/python3.py meson-0.57.0+really0.56.2/mesonbuild/modules/python3.py
--- meson-0.53.2/mesonbuild/modules/python3.py	2019-02-07 09:08:55.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/python3.py	2020-08-15 16:27:05.000000000 +0000
@@ -48,7 +48,7 @@
 
     @noKwargs
     def find_python(self, state, args, kwargs):
-        command = state.environment.binaries.host.lookup_entry('python3')
+        command = state.environment.lookup_binary_entry(mesonlib.MachineChoice.HOST, 'python3')
         if command is not None:
             py3 = dependencies.ExternalProgram.from_entry('python3', command)
         else:
diff -Nru meson-0.53.2/mesonbuild/modules/python.py meson-0.57.0+really0.56.2/mesonbuild/modules/python.py
--- meson-0.53.2/mesonbuild/modules/python.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/python.py	2021-01-06 10:39:48.000000000 +0000
@@ -17,9 +17,9 @@
 import shutil
 import typing as T
 
-from pathlib import Path
+from .._pathlib import Path
 from .. import mesonlib
-from ..mesonlib import MesonException
+from ..mesonlib import MachineChoice, MesonException
 from . import ExtensionModule
 from mesonbuild.modules import ModuleReturnValue
 from ..interpreterbase import (
@@ -34,7 +34,7 @@
 from ..dependencies.base import (
     DependencyMethods, ExternalDependency,
     ExternalProgram, PkgConfigDependency,
-    NonExistingExternalProgram
+    NonExistingExternalProgram, NotFoundDependency
 )
 
 mod_kwargs = set(['subdir'])
@@ -44,7 +44,7 @@
 class PythonDependency(ExternalDependency):
 
     def __init__(self, python_holder, environment, kwargs):
-        super().__init__('python', environment, None, kwargs)
+        super().__init__('python', environment, kwargs)
         self.name = 'python'
         self.static = kwargs.get('static', False)
         self.embed = kwargs.get('embed', False)
@@ -285,7 +285,7 @@
 
 class PythonInstallation(ExternalProgramHolder):
     def __init__(self, interpreter, python, info):
-        ExternalProgramHolder.__init__(self, python)
+        ExternalProgramHolder.__init__(self, python, interpreter.subproject)
         self.interpreter = interpreter
         self.subproject = self.interpreter.subproject
         prefix = self.interpreter.environment.coredata.get_builtin_option('prefix')
@@ -356,12 +356,19 @@
                          'positional arguments. It always returns a Python '
                          'dependency. This will become an error in the future.',
                          location=self.interpreter.current_node)
-        dep = PythonDependency(self, self.interpreter.environment, kwargs)
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            mlog.log('Dependency', mlog.bold('python'), 'skipped: feature', mlog.bold(feature), 'disabled')
+            dep = NotFoundDependency(self.interpreter.environment)
+        else:
+            dep = PythonDependency(self, self.interpreter.environment, kwargs)
+            if required and not dep.found():
+                raise mesonlib.MesonException('Python dependency not found')
         return self.interpreter.holderify(dep)
 
     @permittedKwargs(['pure', 'subdir'])
     def install_sources_method(self, args, kwargs):
-        pure = kwargs.pop('pure', False)
+        pure = kwargs.pop('pure', True)
         if not isinstance(pure, bool):
             raise InvalidArguments('"pure" argument must be a boolean.')
 
@@ -506,7 +513,7 @@
         if len(args) > 1:
             raise InvalidArguments('find_installation takes zero or one positional argument.')
 
-        name_or_path = state.environment.binaries.host.lookup_entry('python')
+        name_or_path = state.environment.lookup_binary_entry(MachineChoice.HOST, 'python')
         if name_or_path is None and args:
             name_or_path = args[0]
             if not isinstance(name_or_path, str):
@@ -514,7 +521,7 @@
 
         if disabled:
             mlog.log('Program', name_or_path or 'python', 'found:', mlog.red('NO'), '(disabled by:', mlog.bold(feature), ')')
-            return ExternalProgramHolder(NonExistingExternalProgram())
+            return ExternalProgramHolder(NonExistingExternalProgram(), state.subproject)
 
         if not name_or_path:
             python = ExternalProgram('python3', mesonlib.python_command, silent=True)
@@ -561,11 +568,11 @@
         if not python.found():
             if required:
                 raise mesonlib.MesonException('{} not found'.format(name_or_path or 'python'))
-            res = ExternalProgramHolder(NonExistingExternalProgram())
+            res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject)
         elif missing_modules:
             if required:
                 raise mesonlib.MesonException('{} is missing modules: {}'.format(name_or_path or 'python', ', '.join(missing_modules)))
-            res = ExternalProgramHolder(NonExistingExternalProgram())
+            res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject)
         else:
             # Sanity check, we expect to have something that at least quacks in tune
             try:
@@ -583,7 +590,7 @@
             if isinstance(info, dict) and 'version' in info and self._check_version(name_or_path, info['version']):
                 res = PythonInstallation(interpreter, python, info)
             else:
-                res = ExternalProgramHolder(NonExistingExternalProgram())
+                res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject)
                 if required:
                     raise mesonlib.MesonException('{} is not a valid python or it is missing setuptools'.format(python))
 
diff -Nru meson-0.53.2/mesonbuild/modules/qt4.py meson-0.57.0+really0.56.2/mesonbuild/modules/qt4.py
--- meson-0.53.2/mesonbuild/modules/qt4.py	2018-12-09 14:27:23.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/qt4.py	2020-08-15 16:27:05.000000000 +0000
@@ -12,7 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from .. import mlog
 from .qt import QtBaseModule
 
 
@@ -23,6 +22,4 @@
 
 
 def initialize(*args, **kwargs):
-    mlog.warning('rcc dependencies will not work properly until this upstream issue is fixed:',
-                 mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'))
     return Qt4Module(*args, **kwargs)
diff -Nru meson-0.53.2/mesonbuild/modules/qt5.py meson-0.57.0+really0.56.2/mesonbuild/modules/qt5.py
--- meson-0.53.2/mesonbuild/modules/qt5.py	2018-12-09 14:27:23.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/qt5.py	2020-08-15 16:27:05.000000000 +0000
@@ -12,7 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from .. import mlog
 from .qt import QtBaseModule
 
 
@@ -23,6 +22,4 @@
 
 
 def initialize(*args, **kwargs):
-    mlog.warning('rcc dependencies will not work reliably until this upstream issue is fixed:',
-                 mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'))
     return Qt5Module(*args, **kwargs)
diff -Nru meson-0.53.2/mesonbuild/modules/qt.py meson-0.57.0+really0.56.2/mesonbuild/modules/qt.py
--- meson-0.53.2/mesonbuild/modules/qt.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/qt.py	2020-11-22 02:19:19.000000000 +0000
@@ -13,13 +13,15 @@
 # limitations under the License.
 
 import os
+import shutil
 from .. import mlog
 from .. import build
-from ..mesonlib import MesonException, Popen_safe, extract_as_list, File
-from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency
+from ..mesonlib import MesonException, extract_as_list, File, unholder, version_compare
+from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency, NonExistingExternalProgram
 import xml.etree.ElementTree as ET
 from . import ModuleReturnValue, get_include_args, ExtensionModule
-from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs
+from ..interpreterbase import noPosargs, permittedKwargs, FeatureNew, FeatureNewKwargs
+from ..interpreter import extract_required_kwarg
 
 _QT_DEPS_LUT = {
     4: Qt4Dependency,
@@ -29,50 +31,36 @@
 
 class QtBaseModule(ExtensionModule):
     tools_detected = False
+    rcc_supports_depfiles = False
 
     def __init__(self, interpreter, qt_version=5):
         ExtensionModule.__init__(self, interpreter)
+        self.snippets.add('has_tools')
         self.qt_version = qt_version
 
-    def _detect_tools(self, env, method):
+    def _detect_tools(self, env, method, required=True):
         if self.tools_detected:
             return
+        self.tools_detected = True
         mlog.log('Detecting Qt{version} tools'.format(version=self.qt_version))
-        # FIXME: We currently require QtX to exist while importing the module.
-        # We should make it gracefully degrade and not create any targets if
-        # the import is marked as 'optional' (not implemented yet)
-        kwargs = {'required': 'true', 'modules': 'Core', 'silent': 'true', 'method': method}
+        kwargs = {'required': required, 'modules': 'Core', 'method': method}
         qt = _QT_DEPS_LUT[self.qt_version](env, kwargs)
-        # Get all tools and then make sure that they are the right version
-        self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect(self.interpreter)
-        # Moc, uic and rcc write their version strings to stderr.
-        # Moc and rcc return a non-zero result when doing so.
-        # What kind of an idiot thought that was a good idea?
-        for compiler, compiler_name in ((self.moc, "Moc"), (self.uic, "Uic"), (self.rcc, "Rcc"), (self.lrelease, "lrelease")):
-            if compiler.found():
-                # Workaround since there is no easy way to know which tool/version support which flag
-                for flag in ['-v', '-version']:
-                    p, stdout, stderr = Popen_safe(compiler.get_command() + [flag])[0:3]
-                    if p.returncode == 0:
-                        break
-                stdout = stdout.strip()
-                stderr = stderr.strip()
-                if 'Qt {}'.format(self.qt_version) in stderr:
-                    compiler_ver = stderr
-                elif 'version {}.'.format(self.qt_version) in stderr:
-                    compiler_ver = stderr
-                elif ' {}.'.format(self.qt_version) in stdout:
-                    compiler_ver = stdout
-                else:
-                    raise MesonException('{name} preprocessor is not for Qt {version}. Output:\n{stdo}\n{stderr}'.format(
-                        name=compiler_name, version=self.qt_version, stdo=stdout, stderr=stderr))
-                mlog.log(' {}:'.format(compiler_name.lower()), mlog.green('YES'), '({path}, {version})'.format(
-                    path=compiler.get_path(), version=compiler_ver.split()[-1]))
+        if qt.found():
+            # Get all tools and then make sure that they are the right version
+            self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect(self.interpreter)
+            if version_compare(qt.version, '>=5.14.0'):
+                self.rcc_supports_depfiles = True
             else:
-                mlog.log(' {}:'.format(compiler_name.lower()), mlog.red('NO'))
-        self.tools_detected = True
+                mlog.warning('rcc dependencies will not work properly until you move to Qt >= 5.14:',
+                    mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False)
+        else:
+            suffix = '-qt{}'.format(self.qt_version)
+            self.moc = NonExistingExternalProgram(name='moc' + suffix)
+            self.uic = NonExistingExternalProgram(name='uic' + suffix)
+            self.rcc = NonExistingExternalProgram(name='rcc' + suffix)
+            self.lrelease = NonExistingExternalProgram(name='lrelease' + suffix)
 
-    def parse_qrc(self, state, rcc_file):
+    def qrc_nodes(self, state, rcc_file):
         if type(rcc_file) is str:
             abspath = os.path.join(state.environment.source_dir, state.subdir, rcc_file)
             rcc_dirname = os.path.dirname(abspath)
@@ -89,41 +77,65 @@
                     mlog.warning("malformed rcc file: ", os.path.join(state.subdir, rcc_file))
                     break
                 else:
-                    resource_path = child.text
-                    # We need to guess if the pointed resource is:
-                    #   a) in build directory -> implies a generated file
-                    #   b) in source directory
-                    #   c) somewhere else external dependency file to bundle
-                    #
-                    # Also from qrc documentation: relative path are always from qrc file
-                    # So relative path must always be computed from qrc file !
-                    if os.path.isabs(resource_path):
-                        # a)
-                        if resource_path.startswith(os.path.abspath(state.environment.build_dir)):
-                            resource_relpath = os.path.relpath(resource_path, state.environment.build_dir)
-                            result.append(File(is_built=True, subdir='', fname=resource_relpath))
-                        # either b) or c)
-                        else:
-                            result.append(File(is_built=False, subdir=state.subdir, fname=resource_path))
-                    else:
-                        path_from_rcc = os.path.normpath(os.path.join(rcc_dirname, resource_path))
-                        # a)
-                        if path_from_rcc.startswith(state.environment.build_dir):
-                            result.append(File(is_built=True, subdir=state.subdir, fname=resource_path))
-                        # b)
-                        else:
-                            result.append(File(is_built=False, subdir=state.subdir, fname=path_from_rcc))
-            return result
+                    result.append(child.text)
+
+            return rcc_dirname, result
         except Exception:
             return []
 
+    def parse_qrc_deps(self, state, rcc_file):
+        rcc_dirname, nodes = self.qrc_nodes(state, rcc_file)
+        result = []
+        for resource_path in nodes:
+            # We need to guess if the pointed resource is:
+            #   a) in build directory -> implies a generated file
+            #   b) in source directory
+            #   c) somewhere else external dependency file to bundle
+            #
+            # Also from qrc documentation: relative path are always from qrc file
+            # So relative path must always be computed from qrc file !
+            if os.path.isabs(resource_path):
+                # a)
+                if resource_path.startswith(os.path.abspath(state.environment.build_dir)):
+                    resource_relpath = os.path.relpath(resource_path, state.environment.build_dir)
+                    result.append(File(is_built=True, subdir='', fname=resource_relpath))
+                # either b) or c)
+                else:
+                    result.append(File(is_built=False, subdir=state.subdir, fname=resource_path))
+            else:
+                path_from_rcc = os.path.normpath(os.path.join(rcc_dirname, resource_path))
+                # a)
+                if path_from_rcc.startswith(state.environment.build_dir):
+                    result.append(File(is_built=True, subdir=state.subdir, fname=resource_path))
+                # b)
+                else:
+                    result.append(File(is_built=False, subdir=state.subdir, fname=path_from_rcc))
+        return result
+
+    @noPosargs
+    @permittedKwargs({'method', 'required'})
+    @FeatureNew('qt.has_tools', '0.54.0')
+    def has_tools(self, interpreter, state, args, kwargs):
+        method = kwargs.get('method', 'auto')
+        disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, default=False)
+        if disabled:
+            mlog.log('qt.has_tools skipped: feature', mlog.bold(feature), 'disabled')
+            return False
+        self._detect_tools(state.environment, method, required=False)
+        for tool in (self.moc, self.uic, self.rcc, self.lrelease):
+            if not tool.found():
+                if required:
+                    raise MesonException('Qt tools not found')
+                return False
+        return True
+
     @FeatureNewKwargs('qt.preprocess', '0.49.0', ['uic_extra_arguments'])
     @FeatureNewKwargs('qt.preprocess', '0.44.0', ['moc_extra_arguments'])
     @FeatureNewKwargs('qt.preprocess', '0.49.0', ['rcc_extra_arguments'])
     @permittedKwargs({'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'rcc_extra_arguments', 'include_directories', 'dependencies', 'ui_files', 'qresources', 'method'})
     def preprocess(self, state, args, kwargs):
         rcc_files, ui_files, moc_headers, moc_sources, uic_extra_arguments, moc_extra_arguments, rcc_extra_arguments, sources, include_directories, dependencies \
-            = extract_as_list(kwargs, 'qresources', 'ui_files', 'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'rcc_extra_arguments', 'sources', 'include_directories', 'dependencies', pop = True)
+            = [extract_as_list(kwargs, c, pop=True) for c in ['qresources', 'ui_files', 'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'rcc_extra_arguments', 'sources', 'include_directories', 'dependencies']]
         sources += args[1:]
         method = kwargs.get('method', 'auto')
         self._detect_tools(state.environment, method)
@@ -138,7 +150,7 @@
             if args:
                 qrc_deps = []
                 for i in rcc_files:
-                    qrc_deps += self.parse_qrc(state, i)
+                    qrc_deps += self.parse_qrc_deps(state, i)
                 name = args[0]
                 rcc_kwargs = {'input': rcc_files,
                               'output': name + '.cpp',
@@ -148,7 +160,7 @@
                 sources.append(res_target)
             else:
                 for rcc_file in rcc_files:
-                    qrc_deps = self.parse_qrc(state, rcc_file)
+                    qrc_deps = self.parse_qrc_deps(state, rcc_file)
                     if type(rcc_file) is str:
                         basename = os.path.basename(rcc_file)
                     elif type(rcc_file) is File:
@@ -158,6 +170,9 @@
                                   'output': name + '.cpp',
                                   'command': [self.rcc, '-name', '@BASENAME@', '-o', '@OUTPUT@', rcc_extra_arguments, '@INPUT@'],
                                   'depend_files': qrc_deps}
+                    if self.rcc_supports_depfiles:
+                        rcc_kwargs['depfile'] = name + '.d'
+                        rcc_kwargs['command'] += ['--depfile', '@DEPFILE@']
                     res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs)
                     sources.append(res_target)
         if ui_files:
@@ -171,9 +186,7 @@
             sources.append(ui_output)
         inc = get_include_args(include_dirs=include_directories)
         compile_args = []
-        for dep in dependencies:
-            if hasattr(dep, 'held_object'):
-                dep = dep.held_object
+        for dep in unholder(dependencies):
             if isinstance(dep, Dependency):
                 for arg in dep.get_compile_args():
                     if arg.startswith('-I') or arg.startswith('-D'):
@@ -200,15 +213,42 @@
         return ModuleReturnValue(sources, sources)
 
     @FeatureNew('qt.compile_translations', '0.44.0')
-    @permittedKwargs({'ts_files', 'install', 'install_dir', 'build_by_default', 'method'})
+    @FeatureNewKwargs('qt.compile_translations', '0.56.0', ['qresource'])
+    @FeatureNewKwargs('qt.compile_translations', '0.56.0', ['rcc_extra_arguments'])
+    @permittedKwargs({'ts_files', 'qresource', 'rcc_extra_arguments', 'install', 'install_dir', 'build_by_default', 'method'})
     def compile_translations(self, state, args, kwargs):
-        ts_files, install_dir = extract_as_list(kwargs, 'ts_files', 'install_dir', pop=True)
+        ts_files, install_dir = [extract_as_list(kwargs, c, pop=True) for c in ['ts_files', 'install_dir']]
+        qresource = kwargs.get('qresource')
+        if qresource:
+            if ts_files:
+                raise MesonException('qt.compile_translations: Cannot specify both ts_files and qresource')
+            if os.path.dirname(qresource) != '':
+                raise MesonException('qt.compile_translations: qresource file name must not contain a subdirectory.')
+            qresource = File.from_built_file(state.subdir, qresource)
+            infile_abs = os.path.join(state.environment.source_dir, qresource.relative_name())
+            outfile_abs = os.path.join(state.environment.build_dir, qresource.relative_name())
+            os.makedirs(os.path.dirname(outfile_abs), exist_ok=True)
+            shutil.copy2(infile_abs, outfile_abs)
+            self.interpreter.add_build_def_file(infile_abs)
+
+            rcc_file, nodes = self.qrc_nodes(state, qresource)
+            for c in nodes:
+                if c.endswith('.qm'):
+                    ts_files.append(c.rstrip('.qm')+'.ts')
+                else:
+                    raise MesonException('qt.compile_translations: qresource can only contain qm files, found {}'.format(c))
+            results = self.preprocess(state, [], {'qresources': qresource, 'rcc_extra_arguments': kwargs.get('rcc_extra_arguments', [])})
         self._detect_tools(state.environment, kwargs.get('method', 'auto'))
         translations = []
         for ts in ts_files:
             if not self.lrelease.found():
                 raise MesonException('qt.compile_translations: ' +
                                      self.lrelease.name + ' not found')
+            if qresource:
+                outdir = os.path.dirname(os.path.normpath(os.path.join(state.subdir, ts)))
+                ts = os.path.basename(ts)
+            else:
+                outdir = state.subdir
             cmd = [self.lrelease, '@INPUT@', '-qm', '@OUTPUT@']
             lrelease_kwargs = {'output': '@BASENAME@.qm',
                                'input': ts,
@@ -217,6 +257,9 @@
                                'command': cmd}
             if install_dir is not None:
                 lrelease_kwargs['install_dir'] = install_dir
-            lrelease_target = build.CustomTarget('qt{}-compile-{}'.format(self.qt_version, ts), state.subdir, state.subproject, lrelease_kwargs)
+            lrelease_target = build.CustomTarget('qt{}-compile-{}'.format(self.qt_version, ts), outdir, state.subproject, lrelease_kwargs)
             translations.append(lrelease_target)
-        return ModuleReturnValue(translations, translations)
+        if qresource:
+            return ModuleReturnValue(results.return_value[0], [results.new_objects, translations])
+        else:
+            return ModuleReturnValue(translations, translations)
diff -Nru meson-0.53.2/mesonbuild/modules/sourceset.py meson-0.57.0+really0.56.2/mesonbuild/modules/sourceset.py
--- meson-0.53.2/mesonbuild/modules/sourceset.py	2019-08-28 17:15:39.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/sourceset.py	2021-01-06 10:39:48.000000000 +0000
@@ -150,7 +150,7 @@
         if isinstance(config_data, dict):
             def _get_from_config_data(key):
                 if strict and key not in config_data:
-                    raise InterpreterException('Entry %s not in configuration dictionary.' % key)
+                    raise InterpreterException('Entry {} not in configuration dictionary.'.format(key))
                 return config_data.get(key, False)
         else:
             config_cache = dict()
diff -Nru meson-0.53.2/mesonbuild/modules/unstable_external_project.py meson-0.57.0+really0.56.2/mesonbuild/modules/unstable_external_project.py
--- meson-0.53.2/mesonbuild/modules/unstable_external_project.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/unstable_external_project.py	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,266 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os, subprocess, shlex
+from .._pathlib import Path
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue
+from .. import mlog, build
+from ..mesonlib import (MesonException, Popen_safe, MachineChoice,
+                       get_variable_regex, do_replacement)
+from ..interpreterbase import InterpreterObject, InterpreterException, FeatureNew
+from ..interpreterbase import stringArgs, permittedKwargs
+from ..interpreter import Interpreter, DependencyHolder, InstallDir
+from ..compilers.compilers import cflags_mapping, cexe_mapping
+from ..dependencies.base import InternalDependency, PkgConfigDependency
+from ..environment import Environment
+
+class ExternalProject(InterpreterObject):
+    def __init__(self,
+                 interpreter: Interpreter,
+                 subdir: str,
+                 project_version: T.Dict[str, str],
+                 subproject: str,
+                 environment: Environment,
+                 build_machine: str,
+                 host_machine: str,
+                 configure_command: T.List[str],
+                 configure_options: T.List[str],
+                 cross_configure_options: T.List[str],
+                 env: build.EnvironmentVariables,
+                 verbose: bool):
+        InterpreterObject.__init__(self)
+        self.methods.update({'dependency': self.dependency_method,
+                             })
+
+        self.interpreter = interpreter
+        self.subdir = Path(subdir)
+        self.project_version = project_version
+        self.subproject = subproject
+        self.env = environment
+        self.build_machine = build_machine
+        self.host_machine = host_machine
+        self.configure_command = configure_command
+        self.configure_options = configure_options
+        self.cross_configure_options = cross_configure_options
+        self.verbose = verbose
+        self.user_env = env
+
+        self.name = self.subdir.name
+        self.src_dir = Path(self.env.get_source_dir(), self.subdir)
+        self.build_dir = Path(self.env.get_build_dir(), self.subdir, 'build')
+        self.install_dir = Path(self.env.get_build_dir(), self.subdir, 'dist')
+        self.prefix = Path(self.env.coredata.get_builtin_option('prefix'))
+        self.libdir = Path(self.env.coredata.get_builtin_option('libdir'))
+        self.includedir = Path(self.env.coredata.get_builtin_option('includedir'))
+
+        # On Windows if the prefix is "c:/foo" and DESTDIR is "c:/bar", `make`
+        # will install files into "c:/bar/c:/foo" which is an invalid path.
+        # Work around that issue by removing the drive from prefix.
+        if self.prefix.drive:
+            self.prefix = self.prefix.relative_to(self.prefix.drive)
+
+        # self.prefix is an absolute path, so we cannot append it to another path.
+        self.rel_prefix = self.prefix.relative_to(self.prefix.root)
+
+        self.make = self.interpreter.find_program_impl('make')
+        self.make = self.make.get_command()[0]
+
+        self._configure()
+
+        self.targets = self._create_targets()
+
+    def _configure(self):
+        # Assume it's the name of a script in source dir, like 'configure',
+        # 'autogen.sh', etc).
+        configure_path = Path(self.src_dir, self.configure_command)
+        configure_prog = self.interpreter.find_program_impl(configure_path.as_posix())
+        configure_cmd = configure_prog.get_command()
+
+        d = {'PREFIX': self.prefix.as_posix(),
+             'LIBDIR': self.libdir.as_posix(),
+             'INCLUDEDIR': self.includedir.as_posix(),
+             }
+        self._validate_configure_options(d.keys())
+
+        configure_cmd += self._format_options(self.configure_options, d)
+
+        if self.env.is_cross_build():
+            host = '{}-{}-{}'.format(self.host_machine.cpu_family,
+                                     self.build_machine.system,
+                                     self.host_machine.system)
+            d = {'HOST': host}
+            configure_cmd += self._format_options(self.cross_configure_options, d)
+
+        # Set common env variables like CFLAGS, CC, etc.
+        link_exelist = []
+        link_args = []
+        self.run_env = os.environ.copy()
+        for lang, compiler in self.env.coredata.compilers[MachineChoice.HOST].items():
+            if any(lang not in i for i in (cexe_mapping, cflags_mapping)):
+                continue
+            cargs = self.env.coredata.get_external_args(MachineChoice.HOST, lang)
+            self.run_env[cexe_mapping[lang]] = self._quote_and_join(compiler.get_exelist())
+            self.run_env[cflags_mapping[lang]] = self._quote_and_join(cargs)
+            if not link_exelist:
+                link_exelist = compiler.get_linker_exelist()
+                link_args = self.env.coredata.get_external_link_args(MachineChoice.HOST, lang)
+        if link_exelist:
+            self.run_env['LD'] = self._quote_and_join(link_exelist)
+        self.run_env['LDFLAGS'] = self._quote_and_join(link_args)
+
+        self.run_env = self.user_env.get_env(self.run_env)
+
+        PkgConfigDependency.setup_env(self.run_env, self.env, MachineChoice.HOST,
+                                      Path(self.env.get_build_dir(), 'meson-uninstalled').as_posix())
+
+        self.build_dir.mkdir(parents=True, exist_ok=True)
+        self._run('configure', configure_cmd)
+
+    def _quote_and_join(self, array: T.List[str]) -> str:
+        return ' '.join([shlex.quote(i) for i in array])
+
+    def _validate_configure_options(self, required_keys: T.List[str]):
+        # Ensure the user at least try to pass basic info to the build system,
+        # like the prefix, libdir, etc.
+        for key in required_keys:
+            key_format = '@{}@'.format(key)
+            for option in self.configure_options:
+                if key_format in option:
+                    break
+            else:
+                m = 'At least one configure option must contain "{}" key'
+                raise InterpreterException(m.format(key_format))
+
+    def _format_options(self, options: T.List[str], variables: T.Dict[str, str]) -> T.List[str]:
+        out = []
+        missing = set()
+        regex = get_variable_regex('meson')
+        confdata = {k: (v, None) for k, v in variables.items()}
+        for o in options:
+            arg, missing_vars = do_replacement(regex, o, 'meson', confdata)
+            missing.update(missing_vars)
+            out.append(arg)
+        if missing:
+            var_list = ", ".join(map(repr, sorted(missing)))
+            raise EnvironmentException(
+                "Variables {} in configure options are missing.".format(var_list))
+        return out
+
+    def _run(self, step: str, command: T.List[str]):
+        mlog.log('External project {}:'.format(self.name), mlog.bold(step))
+        output = None if self.verbose else subprocess.DEVNULL
+        p, o, e = Popen_safe(command, cwd=str(self.build_dir), env=self.run_env,
+                                      stderr=subprocess.STDOUT,
+                                      stdout=output)
+        if p.returncode != 0:
+            m = '{} step failed:\n{}'.format(step, e)
+            raise MesonException(m)
+
+    def _create_targets(self):
+        cmd = self.env.get_build_command()
+        cmd += ['--internal', 'externalproject',
+                '--name', self.name,
+                '--srcdir', self.src_dir.as_posix(),
+                '--builddir', self.build_dir.as_posix(),
+                '--installdir', self.install_dir.as_posix(),
+                '--make', self.make,
+                ]
+        if self.verbose:
+            cmd.append('--verbose')
+
+        target_kwargs = {'output': '{}.stamp'.format(self.name),
+                         'depfile': '{}.d'.format(self.name),
+                         'command': cmd + ['@OUTPUT@', '@DEPFILE@'],
+                         'console': True,
+                         }
+        self.target = build.CustomTarget(self.name,
+                                         self.subdir.as_posix(),
+                                         self.subproject,
+                                         target_kwargs)
+
+        idir = InstallDir(self.subdir.as_posix(),
+                          Path('dist', self.rel_prefix).as_posix(),
+                          install_dir='.',
+                          install_mode=None,
+                          exclude=None,
+                          strip_directory=True,
+                          from_source_dir=False)
+
+        return [self.target, idir]
+
+    @stringArgs
+    @permittedKwargs({'subdir'})
+    def dependency_method(self, args, kwargs):
+        if len(args) != 1:
+            m = 'ExternalProject.dependency takes exactly 1 positional arguments'
+            raise InterpreterException(m)
+        libname = args[0]
+
+        subdir = kwargs.get('subdir', '')
+        if not isinstance(subdir, str):
+            m = 'ExternalProject.dependency subdir keyword argument must be string.'
+            raise InterpreterException(m)
+
+        abs_includedir = Path(self.install_dir, self.rel_prefix, self.includedir)
+        if subdir:
+            abs_includedir = Path(abs_includedir, subdir)
+        abs_libdir = Path(self.install_dir, self.rel_prefix, self.libdir)
+
+        version = self.project_version['version']
+        incdir = []
+        compile_args = ['-I{}'.format(abs_includedir)]
+        link_args = ['-L{}'.format(abs_libdir), '-l{}'.format(libname)]
+        libs = []
+        libs_whole = []
+        sources = self.target
+        final_deps = []
+        variables = []
+        dep = InternalDependency(version, incdir, compile_args, link_args, libs,
+                                 libs_whole, sources, final_deps, variables)
+        return DependencyHolder(dep, self.subproject)
+
+
+class ExternalProjectModule(ExtensionModule):
+    @FeatureNew('External build system Module', '0.56.0')
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+
+    @stringArgs
+    @permittedKwargs({'configure_options', 'cross_configure_options', 'verbose', 'env'})
+    def add_project(self, state, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('add_project takes exactly one positional argument')
+        configure_command = args[0]
+        configure_options = kwargs.get('configure_options', [])
+        cross_configure_options = kwargs.get('cross_configure_options', ['--host={host}'])
+        verbose = kwargs.get('verbose', False)
+        env = self.interpreter.unpack_env_kwarg(kwargs)
+        project = ExternalProject(self.interpreter,
+                                  state.subdir,
+                                  state.project_version,
+                                  state.subproject,
+                                  state.environment,
+                                  state.build_machine,
+                                  state.host_machine,
+                                  configure_command,
+                                  configure_options,
+                                  cross_configure_options,
+                                  env, verbose)
+        return ModuleReturnValue(project, project.targets)
+
+
+def initialize(*args, **kwargs):
+    return ExternalProjectModule(*args, **kwargs)
diff -Nru meson-0.53.2/mesonbuild/modules/unstable_kconfig.py meson-0.57.0+really0.56.2/mesonbuild/modules/unstable_kconfig.py
--- meson-0.53.2/mesonbuild/modules/unstable_kconfig.py	2019-08-28 17:15:39.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/unstable_kconfig.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,73 +0,0 @@
-# Copyright 2017, 2019 The Meson development team
-
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-
-#     http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from . import ExtensionModule
-
-from .. import mesonlib
-from ..mesonlib import typeslistify
-from ..interpreterbase import FeatureNew, noKwargs
-from ..interpreter import InvalidCode
-
-import os
-
-class KconfigModule(ExtensionModule):
-
-    @FeatureNew('Kconfig Module', '0.51.0')
-    def __init__(self, *args, **kwargs):
-        super().__init__(*args, **kwargs)
-        self.snippets.add('load')
-
-    def _load_file(self, path_to_config):
-        result = dict()
-        try:
-            with open(path_to_config) as f:
-                for line in f:
-                    if '#' in line:
-                        comment_idx = line.index('#')
-                        line = line[:comment_idx]
-                    line = line.strip()
-                    try:
-                        name, val = line.split('=', 1)
-                    except ValueError:
-                        continue
-                    result[name.strip()] = val.strip()
-        except IOError as e:
-            raise mesonlib.MesonException('Failed to load {}: {}'.format(path_to_config, e))
-
-        return result
-
-    @noKwargs
-    def load(self, interpreter, state, args, kwargs):
-        sources = typeslistify(args, (str, mesonlib.File))
-        if len(sources) != 1:
-            raise InvalidCode('load takes only one file input.')
-
-        s = sources[0]
-        is_built = False
-        if isinstance(s, mesonlib.File):
-            if s.is_built:
-                FeatureNew('kconfig.load() of built files', '0.52.0').use(state.subproject)
-                is_built = True
-            s = s.absolute_path(interpreter.environment.source_dir, interpreter.environment.build_dir)
-        else:
-            s = os.path.join(interpreter.environment.source_dir, s)
-
-        if s not in interpreter.build_def_files and not is_built:
-            interpreter.build_def_files.append(s)
-
-        return self._load_file(s)
-
-
-def initialize(*args, **kwargs):
-    return KconfigModule(*args, **kwargs)
diff -Nru meson-0.53.2/mesonbuild/modules/windows.py meson-0.57.0+really0.56.2/mesonbuild/modules/windows.py
--- meson-0.53.2/mesonbuild/modules/windows.py	2019-08-28 17:15:39.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/modules/windows.py	2020-10-18 21:29:13.000000000 +0000
@@ -18,7 +18,7 @@
 
 from .. import mlog
 from .. import mesonlib, build
-from ..mesonlib import MachineChoice, MesonException, extract_as_list
+from ..mesonlib import MachineChoice, MesonException, extract_as_list, unholder
 from . import get_include_args
 from . import ModuleReturnValue
 from . import ExtensionModule
@@ -48,7 +48,7 @@
             return self._rescomp
 
         # Will try cross / native file and then env var
-        rescomp = ExternalProgram.from_bin_list(state.environment.binaries[for_machine], 'windres')
+        rescomp = ExternalProgram.from_bin_list(state.environment, for_machine, 'windres')
 
         if not rescomp or not rescomp.found():
             comp = self.detect_compiler(state.environment.coredata.compilers[for_machine])
@@ -107,7 +107,7 @@
                 'a MinGW bug: https://sourceware.org/bugzilla/show_bug.cgi?id=4933'
             for arg in extra_args:
                 if ' ' in arg:
-                    mlog.warning(m.format(arg))
+                    mlog.warning(m.format(arg), fatal=False)
 
         res_targets = []
 
@@ -116,27 +116,28 @@
                 for subsrc in src:
                     add_target(subsrc)
                 return
-
-            if hasattr(src, 'held_object'):
-                src = src.held_object
+            src = unholder(src)
 
             if isinstance(src, str):
-                name_format = 'file {!r}'
+                name_formatted = src
                 name = os.path.join(state.subdir, src)
             elif isinstance(src, mesonlib.File):
-                name_format = 'file {!r}'
+                name_formatted = src.fname
                 name = src.relative_name()
             elif isinstance(src, build.CustomTarget):
                 if len(src.get_outputs()) > 1:
                     raise MesonException('windows.compile_resources does not accept custom targets with more than 1 output.')
 
-                name_format = 'target {!r}'
+                # Chances are that src.get_filename() is already the name of that
+                # target, add a prefix to avoid name clash.
+                name_formatted = 'windows_compile_resources_' + src.get_filename()
                 name = src.get_id()
             else:
                 raise MesonException('Unexpected source type {!r}. windows.compile_resources accepts only strings, files, custom targets, and lists thereof.'.format(src))
 
             # Path separators are not allowed in target names
             name = name.replace('/', '_').replace('\\', '_')
+            name_formatted = name_formatted.replace('/', '_').replace('\\', '_')
 
             res_kwargs = {
                 'output': name + '_@BASENAME@.' + suffix,
@@ -151,7 +152,7 @@
                 res_kwargs['depfile'] = res_kwargs['output'] + '.d'
                 res_kwargs['command'] += ['--preprocessor-arg=-MD', '--preprocessor-arg=-MQ@OUTPUT@', '--preprocessor-arg=-MF@DEPFILE@']
 
-            res_targets.append(build.CustomTarget('Windows resource for ' + name_format.format(name), state.subdir, state.subproject, res_kwargs))
+            res_targets.append(build.CustomTarget(name_formatted, state.subdir, state.subproject, res_kwargs))
 
         add_target(args)
 
diff -Nru meson-0.53.2/mesonbuild/mparser.py meson-0.57.0+really0.56.2/mesonbuild/mparser.py
--- meson-0.53.2/mesonbuild/mparser.py	2019-12-29 22:47:27.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/mparser.py	2020-09-17 22:00:44.000000000 +0000
@@ -14,10 +14,15 @@
 
 import re
 import codecs
+import textwrap
 import types
+import typing as T
 from .mesonlib import MesonException
 from . import mlog
 
+if T.TYPE_CHECKING:
+    from .ast import AstVisitor
+
 # This is the regex for the supported escape sequences of a regular string
 # literal, like 'abc\x00'
 ESCAPE_SEQUENCE_SINGLE_RE = re.compile(r'''
@@ -30,25 +35,34 @@
     )''', re.UNICODE | re.VERBOSE)
 
 class MesonUnicodeDecodeError(MesonException):
-    def __init__(self, match):
-        super().__init__("%s" % match)
+    def __init__(self, match: str) -> None:
+        super().__init__(match)
         self.match = match
 
-def decode_match(match):
+def decode_match(match: T.Match[str]) -> str:
     try:
-        return codecs.decode(match.group(0), 'unicode_escape')
+        return codecs.decode(match.group(0).encode(), 'unicode_escape')
     except UnicodeDecodeError:
         raise MesonUnicodeDecodeError(match.group(0))
 
 class ParseException(MesonException):
-    def __init__(self, text, line, lineno, colno):
+    def __init__(self, text: str, line: str, lineno: int, colno: int) -> None:
         # Format as error message, followed by the line with the error, followed by a caret to show the error column.
-        super().__init__("%s\n%s\n%s" % (text, line, '%s^' % (' ' * colno)))
+        super().__init__("{}\n{}\n{}".format(text, line, '{}^'.format(' ' * colno)))
         self.lineno = lineno
         self.colno = colno
 
 class BlockParseException(MesonException):
-    def __init__(self, text, line, lineno, colno, start_line, start_lineno, start_colno):
+    def __init__(
+                self,
+                text: str,
+                line: str,
+                lineno: int,
+                colno: int,
+                start_line: str,
+                start_lineno: int,
+                start_colno: int,
+            ) -> None:
         # This can be formatted in two ways - one if the block start and end are on the same line, and a different way if they are on different lines.
 
         if lineno == start_lineno:
@@ -71,23 +85,27 @@
         self.lineno = lineno
         self.colno = colno
 
-class Token:
-    def __init__(self, tid, subdir, line_start, lineno, colno, bytespan, value):
-        self.tid = tid
-        self.subdir = subdir
-        self.line_start = line_start
-        self.lineno = lineno
-        self.colno = colno
-        self.bytespan = bytespan
-        self.value = value
+TV_TokenTypes = T.TypeVar('TV_TokenTypes', int, str, bool)
+
+class Token(T.Generic[TV_TokenTypes]):
+    def __init__(self, tid: str, filename: str, line_start: int, lineno: int, colno: int, bytespan: T.Tuple[int, int], value: TV_TokenTypes):
+        self.tid = tid                # type: str
+        self.filename = filename      # type: str
+        self.line_start = line_start  # type: int
+        self.lineno = lineno          # type: int
+        self.colno = colno            # type: int
+        self.bytespan = bytespan      # type: T.Tuple[int, int]
+        self.value = value            # type: TV_TokenTypes
 
-    def __eq__(self, other):
+    def __eq__(self, other: object) -> bool:
         if isinstance(other, str):
             return self.tid == other
-        return self.tid == other.tid
+        elif isinstance(other, Token):
+            return self.tid == other.tid
+        return NotImplemented
 
 class Lexer:
-    def __init__(self, code):
+    def __init__(self, code: str):
         self.code = code
         self.keywords = {'true', 'false', 'if', 'else', 'elif',
                          'endif', 'and', 'or', 'not', 'foreach', 'endforeach',
@@ -129,10 +147,10 @@
             ('questionmark', re.compile(r'\?')),
         ]
 
-    def getline(self, line_start):
+    def getline(self, line_start: int) -> str:
         return self.code[line_start:self.code.find('\n', line_start)]
 
-    def lex(self, subdir):
+    def lex(self, filename: str) -> T.Generator[Token, None, None]:
         line_start = 0
         lineno = 1
         loc = 0
@@ -142,7 +160,7 @@
         col = 0
         while loc < len(self.code):
             matched = False
-            value = None
+            value = None  # type: T.Union[str, bool, int]
             for (tid, reg) in self.token_specification:
                 mo = reg.match(self.code, loc)
                 if mo:
@@ -174,8 +192,14 @@
                     elif tid == 'string':
                         # Handle here and not on the regexp to give a better error message.
                         if match_text.find("\n") != -1:
-                            mlog.warning("""Newline character in a string detected, use ''' (three single quotes) for multiline strings instead.
-This will become a hard error in a future Meson release.""", self.getline(line_start), lineno, col)
+                            mlog.warning(textwrap.dedent("""\
+                                    Newline character in a string detected, use ''' (three single quotes) for multiline strings instead.
+                                    This will become a hard error in a future Meson release.\
+                                """),
+                                self.getline(line_start),
+                                str(lineno),
+                                str(col)
+                            )
                         value = match_text[1:-1]
                         try:
                             value = ESCAPE_SEQUENCE_SINGLE_RE.sub(decode_match, value)
@@ -205,54 +229,63 @@
                         else:
                             if match_text in self.future_keywords:
                                 mlog.warning("Identifier '{}' will become a reserved keyword in a future release. Please rename it.".format(match_text),
-                                             location=types.SimpleNamespace(subdir=subdir, lineno=lineno))
+                                             location=types.SimpleNamespace(filename=filename, lineno=lineno))
                             value = match_text
-                    yield Token(tid, subdir, curline_start, curline, col, bytespan, value)
+                    yield Token(tid, filename, curline_start, curline, col, bytespan, value)
                     break
             if not matched:
                 raise ParseException('lexer', self.getline(line_start), lineno, col)
 
 class BaseNode:
-    def accept(self, visitor):
+    def __init__(self, lineno: int, colno: int, filename: str, end_lineno: T.Optional[int] = None, end_colno: T.Optional[int] = None):
+        self.lineno = lineno      # type: int
+        self.colno = colno        # type: int
+        self.filename = filename  # type: str
+        self.end_lineno = end_lineno if end_lineno is not None else self.lineno
+        self.end_colno = end_colno if end_colno is not None else self.colno
+
+        # Attributes for the visitors
+        self.level = 0            # type: int
+        self.ast_id = ''          # type: str
+        self.condition_level = 0  # type: int
+
+    def accept(self, visitor: 'AstVisitor') -> None:
         fname = 'visit_{}'.format(type(self).__name__)
         if hasattr(visitor, fname):
             func = getattr(visitor, fname)
             if callable(func):
                 func(self)
 
-class ElementaryNode(BaseNode):
-    def __init__(self, token):
-        self.lineno = token.lineno
-        self.subdir = token.subdir
-        self.colno = token.colno
-        self.value = token.value
-        self.bytespan = token.bytespan
+class ElementaryNode(T.Generic[TV_TokenTypes], BaseNode):
+    def __init__(self, token: Token[TV_TokenTypes]):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.value = token.value        # type: TV_TokenTypes
+        self.bytespan = token.bytespan  # type: T.Tuple[int, int]
 
-class BooleanNode(ElementaryNode):
-    def __init__(self, token, value):
+class BooleanNode(ElementaryNode[bool]):
+    def __init__(self, token: Token[bool]):
         super().__init__(token)
-        self.value = value
-        assert(isinstance(self.value, bool))
+        assert isinstance(self.value, bool)
 
-class IdNode(ElementaryNode):
-    def __init__(self, token):
+class IdNode(ElementaryNode[str]):
+    def __init__(self, token: Token[str]):
         super().__init__(token)
-        assert(isinstance(self.value, str))
+        assert isinstance(self.value, str)
 
-    def __str__(self):
+    def __str__(self) -> str:
         return "Id node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno)
 
-class NumberNode(ElementaryNode):
-    def __init__(self, token):
+class NumberNode(ElementaryNode[int]):
+    def __init__(self, token: Token[int]):
         super().__init__(token)
-        assert(isinstance(self.value, int))
+        assert isinstance(self.value, int)
 
-class StringNode(ElementaryNode):
-    def __init__(self, token):
+class StringNode(ElementaryNode[str]):
+    def __init__(self, token: Token[str]):
         super().__init__(token)
-        assert(isinstance(self.value, str))
+        assert isinstance(self.value, str)
 
-    def __str__(self):
+    def __str__(self) -> str:
         return "String node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno)
 
 class ContinueNode(ElementaryNode):
@@ -261,203 +294,163 @@
 class BreakNode(ElementaryNode):
     pass
 
+class ArgumentNode(BaseNode):
+    def __init__(self, token: Token[TV_TokenTypes]):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.arguments = []  # type: T.List[BaseNode]
+        self.commas = []     # type: T.List[Token[TV_TokenTypes]]
+        self.kwargs = {}     # type: T.Dict[BaseNode, BaseNode]
+        self.order_error = False
+
+    def prepend(self, statement: BaseNode) -> None:
+        if self.num_kwargs() > 0:
+            self.order_error = True
+        if not isinstance(statement, EmptyNode):
+            self.arguments = [statement] + self.arguments
+
+    def append(self, statement: BaseNode) -> None:
+        if self.num_kwargs() > 0:
+            self.order_error = True
+        if not isinstance(statement, EmptyNode):
+            self.arguments += [statement]
+
+    def set_kwarg(self, name: IdNode, value: BaseNode) -> None:
+        if name.value in [x.value for x in self.kwargs.keys() if isinstance(x, IdNode)]:
+            mlog.warning('Keyword argument "{}" defined multiple times.'.format(name.value), location=self)
+            mlog.warning('This will be an error in future Meson releases.')
+        self.kwargs[name] = value
+
+    def set_kwarg_no_check(self, name: BaseNode, value: BaseNode) -> None:
+        self.kwargs[name] = value
+
+    def num_args(self) -> int:
+        return len(self.arguments)
+
+    def num_kwargs(self) -> int:
+        return len(self.kwargs)
+
+    def incorrect_order(self) -> bool:
+        return self.order_error
+
+    def __len__(self) -> int:
+        return self.num_args() # Fixme
+
 class ArrayNode(BaseNode):
-    def __init__(self, args, lineno, colno, end_lineno, end_colno):
-        self.subdir = args.subdir
-        self.lineno = lineno
-        self.colno = colno
-        self.end_lineno = end_lineno
-        self.end_colno = end_colno
-        self.args = args
+    def __init__(self, args: ArgumentNode, lineno: int, colno: int, end_lineno: int, end_colno: int):
+        super().__init__(lineno, colno, args.filename, end_lineno=end_lineno, end_colno=end_colno)
+        self.args = args              # type: ArgumentNode
 
 class DictNode(BaseNode):
-    def __init__(self, args, lineno, colno, end_lineno, end_colno):
-        self.subdir = args.subdir
-        self.lineno = lineno
-        self.colno = colno
-        self.end_lineno = end_lineno
-        self.end_colno = end_colno
+    def __init__(self, args: ArgumentNode, lineno: int, colno: int, end_lineno: int, end_colno: int):
+        super().__init__(lineno, colno, args.filename, end_lineno=end_lineno, end_colno=end_colno)
         self.args = args
 
 class EmptyNode(BaseNode):
-    def __init__(self, lineno, colno):
-        self.subdir = ''
-        self.lineno = lineno
-        self.colno = colno
+    def __init__(self, lineno: int, colno: int, filename: str):
+        super().__init__(lineno, colno, filename)
         self.value = None
 
 class OrNode(BaseNode):
-    def __init__(self, left, right):
-        self.subdir = left.subdir
-        self.lineno = left.lineno
-        self.colno = left.colno
-        self.left = left
-        self.right = right
+    def __init__(self, left: BaseNode, right: BaseNode):
+        super().__init__(left.lineno, left.colno, left.filename)
+        self.left = left    # type: BaseNode
+        self.right = right  # type: BaseNode
 
 class AndNode(BaseNode):
-    def __init__(self, left, right):
-        self.subdir = left.subdir
-        self.lineno = left.lineno
-        self.colno = left.colno
-        self.left = left
-        self.right = right
+    def __init__(self, left: BaseNode, right: BaseNode):
+        super().__init__(left.lineno, left.colno, left.filename)
+        self.left = left    # type: BaseNode
+        self.right = right  # type: BaseNode
 
 class ComparisonNode(BaseNode):
-    def __init__(self, ctype, left, right):
-        self.lineno = left.lineno
-        self.colno = left.colno
-        self.subdir = left.subdir
-        self.left = left
-        self.right = right
-        self.ctype = ctype
+    def __init__(self, ctype: str, left: BaseNode, right: BaseNode):
+        super().__init__(left.lineno, left.colno, left.filename)
+        self.left = left    # type: BaseNode
+        self.right = right  # type: BaseNode
+        self.ctype = ctype  # type: str
 
 class ArithmeticNode(BaseNode):
-    def __init__(self, operation, left, right):
-        self.subdir = left.subdir
-        self.lineno = left.lineno
-        self.colno = left.colno
-        self.left = left
-        self.right = right
-        self.operation = operation
+    def __init__(self, operation: str, left: BaseNode, right: BaseNode):
+        super().__init__(left.lineno, left.colno, left.filename)
+        self.left = left            # type: BaseNode
+        self.right = right          # type: BaseNode
+        self.operation = operation  # type: str
 
 class NotNode(BaseNode):
-    def __init__(self, location_node, value):
-        self.subdir = location_node.subdir
-        self.lineno = location_node.lineno
-        self.colno = location_node.colno
-        self.value = value
+    def __init__(self, token: Token[TV_TokenTypes], value: BaseNode):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.value = value  # type: BaseNode
 
 class CodeBlockNode(BaseNode):
-    def __init__(self, location_node):
-        self.subdir = location_node.subdir
-        self.lineno = location_node.lineno
-        self.colno = location_node.colno
-        self.lines = []
+    def __init__(self, token: Token[TV_TokenTypes]):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.lines = []  # type: T.List[BaseNode]
 
 class IndexNode(BaseNode):
-    def __init__(self, iobject, index):
-        self.iobject = iobject
-        self.index = index
-        self.subdir = iobject.subdir
-        self.lineno = iobject.lineno
-        self.colno = iobject.colno
+    def __init__(self, iobject: BaseNode, index: BaseNode):
+        super().__init__(iobject.lineno, iobject.colno, iobject.filename)
+        self.iobject = iobject  # type: BaseNode
+        self.index = index      # type: BaseNode
 
 class MethodNode(BaseNode):
-    def __init__(self, subdir, lineno, colno, source_object, name, args):
-        self.subdir = subdir
-        self.lineno = lineno
-        self.colno = colno
-        self.source_object = source_object
-        self.name = name
+    def __init__(self, filename: str, lineno: int, colno: int, source_object: BaseNode, name: str, args: ArgumentNode):
+        super().__init__(lineno, colno, filename)
+        self.source_object = source_object  # type: BaseNode
+        self.name = name                    # type: str
         assert(isinstance(self.name, str))
-        self.args = args
+        self.args = args                    # type: ArgumentNode
 
 class FunctionNode(BaseNode):
-    def __init__(self, subdir, lineno, colno, end_lineno, end_colno, func_name, args):
-        self.subdir = subdir
-        self.lineno = lineno
-        self.colno = colno
-        self.end_lineno = end_lineno
-        self.end_colno = end_colno
-        self.func_name = func_name
+    def __init__(self, filename: str, lineno: int, colno: int, end_lineno: int, end_colno: int, func_name: str, args: ArgumentNode):
+        super().__init__(lineno, colno, filename, end_lineno=end_lineno, end_colno=end_colno)
+        self.func_name = func_name  # type: str
         assert(isinstance(func_name, str))
-        self.args = args
+        self.args = args  # type: ArgumentNode
 
 class AssignmentNode(BaseNode):
-    def __init__(self, subdir, lineno, colno, var_name, value):
-        self.subdir = subdir
-        self.lineno = lineno
-        self.colno = colno
-        self.var_name = var_name
+    def __init__(self, filename: str, lineno: int, colno: int, var_name: str, value: BaseNode):
+        super().__init__(lineno, colno, filename)
+        self.var_name = var_name  # type: str
         assert(isinstance(var_name, str))
-        self.value = value
+        self.value = value  # type: BaseNode
 
 class PlusAssignmentNode(BaseNode):
-    def __init__(self, subdir, lineno, colno, var_name, value):
-        self.subdir = subdir
-        self.lineno = lineno
-        self.colno = colno
-        self.var_name = var_name
+    def __init__(self, filename: str, lineno: int, colno: int, var_name: str, value: BaseNode):
+        super().__init__(lineno, colno, filename)
+        self.var_name = var_name  # type: str
         assert(isinstance(var_name, str))
-        self.value = value
+        self.value = value  # type: BaseNode
 
 class ForeachClauseNode(BaseNode):
-    def __init__(self, lineno, colno, varnames, items, block):
-        self.lineno = lineno
-        self.colno = colno
-        self.varnames = varnames
-        self.items = items
-        self.block = block
+    def __init__(self, token: Token, varnames: T.List[str], items: BaseNode, block: CodeBlockNode):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.varnames = varnames  # type: T.List[str]
+        self.items = items        # type: BaseNode
+        self.block = block        # type: CodeBlockNode
+
+class IfNode(BaseNode):
+    def __init__(self, linenode: BaseNode, condition: BaseNode, block: CodeBlockNode):
+        super().__init__(linenode.lineno, linenode.colno, linenode.filename)
+        self.condition = condition  # type: BaseNode
+        self.block = block          # type: CodeBlockNode
 
 class IfClauseNode(BaseNode):
-    def __init__(self, lineno, colno):
-        self.lineno = lineno
-        self.colno = colno
-        self.ifs = []
-        self.elseblock = EmptyNode(lineno, colno)
+    def __init__(self, linenode: BaseNode):
+        super().__init__(linenode.lineno, linenode.colno, linenode.filename)
+        self.ifs = []          # type: T.List[IfNode]
+        self.elseblock = None  # type: T.Union[EmptyNode, CodeBlockNode]
 
 class UMinusNode(BaseNode):
-    def __init__(self, current_location, value):
-        self.subdir = current_location.subdir
-        self.lineno = current_location.lineno
-        self.colno = current_location.colno
-        self.value = value
-
-class IfNode(BaseNode):
-    def __init__(self, lineno, colno, condition, block):
-        self.lineno = lineno
-        self.colno = colno
-        self.condition = condition
-        self.block = block
+    def __init__(self, current_location: Token, value: BaseNode):
+        super().__init__(current_location.lineno, current_location.colno, current_location.filename)
+        self.value = value  # type: BaseNode
 
 class TernaryNode(BaseNode):
-    def __init__(self, subdir, lineno, colno, condition, trueblock, falseblock):
-        self.subdir = subdir
-        self.lineno = lineno
-        self.colno = colno
-        self.condition = condition
-        self.trueblock = trueblock
-        self.falseblock = falseblock
-
-class ArgumentNode(BaseNode):
-    def __init__(self, token):
-        self.lineno = token.lineno
-        self.colno = token.colno
-        self.subdir = token.subdir
-        self.arguments = []
-        self.commas = []
-        self.kwargs = {}
-        self.order_error = False
-
-    def prepend(self, statement):
-        if self.num_kwargs() > 0:
-            self.order_error = True
-        if not isinstance(statement, EmptyNode):
-            self.arguments = [statement] + self.arguments
-
-    def append(self, statement):
-        if self.num_kwargs() > 0:
-            self.order_error = True
-        if not isinstance(statement, EmptyNode):
-            self.arguments += [statement]
-
-    def set_kwarg(self, name, value):
-        if name in self.kwargs:
-            mlog.warning('Keyword argument "{}" defined multiple times.'.format(name), location=self)
-            mlog.warning('This will be an error in future Meson releases.')
-        self.kwargs[name] = value
-
-    def num_args(self):
-        return len(self.arguments)
-
-    def num_kwargs(self):
-        return len(self.kwargs)
-
-    def incorrect_order(self):
-        return self.order_error
-
-    def __len__(self):
-        return self.num_args() # Fixme
+    def __init__(self, condition: BaseNode, trueblock: BaseNode, falseblock: BaseNode):
+        super().__init__(condition.lineno, condition.colno, condition.filename)
+        self.condition = condition    # type: BaseNode
+        self.trueblock = trueblock    # type: BaseNode
+        self.falseblock = falseblock  # type: BaseNode
 
 comparison_map = {'equal': '==',
                   'nequal': '!=',
@@ -485,59 +478,68 @@
 # 9 plain token
 
 class Parser:
-    def __init__(self, code, subdir):
+    def __init__(self, code: str, filename: str):
         self.lexer = Lexer(code)
-        self.stream = self.lexer.lex(subdir)
-        self.current = Token('eof', '', 0, 0, 0, (0, 0), None)
+        self.stream = self.lexer.lex(filename)
+        self.current = Token('eof', '', 0, 0, 0, (0, 0), None)  # type: Token
         self.getsym()
         self.in_ternary = False
 
-    def getsym(self):
+    def getsym(self) -> None:
         try:
             self.current = next(self.stream)
         except StopIteration:
             self.current = Token('eof', '', self.current.line_start, self.current.lineno, self.current.colno + self.current.bytespan[1] - self.current.bytespan[0], (0, 0), None)
 
-    def getline(self):
+    def getline(self) -> str:
         return self.lexer.getline(self.current.line_start)
 
-    def accept(self, s):
+    def accept(self, s: str) -> bool:
         if self.current.tid == s:
             self.getsym()
             return True
         return False
 
-    def expect(self, s):
+    def accept_any(self, tids: T.Sequence[str]) -> str:
+        tid = self.current.tid
+        if tid in tids:
+            self.getsym()
+            return tid
+        return ''
+
+    def expect(self, s: str) -> bool:
         if self.accept(s):
             return True
         raise ParseException('Expecting %s got %s.' % (s, self.current.tid), self.getline(), self.current.lineno, self.current.colno)
 
-    def block_expect(self, s, block_start):
+    def block_expect(self, s: str, block_start: Token) -> bool:
         if self.accept(s):
             return True
         raise BlockParseException('Expecting %s got %s.' % (s, self.current.tid), self.getline(), self.current.lineno, self.current.colno, self.lexer.getline(block_start.line_start), block_start.lineno, block_start.colno)
 
-    def parse(self):
+    def parse(self) -> CodeBlockNode:
         block = self.codeblock()
         self.expect('eof')
         return block
 
-    def statement(self):
+    def statement(self) -> BaseNode:
         return self.e1()
 
-    def e1(self):
+    def e1(self) -> BaseNode:
         left = self.e2()
         if self.accept('plusassign'):
             value = self.e1()
             if not isinstance(left, IdNode):
                 raise ParseException('Plusassignment target must be an id.', self.getline(), left.lineno, left.colno)
-            return PlusAssignmentNode(left.subdir, left.lineno, left.colno, left.value, value)
+            assert isinstance(left.value, str)
+            return PlusAssignmentNode(left.filename, left.lineno, left.colno, left.value, value)
         elif self.accept('assign'):
             value = self.e1()
             if not isinstance(left, IdNode):
                 raise ParseException('Assignment target must be an id.',
                                      self.getline(), left.lineno, left.colno)
-            return AssignmentNode(left.subdir, left.lineno, left.colno, left.value, value)
+            assert isinstance(left.value, str)
+            return AssignmentNode(left.filename, left.lineno, left.colno, left.value, value)
         elif self.accept('questionmark'):
             if self.in_ternary:
                 raise ParseException('Nested ternary operators are not allowed.',
@@ -547,10 +549,10 @@
             self.expect('colon')
             falseblock = self.e1()
             self.in_ternary = False
-            return TernaryNode(left.subdir, left.lineno, left.colno, left, trueblock, falseblock)
+            return TernaryNode(left, trueblock, falseblock)
         return left
 
-    def e2(self):
+    def e2(self) -> BaseNode:
         left = self.e3()
         while self.accept('or'):
             if isinstance(left, EmptyNode):
@@ -559,7 +561,7 @@
             left = OrNode(left, self.e3())
         return left
 
-    def e3(self):
+    def e3(self) -> BaseNode:
         left = self.e4()
         while self.accept('and'):
             if isinstance(left, EmptyNode):
@@ -568,7 +570,7 @@
             left = AndNode(left, self.e4())
         return left
 
-    def e4(self):
+    def e4(self) -> BaseNode:
         left = self.e5()
         for nodename, operator_type in comparison_map.items():
             if self.accept(nodename):
@@ -577,47 +579,46 @@
             return ComparisonNode('notin', left, self.e5())
         return left
 
-    def e5(self):
-        return self.e5add()
-
-    def e5add(self):
-        left = self.e5sub()
-        if self.accept('plus'):
-            return ArithmeticNode('add', left, self.e5add())
-        return left
-
-    def e5sub(self):
-        left = self.e5mod()
-        if self.accept('dash'):
-            return ArithmeticNode('sub', left, self.e5sub())
-        return left
-
-    def e5mod(self):
-        left = self.e5mul()
-        if self.accept('percent'):
-            return ArithmeticNode('mod', left, self.e5mod())
-        return left
+    def e5(self) -> BaseNode:
+        return self.e5addsub()
 
-    def e5mul(self):
-        left = self.e5div()
-        if self.accept('star'):
-            return ArithmeticNode('mul', left, self.e5mul())
+    def e5addsub(self) -> BaseNode:
+        op_map = {
+            'plus': 'add',
+            'dash': 'sub',
+        }
+        left = self.e5muldiv()
+        while True:
+            op = self.accept_any(tuple(op_map.keys()))
+            if op:
+                left = ArithmeticNode(op_map[op], left, self.e5muldiv())
+            else:
+                break
         return left
 
-    def e5div(self):
+    def e5muldiv(self) -> BaseNode:
+        op_map = {
+            'percent': 'mod',
+            'star': 'mul',
+            'fslash': 'div',
+        }
         left = self.e6()
-        if self.accept('fslash'):
-            return ArithmeticNode('div', left, self.e5div())
+        while True:
+            op = self.accept_any(tuple(op_map.keys()))
+            if op:
+                left = ArithmeticNode(op_map[op], left, self.e6())
+            else:
+                break
         return left
 
-    def e6(self):
+    def e6(self) -> BaseNode:
         if self.accept('not'):
             return NotNode(self.current, self.e7())
         if self.accept('dash'):
             return UMinusNode(self.current, self.e7())
         return self.e7()
 
-    def e7(self):
+    def e7(self) -> BaseNode:
         left = self.e8()
         block_start = self.current
         if self.accept('lparen'):
@@ -626,7 +627,8 @@
             if not isinstance(left, IdNode):
                 raise ParseException('Function call must be applied to plain id',
                                      self.getline(), left.lineno, left.colno)
-            left = FunctionNode(left.subdir, left.lineno, left.colno, self.current.lineno, self.current.colno, left.value, args)
+            assert isinstance(left.value, str)
+            left = FunctionNode(left.filename, left.lineno, left.colno, self.current.lineno, self.current.colno, left.value, args)
         go_again = True
         while go_again:
             go_again = False
@@ -638,7 +640,7 @@
                 left = self.index_call(left)
         return left
 
-    def e8(self):
+    def e8(self) -> BaseNode:
         block_start = self.current
         if self.accept('lparen'):
             e = self.statement()
@@ -655,27 +657,29 @@
         else:
             return self.e9()
 
-    def e9(self):
+    def e9(self) -> BaseNode:
         t = self.current
         if self.accept('true'):
-            return BooleanNode(t, True)
+            t.value = True
+            return BooleanNode(t)
         if self.accept('false'):
-            return BooleanNode(t, False)
+            t.value = False
+            return BooleanNode(t)
         if self.accept('id'):
             return IdNode(t)
         if self.accept('number'):
             return NumberNode(t)
         if self.accept('string'):
             return StringNode(t)
-        return EmptyNode(self.current.lineno, self.current.colno)
+        return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
 
-    def key_values(self):
-        s = self.statement()
-        a = ArgumentNode(s)
+    def key_values(self) -> ArgumentNode:
+        s = self.statement()  # type: BaseNode
+        a = ArgumentNode(self.current)
 
         while not isinstance(s, EmptyNode):
             if self.accept('colon'):
-                a.set_kwarg(s, self.statement())
+                a.set_kwarg_no_check(s, self.statement())
                 potential = self.current
                 if not self.accept('comma'):
                     return a
@@ -686,9 +690,9 @@
             s = self.statement()
         return a
 
-    def args(self):
-        s = self.statement()
-        a = ArgumentNode(s)
+    def args(self) -> ArgumentNode:
+        s = self.statement()  # type: BaseNode
+        a = ArgumentNode(self.current)
 
         while not isinstance(s, EmptyNode):
             potential = self.current
@@ -699,7 +703,7 @@
                 if not isinstance(s, IdNode):
                     raise ParseException('Dictionary key must be a plain identifier.',
                                          self.getline(), s.lineno, s.colno)
-                a.set_kwarg(s.value, self.statement())
+                a.set_kwarg(s, self.statement())
                 potential = self.current
                 if not self.accept('comma'):
                     return a
@@ -710,81 +714,85 @@
             s = self.statement()
         return a
 
-    def method_call(self, source_object):
+    def method_call(self, source_object: BaseNode) -> MethodNode:
         methodname = self.e9()
         if not(isinstance(methodname, IdNode)):
             raise ParseException('Method name must be plain id',
                                  self.getline(), self.current.lineno, self.current.colno)
+        assert isinstance(methodname.value, str)
         self.expect('lparen')
         args = self.args()
         self.expect('rparen')
-        method = MethodNode(methodname.subdir, methodname.lineno, methodname.colno, source_object, methodname.value, args)
+        method = MethodNode(methodname.filename, methodname.lineno, methodname.colno, source_object, methodname.value, args)
         if self.accept('dot'):
             return self.method_call(method)
         return method
 
-    def index_call(self, source_object):
+    def index_call(self, source_object: BaseNode) -> IndexNode:
         index_statement = self.statement()
         self.expect('rbracket')
         return IndexNode(source_object, index_statement)
 
-    def foreachblock(self):
+    def foreachblock(self) -> ForeachClauseNode:
         t = self.current
         self.expect('id')
+        assert isinstance(t.value, str)
         varname = t
-        varnames = [t]
+        varnames = [t.value]  # type: T.List[str]
 
         if self.accept('comma'):
             t = self.current
             self.expect('id')
-            varnames.append(t)
+            assert isinstance(t.value, str)
+            varnames.append(t.value)
 
         self.expect('colon')
         items = self.statement()
         block = self.codeblock()
-        return ForeachClauseNode(varname.lineno, varname.colno, varnames, items, block)
+        return ForeachClauseNode(varname, varnames, items, block)
 
-    def ifblock(self):
+    def ifblock(self) -> IfClauseNode:
         condition = self.statement()
-        clause = IfClauseNode(condition.lineno, condition.colno)
+        clause = IfClauseNode(condition)
         self.expect('eol')
         block = self.codeblock()
-        clause.ifs.append(IfNode(clause.lineno, clause.colno, condition, block))
+        clause.ifs.append(IfNode(clause, condition, block))
         self.elseifblock(clause)
         clause.elseblock = self.elseblock()
         return clause
 
-    def elseifblock(self, clause):
+    def elseifblock(self, clause: IfClauseNode) -> None:
         while self.accept('elif'):
             s = self.statement()
             self.expect('eol')
             b = self.codeblock()
-            clause.ifs.append(IfNode(s.lineno, s.colno, s, b))
+            clause.ifs.append(IfNode(s, s, b))
 
-    def elseblock(self):
+    def elseblock(self) -> T.Union[CodeBlockNode, EmptyNode]:
         if self.accept('else'):
             self.expect('eol')
             return self.codeblock()
+        return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
 
-    def line(self):
+    def line(self) -> BaseNode:
         block_start = self.current
         if self.current == 'eol':
-            return EmptyNode(self.current.lineno, self.current.colno)
+            return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
         if self.accept('if'):
-            block = self.ifblock()
+            ifblock = self.ifblock()
             self.block_expect('endif', block_start)
-            return block
+            return ifblock
         if self.accept('foreach'):
-            block = self.foreachblock()
+            forblock = self.foreachblock()
             self.block_expect('endforeach', block_start)
-            return block
+            return forblock
         if self.accept('continue'):
             return ContinueNode(self.current)
         if self.accept('break'):
             return BreakNode(self.current)
         return self.statement()
 
-    def codeblock(self):
+    def codeblock(self) -> CodeBlockNode:
         block = CodeBlockNode(self.current)
         cond = True
         while cond:
diff -Nru meson-0.53.2/mesonbuild/msetup.py meson-0.57.0+really0.56.2/mesonbuild/msetup.py
--- meson-0.53.2/mesonbuild/msetup.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/msetup.py	2021-01-06 10:39:48.000000000 +0000
@@ -31,7 +31,7 @@
 from .mconf import make_lower_case
 from .mesonlib import MesonException
 
-def add_arguments(parser):
+def add_arguments(parser: argparse.ArgumentParser) -> None:
     coredata.register_builtin_arguments(parser)
     parser.add_argument('--native-file',
                         default=[],
@@ -53,13 +53,13 @@
                              'is not working.')
     parser.add_argument('--wipe', action='store_true',
                         help='Wipe build directory and reconfigure using previous command line options. ' +
-                             'Userful when build directory got corrupted, or when rebuilding with a ' +
+                             'Useful when build directory got corrupted, or when rebuilding with a ' +
                              'newer version of meson.')
     parser.add_argument('builddir', nargs='?', default=None)
     parser.add_argument('sourcedir', nargs='?', default=None)
 
 class MesonApp:
-    def __init__(self, options):
+    def __init__(self, options: argparse.Namespace) -> None:
         (self.source_dir, self.build_dir) = self.validate_dirs(options.builddir,
                                                                options.sourcedir,
                                                                options.reconfigure,
@@ -86,7 +86,7 @@
                     # will cause a crash
                     for l in os.listdir(self.build_dir):
                         l = os.path.join(self.build_dir, l)
-                        if os.path.isdir(l):
+                        if os.path.isdir(l) and not os.path.islink(l):
                             mesonlib.windows_proof_rmtree(l)
                         else:
                             mesonlib.windows_proof_rm(l)
@@ -118,18 +118,18 @@
         if not os.path.exists(ndir2):
             os.makedirs(ndir2)
         if not stat.S_ISDIR(os.stat(ndir1).st_mode):
-            raise MesonException('%s is not a directory' % dir1)
+            raise MesonException('{} is not a directory'.format(dir1))
         if not stat.S_ISDIR(os.stat(ndir2).st_mode):
-            raise MesonException('%s is not a directory' % dir2)
+            raise MesonException('{} is not a directory'.format(dir2))
         if os.path.samefile(dir1, dir2):
             raise MesonException('Source and build directories must not be the same. Create a pristine build directory.')
         if self.has_build_file(ndir1):
             if self.has_build_file(ndir2):
-                raise MesonException('Both directories contain a build file %s.' % environment.build_filename)
+                raise MesonException('Both directories contain a build file {}.'.format(environment.build_filename))
             return ndir1, ndir2
         if self.has_build_file(ndir2):
             return ndir2, ndir1
-        raise MesonException('Neither directory contains a build file %s.' % environment.build_filename)
+        raise MesonException('Neither directory contains a build file {}.'.format(environment.build_filename))
 
     def validate_dirs(self, dir1: str, dir2: str, reconfigure: bool, wipe: bool) -> T.Tuple[str, str]:
         (src_dir, build_dir) = self.validate_core_dirs(dir1, dir2)
@@ -150,15 +150,17 @@
                 raise SystemExit('Directory does not contain a valid build tree:\n{}'.format(build_dir))
         return src_dir, build_dir
 
-    def generate(self):
+    def generate(self) -> None:
         env = environment.Environment(self.source_dir, self.build_dir, self.options)
         mlog.initialize(env.get_log_dir(), self.options.fatal_warnings)
         if self.options.profile:
             mlog.set_timestamp_start(time.monotonic())
+        if env.coredata.builtins['backend'].value == 'xcode':
+            mlog.warning('xcode backend is currently unmaintained, patches welcome')
         with mesonlib.BuildDirLock(self.build_dir):
             self._generate(env)
 
-    def _generate(self, env):
+    def _generate(self, env: environment.Environment) -> None:
         mlog.debug('Build started at', datetime.datetime.now().isoformat())
         mlog.debug('Main binary:', sys.executable)
         mlog.debug('Build Options:', coredata.get_cmd_line_options(self.build_dir, self.options))
@@ -178,12 +180,18 @@
             logger_fun = mlog.log
         else:
             logger_fun = mlog.debug
-        logger_fun('Build machine cpu family:', mlog.bold(intr.builtin['build_machine'].cpu_family_method([], {})))
-        logger_fun('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {})))
-        mlog.log('Host machine cpu family:', mlog.bold(intr.builtin['host_machine'].cpu_family_method([], {})))
-        mlog.log('Host machine cpu:', mlog.bold(intr.builtin['host_machine'].cpu_method([], {})))
-        logger_fun('Target machine cpu family:', mlog.bold(intr.builtin['target_machine'].cpu_family_method([], {})))
-        logger_fun('Target machine cpu:', mlog.bold(intr.builtin['target_machine'].cpu_method([], {})))
+        build_machine = intr.builtin['build_machine']
+        host_machine = intr.builtin['build_machine']
+        target_machine = intr.builtin['target_machine']
+        assert isinstance(build_machine, interpreter.MachineHolder)
+        assert isinstance(host_machine, interpreter.MachineHolder)
+        assert isinstance(target_machine, interpreter.MachineHolder)
+        logger_fun('Build machine cpu family:', mlog.bold(build_machine.cpu_family_method([], {})))
+        logger_fun('Build machine cpu:', mlog.bold(build_machine.cpu_method([], {})))
+        mlog.log('Host machine cpu family:', mlog.bold(host_machine.cpu_family_method([], {})))
+        mlog.log('Host machine cpu:', mlog.bold(host_machine.cpu_method([], {})))
+        logger_fun('Target machine cpu family:', mlog.bold(target_machine.cpu_family_method([], {})))
+        logger_fun('Target machine cpu:', mlog.bold(target_machine.cpu_method([], {})))
         try:
             if self.options.profile:
                 fname = os.path.join(self.build_dir, 'meson-private', 'profile-interpreter.log')
@@ -210,9 +218,9 @@
             if self.options.profile:
                 fname = 'profile-{}-backend.log'.format(intr.backend.name)
                 fname = os.path.join(self.build_dir, 'meson-private', fname)
-                profile.runctx('intr.backend.generate(intr)', globals(), locals(), filename=fname)
+                profile.runctx('intr.backend.generate()', globals(), locals(), filename=fname)
             else:
-                intr.backend.generate(intr)
+                intr.backend.generate()
             build.save(b, dumpfile)
             if env.first_invocation:
                 coredata.write_cmd_line_file(self.build_dir, self.options)
@@ -239,7 +247,7 @@
                     os.unlink(cdf)
             raise
 
-def run(options) -> int:
+def run(options: argparse.Namespace) -> int:
     coredata.parse_cmd_line_options(options)
     app = MesonApp(options)
     app.generate()
diff -Nru meson-0.53.2/mesonbuild/msubprojects.py meson-0.57.0+really0.56.2/mesonbuild/msubprojects.py
--- meson-0.53.2/mesonbuild/msubprojects.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/msubprojects.py	2021-01-06 10:39:48.000000000 +0000
@@ -1,18 +1,21 @@
 import os, subprocess
 import argparse
+from ._pathlib import Path
 
 from . import mlog
-from .mesonlib import git, Popen_safe
-from .wrap.wrap import API_ROOT, PackageDefinition, Resolver, WrapException
+from .mesonlib import quiet_git, verbose_git, GitException, Popen_safe, MesonException, windows_proof_rmtree
+from .wrap.wrap import API_ROOT, Resolver, WrapException, ALL_TYPES
 from .wrap import wraptool
 
+ALL_TYPES_STRING = ', '.join(ALL_TYPES)
+
 def update_wrapdb_file(wrap, repo_dir, options):
     patch_url = wrap.get('patch_url')
     branch, revision = wraptool.parse_patch_url(patch_url)
     new_branch, new_revision = wraptool.get_latest_version(wrap.name)
     if new_branch == branch and new_revision == revision:
         mlog.log('  -> Up to date.')
-        return
+        return True
     wraptool.update_wrap_file(wrap.filename, wrap.name, new_branch, new_revision)
     msg = ['  -> New wrap file downloaded.']
     # Meson reconfigure won't use the new wrap file as long as the source
@@ -21,11 +24,12 @@
     if os.path.isdir(repo_dir):
         msg += ['To use it, delete', mlog.bold(repo_dir), 'and run', mlog.bold('meson --reconfigure')]
     mlog.log(*msg)
+    return True
 
-def update_file(wrap, repo_dir, options):
+def update_file(r, wrap, repo_dir, options):
     patch_url = wrap.values.get('patch_url', '')
     if patch_url.startswith(API_ROOT):
-        update_wrapdb_file(wrap, repo_dir, options)
+        return update_wrapdb_file(wrap, repo_dir, options)
     elif not os.path.isdir(repo_dir):
         # The subproject is not needed, or it is a tarball extracted in
         # 'libfoo-1.0' directory and the version has been bumped and the new
@@ -38,68 +42,184 @@
         # version.
         mlog.log('  -> Subproject has not changed, or the new source/patch needs to be extracted on the same location.\n' +
                  '     In that case, delete', mlog.bold(repo_dir), 'and run', mlog.bold('meson --reconfigure'))
+    return True
 
 def git_output(cmd, workingdir):
-    return git(cmd, workingdir, check=True, universal_newlines=True,
-               stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout
+    return quiet_git(cmd, workingdir, check=True)[1]
+
+def git_stash(workingdir):
+    # That git command return 1 (failure) when there is something to stash.
+    # We don't want to stash when there is nothing to stash because that would
+    # print spurious "No local changes to save".
+    if not quiet_git(['diff', '--quiet', 'HEAD'], workingdir)[0]:
+        # Don't pipe stdout here because we want the user to see their changes have
+        # been saved.
+        verbose_git(['stash'], workingdir, check=True)
 
 def git_show(repo_dir):
     commit_message = git_output(['show', '--quiet', '--pretty=format:%h%n%d%n%s%n[%an]'], repo_dir)
     parts = [s.strip() for s in commit_message.split('\n')]
     mlog.log('  ->', mlog.yellow(parts[0]), mlog.red(parts[1]), parts[2], mlog.blue(parts[3]))
 
-def update_git(wrap, repo_dir, options):
+def git_rebase(repo_dir, revision):
+    try:
+        git_output(['-c', 'rebase.autoStash=true', 'rebase', 'FETCH_HEAD'], repo_dir)
+    except GitException as e:
+        mlog.log('  -> Could not rebase', mlog.bold(repo_dir), 'onto', mlog.bold(revision))
+        mlog.log(mlog.red(e.output))
+        mlog.log(mlog.red(str(e)))
+        return False
+    return True
+
+def git_reset(repo_dir, revision):
+    try:
+        # Stash local changes, commits can always be found back in reflog, to
+        # avoid any data lost by mistake.
+        git_stash(repo_dir)
+        git_output(['reset', '--hard', 'FETCH_HEAD'], repo_dir)
+    except GitException as e:
+        mlog.log('  -> Could not reset', mlog.bold(repo_dir), 'to', mlog.bold(revision))
+        mlog.log(mlog.red(e.output))
+        mlog.log(mlog.red(str(e)))
+        return False
+    return True
+
+def git_checkout(repo_dir, revision, create=False):
+    cmd = ['checkout', '--ignore-other-worktrees', revision, '--']
+    if create:
+        cmd.insert('-b', 1)
+    try:
+        # Stash local changes, commits can always be found back in reflog, to
+        # avoid any data lost by mistake.
+        git_stash(repo_dir)
+        git_output(cmd, repo_dir)
+    except GitException as e:
+        mlog.log('  -> Could not checkout', mlog.bold(revision), 'in', mlog.bold(repo_dir))
+        mlog.log(mlog.red(e.output))
+        mlog.log(mlog.red(str(e)))
+        return False
+    return True
+
+def git_checkout_and_reset(repo_dir, revision):
+    # revision could be a branch that already exists but is outdated, so we still
+    # have to reset after the checkout.
+    success = git_checkout(repo_dir, revision)
+    if success:
+        success = git_reset(repo_dir, revision)
+    return success
+
+def git_checkout_and_rebase(repo_dir, revision):
+    # revision could be a branch that already exists but is outdated, so we still
+    # have to rebase after the checkout.
+    success = git_checkout(repo_dir, revision)
+    if success:
+        success = git_rebase(repo_dir, revision)
+    return success
+
+def update_git(r, wrap, repo_dir, options):
     if not os.path.isdir(repo_dir):
         mlog.log('  -> Not used.')
-        return
-    revision = wrap.get('revision')
-    ret = git_output(['rev-parse', '--abbrev-ref', 'HEAD'], repo_dir).strip()
-    if ret == 'HEAD':
-        try:
-            # We are currently in detached mode, just checkout the new revision
-            git_output(['fetch'], repo_dir)
-            git_output(['checkout', revision], repo_dir)
-        except subprocess.CalledProcessError as e:
-            out = e.output.decode().strip()
-            mlog.log('  -> Could not checkout revision', mlog.cyan(revision))
-            mlog.log(mlog.red(out))
-            mlog.log(mlog.red(str(e)))
-            return
-    elif ret == revision:
+        return True
+    if not os.path.exists(os.path.join(repo_dir, '.git')):
+        if options.reset:
+            # Delete existing directory and redownload
+            windows_proof_rmtree(repo_dir)
+            try:
+                r.resolve(wrap.name, 'meson')
+                update_git_done(repo_dir)
+                return True
+            except WrapException as e:
+                mlog.log('  ->', mlog.red(str(e)))
+                return False
+        else:
+            mlog.log('  -> Not a git repository.')
+            mlog.log('Pass --reset option to delete directory and redownload.')
+            return False
+    revision = wrap.values.get('revision')
+    url = wrap.values.get('url')
+    push_url = wrap.values.get('push-url')
+    if not revision or not url:
+        # It could be a detached git submodule for example.
+        mlog.log('  -> No revision or URL specified.')
+        return True
+    try:
+        origin_url = git_output(['remote', 'get-url', 'origin'], repo_dir).strip()
+    except GitException as e:
+        mlog.log('  -> Failed to determine current origin URL in', mlog.bold(repo_dir))
+        mlog.log(mlog.red(e.output))
+        mlog.log(mlog.red(str(e)))
+        return False
+    if options.reset:
         try:
-            # We are in the same branch, pull latest commits
-            git_output(['-c', 'rebase.autoStash=true', 'pull', '--rebase'], repo_dir)
-        except subprocess.CalledProcessError as e:
-            out = e.output.decode().strip()
-            mlog.log('  -> Could not rebase', mlog.bold(repo_dir), 'please fix and try again.')
-            mlog.log(mlog.red(out))
+            git_output(['remote', 'set-url', 'origin', url], repo_dir)
+            if push_url:
+                git_output(['remote', 'set-url', '--push', 'origin', push_url], repo_dir)
+        except GitException as e:
+            mlog.log('  -> Failed to reset origin URL in', mlog.bold(repo_dir))
+            mlog.log(mlog.red(e.output))
             mlog.log(mlog.red(str(e)))
-            return
+            return False
+    elif url != origin_url:
+        mlog.log('  -> URL changed from {!r} to {!r}'.format(origin_url, url))
+        return False
+    try:
+        # Same as `git branch --show-current` but compatible with older git version
+        branch = git_output(['rev-parse', '--abbrev-ref', 'HEAD'], repo_dir).strip()
+        branch = branch if branch != 'HEAD' else ''
+    except GitException as e:
+        mlog.log('  -> Failed to determine current branch in', mlog.bold(repo_dir))
+        mlog.log(mlog.red(e.output))
+        mlog.log(mlog.red(str(e)))
+        return False
+    try:
+        # Fetch only the revision we need, this avoids fetching useless branches.
+        # revision can be either a branch, tag or commit id. In all cases we want
+        # FETCH_HEAD to be set to the desired commit and "git checkout "
+        # to to either switch to existing/new branch, or detach to tag/commit.
+        # It is more complicated than it first appear, see discussion there:
+        # https://github.com/mesonbuild/meson/pull/7723#discussion_r488816189.
+        heads_refmap = '+refs/heads/*:refs/remotes/origin/*'
+        tags_refmap = '+refs/tags/*:refs/tags/*'
+        git_output(['fetch', '--refmap', heads_refmap, '--refmap', tags_refmap, 'origin', revision], repo_dir)
+    except GitException as e:
+        mlog.log('  -> Could not fetch revision', mlog.bold(revision), 'in', mlog.bold(repo_dir))
+        mlog.log(mlog.red(e.output))
+        mlog.log(mlog.red(str(e)))
+        return False
+
+    if branch == '':
+        # We are currently in detached mode
+        if options.reset:
+            success = git_checkout_and_reset(repo_dir, revision)
+        else:
+            success = git_checkout_and_rebase(repo_dir, revision)
+    elif branch == revision:
+        # We are in the same branch. A reset could still be needed in the case
+        # a force push happened on remote repository.
+        if options.reset:
+            success = git_reset(repo_dir, revision)
+        else:
+            success = git_rebase(repo_dir, revision)
     else:
-        # We are in another branch, probably user created their own branch and
-        # we should rebase it on top of wrap's branch.
-        if options.rebase:
-            try:
-                git_output(['fetch'], repo_dir)
-                git_output(['-c', 'rebase.autoStash=true', 'rebase', revision], repo_dir)
-            except subprocess.CalledProcessError as e:
-                out = e.output.decode().strip()
-                mlog.log('  -> Could not rebase', mlog.bold(repo_dir), 'please fix and try again.')
-                mlog.log(mlog.red(out))
-                mlog.log(mlog.red(str(e)))
-                return
+        # We are in another branch, either the user created their own branch and
+        # we should rebase it, or revision changed in the wrap file and we need
+        # to checkout the new branch.
+        if options.reset:
+            success = git_checkout_and_reset(repo_dir, revision)
         else:
-            mlog.log('  -> Target revision is', mlog.bold(revision), 'but currently in branch is', mlog.bold(ret), '\n' +
-                     '     To rebase your branch on top of', mlog.bold(revision), 'use', mlog.bold('--rebase'), 'option.')
-            return
+            success = git_rebase(repo_dir, revision)
+    if success:
+        update_git_done(repo_dir)
+    return success
 
+def update_git_done(repo_dir):
     git_output(['submodule', 'update', '--checkout', '--recursive'], repo_dir)
     git_show(repo_dir)
 
-def update_hg(wrap, repo_dir, options):
+def update_hg(r, wrap, repo_dir, options):
     if not os.path.isdir(repo_dir):
         mlog.log('  -> Not used.')
-        return
+        return True
     revno = wrap.get('revision')
     if revno.lower() == 'tip':
         # Failure to do pull is not a fatal error,
@@ -110,16 +230,17 @@
         if subprocess.call(['hg', 'checkout', revno], cwd=repo_dir) != 0:
             subprocess.check_call(['hg', 'pull'], cwd=repo_dir)
             subprocess.check_call(['hg', 'checkout', revno], cwd=repo_dir)
+    return True
 
-def update_svn(wrap, repo_dir, options):
+def update_svn(r, wrap, repo_dir, options):
     if not os.path.isdir(repo_dir):
         mlog.log('  -> Not used.')
-        return
+        return True
     revno = wrap.get('revision')
     p, out, _ = Popen_safe(['svn', 'info', '--show-item', 'revision', repo_dir])
     current_revno = out
     if current_revno == revno:
-        return
+        return True
     if revno.lower() == 'head':
         # Failure to do pull is not a fatal error,
         # because otherwise you can't develop without
@@ -127,68 +248,71 @@
         subprocess.call(['svn', 'update'], cwd=repo_dir)
     else:
         subprocess.check_call(['svn', 'update', '-r', revno], cwd=repo_dir)
+    return True
 
-def update(wrap, repo_dir, options):
-    mlog.log('Updating %s...' % wrap.name)
+def update(r, wrap, repo_dir, options):
+    mlog.log('Updating {}...'.format(wrap.name))
     if wrap.type == 'file':
-        update_file(wrap, repo_dir, options)
+        return update_file(r, wrap, repo_dir, options)
     elif wrap.type == 'git':
-        update_git(wrap, repo_dir, options)
+        return update_git(r, wrap, repo_dir, options)
     elif wrap.type == 'hg':
-        update_hg(wrap, repo_dir, options)
+        return update_hg(r, wrap, repo_dir, options)
     elif wrap.type == 'svn':
-        update_svn(wrap, repo_dir, options)
+        return update_svn(r, wrap, repo_dir, options)
     else:
         mlog.log('  -> Cannot update', wrap.type, 'subproject')
+    return True
 
-def checkout(wrap, repo_dir, options):
+def checkout(r, wrap, repo_dir, options):
     if wrap.type != 'git' or not os.path.isdir(repo_dir):
-        return
+        return True
     branch_name = options.branch_name if options.branch_name else wrap.get('revision')
-    cmd = ['checkout', branch_name, '--']
-    if options.b:
-        cmd.insert(1, '-b')
-    mlog.log('Checkout %s in %s...' % (branch_name, wrap.name))
-    try:
-        git_output(cmd, repo_dir)
+    if not branch_name:
+        # It could be a detached git submodule for example.
+        return True
+    mlog.log('Checkout {} in {}...'.format(branch_name, wrap.name))
+    if git_checkout(repo_dir, branch_name, create=options.b):
         git_show(repo_dir)
-    except subprocess.CalledProcessError as e:
-        out = e.output.decode().strip()
-        mlog.log('  -> ', mlog.red(out))
+        return True
+    return False
 
-def download(wrap, repo_dir, options):
-    mlog.log('Download %s...' % wrap.name)
+def download(r, wrap, repo_dir, options):
+    mlog.log('Download {}...'.format(wrap.name))
     if os.path.isdir(repo_dir):
         mlog.log('  -> Already downloaded')
-        return
+        return True
     try:
-        r = Resolver(os.path.dirname(repo_dir))
         r.resolve(wrap.name, 'meson')
         mlog.log('  -> done')
     except WrapException as e:
         mlog.log('  ->', mlog.red(str(e)))
+        return False
+    return True
 
-def foreach(wrap, repo_dir, options):
-    mlog.log('Executing command in %s' % repo_dir)
+def foreach(r, wrap, repo_dir, options):
+    mlog.log('Executing command in {}'.format(repo_dir))
     if not os.path.isdir(repo_dir):
         mlog.log('  -> Not downloaded yet')
-        return
-    try:
-        out = subprocess.check_output([options.command] + options.args,
-                                      stderr=subprocess.STDOUT,
-                                      cwd=repo_dir).decode()
-        mlog.log(out, end='')
-    except subprocess.CalledProcessError as e:
-        err_message = "Command '%s' returned non-zero exit status %d." % (" ".join(e.cmd), e.returncode)
-        out = e.output.decode()
+        return True
+    cmd = [options.command] + options.args
+    p, out, _ = Popen_safe(cmd, stderr=subprocess.STDOUT, cwd=repo_dir)
+    if p.returncode != 0:
+        err_message = "Command '{}' returned non-zero exit status {}.".format(" ".join(cmd), p.returncode)
         mlog.log('  -> ', mlog.red(err_message))
         mlog.log(out, end='')
-    except Exception as e:
-        mlog.log('  -> ', mlog.red(str(e)))
+        return False
+
+    mlog.log(out, end='')
+    return True
 
 def add_common_arguments(p):
     p.add_argument('--sourcedir', default='.',
                    help='Path to source directory')
+    p.add_argument('--types', default='',
+                   help='Comma-separated list of subproject types. Supported types are: {} (default: all)'.format(ALL_TYPES_STRING))
+
+def add_subprojects_argument(p):
     p.add_argument('subprojects', nargs='*',
                    help='List of subprojects (default: all)')
 
@@ -197,9 +321,13 @@
     subparsers.required = True
 
     p = subparsers.add_parser('update', help='Update all subprojects from wrap files')
-    p.add_argument('--rebase', default=False, action='store_true',
-                   help='Rebase your branch on top of wrap\'s revision (git only)')
+    p.add_argument('--rebase', default=True, action='store_true',
+                   help='Rebase your branch on top of wrap\'s revision. ' + \
+                        'Deprecated, it is now the default behaviour. (git only)')
+    p.add_argument('--reset', default=False, action='store_true',
+                   help='Checkout wrap\'s revision and hard reset to that commit. (git only)')
     add_common_arguments(p)
+    add_subprojects_argument(p)
     p.set_defaults(subprojects_func=update)
 
     p = subparsers.add_parser('checkout', help='Checkout a branch (git only)')
@@ -208,12 +336,14 @@
     p.add_argument('branch_name', nargs='?',
                    help='Name of the branch to checkout or create (default: revision set in wrap file)')
     add_common_arguments(p)
+    add_subprojects_argument(p)
     p.set_defaults(subprojects_func=checkout)
 
     p = subparsers.add_parser('download', help='Ensure subprojects are fetched, even if not in use. ' +
                                                'Already downloaded subprojects are not modified. ' +
                                                'This can be used to pre-fetch all subprojects and avoid downloads during configure.')
     add_common_arguments(p)
+    add_subprojects_argument(p)
     p.set_defaults(subprojects_func=download)
 
     p = subparsers.add_parser('foreach', help='Execute a command in each subproject directory.')
@@ -221,8 +351,8 @@
                    help='Command to execute in each subproject directory')
     p.add_argument('args', nargs=argparse.REMAINDER,
                    help=argparse.SUPPRESS)
-    p.add_argument('--sourcedir', default='.',
-                   help='Path to source directory')
+    add_common_arguments(p)
+    p.set_defaults(subprojects=[])
     p.set_defaults(subprojects_func=foreach)
 
 def run(options):
@@ -234,22 +364,24 @@
     if not os.path.isdir(subprojects_dir):
         mlog.log('Directory', mlog.bold(src_dir), 'does not seem to have subprojects.')
         return 0
-    files = []
-    if hasattr(options, 'subprojects'):
-        for name in options.subprojects:
-            f = os.path.join(subprojects_dir, name + '.wrap')
-            if not os.path.isfile(f):
-                mlog.error('Subproject', mlog.bold(name), 'not found.')
-                return 1
-            else:
-                files.append(f)
-    if not files:
-        for f in os.listdir(subprojects_dir):
-            if f.endswith('.wrap'):
-                files.append(os.path.join(subprojects_dir, f))
-    for f in files:
-        wrap = PackageDefinition(f)
-        directory = wrap.values.get('directory', wrap.name)
-        repo_dir = os.path.join(subprojects_dir, directory)
-        options.subprojects_func(wrap, repo_dir, options)
-    return 0
+    r = Resolver(src_dir, 'subprojects')
+    if options.subprojects:
+        wraps = [wrap for name, wrap in r.wraps.items() if name in options.subprojects]
+    else:
+        wraps = r.wraps.values()
+    types = [t.strip() for t in options.types.split(',')] if options.types else []
+    for t in types:
+        if t not in ALL_TYPES:
+            raise MesonException('Unknown subproject type {!r}, supported types are: {}'.format(t, ALL_TYPES_STRING))
+    failures = []
+    for wrap in wraps:
+        if types and wrap.type not in types:
+            continue
+        dirname = Path(subprojects_dir, wrap.directory).as_posix()
+        if not options.subprojects_func(r, wrap, dirname, options):
+            failures.append(wrap.name)
+    if failures:
+        m = 'Please check logs above as command failed in some subprojects which could have been left in conflict state: '
+        m += ', '.join(failures)
+        mlog.warning(m)
+    return len(failures)
diff -Nru meson-0.53.2/mesonbuild/mtest.py meson-0.57.0+really0.56.2/mesonbuild/mtest.py
--- meson-0.53.2/mesonbuild/mtest.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/mtest.py	2021-01-06 10:39:48.000000000 +0000
@@ -14,7 +14,7 @@
 
 # A tool to run tests in many different ways.
 
-from pathlib import Path
+from ._pathlib import Path
 from collections import namedtuple
 from copy import deepcopy
 import argparse
@@ -33,17 +33,19 @@
 import subprocess
 import sys
 import tempfile
+import textwrap
 import time
 import typing as T
+import xml.etree.ElementTree as et
 
 from . import build
 from . import environment
 from . import mlog
+from .coredata import major_versions_differ, MesonVersionMismatchException
+from .coredata import version as coredata_version
 from .dependencies import ExternalProgram
-from .mesonlib import MesonException, get_wine_shortpath, split_args
-
-if T.TYPE_CHECKING:
-    from .backend.backends import TestSerialisation
+from .mesonlib import MesonException, get_wine_shortpath, split_args, join_args
+from .backend.backends import TestProtocol, TestSerialisation
 
 # GNU autotools interprets a return code of 77 from tests it executes to
 # mean that the test should be skipped.
@@ -55,11 +57,10 @@
 
 def is_windows() -> bool:
     platname = platform.system().lower()
-    return platname == 'windows' or 'mingw' in platname
+    return platname == 'windows'
 
 def is_cygwin() -> bool:
-    platname = platform.system().lower()
-    return 'cygwin' in platname
+    return sys.platform == 'cygwin'
 
 def determine_worker_count() -> int:
     varname = 'MESON_TESTTHREADS'
@@ -67,7 +68,7 @@
         try:
             num_workers = int(os.environ[varname])
         except ValueError:
-            print('Invalid value in %s, using 1 thread.' % varname)
+            print('Invalid value in {}, using 1 thread.'.format(varname))
             num_workers = 1
     else:
         try:
@@ -92,6 +93,9 @@
     parser.add_argument('--wrapper', default=None, dest='wrapper', type=split_args,
                         help='wrapper to run tests with (e.g. Valgrind)')
     parser.add_argument('-C', default='.', dest='wd',
+                        # https://github.com/python/typeshed/issues/3107
+                        # https://github.com/python/mypy/issues/7177
+                        type=os.path.abspath,  # type: ignore
                         help='directory to cd into before running')
     parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE',
                         help='Only run tests belonging to the given suite.')
@@ -120,7 +124,9 @@
     parser.add_argument('--test-args', default=[], type=split_args,
                         help='Arguments to pass to the specified test(s) or all tests')
     parser.add_argument('args', nargs='*',
-                        help='Optional list of tests to run')
+                        help='Optional list of test names to run. "testname" to run all tests with that name, '
+                        '"subprojname:testname" to specifically run "testname" from "subprojname", '
+                        '"subprojname:" to run all tests defined by "subprojname".')
 
 
 def returncode_to_status(retcode: int) -> str:
@@ -136,20 +142,20 @@
             signame = signal.Signals(signum).name
         except ValueError:
             signame = 'SIGinvalid'
-        return '(killed by signal %d %s)' % (signum, signame)
+        return '(killed by signal {} {})'.format(signum, signame)
 
     if retcode <= 128:
-        return '(exit status %d)' % (retcode,)
+        return '(exit status {})'.format(retcode)
 
     signum = retcode - 128
     try:
         signame = signal.Signals(signum).name
     except ValueError:
         signame = 'SIGinvalid'
-    return '(exit status %d or signal %d %s)' % (retcode, signum, signame)
+    return '(exit status {} or signal {} {})'.format(retcode, signum, signame)
 
 def env_tuple_to_str(env: T.Iterable[T.Tuple[str, str]]) -> str:
-    return ''.join(["%s='%s' " % (k, v) for k, v in env])
+    return ''.join(["{}='{}' ".format(k, v) for k, v in env])
 
 
 class TestException(MesonException):
@@ -167,6 +173,10 @@
     UNEXPECTEDPASS = 'UNEXPECTEDPASS'
     ERROR = 'ERROR'
 
+    @staticmethod
+    def maxlen() -> int:
+        return 14 # len(UNEXPECTEDPASS)
+
 
 class TAPParser:
     Plan = namedtuple('Plan', ['count', 'late', 'skipped', 'explanation'])
@@ -196,7 +206,7 @@
         explanation = explanation.strip() if explanation else None
         if directive is not None:
             directive = directive.upper()
-            if directive == 'SKIP':
+            if directive.startswith('SKIP'):
                 if ok:
                     yield self.Test(num, name, TestResult.SKIP, explanation)
                     return
@@ -204,7 +214,7 @@
                 yield self.Test(num, name, TestResult.UNEXPECTEDPASS if ok else TestResult.EXPECTEDFAIL, explanation)
                 return
             else:
-                yield self.Error('invalid directive "%s"' % (directive,))
+                yield self.Error('invalid directive "{}"'.format(directive,))
 
         yield self.Test(num, name, TestResult.OK if ok else TestResult.FAIL, explanation)
 
@@ -301,28 +311,159 @@
                     yield self.Version(version=version)
                 continue
 
-            if len(line) == 0:
+            if not line:
                 continue
 
-            yield self.Error('unexpected input at line %d' % (lineno,))
+            yield self.Error('unexpected input at line {}'.format((lineno,)))
 
         if state == self._YAML:
             yield self.Error('YAML block not terminated (started on line {})'.format(yaml_lineno))
 
         if not bailed_out and plan and num_tests != plan.count:
             if num_tests < plan.count:
-                yield self.Error('Too few tests run (expected %d, got %d)' % (plan.count, num_tests))
+                yield self.Error('Too few tests run (expected {}, got {})'.format(plan.count, num_tests))
             else:
-                yield self.Error('Too many tests run (expected %d, got %d)' % (plan.count, num_tests))
+                yield self.Error('Too many tests run (expected {}, got {})'.format(plan.count, num_tests))
+
+
+
+class JunitBuilder:
+
+    """Builder for Junit test results.
+
+    Junit is impossible to stream out, it requires attributes counting the
+    total number of tests, failures, skips, and errors in the root element
+    and in each test suite. As such, we use a builder class to track each
+    test case, and calculate all metadata before writing it out.
+
+    For tests with multiple results (like from a TAP test), we record the
+    test as a suite with the project_name.test_name. This allows us to track
+    each result separately. For tests with only one result (such as exit-code
+    tests) we record each one into a suite with the name project_name. The use
+    of the project_name allows us to sort subproject tests separately from
+    the root project.
+    """
+
+    def __init__(self, filename: str) -> None:
+        self.filename = filename
+        self.root = et.Element(
+            'testsuites', tests='0', errors='0', failures='0')
+        self.suites = {}  # type: T.Dict[str, et.Element]
+
+    def log(self, name: str, test: 'TestRun') -> None:
+        """Log a single test case."""
+        if test.junit is not None:
+            for suite in test.junit.findall('.//testsuite'):
+                # Assume that we don't need to merge anything here...
+                suite.attrib['name'] = '{}.{}.{}'.format(test.project, name, suite.attrib['name'])
+
+                # GTest can inject invalid attributes
+                for case in suite.findall('.//testcase[@result]'):
+                    del case.attrib['result']
+                for case in suite.findall('.//testcase[@timestamp]'):
+                    del case.attrib['timestamp']
+                self.root.append(suite)
+            return
+
+        # In this case we have a test binary with multiple results.
+        # We want to record this so that each result is recorded
+        # separately
+        if test.results:
+            suitename = '{}.{}'.format(test.project, name)
+            assert suitename not in self.suites, 'duplicate suite'
+
+            suite = self.suites[suitename] = et.Element(
+                'testsuite',
+                name=suitename,
+                tests=str(len(test.results)),
+                errors=str(sum(1 for r in test.results if r is TestResult.ERROR)),
+                failures=str(sum(1 for r in test.results if r in
+                                 {TestResult.FAIL, TestResult.UNEXPECTEDPASS, TestResult.TIMEOUT})),
+                skipped=str(sum(1 for r in test.results if r is TestResult.SKIP)),
+            )
+
+            for i, result in enumerate(test.results):
+                # Both name and classname are required. Set them both to the
+                # number of the test in a TAP test, as TAP doesn't give names.
+                testcase = et.SubElement(suite, 'testcase', name=str(i), classname=str(i))
+                if result is TestResult.SKIP:
+                    et.SubElement(testcase, 'skipped')
+                elif result is TestResult.ERROR:
+                    et.SubElement(testcase, 'error')
+                elif result is TestResult.FAIL:
+                    et.SubElement(testcase, 'failure')
+                elif result is TestResult.UNEXPECTEDPASS:
+                    fail = et.SubElement(testcase, 'failure')
+                    fail.text = 'Test unexpected passed.'
+                elif result is TestResult.TIMEOUT:
+                    fail = et.SubElement(testcase, 'failure')
+                    fail.text = 'Test did not finish before configured timeout.'
+            if test.stdo:
+                out = et.SubElement(suite, 'system-out')
+                out.text = test.stdo.rstrip()
+            if test.stde:
+                err = et.SubElement(suite, 'system-err')
+                err.text = test.stde.rstrip()
+        else:
+            if test.project not in self.suites:
+                suite = self.suites[test.project] = et.Element(
+                    'testsuite', name=test.project, tests='1', errors='0',
+                    failures='0', skipped='0')
+            else:
+                suite = self.suites[test.project]
+                suite.attrib['tests'] = str(int(suite.attrib['tests']) + 1)
+
+            testcase = et.SubElement(suite, 'testcase', name=name, classname=name)
+            if test.res is TestResult.SKIP:
+                et.SubElement(testcase, 'skipped')
+                suite.attrib['skipped'] = str(int(suite.attrib['skipped']) + 1)
+            elif test.res is TestResult.ERROR:
+                et.SubElement(testcase, 'error')
+                suite.attrib['errors'] = str(int(suite.attrib['errors']) + 1)
+            elif test.res is TestResult.FAIL:
+                et.SubElement(testcase, 'failure')
+                suite.attrib['failures'] = str(int(suite.attrib['failures']) + 1)
+            if test.stdo:
+                out = et.SubElement(testcase, 'system-out')
+                out.text = test.stdo.rstrip()
+            if test.stde:
+                err = et.SubElement(testcase, 'system-err')
+                err.text = test.stde.rstrip()
+
+    def write(self) -> None:
+        """Calculate total test counts and write out the xml result."""
+        for suite in self.suites.values():
+            self.root.append(suite)
+            # Skipped is really not allowed in the "testsuits" element
+            for attr in ['tests', 'errors', 'failures']:
+                self.root.attrib[attr] = str(int(self.root.attrib[attr]) + int(suite.attrib[attr]))
+
+        tree = et.ElementTree(self.root)
+        with open(self.filename, 'wb') as f:
+            tree.write(f, encoding='utf-8', xml_declaration=True)
 
 
 class TestRun:
 
     @classmethod
-    def make_exitcode(cls, test: 'TestSerialisation', test_env: T.Dict[str, str],
+    def make_gtest(cls, test: TestSerialisation, test_env: T.Dict[str, str],
+                   returncode: int, starttime: float, duration: float,
+                   stdo: T.Optional[str], stde: T.Optional[str],
+                   cmd: T.Optional[T.List[str]]) -> 'TestRun':
+        filename = '{}.xml'.format(test.name)
+        if test.workdir:
+            filename = os.path.join(test.workdir, filename)
+        tree = et.parse(filename)
+
+        return cls.make_exitcode(
+            test, test_env, returncode, starttime, duration, stdo, stde, cmd,
+            junit=tree)
+
+    @classmethod
+    def make_exitcode(cls, test: TestSerialisation, test_env: T.Dict[str, str],
                       returncode: int, starttime: float, duration: float,
                       stdo: T.Optional[str], stde: T.Optional[str],
-                      cmd: T.Optional[T.List[str]]) -> 'TestRun':
+                      cmd: T.Optional[T.List[str]], **kwargs: T.Any) -> 'TestRun':
         if returncode == GNU_SKIP_RETURNCODE:
             res = TestResult.SKIP
         elif returncode == GNU_ERROR_RETURNCODE:
@@ -331,38 +472,37 @@
             res = TestResult.EXPECTEDFAIL if bool(returncode) else TestResult.UNEXPECTEDPASS
         else:
             res = TestResult.FAIL if bool(returncode) else TestResult.OK
-        return cls(test, test_env, res, returncode, starttime, duration, stdo, stde, cmd)
+        return cls(test, test_env, res, [], returncode, starttime, duration, stdo, stde, cmd, **kwargs)
 
     @classmethod
-    def make_tap(cls, test: 'TestSerialisation', test_env: T.Dict[str, str],
+    def make_tap(cls, test: TestSerialisation, test_env: T.Dict[str, str],
                  returncode: int, starttime: float, duration: float,
                  stdo: str, stde: str,
                  cmd: T.Optional[T.List[str]]) -> 'TestRun':
-        res = None
-        num_tests = 0
+        res = None    # type: T.Optional[TestResult]
+        results = []  # type: T.List[TestResult]
         failed = False
-        num_skipped = 0
 
         for i in TAPParser(io.StringIO(stdo)).parse():
             if isinstance(i, TAPParser.Bailout):
-                res = TestResult.ERROR
+                results.append(TestResult.ERROR)
+                failed = True
             elif isinstance(i, TAPParser.Test):
-                if i.result == TestResult.SKIP:
-                    num_skipped += 1
-                elif i.result in (TestResult.FAIL, TestResult.UNEXPECTEDPASS):
+                results.append(i.result)
+                if i.result not in {TestResult.OK, TestResult.EXPECTEDFAIL, TestResult.SKIP}:
                     failed = True
-                num_tests += 1
             elif isinstance(i, TAPParser.Error):
-                res = TestResult.ERROR
+                results.append(TestResult.ERROR)
                 stde += '\nTAP parsing error: ' + i.message
+                failed = True
 
         if returncode != 0:
             res = TestResult.ERROR
-            stde += '\n(test program exited with status code %d)' % (returncode,)
+            stde += '\n(test program exited with status code {})'.format(returncode,)
 
         if res is None:
             # Now determine the overall result of the test based on the outcome of the subcases
-            if num_skipped == num_tests:
+            if all(t is TestResult.SKIP for t in results):
                 # This includes the case where num_tests is zero
                 res = TestResult.SKIP
             elif test.should_fail:
@@ -370,14 +510,16 @@
             else:
                 res = TestResult.FAIL if failed else TestResult.OK
 
-        return cls(test, test_env, res, returncode, starttime, duration, stdo, stde, cmd)
+        return cls(test, test_env, res, results, returncode, starttime, duration, stdo, stde, cmd)
 
-    def __init__(self, test: 'TestSerialisation', test_env: T.Dict[str, str],
-                 res: TestResult, returncode: int, starttime: float, duration: float,
+    def __init__(self, test: TestSerialisation, test_env: T.Dict[str, str],
+                 res: TestResult, results: T.List[TestResult], returncode:
+                 int, starttime: float, duration: float,
                  stdo: T.Optional[str], stde: T.Optional[str],
-                 cmd: T.Optional[T.List[str]]):
+                 cmd: T.Optional[T.List[str]], *, junit: T.Optional[et.ElementTree] = None):
         assert isinstance(res, TestResult)
         self.res = res
+        self.results = results  # May be an empty list
         self.returncode = returncode
         self.starttime = starttime
         self.duration = duration
@@ -386,6 +528,8 @@
         self.cmd = cmd
         self.env = test_env
         self.should_fail = test.should_fail
+        self.project = test.project_name
+        self.junit = junit
 
     def get_log(self) -> str:
         res = '--- command ---\n'
@@ -432,30 +576,40 @@
     jsonlogfile.write(json.dumps(jresult) + '\n')
 
 def run_with_mono(fname: str) -> bool:
-    if fname.endswith('.exe') and not (is_windows() or is_cygwin()):
-        return True
-    return False
+    return fname.endswith('.exe') and not (is_windows() or is_cygwin())
 
-def load_benchmarks(build_dir: str) -> T.List['TestSerialisation']:
+def check_testdata(objs: T.List[TestSerialisation]) -> T.List[TestSerialisation]:
+    if not isinstance(objs, list):
+        raise MesonVersionMismatchException('', coredata_version)
+    for obj in objs:
+        if not isinstance(obj, TestSerialisation):
+            raise MesonVersionMismatchException('', coredata_version)
+        if not hasattr(obj, 'version'):
+            raise MesonVersionMismatchException('', coredata_version)
+        if major_versions_differ(obj.version, coredata_version):
+            raise MesonVersionMismatchException(obj.version, coredata_version)
+    return objs
+
+def load_benchmarks(build_dir: str) -> T.List[TestSerialisation]:
     datafile = Path(build_dir) / 'meson-private' / 'meson_benchmark_setup.dat'
     if not datafile.is_file():
         raise TestException('Directory {!r} does not seem to be a Meson build directory.'.format(build_dir))
     with datafile.open('rb') as f:
-        obj = T.cast(T.List['TestSerialisation'], pickle.load(f))
-    return obj
+        objs = check_testdata(pickle.load(f))
+    return objs
 
-def load_tests(build_dir: str) -> T.List['TestSerialisation']:
+def load_tests(build_dir: str) -> T.List[TestSerialisation]:
     datafile = Path(build_dir) / 'meson-private' / 'meson_test_setup.dat'
     if not datafile.is_file():
         raise TestException('Directory {!r} does not seem to be a Meson build directory.'.format(build_dir))
     with datafile.open('rb') as f:
-        obj = T.cast(T.List['TestSerialisation'], pickle.load(f))
-    return obj
+        objs = check_testdata(pickle.load(f))
+    return objs
 
 
 class SingleTestRunner:
 
-    def __init__(self, test: 'TestSerialisation', test_env: T.Dict[str, str],
+    def __init__(self, test: TestSerialisation, test_env: T.Dict[str, str],
                  env: T.Dict[str, str], options: argparse.Namespace):
         self.test = test
         self.test_env = test_env
@@ -467,26 +621,26 @@
             return ['java', '-jar'] + self.test.fname
         elif not self.test.is_cross_built and run_with_mono(self.test.fname[0]):
             return ['mono'] + self.test.fname
-        else:
-            if self.test.is_cross_built and self.test.needs_exe_wrapper:
-                if self.test.exe_runner is None:
-                    # Can not run test on cross compiled executable
-                    # because there is no execute wrapper.
-                    return None
-                else:
-                    if not self.test.exe_runner.found():
-                        msg = 'The exe_wrapper defined in the cross file {!r} was not ' \
-                              'found. Please check the command and/or add it to PATH.'
-                        raise TestException(msg.format(self.test.exe_runner.name))
-                    return self.test.exe_runner.get_command() + self.test.fname
-            else:
-                return self.test.fname
+        elif self.test.cmd_is_built and self.test.needs_exe_wrapper:
+            if self.test.exe_runner is None:
+                # Can not run test on cross compiled executable
+                # because there is no execute wrapper.
+                return None
+            elif self.test.cmd_is_built:
+                # If the command is not built (ie, its a python script),
+                # then we don't check for the exe-wrapper
+                if not self.test.exe_runner.found():
+                    msg = ('The exe_wrapper defined in the cross file {!r} was not '
+                           'found. Please check the command and/or add it to PATH.')
+                    raise TestException(msg.format(self.test.exe_runner.name))
+                return self.test.exe_runner.get_command() + self.test.fname
+        return self.test.fname
 
     def run(self) -> TestRun:
         cmd = self._get_cmd()
         if cmd is None:
             skip_stdout = 'Not run because can not execute cross compiled binaries.'
-            return TestRun(self.test, self.test_env, TestResult.SKIP, GNU_SKIP_RETURNCODE, time.time(), 0.0, skip_stdout, None, None)
+            return TestRun(self.test, self.test_env, TestResult.SKIP, [], GNU_SKIP_RETURNCODE, time.time(), 0.0, skip_stdout, None, None)
         else:
             wrap = TestHarness.get_wrapper(self.options)
             if self.options.gdb:
@@ -496,7 +650,7 @@
     def _run_cmd(self, cmd: T.List[str]) -> TestRun:
         starttime = time.time()
 
-        if len(self.test.extra_paths) > 0:
+        if self.test.extra_paths:
             self.env['PATH'] = os.pathsep.join(self.test.extra_paths + ['']) + self.env['PATH']
             winecmd = []
             for c in cmd:
@@ -521,7 +675,7 @@
         if not self.options.verbose:
             stdout = tempfile.TemporaryFile("wb+")
             stderr = tempfile.TemporaryFile("wb+") if self.options.split else stdout
-        if self.test.protocol == 'tap' and stderr is stdout:
+        if self.test.protocol is TestProtocol.TAP and stderr is stdout:
             stdout = tempfile.TemporaryFile("wb+")
 
         # Let gdb handle ^C instead of us
@@ -539,9 +693,16 @@
                 # We don't want setsid() in gdb because gdb needs the
                 # terminal in order to handle ^C and not show tcsetpgrp()
                 # errors avoid not being able to use the terminal.
-                os.setsid()  # type: ignore
+                os.setsid()
+
+        extra_cmd = []  # type: T.List[str]
+        if self.test.protocol is TestProtocol.GTEST:
+            gtestname = self.test.name
+            if self.test.workdir:
+                gtestname = os.path.join(self.test.workdir, self.test.name)
+            extra_cmd.append('--gtest_output=xml:{}.xml'.format(gtestname))
 
-        p = subprocess.Popen(cmd,
+        p = subprocess.Popen(cmd + extra_cmd,
                              stdout=stdout,
                              stderr=stderr,
                              env=self.env,
@@ -562,7 +723,7 @@
                 print('{} time out (After {} seconds)'.format(self.test.name, timeout))
             timed_out = True
         except KeyboardInterrupt:
-            mlog.warning('CTRL-C detected while running %s' % (self.test.name))
+            mlog.warning('CTRL-C detected while running {}'.format(self.test.name))
             kill_test = True
         finally:
             if self.options.gdb:
@@ -578,14 +739,27 @@
             if is_windows():
                 subprocess.run(['taskkill', '/F', '/T', '/PID', str(p.pid)])
             else:
+
+                def _send_signal_to_process_group(pgid : int, signum : int) -> None:
+                    """ sends a signal to a process group """
+                    try:
+                        os.killpg(pgid, signum)
+                    except ProcessLookupError:
+                        # Sometimes (e.g. with Wine) this happens.
+                        # There's nothing we can do (maybe the process
+                        # already died) so carry on.
+                        pass
+
+                # Send a termination signal to the process group that setsid()
+                # created - giving it a chance to perform any cleanup.
+                _send_signal_to_process_group(p.pid, signal.SIGTERM)
+
+                # Make sure the termination signal actually kills the process
+                # group, otherwise retry with a SIGKILL.
                 try:
-                    # Kill the process group that setsid() created.
-                    os.killpg(p.pid, signal.SIGKILL)  # type: ignore
-                except ProcessLookupError:
-                    # Sometimes (e.g. with Wine) this happens.
-                    # There's nothing we can do (maybe the process
-                    # already died) so carry on.
-                    pass
+                    p.communicate(timeout=0.5)
+                except subprocess.TimeoutExpired:
+                    _send_signal_to_process_group(p.pid, signal.SIGKILL)
             try:
                 p.communicate(timeout=1)
             except subprocess.TimeoutExpired:
@@ -616,10 +790,12 @@
             stdo = ""
             stde = additional_error
         if timed_out:
-            return TestRun(self.test, self.test_env, TestResult.TIMEOUT, p.returncode, starttime, duration, stdo, stde, cmd)
+            return TestRun(self.test, self.test_env, TestResult.TIMEOUT, [], p.returncode, starttime, duration, stdo, stde, cmd)
         else:
-            if self.test.protocol == 'exitcode':
+            if self.test.protocol is TestProtocol.EXITCODE:
                 return TestRun.make_exitcode(self.test, self.test_env, p.returncode, starttime, duration, stdo, stde, cmd)
+            elif self.test.protocol is TestProtocol.GTEST:
+                return TestRun.make_gtest(self.test, self.test_env, p.returncode, starttime, duration, stdo, stde, cmd)
             else:
                 if self.options.verbose:
                     print(stdo, end='')
@@ -630,6 +806,7 @@
     def __init__(self, options: argparse.Namespace):
         self.options = options
         self.collected_logs = []  # type: T.List[str]
+        self.collected_failures = []  # type: T.List[str]
         self.fail_count = 0
         self.expectedfail_count = 0
         self.unexpectedpass_count = 0
@@ -638,9 +815,11 @@
         self.timeout_count = 0
         self.is_run = False
         self.tests = None
+        self.results = []         # type: T.List[TestRun]
         self.logfilename = None   # type: T.Optional[str]
         self.logfile = None       # type: T.Optional[T.TextIO]
         self.jsonlogfile = None   # type: T.Optional[T.TextIO]
+        self.junit = None         # type: T.Optional[JunitBuilder]
         if self.options.benchmark:
             self.tests = load_benchmarks(options.wd)
         else:
@@ -654,29 +833,28 @@
     def __del__(self) -> None:
         self.close_logfiles()
 
-    def __enter__(self):
+    def __enter__(self) -> 'TestHarness':
         return self
 
-    def __exit__(self, exc_type, exc_value, traceback) -> None:
+    def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None:
         self.close_logfiles()
 
     def close_logfiles(self) -> None:
-        if self.logfile:
-            self.logfile.close()
-            self.logfile = None
-        if self.jsonlogfile:
-            self.jsonlogfile.close()
-            self.jsonlogfile = None
+        for f in ['logfile', 'jsonlogfile']:
+            lfile =  getattr(self, f)
+            if lfile:
+                lfile.close()
+                setattr(self, f, None)
 
-    def merge_suite_options(self, options: argparse.Namespace, test: 'TestSerialisation') -> T.Dict[str, str]:
+    def merge_suite_options(self, options: argparse.Namespace, test: TestSerialisation) -> T.Dict[str, str]:
         if ':' in options.setup:
             if options.setup not in self.build_data.test_setups:
-                sys.exit("Unknown test setup '%s'." % options.setup)
+                sys.exit("Unknown test setup '{}'.".format(options.setup))
             current = self.build_data.test_setups[options.setup]
         else:
             full_name = test.project_name + ":" + options.setup
             if full_name not in self.build_data.test_setups:
-                sys.exit("Test setup '%s' not found from project '%s'." % (options.setup, test.project_name))
+                sys.exit("Test setup '{}' not found from project '{}'.".format(options.setup, test.project_name))
             current = self.build_data.test_setups[full_name]
         if not options.gdb:
             options.gdb = current.gdb
@@ -692,7 +870,7 @@
             options.wrapper = current.exe_wrapper
         return current.env.get_env(os.environ.copy())
 
-    def get_test_runner(self, test: 'TestSerialisation') -> SingleTestRunner:
+    def get_test_runner(self, test: TestSerialisation) -> SingleTestRunner:
         options = deepcopy(self.options)
         if not options.setup:
             options.setup = self.build_data.test_setup_default_name
@@ -702,6 +880,9 @@
             env = os.environ.copy()
         test_env = test.env.get_env(env)
         env.update(test_env)
+        if (test.is_cross_built and test.needs_exe_wrapper and
+                test.exe_runner and test.exe_runner.found()):
+            env['MESON_EXE_WRAPPER'] = join_args(test.exe_runner.get_command())
         return SingleTestRunner(test, test_env, env, options)
 
     def process_test_result(self, result: TestRun) -> None:
@@ -720,31 +901,35 @@
         else:
             sys.exit('Unknown test result encountered: {}'.format(result.res))
 
-    def print_stats(self, numlen: int, tests: T.List['TestSerialisation'],
+    def print_stats(self, test_count: int, name_max_len: int,
+                    tests: T.List[TestSerialisation],
                     name: str, result: TestRun, i: int) -> None:
-        startpad = ' ' * (numlen - len('%d' % (i + 1)))
-        num = '%s%d/%d' % (startpad, i + 1, len(tests))
-        padding1 = ' ' * (38 - len(name))
-        padding2 = ' ' * (8 - len(result.res.value))
-        status = ''
-
-        if result.res is TestResult.FAIL:
-            status = returncode_to_status(result.returncode)
-        result_str = '%s %s  %s%s%s%5.2f s %s' % \
-            (num, name, padding1, result.res.value, padding2, result.duration,
-             status)
         ok_statuses = (TestResult.OK, TestResult.EXPECTEDFAIL)
-        bad_statuses = (TestResult.FAIL, TestResult.TIMEOUT, TestResult.UNEXPECTEDPASS,
-                        TestResult.ERROR)
+        bad_statuses = (TestResult.FAIL, TestResult.TIMEOUT,
+                        TestResult.UNEXPECTEDPASS, TestResult.ERROR)
+        result_str = '{num:{numlen}}/{testcount} {name:{name_max_len}} {res:{reslen}} {dur:.2f}s'.format(
+            numlen=len(str(test_count)),
+            num=i,
+            testcount=test_count,
+            name_max_len=name_max_len,
+            name=name,
+            reslen=TestResult.maxlen(),
+            res=result.res.value,
+            dur=result.duration)
+        if result.res is TestResult.FAIL:
+            result_str += ' ' + returncode_to_status(result.returncode)
         if not self.options.quiet or result.res not in ok_statuses:
-            if result.res not in ok_statuses and mlog.colorize_console:
-                if result.res in bad_statuses:
-                    decorator = mlog.red
-                elif result.res is TestResult.SKIP:
-                    decorator = mlog.yellow
-                else:
-                    sys.exit('Unreachable code was ... well ... reached.')
-                print(decorator(result_str).get_text(True))
+            if result.res not in ok_statuses:
+                self.collected_failures.append(result_str)
+                if mlog.colorize_console():
+                    if result.res in bad_statuses:
+                        self.collected_failures.append(result_str)
+                        decorator = mlog.red
+                    elif result.res is TestResult.SKIP:
+                        decorator = mlog.yellow
+                    else:
+                        sys.exit('Unreachable code was ... well ... reached.')
+                    print(decorator(result_str).get_text(True))
             else:
                 print(result_str)
         result_str += "\n\n" + result.get_log()
@@ -755,23 +940,31 @@
             self.logfile.write(result_str)
         if self.jsonlogfile:
             write_json_log(self.jsonlogfile, name, result)
+        if self.junit:
+            self.junit.log(name, result)
 
     def print_summary(self) -> None:
-        msg = '''
-Ok:                 %4d
-Expected Fail:      %4d
-Fail:               %4d
-Unexpected Pass:    %4d
-Skipped:            %4d
-Timeout:            %4d
-''' % (self.success_count, self.expectedfail_count, self.fail_count,
-            self.unexpectedpass_count, self.skip_count, self.timeout_count)
+        # Prepend a list of failures
+        msg = '' if len(self.collected_failures) < 1 else "\nSummary of Failures:\n\n"
+        msg += '\n'.join(self.collected_failures)
+        msg += textwrap.dedent('''
+
+            Ok:                 {:<4}
+            Expected Fail:      {:<4}
+            Fail:               {:<4}
+            Unexpected Pass:    {:<4}
+            Skipped:            {:<4}
+            Timeout:            {:<4}
+            ''').format(self.success_count, self.expectedfail_count, self.fail_count,
+           self.unexpectedpass_count, self.skip_count, self.timeout_count)
         print(msg)
         if self.logfile:
             self.logfile.write(msg)
+        if self.junit:
+            self.junit.write()
 
     def print_collected_logs(self) -> None:
-        if len(self.collected_logs) > 0:
+        if self.collected_logs:
             if len(self.collected_logs) > 10:
                 print('\nThe output from 10 first failed tests:\n')
             else:
@@ -805,14 +998,14 @@
     @staticmethod
     def split_suite_string(suite: str) -> T.Tuple[str, str]:
         if ':' in suite:
-            # mypy can't figure out that str.split(n, 1) will return a list of
-            # length 2, so we have to help it.
-            return T.cast(T.Tuple[str, str], tuple(suite.split(':', 1)))
+            split = suite.split(':', 1)
+            assert len(split) == 2
+            return split[0], split[1]
         else:
             return suite, ""
 
     @staticmethod
-    def test_in_suites(test: 'TestSerialisation', suites: T.List[str]) -> bool:
+    def test_in_suites(test: TestSerialisation, suites: T.List[str]) -> bool:
         for suite in suites:
             (prj_match, st_match) = TestHarness.split_suite_string(suite)
             for prjst in test.suite:
@@ -843,17 +1036,39 @@
                 return True
         return False
 
-    def test_suitable(self, test: 'TestSerialisation') -> bool:
+    def test_suitable(self, test: TestSerialisation) -> bool:
         return ((not self.options.include_suites or
                 TestHarness.test_in_suites(test, self.options.include_suites)) and not
                 TestHarness.test_in_suites(test, self.options.exclude_suites))
 
-    def get_tests(self) -> T.List['TestSerialisation']:
+    def tests_from_args(self, tests: T.List[TestSerialisation]) -> T.Generator[TestSerialisation, None, None]:
+        '''
+        Allow specifying test names like "meson test foo1 foo2", where test('foo1', ...)
+
+        Also support specifying the subproject to run tests from like
+        "meson test subproj:" (all tests inside subproj) or "meson test subproj:foo1"
+        to run foo1 inside subproj. Coincidentally also "meson test :foo1" to
+        run all tests with that name across all subprojects, which is
+        identical to "meson test foo1"
+        '''
+        for arg in self.options.args:
+            if ':' in arg:
+                subproj, name = arg.split(':', maxsplit=1)
+            else:
+                subproj, name = '', arg
+            for t in tests:
+                if subproj and t.project_name != subproj:
+                    continue
+                if name and t.name != name:
+                    continue
+                yield t
+
+    def get_tests(self) -> T.List[TestSerialisation]:
         if not self.tests:
             print('No tests defined.')
             return []
 
-        if len(self.options.include_suites) or len(self.options.exclude_suites):
+        if self.options.include_suites or self.options.exclude_suites:
             tests = []
             for tst in self.tests:
                 if self.test_suitable(tst):
@@ -861,9 +1076,8 @@
         else:
             tests = self.tests
 
-        # allow specifying test names like "meson test foo1 foo2", where test('foo1', ...)
         if self.options.args:
-            tests = [t for t in tests if t.name in self.options.args]
+            tests = list(self.tests_from_args(tests))
 
         if not tests:
             print('No suitable tests defined.')
@@ -885,14 +1099,16 @@
 
         if namebase:
             logfile_base += '-' + namebase.replace(' ', '_')
+
+        self.junit = JunitBuilder(logfile_base + '.junit.xml')
+
         self.logfilename = logfile_base + '.txt'
         self.jsonlogfilename = logfile_base + '.json'
 
         self.jsonlogfile = open(self.jsonlogfilename, 'w', encoding='utf-8', errors='replace')
         self.logfile = open(self.logfilename, 'w', encoding='utf-8', errors='surrogateescape')
 
-        self.logfile.write('Log of Meson test suite run on %s\n\n'
-                           % datetime.datetime.now().isoformat())
+        self.logfile.write('Log of Meson test suite run on {}\n\n'.format(datetime.datetime.now().isoformat()))
         inherit_env = env_tuple_to_str(os.environ.items())
         self.logfile.write('Inherited environment: {}\n\n'.format(inherit_env))
 
@@ -909,20 +1125,21 @@
             wrap += options.wrapper
         return wrap
 
-    def get_pretty_suite(self, test: 'TestSerialisation') -> str:
+    def get_pretty_suite(self, test: TestSerialisation) -> str:
         if len(self.suites) > 1 and test.suite:
             rv = TestHarness.split_suite_string(test.suite[0])[0]
             s = "+".join(TestHarness.split_suite_string(s)[1] for s in test.suite)
-            if len(s):
+            if s:
                 rv += ":"
             return rv + s + " / " + test.name
         else:
             return test.name
 
-    def run_tests(self, tests: T.List['TestSerialisation']) -> None:
+    def run_tests(self, tests: T.List[TestSerialisation]) -> None:
         executor = None
-        futures = []  # type: T.List[T.Tuple[conc.Future[TestRun], int, T.List[TestSerialisation], str, int]]
-        numlen = len('%d' % len(tests))
+        futures = []  # type: T.List[T.Tuple[conc.Future[TestRun], int, int, T.List[TestSerialisation], str, int]]
+        test_count = len(tests)
+        name_max_len = max([len(self.get_pretty_suite(test)) for test in tests])
         self.open_log_files()
         startdir = os.getcwd()
         if self.options.wd:
@@ -931,7 +1148,7 @@
 
         try:
             for _ in range(self.options.repeat):
-                for i, test in enumerate(tests):
+                for i, test in enumerate(tests, 1):
                     visible_name = self.get_pretty_suite(test)
                     single_test = self.get_test_runner(test)
 
@@ -940,35 +1157,35 @@
                         futures = []
                         res = single_test.run()
                         self.process_test_result(res)
-                        self.print_stats(numlen, tests, visible_name, res, i)
+                        self.print_stats(test_count, name_max_len, tests, visible_name, res, i)
                     else:
                         if not executor:
                             executor = conc.ThreadPoolExecutor(max_workers=self.options.num_processes)
                         f = executor.submit(single_test.run)
-                        futures.append((f, numlen, tests, visible_name, i))
+                        futures.append((f, test_count, name_max_len, tests, visible_name, i))
                     if self.options.repeat > 1 and self.fail_count:
                         break
                 if self.options.repeat > 1 and self.fail_count:
                     break
 
             self.drain_futures(futures)
-            self.print_summary()
             self.print_collected_logs()
+            self.print_summary()
 
             if self.logfilename:
-                print('Full log written to %s' % self.logfilename)
+                print('Full log written to {}'.format(self.logfilename))
         finally:
             os.chdir(startdir)
 
-    def drain_futures(self, futures: T.List[T.Tuple['conc.Future[TestRun]', int, T.List['TestSerialisation'], str, int]]) -> None:
+    def drain_futures(self, futures: T.List[T.Tuple['conc.Future[TestRun]', int, int, T.List[TestSerialisation], str, int]]) -> None:
         for x in futures:
-            (result, numlen, tests, name, i) = x
+            (result, test_count, name_max_len, tests, name, i) = x
             if self.options.repeat > 1 and self.fail_count:
                 result.cancel()
             if self.options.verbose:
                 result.result()
             self.process_test_result(result.result())
-            self.print_stats(numlen, tests, name, result.result(), i)
+            self.print_stats(test_count, name_max_len, tests, name, result.result(), i)
 
     def run_special(self) -> int:
         '''Tests run by the user, usually something like "under gdb 1000 times".'''
@@ -997,7 +1214,7 @@
         print("Can't find ninja, can't rebuild test.")
         return False
 
-    ret = subprocess.run([ninja, '-C', wd]).returncode
+    ret = subprocess.run(ninja + ['-C', wd]).returncode
     if ret != 0:
         print('Could not rebuild {}'.format(wd))
         return False
@@ -1028,7 +1245,6 @@
         if not exe.found():
             print('Could not find requested program: {!r}'.format(check_bin))
             return 1
-    options.wd = os.path.abspath(options.wd)
 
     if not options.list and not options.no_rebuild:
         if not rebuild_all(options.wd):
diff -Nru meson-0.53.2/mesonbuild/optinterpreter.py meson-0.57.0+really0.56.2/mesonbuild/optinterpreter.py
--- meson-0.53.2/mesonbuild/optinterpreter.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/optinterpreter.py	2021-01-06 10:39:48.000000000 +0000
@@ -12,16 +12,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os, re
+import re
 import functools
 import typing as T
 
-from . import mparser
+from . import compilers
 from . import coredata
 from . import mesonlib
-from . import compilers
+from . import mparser
+from .interpreterbase import FeatureNew
 
-forbidden_option_names = set(coredata.builtin_options.keys())
+forbidden_option_names = set(coredata.BUILTIN_OPTIONS.keys())
 forbidden_prefixes = [lang + '_' for lang in compilers.all_languages] + ['b_', 'backend_']
 reserved_prefixes = ['cross_']
 
@@ -61,7 +62,6 @@
 def StringParser(description, kwargs):
     return coredata.UserStringOption(description,
                                      kwargs.get('value', ''),
-                                     kwargs.get('choices', []),
                                      kwargs.get('yield', coredata.default_yielding))
 
 @permitted_kwargs({'value', 'yield'})
@@ -90,10 +90,9 @@
 def IntegerParser(description, kwargs):
     if 'value' not in kwargs:
         raise OptionException('Integer option must contain value argument.')
+    inttuple = (kwargs.get('min', None), kwargs.get('max', None), kwargs['value'])
     return coredata.UserIntegerOption(description,
-                                      kwargs.get('min', None),
-                                      kwargs.get('max', None),
-                                      kwargs['value'],
+                                      inttuple,
                                       kwargs.get('yield', coredata.default_yielding))
 
 # FIXME: Cannot use FeatureNew while parsing options because we parse it before
@@ -134,20 +133,21 @@
                 } # type: T.Dict[str, T.Callable[[str, T.Dict], coredata.UserOption]]
 
 class OptionInterpreter:
-    def __init__(self, subproject):
+    def __init__(self, subproject: str) -> None:
         self.options = {}
         self.subproject = subproject
 
-    def process(self, option_file):
+    def process(self, option_file: str) -> None:
         try:
             with open(option_file, 'r', encoding='utf8') as f:
-                ast = mparser.Parser(f.read(), '').parse()
+                ast = mparser.Parser(f.read(), option_file).parse()
         except mesonlib.MesonException as me:
             me.file = option_file
             raise me
         if not isinstance(ast, mparser.CodeBlockNode):
             e = OptionException('Option file is malformed.')
             e.lineno = ast.lineno()
+            e.file = option_file
             raise e
         for cur in ast.lines:
             try:
@@ -155,10 +155,10 @@
             except Exception as e:
                 e.lineno = cur.lineno
                 e.colno = cur.colno
-                e.file = os.path.join('meson_options.txt')
+                e.file = option_file
                 raise e
 
-    def reduce_single(self, arg):
+    def reduce_single(self, arg: T.Union[str, mparser.BaseNode]) -> T.Union[str, int, bool]:
         if isinstance(arg, str):
             return arg
         elif isinstance(arg, (mparser.StringNode, mparser.BooleanNode,
@@ -166,23 +166,42 @@
             return arg.value
         elif isinstance(arg, mparser.ArrayNode):
             return [self.reduce_single(curarg) for curarg in arg.args.arguments]
+        elif isinstance(arg, mparser.UMinusNode):
+            res = self.reduce_single(arg.value)
+            if not isinstance(res, (int, float)):
+                raise OptionException('Token after "-" is not a number')
+            FeatureNew.single_use('negative numbers in meson_options.txt', '0.54.1', self.subproject)
+            return -res
+        elif isinstance(arg, mparser.NotNode):
+            res = self.reduce_single(arg.value)
+            if not isinstance(res, bool):
+                raise OptionException('Token after "not" is not a a boolean')
+            FeatureNew.single_use('negation ("not") in meson_options.txt', '0.54.1', self.subproject)
+            return not res
+        elif isinstance(arg, mparser.ArithmeticNode):
+            l = self.reduce_single(arg.left)
+            r = self.reduce_single(arg.right)
+            if not (arg.operation == 'add' and isinstance(l, str) and isinstance(r, str)):
+                raise OptionException('Only string concatenation with the "+" operator is allowed')
+            FeatureNew.single_use('string concatenation in meson_options.txt', '0.55.0', self.subproject)
+            return l + r
         else:
             raise OptionException('Arguments may only be string, int, bool, or array of those.')
 
-    def reduce_arguments(self, args):
+    def reduce_arguments(self, args: mparser.ArgumentNode) -> T.Tuple[T.List[T.Union[str, int, bool]], T.Dict[str, T.Union[str, int, bool]]]:
         assert(isinstance(args, mparser.ArgumentNode))
         if args.incorrect_order():
             raise OptionException('All keyword arguments must be after positional arguments.')
         reduced_pos = [self.reduce_single(arg) for arg in args.arguments]
         reduced_kw = {}
         for key in args.kwargs.keys():
-            if not isinstance(key, str):
+            if not isinstance(key, mparser.IdNode):
                 raise OptionException('Keyword argument name is not a string.')
             a = args.kwargs[key]
-            reduced_kw[key] = self.reduce_single(a)
+            reduced_kw[key.value] = self.reduce_single(a)
         return reduced_pos, reduced_kw
 
-    def evaluate_statement(self, node):
+    def evaluate_statement(self, node: mparser.BaseNode) -> None:
         if not isinstance(node, mparser.FunctionNode):
             raise OptionException('Option file may only contain option definitions')
         func_name = node.func_name
@@ -190,11 +209,8 @@
             raise OptionException('Only calls to option() are allowed in option files.')
         (posargs, kwargs) = self.reduce_arguments(node.args)
 
-        # FIXME: Cannot use FeatureNew while parsing options because we parse
-        # it before reading options in project(). See func_project() in
-        # interpreter.py
-        #if 'yield' in kwargs:
-        #    FeatureNew('option yield', '0.45.0').use(self.subproject)
+        if 'yield' in kwargs:
+            FeatureNew.single_use('option yield', '0.45.0', self.subproject)
 
         if 'type' not in kwargs:
             raise OptionException('Option call missing mandatory "type" keyword argument')
diff -Nru meson-0.53.2/mesonbuild/_pathlib.py meson-0.57.0+really0.56.2/mesonbuild/_pathlib.py
--- meson-0.53.2/mesonbuild/_pathlib.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/_pathlib.py	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,49 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import typing as T
+
+# Python 3.5 does not have the strict kwarg for resolve and always
+# behaves like calling resolve with strict=True in Python 3.6+
+#
+# This module emulates the behavior of Python 3.6+ by in Python 3.5 by
+# overriding the resolve method with a bit of custom logic
+#
+# TODO: Drop this module as soon as Python 3.5 support is dropped
+
+if T.TYPE_CHECKING:
+    from pathlib import Path
+else:
+    if sys.version_info.major <= 3 and sys.version_info.minor <= 5:
+
+        # Inspired by https://codereview.stackexchange.com/questions/162426/subclassing-pathlib-path
+        import pathlib
+        import os
+
+        # Can not directly inherit from pathlib.Path because the __new__
+        # operator of pathlib.Path() returns a {Posix,Windows}Path object.
+        class Path(type(pathlib.Path())):
+            def resolve(self, strict: bool = False) -> 'Path':
+                try:
+                    return super().resolve()
+                except FileNotFoundError:
+                    if strict:
+                        raise
+                    return Path(os.path.normpath(str(self)))
+
+    else:
+        from pathlib import Path
+
+from pathlib import PurePath, PureWindowsPath, PurePosixPath
diff -Nru meson-0.53.2/mesonbuild/rewriter.py meson-0.57.0+really0.56.2/mesonbuild/rewriter.py
--- meson-0.53.2/mesonbuild/rewriter.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/rewriter.py	2021-01-06 10:39:48.000000000 +0000
@@ -113,7 +113,7 @@
 
     def _new_node(self):
         # Overwrite in derived class
-        return BaseNode()
+        raise RewriterException('Internal error: _new_node of MTypeBase was called')
 
     def can_modify(self):
         return self.node_type is not None
@@ -159,7 +159,7 @@
         super().__init__(node)
 
     def _new_node(self):
-        return StringNode(Token('', '', 0, 0, 0, None, False))
+        return BooleanNode(Token('', '', 0, 0, 0, None, False))
 
     def supported_nodes(self):
         return [BooleanNode]
@@ -172,7 +172,7 @@
         super().__init__(node)
 
     def _new_node(self):
-        return StringNode(Token('', '', 0, 0, 0, None, ''))
+        return IdNode(Token('', '', 0, 0, 0, None, ''))
 
     def supported_nodes(self):
         return [IdNode]
@@ -189,7 +189,7 @@
 
     def _new_element_node(self, value):
         # Overwrite in derived class
-        return BaseNode()
+        raise RewriterException('Internal error: _new_element_node of MTypeList was called')
 
     def _ensure_array_node(self):
         if not isinstance(self.node, ArrayNode):
@@ -414,10 +414,10 @@
         # Check the assignments
         tgt = None
         if target in self.interpreter.assignments:
-            node = self.interpreter.assignments[target][0]
+            node = self.interpreter.assignments[target]
             if isinstance(node, FunctionNode):
                 if node.func_name in ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries']:
-                    tgt = self.interpreter.assign_vals[target][0]
+                    tgt = self.interpreter.assign_vals[target]
 
         return tgt
 
@@ -434,7 +434,7 @@
 
         # Check the assignments
         if dependency in self.interpreter.assignments:
-            node = self.interpreter.assignments[dependency][0]
+            node = self.interpreter.assignments[dependency]
             if isinstance(node, FunctionNode):
                 if node.func_name in ['dependency']:
                     name = self.interpreter.flatten_args(node.args)[0]
@@ -469,8 +469,8 @@
             **{'build.' + k: o for k, o in cdata.builtins_per_machine.build.items()},
             **cdata.backend_options,
             **cdata.base_options,
-            **cdata.compiler_options.host,
-            **{'build.' + k: o for k, o in cdata.compiler_options.build.items()},
+            **(dict(cdata.flatten_lang_iterator(cdata.compiler_options.host.items()))),
+            **{'build.' + k: o for k, o in cdata.flatten_lang_iterator(cdata.compiler_options.build.items())},
             **cdata.user_options,
         }
 
@@ -522,6 +522,8 @@
             mlog.error('Unable to find the function node')
         assert(isinstance(node, FunctionNode))
         assert(isinstance(arg_node, ArgumentNode))
+        # Transform the key nodes to plain strings
+        arg_node.kwargs = {k.value: v for k, v in arg_node.kwargs.items()}
 
         # Print kwargs info
         if cmd['operation'] == 'info':
@@ -585,11 +587,13 @@
             arg_node.kwargs[key] = modifyer.get_node()
             num_changed += 1
 
+        # Convert the keys back to IdNode's
+        arg_node.kwargs = {IdNode(Token('', '', 0, 0, 0, None, k)): v for k, v in arg_node.kwargs.items()}
         if num_changed > 0 and node not in self.modefied_nodes:
             self.modefied_nodes += [node]
 
     def find_assignment_node(self, node: BaseNode) -> AssignmentNode:
-        if hasattr(node, 'ast_id') and node.ast_id in self.interpreter.reverse_assignment:
+        if node.ast_id and node.ast_id in self.interpreter.reverse_assignment:
             return self.interpreter.reverse_assignment[node.ast_id]
         return None
 
@@ -651,8 +655,8 @@
                     mlog.log('  -- Source', mlog.green(i), 'is already defined for the target --> skipping')
                     continue
                 mlog.log('  -- Adding source', mlog.green(i), 'at',
-                         mlog.yellow('{}:{}'.format(os.path.join(node.subdir, environment.build_filename), node.lineno)))
-                token = Token('string', node.subdir, 0, 0, 0, None, i)
+                         mlog.yellow('{}:{}'.format(node.filename, node.lineno)))
+                token = Token('string', node.filename, 0, 0, 0, None, i)
                 to_append += [StringNode(token)]
 
             # Append to the AST at the right place
@@ -695,7 +699,7 @@
                     arg_node = root
                 assert(arg_node is not None)
                 mlog.log('  -- Removing source', mlog.green(i), 'from',
-                         mlog.yellow('{}:{}'.format(os.path.join(string_node.subdir, environment.build_filename), string_node.lineno)))
+                         mlog.yellow('{}:{}'.format(string_node.filename, string_node.lineno)))
                 arg_node.arguments.remove(string_node)
 
                 # Mark the node as modified
@@ -712,23 +716,24 @@
             id_base = re.sub(r'[- ]', '_', cmd['target'])
             target_id = id_base + '_exe' if cmd['target_type'] == 'executable' else '_lib'
             source_id = id_base + '_sources'
+            filename = os.path.join(cmd['subdir'], environment.build_filename)
 
             # Build src list
-            src_arg_node = ArgumentNode(Token('string', cmd['subdir'], 0, 0, 0, None, ''))
+            src_arg_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
             src_arr_node = ArrayNode(src_arg_node, 0, 0, 0, 0)
-            src_far_node = ArgumentNode(Token('string', cmd['subdir'], 0, 0, 0, None, ''))
-            src_fun_node = FunctionNode(cmd['subdir'], 0, 0, 0, 0, 'files', src_far_node)
-            src_ass_node = AssignmentNode(cmd['subdir'], 0, 0, source_id, src_fun_node)
-            src_arg_node.arguments = [StringNode(Token('string', cmd['subdir'], 0, 0, 0, None, x)) for x in cmd['sources']]
+            src_far_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
+            src_fun_node = FunctionNode(filename, 0, 0, 0, 0, 'files', src_far_node)
+            src_ass_node = AssignmentNode(filename, 0, 0, source_id, src_fun_node)
+            src_arg_node.arguments = [StringNode(Token('string', filename, 0, 0, 0, None, x)) for x in cmd['sources']]
             src_far_node.arguments = [src_arr_node]
 
             # Build target
-            tgt_arg_node = ArgumentNode(Token('string', cmd['subdir'], 0, 0, 0, None, ''))
-            tgt_fun_node = FunctionNode(cmd['subdir'], 0, 0, 0, 0, cmd['target_type'], tgt_arg_node)
-            tgt_ass_node = AssignmentNode(cmd['subdir'], 0, 0, target_id, tgt_fun_node)
+            tgt_arg_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
+            tgt_fun_node = FunctionNode(filename, 0, 0, 0, 0, cmd['target_type'], tgt_arg_node)
+            tgt_ass_node = AssignmentNode(filename, 0, 0, target_id, tgt_fun_node)
             tgt_arg_node.arguments = [
-                StringNode(Token('string', cmd['subdir'], 0, 0, 0, None, cmd['target'])),
-                IdNode(Token('string', cmd['subdir'], 0, 0, 0, None, source_id))
+                StringNode(Token('string', filename, 0, 0, 0, None, cmd['target'])),
+                IdNode(Token('string', filename, 0, 0, 0, None, source_id))
             ]
 
             src_ass_node.accept(AstIndentationGenerator())
@@ -741,7 +746,7 @@
                 to_remove = target['node']
             self.to_remove_nodes += [to_remove]
             mlog.log('  -- Removing target', mlog.green(cmd['target']), 'at',
-                     mlog.yellow('{}:{}'.format(os.path.join(to_remove.subdir, environment.build_filename), to_remove.lineno)))
+                     mlog.yellow('{}:{}'.format(to_remove.filename, to_remove.lineno)))
 
         elif cmd['operation'] == 'info':
             # T.List all sources in the target
@@ -776,8 +781,8 @@
         self.functions[cmd['type']](cmd)
 
     def apply_changes(self):
-        assert(all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'subdir') for x in self.modefied_nodes))
-        assert(all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'subdir') for x in self.to_remove_nodes))
+        assert(all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'filename') for x in self.modefied_nodes))
+        assert(all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'filename') for x in self.to_remove_nodes))
         assert(all(isinstance(x, (ArrayNode, FunctionNode)) for x in self.modefied_nodes))
         assert(all(isinstance(x, (ArrayNode, AssignmentNode, FunctionNode)) for x in self.to_remove_nodes))
         # Sort based on line and column in reversed order
@@ -796,7 +801,7 @@
                 printer.post_process()
                 new_data = printer.result.strip()
             data = {
-                'file': os.path.join(i['node'].subdir, environment.build_filename),
+                'file': i['node'].filename,
                 'str': new_data,
                 'node': i['node'],
                 'action': i['action']
diff -Nru meson-0.53.2/mesonbuild/scripts/clangformat.py meson-0.57.0+really0.56.2/mesonbuild/scripts/clangformat.py
--- meson-0.53.2/mesonbuild/scripts/clangformat.py	2019-10-06 17:01:35.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/clangformat.py	2021-01-06 10:39:48.000000000 +0000
@@ -18,14 +18,17 @@
 
 from ..environment import detect_clangformat
 from ..compilers import lang_suffixes
+import typing as T
 
-def clangformat(exelist, srcdir_name, builddir_name):
+def clangformat(exelist: T.List[str], srcdir_name: str, builddir_name: str) -> int:
     srcdir = pathlib.Path(srcdir_name)
     suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
     suffixes.add('h')
     futures = []
     with ThreadPoolExecutor() as e:
         for f in (x for suff in suffixes for x in srcdir.glob('**/*.' + suff)):
+            if f.is_dir():
+                continue
             strf = str(f)
             if strf.startswith(builddir_name):
                 continue
@@ -33,7 +36,7 @@
         [x.result() for x in futures]
     return 0
 
-def run(args):
+def run(args: T.List[str]) -> int:
     srcdir_name = args[0]
     builddir_name = args[1]
 
diff -Nru meson-0.53.2/mesonbuild/scripts/clangtidy.py meson-0.57.0+really0.56.2/mesonbuild/scripts/clangtidy.py
--- meson-0.53.2/mesonbuild/scripts/clangtidy.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/clangtidy.py	2021-01-06 10:39:48.000000000 +0000
@@ -15,11 +15,14 @@
 import pathlib
 import subprocess
 import shutil
+import os
+import re
 from concurrent.futures import ThreadPoolExecutor
+import typing as T
 
 from ..compilers import lang_suffixes
 
-def manual_clangformat(srcdir_name, builddir_name):
+def manual_clangformat(srcdir_name: str, builddir_name: str) -> int:
     srcdir = pathlib.Path(srcdir_name)
     suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
     suffixes.add('h')
@@ -27,6 +30,8 @@
     returncode = 0
     with ThreadPoolExecutor() as e:
         for f in (x for suff in suffixes for x in srcdir.glob('**/*.' + suff)):
+            if f.is_dir():
+                continue
             strf = str(f)
             if strf.startswith(builddir_name):
                 continue
@@ -34,19 +39,20 @@
         [max(returncode, x.result().returncode) for x in futures]
     return returncode
 
-def clangformat(srcdir_name, builddir_name):
+def clangformat(srcdir_name: str, builddir_name: str) -> int:
     run_clang_tidy = None
     for rct in ('run-clang-tidy', 'run-clang-tidy.py'):
         if shutil.which(rct):
             run_clang_tidy = rct
             break
     if run_clang_tidy:
-        return subprocess.run([run_clang_tidy, '-p', builddir_name]).returncode
+        return subprocess.run([run_clang_tidy, '-p', builddir_name, '^(?!' + re.escape(builddir_name + os.path.sep) +').*$']).returncode
     else:
         print('Could not find run-clang-tidy, running checks manually.')
         manual_clangformat(srcdir_name, builddir_name)
+    return 0
 
-def run(args):
+def run(args: T.List[str]) -> int:
     srcdir_name = args[0]
     builddir_name = args[1]
     return clangformat(srcdir_name, builddir_name)
diff -Nru meson-0.53.2/mesonbuild/scripts/cleantrees.py meson-0.57.0+really0.56.2/mesonbuild/scripts/cleantrees.py
--- meson-0.53.2/mesonbuild/scripts/cleantrees.py	2016-12-20 19:35:10.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/cleantrees.py	2020-09-17 22:00:44.000000000 +0000
@@ -16,8 +16,9 @@
 import sys
 import shutil
 import pickle
+import typing as T
 
-def rmtrees(build_dir, trees):
+def rmtrees(build_dir: str, trees: T.List[str]) -> None:
     for t in trees:
         # Never delete trees outside of the builddir
         if os.path.isabs(t):
@@ -28,7 +29,7 @@
         if os.path.isdir(bt):
             shutil.rmtree(bt, ignore_errors=True)
 
-def run(args):
+def run(args: T.List[str]) -> int:
     if len(args) != 1:
         print('Cleaner script for Meson. Do not run on your own please.')
         print('cleantrees.py ')
diff -Nru meson-0.53.2/mesonbuild/scripts/cmake_run_ctgt.py meson-0.57.0+really0.56.2/mesonbuild/scripts/cmake_run_ctgt.py
--- meson-0.53.2/mesonbuild/scripts/cmake_run_ctgt.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/cmake_run_ctgt.py	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,102 @@
+#!/usr/bin/env python3
+
+import argparse
+import subprocess
+import shutil
+import sys
+from pathlib import Path
+import typing as T
+
+def run(argsv: T.List[str]) -> int:
+    commands = [[]]  # type: T.List[T.List[str]]
+    SEPARATOR = ';;;'
+
+    # Generate CMD parameters
+    parser = argparse.ArgumentParser(description='Wrapper for add_custom_command')
+    parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to')
+    parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files')
+    parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake')
+    parser.add_argument('commands', nargs=argparse.REMAINDER, help='A "{}" seperated list of commands'.format(SEPARATOR))
+
+    # Parse
+    args = parser.parse_args(argsv)
+    directory = Path(args.directory)
+
+    dummy_target = None
+    if len(args.outputs) == 1 and len(args.original_outputs) == 0:
+        dummy_target = Path(args.outputs[0])
+    elif len(args.outputs) != len(args.original_outputs):
+        print('Length of output list and original output list differ')
+        return 1
+
+    for i in args.commands:
+        if i == SEPARATOR:
+            commands += [[]]
+            continue
+
+        i = i.replace('"', '')  # Remove lefover quotes
+        commands[-1] += [i]
+
+    # Execute
+    for i in commands:
+        # Skip empty lists
+        if not i:
+            continue
+
+        cmd = []
+        stdout = None
+        stderr = None
+        capture_file = ''
+
+        for j in i:
+            if j in ['>', '>>']:
+                stdout = subprocess.PIPE
+                continue
+            elif j in ['&>', '&>>']:
+                stdout = subprocess.PIPE
+                stderr = subprocess.STDOUT
+                continue
+
+            if stdout is not None or stderr is not None:
+                capture_file += j
+            else:
+                cmd += [j]
+
+        try:
+            directory.mkdir(parents=True, exist_ok=True)
+
+            res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=str(directory), check=True)
+            if capture_file:
+                out_file = directory / capture_file
+                out_file.write_bytes(res.stdout)
+        except subprocess.CalledProcessError:
+            return 1
+
+    if dummy_target:
+        dummy_target.touch()
+        return 0
+
+    # Copy outputs
+    zipped_outputs = zip([Path(x) for x in args.outputs], [Path(x) for x in args.original_outputs])
+    for expected, generated in zipped_outputs:
+        do_copy = False
+        if not expected.exists():
+            if not generated.exists():
+                print('Unable to find generated file. This can cause the build to fail:')
+                print(generated)
+                do_copy = False
+            else:
+                do_copy = True
+        elif generated.exists():
+            if generated.stat().st_mtime > expected.stat().st_mtime:
+                do_copy = True
+
+        if do_copy:
+            if expected.exists():
+                expected.unlink()
+            shutil.copyfile(str(generated), str(expected))
+
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff -Nru meson-0.53.2/mesonbuild/scripts/commandrunner.py meson-0.57.0+really0.56.2/mesonbuild/scripts/commandrunner.py
--- meson-0.53.2/mesonbuild/scripts/commandrunner.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/commandrunner.py	2020-09-17 22:00:44.000000000 +0000
@@ -17,8 +17,9 @@
 
 import sys, os, subprocess, shutil, shlex
 import re
+import typing as T
 
-def run_command(source_dir, build_dir, subdir, meson_command, command, arguments):
+def run_command(source_dir: str, build_dir: str, subdir: str, meson_command: T.List[str], command: str, arguments: T.List[str]) -> subprocess.Popen:
     env = {'MESON_SOURCE_ROOT': source_dir,
            'MESON_BUILD_ROOT': build_dir,
            'MESON_SUBDIR': subdir,
@@ -50,24 +51,24 @@
         print('Could not execute command "{}": {}'.format(command, err))
         sys.exit(1)
 
-def is_python_command(cmdname):
+def is_python_command(cmdname: str) -> bool:
     end_py_regex = r'python(3|3\.\d+)?(\.exe)?$'
     return re.search(end_py_regex, cmdname) is not None
 
-def run(args):
+def run(args: T.List[str]) -> int:
     if len(args) < 4:
         print('commandrunner.py     [arguments]')
         return 1
     src_dir = args[0]
     build_dir = args[1]
     subdir = args[2]
-    meson_command = args[3]
-    if is_python_command(meson_command):
-        meson_command = [meson_command, args[4]]
+    meson_bin = args[3]
+    if is_python_command(meson_bin):
+        meson_command = [meson_bin, args[4]]
         command = args[5]
         arguments = args[6:]
     else:
-        meson_command = [meson_command]
+        meson_command = [meson_bin]
         command = args[4]
         arguments = args[5:]
     pc = run_command(src_dir, build_dir, subdir, meson_command, command, arguments)
diff -Nru meson-0.53.2/mesonbuild/scripts/coverage.py meson-0.57.0+really0.56.2/mesonbuild/scripts/coverage.py
--- meson-0.53.2/mesonbuild/scripts/coverage.py	2019-05-02 18:59:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/coverage.py	2020-09-17 22:00:44.000000000 +0000
@@ -12,15 +12,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from mesonbuild import environment
+from mesonbuild import environment, mesonlib
 
-import argparse, sys, os, subprocess, pathlib
+import argparse, sys, os, subprocess, pathlib, stat
+import typing as T
 
-def coverage(outputs, source_root, subproject_root, build_root, log_dir):
+def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build_root: str, log_dir: str, use_llvm_cov: bool) -> int:
     outfiles = []
     exitcode = 0
 
-    (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe) = environment.find_coverage_tools()
+    (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe) = environment.find_coverage_tools()
 
     # gcovr >= 4.2 requires a different syntax for out of source builds
     if gcovr_new_rootdir:
@@ -28,13 +29,18 @@
     else:
         gcovr_base_cmd = [gcovr_exe, '-r', build_root]
 
+    if use_llvm_cov:
+        gcov_exe_args = ['--gcov-executable', llvm_cov_exe + ' gcov']
+    else:
+        gcov_exe_args = []
+
     if not outputs or 'xml' in outputs:
         if gcovr_exe:
             subprocess.check_call(gcovr_base_cmd +
                                   ['-x',
                                    '-e', subproject_root,
-                                   '-o', os.path.join(log_dir, 'coverage.xml'),
-                                   ])
+                                   '-o', os.path.join(log_dir, 'coverage.xml')
+                                   ] + gcov_exe_args)
             outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml')))
         elif outputs:
             print('gcovr >= 3.3 needed to generate Xml coverage report')
@@ -44,8 +50,8 @@
         if gcovr_exe:
             subprocess.check_call(gcovr_base_cmd +
                                   ['-e', subproject_root,
-                                   '-o', os.path.join(log_dir, 'coverage.txt'),
-                                   ])
+                                   '-o', os.path.join(log_dir, 'coverage.txt')
+                                   ] + gcov_exe_args)
             outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt')))
         elif outputs:
             print('gcovr >= 3.3 needed to generate text coverage report')
@@ -58,19 +64,34 @@
             initial_tracefile = covinfo + '.initial'
             run_tracefile = covinfo + '.run'
             raw_tracefile = covinfo + '.raw'
+            if use_llvm_cov:
+                # Create a shim to allow using llvm-cov as a gcov tool.
+                if mesonlib.is_windows():
+                    llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.bat')
+                    with open(llvm_cov_shim_path, 'w') as llvm_cov_bat:
+                        llvm_cov_bat.write('@"{}" gcov %*'.format(llvm_cov_exe))
+                else:
+                    llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.sh')
+                    with open(llvm_cov_shim_path, 'w') as llvm_cov_sh:
+                        llvm_cov_sh.write('#!/usr/bin/env sh\nexec "{}" gcov $@'.format(llvm_cov_exe))
+                    os.chmod(llvm_cov_shim_path, os.stat(llvm_cov_shim_path).st_mode | stat.S_IEXEC)
+                gcov_tool_args = ['--gcov-tool', llvm_cov_shim_path]
+            else:
+                gcov_tool_args = []
             subprocess.check_call([lcov_exe,
                                    '--directory', build_root,
                                    '--capture',
                                    '--initial',
                                    '--output-file',
-                                   initial_tracefile])
+                                   initial_tracefile] +
+                                  gcov_tool_args)
             subprocess.check_call([lcov_exe,
                                    '--directory', build_root,
                                    '--capture',
                                    '--output-file', run_tracefile,
                                    '--no-checksum',
-                                   '--rc', 'lcov_branch_coverage=1',
-                                   ])
+                                   '--rc', 'lcov_branch_coverage=1'] +
+                                  gcov_tool_args)
             # Join initial and test results.
             subprocess.check_call([lcov_exe,
                                    '-a', initial_tracefile,
@@ -126,7 +147,7 @@
 
     return exitcode
 
-def run(args):
+def run(args: T.List[str]) -> int:
     if not os.path.isfile('build.ninja'):
         print('Coverage currently only works with the Ninja backend.')
         return 1
@@ -137,6 +158,8 @@
                         const='xml', help='generate Xml report')
     parser.add_argument('--html', dest='outputs', action='append_const',
                         const='html', help='generate Html report')
+    parser.add_argument('--use_llvm_cov', action='store_true',
+                        help='use llvm-cov')
     parser.add_argument('source_root')
     parser.add_argument('subproject_root')
     parser.add_argument('build_root')
@@ -144,7 +167,7 @@
     options = parser.parse_args(args)
     return coverage(options.outputs, options.source_root,
                     options.subproject_root, options.build_root,
-                    options.log_dir)
+                    options.log_dir, options.use_llvm_cov)
 
 if __name__ == '__main__':
     sys.exit(run(sys.argv[1:]))
diff -Nru meson-0.53.2/mesonbuild/scripts/delwithsuffix.py meson-0.57.0+really0.56.2/mesonbuild/scripts/delwithsuffix.py
--- meson-0.53.2/mesonbuild/scripts/delwithsuffix.py	2017-05-20 09:00:34.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/delwithsuffix.py	2020-09-17 22:00:44.000000000 +0000
@@ -13,8 +13,9 @@
 # limitations under the License.
 
 import os, sys
+import typing as T
 
-def run(args):
+def run(args: T.List[str]) -> int:
     if len(args) != 2:
         print('delwithsuffix.py  ')
         sys.exit(1)
diff -Nru meson-0.53.2/mesonbuild/scripts/depfixer.py meson-0.57.0+really0.56.2/mesonbuild/scripts/depfixer.py
--- meson-0.53.2/mesonbuild/scripts/depfixer.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/depfixer.py	2020-12-28 21:59:09.000000000 +0000
@@ -15,6 +15,7 @@
 
 import sys, struct
 import shutil, subprocess
+import typing as T
 
 from ..mesonlib import OrderedSet
 
@@ -26,8 +27,11 @@
 DT_SONAME = 14
 DT_MIPS_RLD_MAP_REL = 1879048245
 
+# Global cache for tools
+INSTALL_NAME_TOOL = False
+
 class DataSizes:
-    def __init__(self, ptrsize, is_le):
+    def __init__(self, ptrsize: int, is_le: bool) -> None:
         if is_le:
             p = '<'
         else:
@@ -54,7 +58,7 @@
             self.OffSize = 4
 
 class DynamicEntry(DataSizes):
-    def __init__(self, ifile, ptrsize, is_le):
+    def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
         super().__init__(ptrsize, is_le)
         self.ptrsize = ptrsize
         if ptrsize == 64:
@@ -64,7 +68,7 @@
             self.d_tag = struct.unpack(self.Sword, ifile.read(self.SwordSize))[0]
             self.val = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
 
-    def write(self, ofile):
+    def write(self, ofile: T.BinaryIO) -> None:
         if self.ptrsize == 64:
             ofile.write(struct.pack(self.Sxword, self.d_tag))
             ofile.write(struct.pack(self.XWord, self.val))
@@ -73,7 +77,7 @@
             ofile.write(struct.pack(self.Word, self.val))
 
 class SectionHeader(DataSizes):
-    def __init__(self, ifile, ptrsize, is_le):
+    def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
         super().__init__(ptrsize, is_le)
         if ptrsize == 64:
             is_64 = True
@@ -113,10 +117,12 @@
             self.sh_entsize = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
 
 class Elf(DataSizes):
-    def __init__(self, bfile, verbose=True):
+    def __init__(self, bfile: str, verbose: bool = True) -> None:
         self.bfile = bfile
         self.verbose = verbose
         self.bf = open(bfile, 'r+b')
+        self.sections = []  # type: T.List[SectionHeader]
+        self.dynamic = []   # type: T.List[DynamicEntry]
         try:
             (self.ptrsize, self.is_le) = self.detect_elf_type()
             super().__init__(self.ptrsize, self.is_le)
@@ -127,18 +133,18 @@
             self.bf.close()
             raise
 
-    def __enter__(self):
+    def __enter__(self) -> 'Elf':
         return self
 
-    def __del__(self):
+    def __del__(self) -> None:
         if self.bf:
             self.bf.close()
 
-    def __exit__(self, exc_type, exc_value, traceback):
+    def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None:
         self.bf.close()
         self.bf = None
 
-    def detect_elf_type(self):
+    def detect_elf_type(self) -> T.Tuple[int, bool]:
         data = self.bf.read(6)
         if data[1:4] != b'ELF':
             # This script gets called to non-elf targets too
@@ -160,7 +166,7 @@
             sys.exit('File "%s" has unknown ELF endianness.' % self.bfile)
         return ptrsize, is_le
 
-    def parse_header(self):
+    def parse_header(self) -> None:
         self.bf.seek(0)
         self.e_ident = struct.unpack('16s', self.bf.read(16))[0]
         self.e_type = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
@@ -177,13 +183,12 @@
         self.e_shnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
         self.e_shstrndx = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
 
-    def parse_sections(self):
+    def parse_sections(self) -> None:
         self.bf.seek(self.e_shoff)
-        self.sections = []
         for _ in range(self.e_shnum):
             self.sections.append(SectionHeader(self.bf, self.ptrsize, self.is_le))
 
-    def read_str(self):
+    def read_str(self) -> bytes:
         arr = []
         x = self.bf.read(1)
         while x != b'\0':
@@ -193,17 +198,17 @@
                 raise RuntimeError('Tried to read past the end of the file')
         return b''.join(arr)
 
-    def find_section(self, target_name):
+    def find_section(self, target_name: bytes) -> T.Optional[SectionHeader]:
         section_names = self.sections[self.e_shstrndx]
         for i in self.sections:
             self.bf.seek(section_names.sh_offset + i.sh_name)
             name = self.read_str()
             if name == target_name:
                 return i
+        return None
 
-    def parse_dynamic(self):
+    def parse_dynamic(self) -> None:
         sec = self.find_section(b'.dynamic')
-        self.dynamic = []
         if sec is None:
             return
         self.bf.seek(sec.sh_offset)
@@ -213,14 +218,14 @@
             if e.d_tag == 0:
                 break
 
-    def print_section_names(self):
+    def print_section_names(self) -> None:
         section_names = self.sections[self.e_shstrndx]
         for i in self.sections:
             self.bf.seek(section_names.sh_offset + i.sh_name)
             name = self.read_str()
             print(name.decode())
 
-    def print_soname(self):
+    def print_soname(self) -> None:
         soname = None
         strtab = None
         for i in self.dynamic:
@@ -234,14 +239,16 @@
         self.bf.seek(strtab.val + soname.val)
         print(self.read_str())
 
-    def get_entry_offset(self, entrynum):
+    def get_entry_offset(self, entrynum: int) -> T.Optional[int]:
         sec = self.find_section(b'.dynstr')
         for i in self.dynamic:
             if i.d_tag == entrynum:
-                return sec.sh_offset + i.val
+                res = sec.sh_offset + i.val
+                assert isinstance(res, int)
+                return res
         return None
 
-    def print_rpath(self):
+    def print_rpath(self) -> None:
         offset = self.get_entry_offset(DT_RPATH)
         if offset is None:
             print("This file does not have an rpath.")
@@ -249,7 +256,7 @@
             self.bf.seek(offset)
             print(self.read_str())
 
-    def print_runpath(self):
+    def print_runpath(self) -> None:
         offset = self.get_entry_offset(DT_RUNPATH)
         if offset is None:
             print("This file does not have a runpath.")
@@ -257,7 +264,7 @@
             self.bf.seek(offset)
             print(self.read_str())
 
-    def print_deps(self):
+    def print_deps(self) -> None:
         sec = self.find_section(b'.dynstr')
         deps = []
         for i in self.dynamic:
@@ -269,7 +276,7 @@
             name = self.read_str()
             print(name)
 
-    def fix_deps(self, prefix):
+    def fix_deps(self, prefix: bytes) -> None:
         sec = self.find_section(b'.dynstr')
         deps = []
         for i in self.dynamic:
@@ -287,24 +294,42 @@
                 self.bf.seek(offset)
                 self.bf.write(newname)
 
-    def fix_rpath(self, new_rpath):
+    def fix_rpath(self, rpath_dirs_to_remove: T.List[bytes], new_rpath: bytes) -> None:
         # The path to search for can be either rpath or runpath.
         # Fix both of them to be sure.
-        self.fix_rpathtype_entry(new_rpath, DT_RPATH)
-        self.fix_rpathtype_entry(new_rpath, DT_RUNPATH)
+        self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RPATH)
+        self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RUNPATH)
 
-    def fix_rpathtype_entry(self, new_rpath, entrynum):
-        if isinstance(new_rpath, str):
-            new_rpath = new_rpath.encode('utf8')
+    def fix_rpathtype_entry(self, rpath_dirs_to_remove: T.List[bytes], new_rpath: bytes, entrynum: int) -> None:
         rp_off = self.get_entry_offset(entrynum)
         if rp_off is None:
             if self.verbose:
                 print('File does not have rpath. It should be a fully static executable.')
             return
         self.bf.seek(rp_off)
+
         old_rpath = self.read_str()
+        # Some rpath entries may come from multiple sources.
+        # Only add each one once.
+        new_rpaths = OrderedSet()  # type: OrderedSet[bytes]
+        if new_rpath:
+            new_rpaths.update(new_rpath.split(b':'))
+        if old_rpath:
+            # Filter out build-only rpath entries
+            # added by get_link_dep_subdirs() or
+            # specified by user with build_rpath.
+            for rpath_dir in old_rpath.split(b':'):
+                if not (rpath_dir in rpath_dirs_to_remove or
+                        rpath_dir == (b'X' * len(rpath_dir))):
+                    if rpath_dir:
+                        new_rpaths.add(rpath_dir)
+
+        # Prepend user-specified new entries while preserving the ones that came from pkgconfig etc.
+        new_rpath = b':'.join(new_rpaths)
+
         if len(old_rpath) < len(new_rpath):
-            sys.exit("New rpath must not be longer than the old one.")
+            msg = "New rpath must not be longer than the old one.\n Old: {}\n New: {}".format(old_rpath.decode('utf-8'), new_rpath.decode('utf-8'))
+            sys.exit(msg)
         # The linker does read-only string deduplication. If there is a
         # string that shares a suffix with the rpath, they might get
         # dedupped. This means changing the rpath string might break something
@@ -320,7 +345,7 @@
             self.bf.write(new_rpath)
             self.bf.write(b'\0')
 
-    def remove_rpath_entry(self, entrynum):
+    def remove_rpath_entry(self, entrynum: int) -> None:
         sec = self.find_section(b'.dynamic')
         if sec is None:
             return None
@@ -340,15 +365,15 @@
             entry.write(self.bf)
         return None
 
-def fix_elf(fname, new_rpath, verbose=True):
+def fix_elf(fname: str, rpath_dirs_to_remove: T.List[bytes], new_rpath: T.Optional[bytes], verbose: bool = True) -> None:
     with Elf(fname, verbose) as e:
         if new_rpath is None:
             e.print_rpath()
             e.print_runpath()
         else:
-            e.fix_rpath(new_rpath)
+            e.fix_rpath(rpath_dirs_to_remove, new_rpath)
 
-def get_darwin_rpaths_to_remove(fname):
+def get_darwin_rpaths_to_remove(fname: str) -> T.List[str]:
     out = subprocess.check_output(['otool', '-l', fname],
                                   universal_newlines=True,
                                   stderr=subprocess.DEVNULL)
@@ -366,7 +391,7 @@
             result.append(rp)
     return result
 
-def fix_darwin(fname, new_rpath, final_path, install_name_mappings):
+def fix_darwin(fname: str, new_rpath: str, final_path: str, install_name_mappings: T.Dict[str, str]) -> None:
     try:
         rpaths = get_darwin_rpaths_to_remove(fname)
     except subprocess.CalledProcessError:
@@ -416,7 +441,7 @@
     except Exception as err:
         raise SystemExit(err)
 
-def fix_jar(fname):
+def fix_jar(fname: str) -> None:
     subprocess.check_call(['jar', 'xfv', fname, 'META-INF/MANIFEST.MF'])
     with open('META-INF/MANIFEST.MF', 'r+') as f:
         lines = f.readlines()
@@ -427,23 +452,33 @@
         f.truncate()
     subprocess.check_call(['jar', 'ufm', fname, 'META-INF/MANIFEST.MF'])
 
-def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True):
-    # Static libraries never have rpaths
-    if fname.endswith('.a'):
-        return
-    # DLLs and EXE never have rpaths
-    if fname.endswith('.dll') or fname.endswith('.exe'):
+def fix_rpath(fname: str, rpath_dirs_to_remove: T.List[bytes], new_rpath: T.Union[str, bytes], final_path: str, install_name_mappings: T.Dict[str, str], verbose: bool = True) -> None:
+    global INSTALL_NAME_TOOL
+    # Static libraries, import libraries, debug information, headers, etc
+    # never have rpaths
+    # DLLs and EXE currently do not need runtime path fixing
+    if fname.endswith(('.a', '.lib', '.pdb', '.h', '.hpp', '.dll', '.exe')):
         return
     try:
         if fname.endswith('.jar'):
             fix_jar(fname)
             return
-        fix_elf(fname, new_rpath, verbose)
+        if isinstance(new_rpath, str):
+            new_rpath = new_rpath.encode('utf8')
+        fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose)
         return
     except SystemExit as e:
         if isinstance(e.code, int) and e.code == 0:
             pass
         else:
             raise
-    if shutil.which('install_name_tool'):
+    # We don't look for this on import because it will do a useless PATH lookup
+    # on non-mac platforms. That can be expensive on some Windows machines
+    # (upto 30ms), which is significant with --only-changed. For details, see:
+    # https://github.com/mesonbuild/meson/pull/6612#discussion_r378581401
+    if INSTALL_NAME_TOOL is False:
+        INSTALL_NAME_TOOL = bool(shutil.which('install_name_tool'))
+    if INSTALL_NAME_TOOL:
+        if isinstance(new_rpath, bytes):
+            new_rpath = new_rpath.decode('utf8')
         fix_darwin(fname, new_rpath, final_path, install_name_mappings)
diff -Nru meson-0.53.2/mesonbuild/scripts/dirchanger.py meson-0.57.0+really0.56.2/mesonbuild/scripts/dirchanger.py
--- meson-0.53.2/mesonbuild/scripts/dirchanger.py	2016-12-18 18:47:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/dirchanger.py	2020-09-17 22:00:44.000000000 +0000
@@ -16,8 +16,9 @@
 the command given in the rest of the arguments.'''
 
 import os, subprocess, sys
+import typing as T
 
-def run(args):
+def run(args: T.List[str]) -> int:
     dirname = args[0]
     command = args[1:]
 
diff -Nru meson-0.53.2/mesonbuild/scripts/externalproject.py meson-0.57.0+really0.56.2/mesonbuild/scripts/externalproject.py
--- meson-0.53.2/mesonbuild/scripts/externalproject.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/externalproject.py	2020-09-17 22:00:51.000000000 +0000
@@ -0,0 +1,96 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import argparse
+import multiprocessing
+import subprocess
+from pathlib import Path
+import typing as T
+
+from ..mesonlib import Popen_safe
+
+class ExternalProject:
+    def __init__(self, options: argparse.Namespace):
+        self.name = options.name
+        self.src_dir = options.srcdir
+        self.build_dir = options.builddir
+        self.install_dir = options.installdir
+        self.verbose = options.verbose
+        self.stampfile = options.stampfile
+        self.depfile = options.depfile
+        self.make = options.make
+
+    def write_depfile(self) -> None:
+        with open(self.depfile, 'w') as f:
+            f.write('{}: \\\n'.format(self.stampfile))
+            for dirpath, dirnames, filenames in os.walk(self.src_dir):
+                dirnames[:] = [d for d in dirnames if not d.startswith('.')]
+                for fname in filenames:
+                    if fname.startswith('.'):
+                        continue
+                    path = Path(dirpath, fname)
+                    f.write('  {} \\\n'.format(path.as_posix().replace(' ', '\\ ')))
+
+    def write_stampfile(self) -> None:
+        with open(self.stampfile, 'w') as f:
+            pass
+
+    def gnu_make(self) -> bool:
+        p, o, e = Popen_safe([self.make, '--version'])
+        if p.returncode == 0 and 'GNU Make' in o:
+            return True
+        return False
+
+    def build(self) -> int:
+        make_cmd = [self.make]
+        if not self.verbose:
+            make_cmd.append('--quiet')
+        if self.gnu_make():
+            make_cmd.append('-j' + str(multiprocessing.cpu_count()))
+
+        rc = self._run(make_cmd)
+        if rc != 0:
+            return rc
+
+        install_cmd = make_cmd + ['DESTDIR= ' + self.install_dir, 'install']
+        rc = self._run(install_cmd)
+        if rc != 0:
+            return rc
+
+        self.write_depfile()
+        self.write_stampfile()
+
+        return 0
+
+    def _run(self, command: T.List[str]) -> int:
+        output = None if self.verbose else subprocess.DEVNULL
+        p, o, e = Popen_safe(command, stderr=subprocess.STDOUT, stdout=output,
+                             cwd=self.build_dir)
+        return p.returncode
+
+def run(args: T.List[str]) -> int:
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--name')
+    parser.add_argument('--srcdir')
+    parser.add_argument('--builddir')
+    parser.add_argument('--installdir')
+    parser.add_argument('--make')
+    parser.add_argument('--verbose', action='store_true')
+    parser.add_argument('stampfile')
+    parser.add_argument('depfile')
+
+    options = parser.parse_args(args)
+    ep = ExternalProject(options)
+    return ep.build()
diff -Nru meson-0.53.2/mesonbuild/scripts/gettext.py meson-0.57.0+really0.56.2/mesonbuild/scripts/gettext.py
--- meson-0.53.2/mesonbuild/scripts/gettext.py	2019-04-17 08:08:43.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/gettext.py	2021-01-06 10:39:48.000000000 +0000
@@ -17,6 +17,7 @@
 import argparse
 import subprocess
 from . import destdir_join
+import typing as T
 
 parser = argparse.ArgumentParser()
 parser.add_argument('command')
@@ -27,7 +28,7 @@
 parser.add_argument('--subdir', default='')
 parser.add_argument('--extra-args', default='')
 
-def read_linguas(src_sub):
+def read_linguas(src_sub: str) -> T.List[str]:
     # Syntax of this file is documented here:
     # https://www.gnu.org/software/gettext/manual/html_node/po_002fLINGUAS.html
     linguas = os.path.join(src_sub, 'LINGUAS')
@@ -43,7 +44,7 @@
         print('Could not find file LINGUAS in {}'.format(src_sub))
         return []
 
-def run_potgen(src_sub, pkgname, datadirs, args):
+def run_potgen(src_sub: str, pkgname: str, datadirs: str, args: T.List[str]) -> int:
     listfile = os.path.join(src_sub, 'POTFILES.in')
     if not os.path.exists(listfile):
         listfile = os.path.join(src_sub, 'POTFILES')
@@ -60,13 +61,13 @@
                             '-D', os.environ['MESON_SOURCE_ROOT'], '-k_', '-o', ofile] + args,
                            env=child_env)
 
-def gen_gmo(src_sub, bld_sub, langs):
+def gen_gmo(src_sub: str, bld_sub: str, langs: T.List[str]) -> int:
     for l in langs:
         subprocess.check_call(['msgfmt', os.path.join(src_sub, l + '.po'),
                                '-o', os.path.join(bld_sub, l + '.gmo')])
     return 0
 
-def update_po(src_sub, pkgname, langs):
+def update_po(src_sub: str, pkgname: str, langs: T.List[str]) -> int:
     potfile = os.path.join(src_sub, pkgname + '.pot')
     for l in langs:
         pofile = os.path.join(src_sub, l + '.po')
@@ -76,20 +77,19 @@
             subprocess.check_call(['msginit', '--input', potfile, '--output-file', pofile, '--locale', l, '--no-translator'])
     return 0
 
-def do_install(src_sub, bld_sub, dest, pkgname, langs):
+def do_install(src_sub: str, bld_sub: str, dest: str, pkgname: str, langs: T.List[str]) -> int:
     for l in langs:
         srcfile = os.path.join(bld_sub, l + '.gmo')
         outfile = os.path.join(dest, l, 'LC_MESSAGES',
                                pkgname + '.mo')
         tempfile = outfile + '.tmp'
         os.makedirs(os.path.dirname(outfile), exist_ok=True)
-        shutil.copyfile(srcfile, tempfile)
-        shutil.copystat(srcfile, tempfile)
+        shutil.copy2(srcfile, tempfile)
         os.replace(tempfile, outfile)
         print('Installing %s to %s' % (srcfile, outfile))
     return 0
 
-def run(args):
+def run(args: T.List[str]) -> int:
     options = parser.parse_args(args)
     subcmd = options.command
     langs = options.langs.split('@@') if options.langs else None
@@ -121,3 +121,4 @@
     else:
         print('Unknown subcommand.')
         return 1
+    return 0
diff -Nru meson-0.53.2/mesonbuild/scripts/gtkdochelper.py meson-0.57.0+really0.56.2/mesonbuild/scripts/gtkdochelper.py
--- meson-0.53.2/mesonbuild/scripts/gtkdochelper.py	2019-09-16 21:20:45.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/gtkdochelper.py	2020-09-17 22:00:44.000000000 +0000
@@ -16,8 +16,9 @@
 import subprocess
 import shutil
 import argparse
-from ..mesonlib import MesonException, Popen_safe, is_windows, split_args
+from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args
 from . import destdir_join
+import typing as T
 
 parser = argparse.ArgumentParser()
 
@@ -50,12 +51,12 @@
     program_name = 'gtkdoc-' + tool
     parser.add_argument('--' + program_name, dest=program_name.replace('-', '_'))
 
-def gtkdoc_run_check(cmd, cwd, library_paths=None):
+def gtkdoc_run_check(cmd: T.List[str], cwd: str, library_paths: T.Optional[T.List[str]] = None) -> None:
     if library_paths is None:
         library_paths = []
 
     env = dict(os.environ)
-    if is_windows():
+    if is_windows() or is_cygwin():
         if 'PATH' in env:
             library_paths.extend(env['PATH'].split(os.pathsep))
         env['PATH'] = os.pathsep.join(library_paths)
@@ -64,6 +65,9 @@
             library_paths.extend(env['LD_LIBRARY_PATH'].split(os.pathsep))
         env['LD_LIBRARY_PATH'] = os.pathsep.join(library_paths)
 
+    if is_windows():
+        cmd.insert(0, sys.executable)
+
     # Put stderr into stdout since we want to print it out anyway.
     # This preserves the order of messages.
     p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2]
@@ -73,14 +77,21 @@
             err_msg.append(out)
         raise MesonException('\n'.join(err_msg))
     elif out:
-        print(out)
-
-def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs,
-                 main_file, module, module_version,
-                 html_args, scan_args, fixxref_args, mkdb_args,
-                 gobject_typesfile, scanobjs_args, run, ld, cc, ldflags, cflags,
-                 html_assets, content_files, ignore_headers, namespace,
-                 expand_content_files, mode, options):
+        # Unfortunately Windows cmd.exe consoles may be using a codepage
+        # that might choke print() with a UnicodeEncodeError, so let's
+        # ignore such errors for now, as a compromise as we are outputting
+        # console output here...
+        try:
+            print(out)
+        except UnicodeEncodeError:
+            pass
+
+def build_gtkdoc(source_root: str, build_root: str, doc_subdir: str, src_subdirs: T.List[str],
+                 main_file: str, module: str, module_version: str,
+                 html_args: T.List[str], scan_args: T.List[str], fixxref_args: T.List[str], mkdb_args: T.List[str],
+                 gobject_typesfile: str, scanobjs_args: T.List[str], run: str, ld: str, cc: str, ldflags: str, cflags: str,
+                 html_assets: T.List[str], content_files: T.List[str], ignore_headers: T.List[str], namespace: str,
+                 expand_content_files: T.List[str], mode: str, options: argparse.Namespace) -> None:
     print("Building documentation for %s" % module)
 
     src_dir_args = []
@@ -207,13 +218,13 @@
         shutil.move(os.path.join(htmldir, '{}.devhelp2'.format(module)),
                     os.path.join(htmldir, '{}-{}.devhelp2'.format(module, module_version)))
 
-def install_gtkdoc(build_root, doc_subdir, install_prefix, datadir, module):
+def install_gtkdoc(build_root: str, doc_subdir: str, install_prefix: str, datadir: str, module: str) -> None:
     source = os.path.join(build_root, doc_subdir, 'html')
     final_destination = os.path.join(install_prefix, datadir, module)
     shutil.rmtree(final_destination, ignore_errors=True)
     shutil.copytree(source, final_destination)
 
-def run(args):
+def run(args: T.List[str]) -> int:
     options = parser.parse_args(args)
     if options.htmlargs:
         htmlargs = options.htmlargs.split('@@')
diff -Nru meson-0.53.2/mesonbuild/scripts/hotdochelper.py meson-0.57.0+really0.56.2/mesonbuild/scripts/hotdochelper.py
--- meson-0.53.2/mesonbuild/scripts/hotdochelper.py	2018-10-31 09:31:20.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/hotdochelper.py	2020-09-17 22:00:44.000000000 +0000
@@ -5,6 +5,7 @@
 from . import destdir_join
 
 import argparse
+import typing as T
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--install')
@@ -14,7 +15,7 @@
 parser.add_argument('--project-version')
 
 
-def run(argv):
+def run(argv: T.List[str]) -> int:
     options, args = parser.parse_known_args(argv)
     subenv = os.environ.copy()
 
@@ -23,7 +24,7 @@
 
     res = subprocess.call(args, cwd=options.builddir, env=subenv)
     if res != 0:
-        exit(res)
+        return res
 
     if options.install:
         source_dir = os.path.join(options.builddir, options.install)
@@ -34,3 +35,4 @@
 
         shutil.rmtree(installdir, ignore_errors=True)
         shutil.copytree(source_dir, installdir)
+    return 0
diff -Nru meson-0.53.2/mesonbuild/scripts/__init__.py meson-0.57.0+really0.56.2/mesonbuild/scripts/__init__.py
--- meson-0.53.2/mesonbuild/scripts/__init__.py	2017-01-12 20:52:44.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/__init__.py	2020-09-17 22:00:44.000000000 +0000
@@ -12,7 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-def destdir_join(d1, d2):
+# TODO: consider switching to pathlib for this
+def destdir_join(d1: str, d2: str) -> str:
     # c:\destdir + c:\prefix must produce c:\destdir\prefix
     if len(d1) > 1 and d1[1] == ':' \
             and len(d2) > 1 and d2[1] == ':':
diff -Nru meson-0.53.2/mesonbuild/scripts/meson_exe.py meson-0.57.0+really0.56.2/mesonbuild/scripts/meson_exe.py
--- meson-0.53.2/mesonbuild/scripts/meson_exe.py	2019-09-16 21:20:45.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/meson_exe.py	2020-10-18 21:29:13.000000000 +0000
@@ -16,29 +16,21 @@
 import sys
 import argparse
 import pickle
-import platform
 import subprocess
+import typing as T
 
 from .. import mesonlib
 from ..backend.backends import ExecutableSerialisation
 
 options = None
 
-def buildparser():
+def buildparser() -> argparse.ArgumentParser:
     parser = argparse.ArgumentParser(description='Custom executable wrapper for Meson. Do not run on your own, mmm\'kay?')
     parser.add_argument('--unpickle')
     parser.add_argument('--capture')
     return parser
 
-def is_windows():
-    platname = platform.system().lower()
-    return platname == 'windows' or 'mingw' in platname
-
-def is_cygwin():
-    platname = platform.system().lower()
-    return 'cygwin' in platname
-
-def run_exe(exe):
+def run_exe(exe: ExecutableSerialisation) -> int:
     if exe.exe_runner:
         if not exe.exe_runner.found():
             raise AssertionError('BUG: Can\'t run cross-compiled exe {!r} with not-found '
@@ -63,9 +55,12 @@
                          stderr=subprocess.PIPE)
     stdout, stderr = p.communicate()
 
+    if exe.pickled and p.returncode != 0:
+        print('while executing {!r}'.format(cmd_args))
+
     if p.returncode == 0xc0000135:
         # STATUS_DLL_NOT_FOUND on Windows indicating a common problem that is otherwise hard to diagnose
-        raise FileNotFoundError('Missing DLLs on calling {!r}'.format(exe.name))
+        raise FileNotFoundError('due to missing DLLs')
 
     if exe.capture and p.returncode == 0:
         skip_write = False
@@ -83,7 +78,7 @@
         sys.stderr.buffer.write(stderr)
     return p.returncode
 
-def run(args):
+def run(args: T.List[str]) -> int:
     global options
     parser = buildparser()
     options, cmd_args = parser.parse_known_args(args)
@@ -98,6 +93,7 @@
             parser.error('no other arguments can be used with --unpickle')
         with open(options.unpickle, 'rb') as f:
             exe = pickle.load(f)
+            exe.pickled = True
     else:
         exe = ExecutableSerialisation(cmd_args, capture=options.capture)
 
diff -Nru meson-0.53.2/mesonbuild/scripts/msgfmthelper.py meson-0.57.0+really0.56.2/mesonbuild/scripts/msgfmthelper.py
--- meson-0.53.2/mesonbuild/scripts/msgfmthelper.py	2019-05-02 18:59:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/msgfmthelper.py	2020-09-17 22:00:44.000000000 +0000
@@ -15,6 +15,7 @@
 import argparse
 import subprocess
 import os
+import typing as T
 
 parser = argparse.ArgumentParser()
 parser.add_argument('input')
@@ -25,7 +26,7 @@
 parser.add_argument('args', default=[], metavar='extra msgfmt argument', nargs='*')
 
 
-def run(args):
+def run(args: T.List[str]) -> int:
     options = parser.parse_args(args)
     env = None
     if options.datadirs:
diff -Nru meson-0.53.2/mesonbuild/scripts/regen_checker.py meson-0.57.0+really0.56.2/mesonbuild/scripts/regen_checker.py
--- meson-0.53.2/mesonbuild/scripts/regen_checker.py	2018-08-25 08:05:43.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/regen_checker.py	2021-01-06 10:39:48.000000000 +0000
@@ -14,10 +14,13 @@
 
 import sys, os
 import pickle, subprocess
+import typing as T
+from ..coredata import CoreData
+from ..backend.vs2010backend import RegenInfo
 
 # This could also be used for XCode.
 
-def need_regen(regeninfo, regen_timestamp):
+def need_regen(regeninfo: RegenInfo, regen_timestamp: float) -> bool:
     for i in regeninfo.depfiles:
         curfile = os.path.join(regeninfo.build_dir, i)
         curtime = os.stat(curfile).st_mtime
@@ -31,7 +34,7 @@
     Vs2010Backend.touch_regen_timestamp(regeninfo.build_dir)
     return False
 
-def regen(regeninfo, meson_command, backend):
+def regen(regeninfo: RegenInfo, meson_command: T.List[str], backend: str) -> None:
     cmd = meson_command + ['--internal',
                            'regenerate',
                            regeninfo.build_dir,
@@ -39,19 +42,22 @@
                            '--backend=' + backend]
     subprocess.check_call(cmd)
 
-def run(args):
+def run(args: T.List[str]) -> int:
     private_dir = args[0]
     dumpfile = os.path.join(private_dir, 'regeninfo.dump')
-    coredata = os.path.join(private_dir, 'coredata.dat')
+    coredata_file = os.path.join(private_dir, 'coredata.dat')
     with open(dumpfile, 'rb') as f:
         regeninfo = pickle.load(f)
-    with open(coredata, 'rb') as f:
+        assert isinstance(regeninfo, RegenInfo)
+    with open(coredata_file, 'rb') as f:
         coredata = pickle.load(f)
+        assert isinstance(coredata, CoreData)
     backend = coredata.get_builtin_option('backend')
+    assert isinstance(backend, str)
     regen_timestamp = os.stat(dumpfile).st_mtime
     if need_regen(regeninfo, regen_timestamp):
         regen(regeninfo, coredata.meson_command, backend)
-    sys.exit(0)
+    return 0
 
 if __name__ == '__main__':
-    run(sys.argv[1:])
+    sys.exit(run(sys.argv[1:]))
diff -Nru meson-0.53.2/mesonbuild/scripts/scanbuild.py meson-0.57.0+really0.56.2/mesonbuild/scripts/scanbuild.py
--- meson-0.53.2/mesonbuild/scripts/scanbuild.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/scanbuild.py	2021-01-06 10:39:48.000000000 +0000
@@ -12,30 +12,31 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os
 import subprocess
 import shutil
 import tempfile
 from ..environment import detect_ninja, detect_scanbuild
+from pathlib import Path
+import typing as T
 
 
-def scanbuild(exelist, srcdir, blddir, privdir, logdir, args):
-    with tempfile.TemporaryDirectory(dir=privdir) as scandir:
+def scanbuild(exelist: T.List[str], srcdir: Path, blddir: Path, privdir: Path, logdir: Path, args: T.List[str]) -> int:
+    with tempfile.TemporaryDirectory(dir=str(privdir)) as scandir:
         meson_cmd = exelist + args
-        build_cmd = exelist + ['-o', logdir, detect_ninja(), '-C', scandir]
-        rc = subprocess.call(meson_cmd + [srcdir, scandir])
+        build_cmd = exelist + ['-o', str(logdir)] + detect_ninja() + ['-C', scandir]
+        rc = subprocess.call(meson_cmd + [str(srcdir), scandir])
         if rc != 0:
             return rc
         return subprocess.call(build_cmd)
 
 
-def run(args):
-    srcdir = args[0]
-    blddir = args[1]
+def run(args: T.List[str]) -> int:
+    srcdir = Path(args[0])
+    blddir = Path(args[1])
     meson_cmd = args[2:]
-    privdir = os.path.join(blddir, 'meson-private')
-    logdir = os.path.join(blddir, 'meson-logs/scanbuild')
-    shutil.rmtree(logdir, ignore_errors=True)
+    privdir = blddir / 'meson-private'
+    logdir = blddir / 'meson-logs' / 'scanbuild'
+    shutil.rmtree(str(logdir), ignore_errors=True)
 
     exelist = detect_scanbuild()
     if not exelist:
diff -Nru meson-0.53.2/mesonbuild/scripts/symbolextractor.py meson-0.57.0+really0.56.2/mesonbuild/scripts/symbolextractor.py
--- meson-0.53.2/mesonbuild/scripts/symbolextractor.py	2019-05-02 18:59:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/symbolextractor.py	2021-01-06 10:39:48.000000000 +0000
@@ -20,8 +20,10 @@
 # This file is basically a reimplementation of
 # http://cgit.freedesktop.org/libreoffice/core/commit/?id=3213cd54b76bc80a6f0516aac75a48ff3b2ad67c
 
+import typing as T
 import os, sys
 from .. import mesonlib
+from .. import mlog
 from ..mesonlib import Popen_safe
 import argparse
 
@@ -31,12 +33,15 @@
                     help='cross compilation host platform')
 parser.add_argument('args', nargs='+')
 
-def dummy_syms(outfilename):
+TOOL_WARNING_FILE = None
+RELINKING_WARNING = 'Relinking will always happen on source changes.'
+
+def dummy_syms(outfilename: str) -> None:
     """Just touch it so relinking happens always."""
     with open(outfilename, 'w'):
         pass
 
-def write_if_changed(text, outfilename):
+def write_if_changed(text: str, outfilename: str) -> None:
     try:
         with open(outfilename, 'r') as f:
             oldtext = f.read()
@@ -47,75 +52,259 @@
     with open(outfilename, 'w') as f:
         f.write(text)
 
-def linux_syms(libfilename, outfilename):
-    evar = 'READELF'
-    if evar in os.environ:
-        readelfbin = os.environ[evar].strip()
-    else:
-        readelfbin = 'readelf'
-    evar = 'NM'
+def print_tool_warning(tools: T.List[str], msg: str, stderr: T.Optional[str] = None) -> None:
+    global TOOL_WARNING_FILE
+    if os.path.exists(TOOL_WARNING_FILE):
+        return
+    m = '{!r} {}. {}'.format(tools, msg, RELINKING_WARNING)
+    if stderr:
+        m += '\n' + stderr
+    mlog.warning(m)
+    # Write it out so we don't warn again
+    with open(TOOL_WARNING_FILE, 'w'):
+        pass
+
+def get_tool(name: str) -> T.List[str]:
+    evar = name.upper()
     if evar in os.environ:
-        nmbin = os.environ[evar].strip()
-    else:
-        nmbin = 'nm'
-    pe, output = Popen_safe([readelfbin, '-d', libfilename])[0:2]
-    if pe.returncode != 0:
-        raise RuntimeError('Readelf does not work')
+        import shlex
+        return shlex.split(os.environ[evar])
+    return [name]
+
+def call_tool(name: str, args: T.List[str], **kwargs: T.Any) -> str:
+    tool = get_tool(name)
+    try:
+        p, output, e = Popen_safe(tool + args, **kwargs)
+    except FileNotFoundError:
+        print_tool_warning(tool, 'not found')
+        return None
+    except PermissionError:
+        print_tool_warning(tool, 'not usable')
+        return None
+    if p.returncode != 0:
+        print_tool_warning(tool, 'does not work', e)
+        return None
+    return output
+
+def call_tool_nowarn(tool: T.List[str], **kwargs: T.Any) -> T.Tuple[str, str]:
+    try:
+        p, output, e = Popen_safe(tool, **kwargs)
+    except FileNotFoundError:
+        return None, '{!r} not found\n'.format(tool[0])
+    except PermissionError:
+        return None, '{!r} not usable\n'.format(tool[0])
+    if p.returncode != 0:
+        return None, e
+    return output, None
+
+def gnu_syms(libfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('readelf', ['-d', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
     result = [x for x in output.split('\n') if 'SONAME' in x]
     assert(len(result) <= 1)
-    pnm, output = Popen_safe([nmbin, '--dynamic', '--extern-only',
-                              '--defined-only', '--format=posix',
-                              libfilename])[0:2]
-    if pnm.returncode != 0:
-        raise RuntimeError('nm does not work.')
+    # Get a list of all symbols exported
+    output = call_tool('nm', ['--dynamic', '--extern-only', '--defined-only',
+                              '--format=posix', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
     for line in output.split('\n'):
         if not line:
             continue
         line_split = line.split()
         entry = line_split[0:2]
-        if len(line_split) >= 4:
+        # Store the size of symbols pointing to data objects so we relink
+        # when those change, which is needed because of copy relocations
+        # https://github.com/mesonbuild/meson/pull/7132#issuecomment-628353702
+        if line_split[1].upper() in ('B', 'G', 'D') and len(line_split) >= 4:
             entry += [line_split[3]]
         result += [' '.join(entry)]
     write_if_changed('\n'.join(result) + '\n', outfilename)
 
-def osx_syms(libfilename, outfilename):
-    pe, output = Popen_safe(['otool', '-l', libfilename])[0:2]
-    if pe.returncode != 0:
-        raise RuntimeError('Otool does not work.')
+def solaris_syms(libfilename: str, outfilename: str) -> None:
+    # gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump
+    origpath = os.environ['PATH']
+    try:
+        os.environ['PATH'] = '/usr/gnu/bin:' + origpath
+        gnu_syms(libfilename, outfilename)
+    finally:
+        os.environ['PATH'] = origpath
+
+def osx_syms(libfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('otool', ['-l', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
     arr = output.split('\n')
     for (i, val) in enumerate(arr):
         if 'LC_ID_DYLIB' in val:
             match = i
             break
     result = [arr[match + 2], arr[match + 5]] # Libreoffice stores all 5 lines but the others seem irrelevant.
-    pnm, output = Popen_safe(['nm', '-g', '-P', libfilename])[0:2]
-    if pnm.returncode != 0:
-        raise RuntimeError('nm does not work.')
-    result += [' '.join(x.split()[0:2]) for x in output.split('\n') if x and not x.endswith('U')]
+    # Get a list of all symbols exported
+    output = call_tool('nm', ['--extern-only', '--defined-only',
+                              '--format=posix', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def openbsd_syms(libfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('readelf', ['-d', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result = [x for x in output.split('\n') if 'SONAME' in x]
+    assert(len(result) <= 1)
+    # Get a list of all symbols exported
+    output = call_tool('nm', ['-D', '-P', '-g', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    # U = undefined (cope with the lack of --defined-only option)
+    result += [' '.join(x.split()[0:2]) for x in output.split('\n') if x and not x.endswith('U ')]
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def cygwin_syms(impfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('dlltool', ['-I', impfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result = [output]
+    # Get the list of all symbols exported
+    output = call_tool('nm', ['--extern-only', '--defined-only',
+                              '--format=posix', impfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    for line in output.split('\n'):
+        if ' T ' not in line:
+            continue
+        result.append(line.split(maxsplit=1)[0])
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def _get_implib_dllname(impfilename: str) -> T.Tuple[T.List[str], str]:
+    all_stderr = ''
+    # First try lib.exe, which is provided by MSVC. Then llvm-lib.exe, by LLVM
+    # for clang-cl.
+    #
+    # We cannot call get_tool on `lib` because it will look at the `LIB` env
+    # var which is the list of library paths MSVC will search for import
+    # libraries while linking.
+    for lib in (['lib'], get_tool('llvm-lib')):
+        output, e = call_tool_nowarn(lib + ['-list', impfilename])
+        if output:
+            # The output is a list of DLLs that each symbol exported by the import
+            # library is available in. We only build import libraries that point to
+            # a single DLL, so we can pick any of these. Pick the last one for
+            # simplicity. Also skip the last line, which is empty.
+            return output.split('\n')[-2:-1], None
+        all_stderr += e
+    # Next, try dlltool.exe which is provided by MinGW
+    output, e = call_tool_nowarn(get_tool('dlltool') + ['-I', impfilename])
+    if output:
+        return [output], None
+    all_stderr += e
+    return ([], all_stderr)
+
+def _get_implib_exports(impfilename: str) -> T.Tuple[T.List[str], str]:
+    all_stderr = ''
+    # Force dumpbin.exe to use en-US so we can parse its output
+    env = os.environ.copy()
+    env['VSLANG'] = '1033'
+    output, e = call_tool_nowarn(get_tool('dumpbin') + ['-exports', impfilename], env=env)
+    if output:
+        lines = output.split('\n')
+        start = lines.index('File Type: LIBRARY')
+        end = lines.index('  Summary')
+        return lines[start:end], None
+    all_stderr += e
+    # Next, try llvm-nm.exe provided by LLVM, then nm.exe provided by MinGW
+    for nm in ('llvm-nm', 'nm'):
+        output, e = call_tool_nowarn(get_tool(nm) + ['--extern-only', '--defined-only',
+                                                     '--format=posix', impfilename])
+        if output:
+            result = []
+            for line in output.split('\n'):
+                if ' T ' not in line or line.startswith('.text'):
+                    continue
+                result.append(line.split(maxsplit=1)[0])
+            return result, None
+        all_stderr += e
+    return ([], all_stderr)
+
+def windows_syms(impfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    result, e = _get_implib_dllname(impfilename)
+    if not result:
+        print_tool_warning(['lib', 'llvm-lib', 'dlltool'], 'do not work or were not found', e)
+        dummy_syms(outfilename)
+        return
+    # Get a list of all symbols exported
+    symbols, e = _get_implib_exports(impfilename)
+    if not symbols:
+        print_tool_warning(['dumpbin', 'llvm-nm', 'nm'], 'do not work or were not found', e)
+        dummy_syms(outfilename)
+        return
+    result += symbols
     write_if_changed('\n'.join(result) + '\n', outfilename)
 
-def gen_symbols(libfilename, outfilename, cross_host):
+def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host: str) -> None:
     if cross_host is not None:
-        # In case of cross builds just always relink.
-        # In theory we could determine the correct
-        # toolset but there are more important things
-        # to do.
+        # In case of cross builds just always relink. In theory we could
+        # determine the correct toolset, but we would need to use the correct
+        # `nm`, `readelf`, etc, from the cross info which requires refactoring.
         dummy_syms(outfilename)
-    elif mesonlib.is_linux():
-        linux_syms(libfilename, outfilename)
+    elif mesonlib.is_linux() or mesonlib.is_hurd():
+        gnu_syms(libfilename, outfilename)
     elif mesonlib.is_osx():
         osx_syms(libfilename, outfilename)
+    elif mesonlib.is_openbsd():
+        openbsd_syms(libfilename, outfilename)
+    elif mesonlib.is_windows():
+        if os.path.isfile(impfilename):
+            windows_syms(impfilename, outfilename)
+        else:
+            # No import library. Not sure how the DLL is being used, so just
+            # rebuild everything that links to it every time.
+            dummy_syms(outfilename)
+    elif mesonlib.is_cygwin():
+        if os.path.isfile(impfilename):
+            cygwin_syms(impfilename, outfilename)
+        else:
+            # No import library. Not sure how the DLL is being used, so just
+            # rebuild everything that links to it every time.
+            dummy_syms(outfilename)
+    elif mesonlib.is_sunos():
+        solaris_syms(libfilename, outfilename)
     else:
+        if not os.path.exists(TOOL_WARNING_FILE):
+            mlog.warning('Symbol extracting has not been implemented for this '
+                         'platform. ' + RELINKING_WARNING)
+            # Write it out so we don't warn again
+            with open(TOOL_WARNING_FILE, 'w'):
+                pass
         dummy_syms(outfilename)
 
-def run(args):
+def run(args: T.List[str]) -> int:
+    global TOOL_WARNING_FILE
     options = parser.parse_args(args)
-    if len(options.args) != 2:
-        print('symbolextractor.py  ')
+    if len(options.args) != 4:
+        print('symbolextractor.py   ')
         sys.exit(1)
-    libfile = options.args[0]
-    outfile = options.args[1]
-    gen_symbols(libfile, outfile, options.cross_host)
+    privdir = os.path.join(options.args[0], 'meson-private')
+    TOOL_WARNING_FILE = os.path.join(privdir, 'symbolextractor_tool_warning_printed')
+    libfile = options.args[1]
+    impfile = options.args[2] # Only used on Windows
+    outfile = options.args[3]
+    gen_symbols(libfile, impfile, outfile, options.cross_host)
     return 0
 
 if __name__ == '__main__':
diff -Nru meson-0.53.2/mesonbuild/scripts/tags.py meson-0.57.0+really0.56.2/mesonbuild/scripts/tags.py
--- meson-0.53.2/mesonbuild/scripts/tags.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/tags.py	2020-09-17 22:00:44.000000000 +0000
@@ -15,9 +15,9 @@
 import os
 import subprocess
 from pathlib import Path
+import typing as T
 
-
-def ls_as_bytestream():
+def ls_as_bytestream() -> bytes:
     if os.path.exists('.git'):
         return subprocess.run(['git', 'ls-tree', '-r', '--name-only', 'HEAD'],
                               stdout=subprocess.PIPE).stdout
@@ -28,24 +28,26 @@
     return '\n'.join(files).encode()
 
 
-def cscope():
+def cscope() -> int:
     ls = b'\n'.join([b'"%s"' % f for f in ls_as_bytestream().split()])
     return subprocess.run(['cscope', '-v', '-b', '-i-'], input=ls).returncode
 
 
-def ctags():
+def ctags() -> int:
     ls = ls_as_bytestream()
     return subprocess.run(['ctags', '-L-'], input=ls).returncode
 
 
-def etags():
+def etags() -> int:
     ls = ls_as_bytestream()
     return subprocess.run(['etags', '-'], input=ls).returncode
 
 
-def run(args):
+def run(args: T.List[str]) -> int:
     tool_name = args[0]
     srcdir_name = args[1]
     os.chdir(srcdir_name)
     assert tool_name in ['cscope', 'ctags', 'etags']
-    return globals()[tool_name]()
+    res = globals()[tool_name]()
+    assert isinstance(res, int)
+    return res
diff -Nru meson-0.53.2/mesonbuild/scripts/uninstall.py meson-0.57.0+really0.56.2/mesonbuild/scripts/uninstall.py
--- meson-0.53.2/mesonbuild/scripts/uninstall.py	2017-08-02 19:21:35.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/uninstall.py	2020-09-17 22:00:44.000000000 +0000
@@ -13,10 +13,11 @@
 # limitations under the License.
 
 import os
+import typing as T
 
 logfile = 'meson-logs/install-log.txt'
 
-def do_uninstall(log):
+def do_uninstall(log: str) -> None:
     failures = 0
     successes = 0
     for line in open(log):
@@ -38,7 +39,7 @@
     print('Failed:', failures)
     print('\nRemember that files created by custom scripts have not been removed.')
 
-def run(args):
+def run(args: T.List[str]) -> int:
     if args:
         print('Weird error.')
         return 1
diff -Nru meson-0.53.2/mesonbuild/scripts/vcstagger.py meson-0.57.0+really0.56.2/mesonbuild/scripts/vcstagger.py
--- meson-0.53.2/mesonbuild/scripts/vcstagger.py	2019-04-17 08:08:43.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/vcstagger.py	2020-09-17 22:00:44.000000000 +0000
@@ -13,9 +13,9 @@
 # limitations under the License.
 
 import sys, os, subprocess, re
+import typing as T
 
-
-def config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, cmd):
+def config_vcs_tag(infile: str, outfile: str, fallback: str, source_dir: str, replace_string: str, regex_selector: str, cmd: T.List[str]) -> None:
     try:
         output = subprocess.check_output(cmd, cwd=source_dir)
         new_string = re.search(regex_selector, output.decode()).group(1).strip()
@@ -34,7 +34,7 @@
             f.write(new_data)
 
 
-def run(args):
+def run(args: T.List[str]) -> int:
     infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6]
     command = args[6:]
     config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, command)
diff -Nru meson-0.53.2/mesonbuild/scripts/yelphelper.py meson-0.57.0+really0.56.2/mesonbuild/scripts/yelphelper.py
--- meson-0.53.2/mesonbuild/scripts/yelphelper.py	2019-06-16 18:54:18.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/scripts/yelphelper.py	2020-09-17 22:00:44.000000000 +0000
@@ -20,6 +20,7 @@
 from ..mesonlib import has_path_sep
 from . import destdir_join
 from .gettext import read_linguas
+import typing as T
 
 parser = argparse.ArgumentParser()
 parser.add_argument('command')
@@ -31,19 +32,19 @@
 parser.add_argument('--langs', dest='langs', default='')
 parser.add_argument('--symlinks', type=bool, dest='symlinks', default=False)
 
-def build_pot(srcdir, project_id, sources):
+def build_pot(srcdir: str, project_id: str, sources: T.List[str]) -> None:
     # Must be relative paths
     sources = [os.path.join('C', source) for source in sources]
     outfile = os.path.join(srcdir, project_id + '.pot')
     subprocess.call(['itstool', '-o', outfile] + sources)
 
-def update_po(srcdir, project_id, langs):
+def update_po(srcdir: str, project_id: str, langs: T.List[str]) -> None:
     potfile = os.path.join(srcdir, project_id + '.pot')
     for lang in langs:
         pofile = os.path.join(srcdir, lang, lang + '.po')
         subprocess.call(['msgmerge', '-q', '-o', pofile, pofile, potfile])
 
-def build_translations(srcdir, blddir, langs):
+def build_translations(srcdir: str, blddir: str, langs: T.List[str]) -> None:
     for lang in langs:
         outdir = os.path.join(blddir, lang)
         os.makedirs(outdir, exist_ok=True)
@@ -52,14 +53,14 @@
             '-o', os.path.join(outdir, lang + '.gmo')
         ])
 
-def merge_translations(blddir, sources, langs):
+def merge_translations(blddir: str, sources: T.List[str], langs: T.List[str]) -> None:
     for lang in langs:
         subprocess.call([
             'itstool', '-m', os.path.join(blddir, lang, lang + '.gmo'),
             '-o', os.path.join(blddir, lang)
         ] + sources)
 
-def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, project_id, symlinks):
+def install_help(srcdir: str, blddir: str, sources: T.List[str], media: T.List[str], langs: T.List[str], install_dir: str, destdir: str, project_id: str, symlinks: bool) -> None:
     c_install_dir = os.path.join(install_dir, 'C', project_id)
     for lang in langs + ['C']:
         indir = destdir_join(destdir, os.path.join(install_dir, lang, project_id))
@@ -68,8 +69,7 @@
             infile = os.path.join(srcdir if lang == 'C' else blddir, lang, source)
             outfile = os.path.join(indir, source)
             mlog.log('Installing %s to %s' % (infile, outfile))
-            shutil.copyfile(infile, outfile)
-            shutil.copystat(infile, outfile)
+            shutil.copy2(infile, outfile)
         for m in media:
             infile = os.path.join(srcdir, lang, m)
             outfile = os.path.join(indir, m)
@@ -102,7 +102,7 @@
             shutil.copyfile(infile, outfile)
             shutil.copystat(infile, outfile)
 
-def run(args):
+def run(args: T.List[str]) -> int:
     options = parser.parse_args(args)
     langs = options.langs.split('@@') if options.langs else []
     media = options.media.split('@@') if options.media else []
@@ -130,3 +130,4 @@
             merge_translations(build_subdir, abs_sources, langs)
         install_help(src_subdir, build_subdir, sources, media, langs, install_dir,
                      destdir, options.project_id, options.symlinks)
+    return 0
diff -Nru meson-0.53.2/mesonbuild/templates/cpptemplates.py meson-0.57.0+really0.56.2/mesonbuild/templates/cpptemplates.py
--- meson-0.53.2/mesonbuild/templates/cpptemplates.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/cpptemplates.py	2020-09-17 22:00:44.000000000 +0000
@@ -11,6 +11,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
 import re
 
 
@@ -139,39 +140,45 @@
 '''
 
 
-def create_exe_cpp_sample(project_name, project_version):
-    lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
-    source_name = lowercase_token + '.cpp'
-    open(source_name, 'w').write(hello_cpp_template.format(project_name=project_name))
-    open('meson.build', 'w').write(hello_cpp_meson_template.format(project_name=project_name,
-                                                                   exe_name=lowercase_token,
-                                                                   source_name=source_name,
-                                                                   version=project_version))
-
-
-def create_lib_cpp_sample(project_name, version):
-    lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
-    uppercase_token = lowercase_token.upper()
-    class_name = uppercase_token[0] + lowercase_token[1:]
-    namespace = lowercase_token
-    lib_hpp_name = lowercase_token + '.hpp'
-    lib_cpp_name = lowercase_token + '.cpp'
-    test_cpp_name = lowercase_token + '_test.cpp'
-    kwargs = {'utoken': uppercase_token,
-              'ltoken': lowercase_token,
-              'header_dir': lowercase_token,
-              'class_name': class_name,
-              'namespace': namespace,
-              'header_file': lib_hpp_name,
-              'source_file': lib_cpp_name,
-              'test_source_file': test_cpp_name,
-              'test_exe_name': lowercase_token,
-              'project_name': project_name,
-              'lib_name': lowercase_token,
-              'test_name': lowercase_token,
-              'version': version,
-              }
-    open(lib_hpp_name, 'w').write(lib_hpp_template.format(**kwargs))
-    open(lib_cpp_name, 'w').write(lib_cpp_template.format(**kwargs))
-    open(test_cpp_name, 'w').write(lib_cpp_test_template.format(**kwargs))
-    open('meson.build', 'w').write(lib_cpp_meson_template.format(**kwargs))
+class CppProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.cpp'
+        open(source_name, 'w').write(hello_cpp_template.format(project_name=self.name))
+        open('meson.build', 'w').write(hello_cpp_meson_template.format(project_name=self.name,
+                                                                       exe_name=lowercase_token,
+                                                                       source_name=source_name,
+                                                                       version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        test_exe_name = lowercase_token + '_test'
+        namespace = lowercase_token
+        lib_hpp_name = lowercase_token + '.hpp'
+        lib_cpp_name = lowercase_token + '.cpp'
+        test_cpp_name = lowercase_token + '_test.cpp'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'class_name': class_name,
+                  'namespace': namespace,
+                  'header_file': lib_hpp_name,
+                  'source_file': lib_cpp_name,
+                  'test_source_file': test_cpp_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_hpp_name, 'w').write(lib_hpp_template.format(**kwargs))
+        open(lib_cpp_name, 'w').write(lib_cpp_template.format(**kwargs))
+        open(test_cpp_name, 'w').write(lib_cpp_test_template.format(**kwargs))
+        open('meson.build', 'w').write(lib_cpp_meson_template.format(**kwargs))
diff -Nru meson-0.53.2/mesonbuild/templates/cstemplates.py meson-0.57.0+really0.56.2/mesonbuild/templates/cstemplates.py
--- meson-0.53.2/mesonbuild/templates/cstemplates.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/cstemplates.py	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,132 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_cs_template = '''using System;
+
+public class {class_name} {{
+    const String PROJECT_NAME = "{project_name}";
+
+    static int Main(String[] args) {{
+      if (args.Length > 0) {{
+          System.Console.WriteLine(String.Format("{project_name} takes no arguments.."));
+          return 1;
+      }}
+      Console.WriteLine(String.Format("This is project {{0}}.", PROJECT_NAME));
+      return 0;
+    }}
+}}
+
+'''
+
+hello_cs_meson_template = '''project('{project_name}', 'cs',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_cs_template = '''
+public class {class_name} {{
+    private const int number = 6;
+
+    public int get_number() {{
+      return number;
+    }}
+}}
+
+'''
+
+lib_cs_test_template = '''using System;
+
+public class {class_test} {{
+    static int Main(String[] args) {{
+      if (args.Length > 0) {{
+          System.Console.WriteLine("{project_name} takes no arguments..");
+          return 1;
+      }}
+      {class_name} c = new {class_name}();
+      Boolean result = true;
+      return result.CompareTo(c.get_number() != 6);
+    }}
+}}
+
+'''
+
+lib_cs_meson_template = '''project('{project_name}', 'cs',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+stlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : stlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : stlib)
+
+'''
+
+
+class CSharpProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        source_name = uppercase_token[0] + lowercase_token[1:] + '.cs'
+        open(source_name, 'w').write(hello_cs_template.format(project_name=self.name,
+                                                              class_name=class_name))
+        open('meson.build', 'w').write(hello_cs_meson_template.format(project_name=self.name,
+                                                                      exe_name=self.name,
+                                                                      source_name=source_name,
+                                                                      version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        class_test = uppercase_token[0] + lowercase_token[1:] + '_test'
+        project_test = lowercase_token + '_test'
+        lib_cs_name = uppercase_token[0] + lowercase_token[1:] + '.cs'
+        test_cs_name = uppercase_token[0] + lowercase_token[1:] + '_test.cs'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'class_test': class_test,
+                  'class_name': class_name,
+                  'source_file': lib_cs_name,
+                  'test_source_file': test_cs_name,
+                  'test_exe_name': project_test,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_cs_name, 'w').write(lib_cs_template.format(**kwargs))
+        open(test_cs_name, 'w').write(lib_cs_test_template.format(**kwargs))
+        open('meson.build', 'w').write(lib_cs_meson_template.format(**kwargs))
diff -Nru meson-0.53.2/mesonbuild/templates/ctemplates.py meson-0.57.0+really0.56.2/mesonbuild/templates/ctemplates.py
--- meson-0.53.2/mesonbuild/templates/ctemplates.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/ctemplates.py	2020-09-17 22:00:44.000000000 +0000
@@ -11,6 +11,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
 import re
 
 
@@ -121,36 +122,44 @@
 test('basic', exe)
 '''
 
-def create_exe_c_sample(project_name, project_version):
-    lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
-    source_name = lowercase_token + '.c'
-    open(source_name, 'w').write(hello_c_template.format(project_name=project_name))
-    open('meson.build', 'w').write(hello_c_meson_template.format(project_name=project_name,
-                                                                 exe_name=lowercase_token,
-                                                                 source_name=source_name,
-                                                                 version=project_version))
-
-def create_lib_c_sample(project_name, version):
-    lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
-    uppercase_token = lowercase_token.upper()
-    function_name = lowercase_token[0:3] + '_func'
-    lib_h_name = lowercase_token + '.h'
-    lib_c_name = lowercase_token + '.c'
-    test_c_name = lowercase_token + '_test.c'
-    kwargs = {'utoken': uppercase_token,
-              'ltoken': lowercase_token,
-              'header_dir': lowercase_token,
-              'function_name': function_name,
-              'header_file': lib_h_name,
-              'source_file': lib_c_name,
-              'test_source_file': test_c_name,
-              'test_exe_name': lowercase_token,
-              'project_name': project_name,
-              'lib_name': lowercase_token,
-              'test_name': lowercase_token,
-              'version': version,
-              }
-    open(lib_h_name, 'w').write(lib_h_template.format(**kwargs))
-    open(lib_c_name, 'w').write(lib_c_template.format(**kwargs))
-    open(test_c_name, 'w').write(lib_c_test_template.format(**kwargs))
-    open('meson.build', 'w').write(lib_c_meson_template.format(**kwargs))
+
+class CProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.c'
+        open(source_name, 'w').write(hello_c_template.format(project_name=self.name))
+        open('meson.build', 'w').write(hello_c_meson_template.format(project_name=self.name,
+                                                                     exe_name=lowercase_token,
+                                                                     source_name=source_name,
+                                                                     version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_h_name = lowercase_token + '.h'
+        lib_c_name = lowercase_token + '.c'
+        test_c_name = lowercase_token + '_test.c'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'header_file': lib_h_name,
+                  'source_file': lib_c_name,
+                  'test_source_file': test_c_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_h_name, 'w').write(lib_h_template.format(**kwargs))
+        open(lib_c_name, 'w').write(lib_c_template.format(**kwargs))
+        open(test_c_name, 'w').write(lib_c_test_template.format(**kwargs))
+        open('meson.build', 'w').write(lib_c_meson_template.format(**kwargs))
diff -Nru meson-0.53.2/mesonbuild/templates/cudatemplates.py meson-0.57.0+really0.56.2/mesonbuild/templates/cudatemplates.py
--- meson-0.53.2/mesonbuild/templates/cudatemplates.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/cudatemplates.py	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,184 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_cuda_template = '''#include 
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] <<  "takes no arguments.\\n";
+        return 1;
+    }}
+    std::cout << "This is project " << PROJECT_NAME << ".\\n";
+    return 0;
+}}
+'''
+
+hello_cuda_meson_template = '''project('{project_name}', ['cuda', 'cpp'],
+  version : '{version}',
+  default_options : ['warning_level=3',
+                     'cpp_std=c++14'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+  #ifdef BUILDING_{utoken}
+    #define {utoken}_PUBLIC __declspec(dllexport)
+  #else
+    #define {utoken}_PUBLIC __declspec(dllimport)
+  #endif
+#else
+  #ifdef BUILDING_{utoken}
+      #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+  #else
+      #define {utoken}_PUBLIC
+  #endif
+#endif
+
+namespace {namespace} {{
+
+class {utoken}_PUBLIC {class_name} {{
+
+public:
+  {class_name}();
+  int get_number() const;
+
+private:
+
+  int number;
+
+}};
+
+}}
+
+'''
+
+lib_cuda_template = '''#include <{header_file}>
+
+namespace {namespace} {{
+
+{class_name}::{class_name}() {{
+    number = 6;
+}}
+
+int {class_name}::get_number() const {{
+  return number;
+}}
+
+}}
+'''
+
+lib_cuda_test_template = '''#include <{header_file}>
+#include 
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] << " takes no arguments.\\n";
+        return 1;
+    }}
+    {namespace}::{class_name} c;
+    return c.get_number() != 6;
+}}
+'''
+
+lib_cuda_meson_template = '''project('{project_name}', ['cuda', 'cpp'],
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  cpp_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+
+class CudaProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.cu'
+        open(source_name, 'w').write(hello_cuda_template.format(project_name=self.name))
+        open('meson.build', 'w').write(hello_cuda_meson_template.format(project_name=self.name,
+                                                                        exe_name=lowercase_token,
+                                                                        source_name=source_name,
+                                                                        version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        test_exe_name = lowercase_token + '_test'
+        namespace = lowercase_token
+        lib_h_name = lowercase_token + '.h'
+        lib_cuda_name = lowercase_token + '.cu'
+        test_cuda_name = lowercase_token + '_test.cu'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'class_name': class_name,
+                  'namespace': namespace,
+                  'header_file': lib_h_name,
+                  'source_file': lib_cuda_name,
+                  'test_source_file': test_cuda_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_h_name, 'w').write(lib_h_template.format(**kwargs))
+        open(lib_cuda_name, 'w').write(lib_cuda_template.format(**kwargs))
+        open(test_cuda_name, 'w').write(lib_cuda_test_template.format(**kwargs))
+        open('meson.build', 'w').write(lib_cuda_meson_template.format(**kwargs))
diff -Nru meson-0.53.2/mesonbuild/templates/dlangtemplates.py meson-0.57.0+really0.56.2/mesonbuild/templates/dlangtemplates.py
--- meson-0.53.2/mesonbuild/templates/dlangtemplates.py	2020-01-07 19:29:59.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/dlangtemplates.py	2020-09-17 22:00:44.000000000 +0000
@@ -11,6 +11,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
 import re
 
 
@@ -99,36 +100,43 @@
 endif
 '''
 
-def create_exe_d_sample(project_name, project_version):
-    lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
-    source_name = lowercase_token + '.d'
-    open(source_name, 'w').write(hello_d_template.format(project_name=project_name))
-    open('meson.build', 'w').write(hello_d_meson_template.format(project_name=project_name,
-                                                                 exe_name=lowercase_token,
-                                                                 source_name=source_name,
-                                                                 version=project_version))
-
-
-def create_lib_d_sample(project_name, version):
-    lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
-    uppercase_token = lowercase_token.upper()
-    function_name = lowercase_token[0:3] + '_func'
-    lib_m_name = lowercase_token
-    lib_d_name = lowercase_token + '.d'
-    test_d_name = lowercase_token + '_test.d'
-    kwargs = {'utoken': uppercase_token,
-              'ltoken': lowercase_token,
-              'header_dir': lowercase_token,
-              'function_name': function_name,
-              'module_file': lib_m_name,
-              'source_file': lib_d_name,
-              'test_source_file': test_d_name,
-              'test_exe_name': lowercase_token,
-              'project_name': project_name,
-              'lib_name': lowercase_token,
-              'test_name': lowercase_token,
-              'version': version,
-              }
-    open(lib_d_name, 'w').write(lib_d_template.format(**kwargs))
-    open(test_d_name, 'w').write(lib_d_test_template.format(**kwargs))
-    open('meson.build', 'w').write(lib_d_meson_template.format(**kwargs))
+
+class DlangProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.d'
+        open(source_name, 'w').write(hello_d_template.format(project_name=self.name))
+        open('meson.build', 'w').write(hello_d_meson_template.format(project_name=self.name,
+                                                                     exe_name=lowercase_token,
+                                                                     source_name=source_name,
+                                                                     version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_m_name = lowercase_token
+        lib_d_name = lowercase_token + '.d'
+        test_d_name = lowercase_token + '_test.d'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'module_file': lib_m_name,
+                  'source_file': lib_d_name,
+                  'test_source_file': test_d_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_d_name, 'w').write(lib_d_template.format(**kwargs))
+        open(test_d_name, 'w').write(lib_d_test_template.format(**kwargs))
+        open('meson.build', 'w').write(lib_d_meson_template.format(**kwargs))
diff -Nru meson-0.53.2/mesonbuild/templates/fortrantemplates.py meson-0.57.0+really0.56.2/mesonbuild/templates/fortrantemplates.py
--- meson-0.53.2/mesonbuild/templates/fortrantemplates.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/fortrantemplates.py	2020-09-17 22:00:44.000000000 +0000
@@ -11,6 +11,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
 import re
 
 lib_fortran_template = '''
@@ -98,33 +99,41 @@
 test('basic', exe)
 '''
 
-def create_exe_fortran_sample(project_name, project_version):
-    lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
-    source_name = lowercase_token + '.f90'
-    open(source_name, 'w').write(hello_fortran_template.format(project_name=project_name))
-    open('meson.build', 'w').write(hello_fortran_meson_template.format(project_name=project_name,
-                                                                       exe_name=lowercase_token,
-                                                                       source_name=source_name,
-                                                                       version=project_version))
-
-def create_lib_fortran_sample(project_name, version):
-    lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
-    uppercase_token = lowercase_token.upper()
-    function_name = lowercase_token[0:3] + '_func'
-    lib_fortran_name = lowercase_token + '.f90'
-    test_fortran_name = lowercase_token + '_test.f90'
-    kwargs = {'utoken': uppercase_token,
-              'ltoken': lowercase_token,
-              'header_dir': lowercase_token,
-              'function_name': function_name,
-              'source_file': lib_fortran_name,
-              'test_source_file': test_fortran_name,
-              'test_exe_name': lowercase_token,
-              'project_name': project_name,
-              'lib_name': lowercase_token,
-              'test_name': lowercase_token,
-              'version': version,
-              }
-    open(lib_fortran_name, 'w').write(lib_fortran_template.format(**kwargs))
-    open(test_fortran_name, 'w').write(lib_fortran_test_template.format(**kwargs))
-    open('meson.build', 'w').write(lib_fortran_meson_template.format(**kwargs))
+
+class FortranProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.f90'
+        open(source_name, 'w').write(hello_fortran_template.format(project_name=self.name))
+        open('meson.build', 'w').write(hello_fortran_meson_template.format(project_name=self.name,
+                                                                           exe_name=lowercase_token,
+                                                                           source_name=source_name,
+                                                                           version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_fortran_name = lowercase_token + '.f90'
+        test_fortran_name = lowercase_token + '_test.f90'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'source_file': lib_fortran_name,
+                  'test_source_file': test_fortran_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_fortran_name, 'w').write(lib_fortran_template.format(**kwargs))
+        open(test_fortran_name, 'w').write(lib_fortran_test_template.format(**kwargs))
+        open('meson.build', 'w').write(lib_fortran_meson_template.format(**kwargs))
diff -Nru meson-0.53.2/mesonbuild/templates/javatemplates.py meson-0.57.0+really0.56.2/mesonbuild/templates/javatemplates.py
--- meson-0.53.2/mesonbuild/templates/javatemplates.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/javatemplates.py	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,134 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_java_template = '''
+
+public class {class_name} {{
+    final static String PROJECT_NAME = "{project_name}";
+
+    public static void main (String args[]) {{
+        if(args.length != 0) {{
+            System.out.println(args + " takes no arguments.");
+            System.exit(0);
+        }}
+        System.out.println("This is project " + PROJECT_NAME + ".");
+        System.exit(0);
+    }}
+}}
+
+'''
+
+hello_java_meson_template = '''project('{project_name}', 'java',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = jar('{exe_name}', '{source_name}',
+  main_class : '{exe_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_java_template = '''
+
+public class {class_name} {{
+    final static int number = 6;
+
+    public final int get_number() {{
+      return number;
+    }}
+}}
+
+'''
+
+lib_java_test_template = '''
+
+public class {class_test} {{
+    public static void main (String args[]) {{
+        if(args.length != 0) {{
+            System.out.println(args + " takes no arguments.");
+            System.exit(1);
+        }}
+
+        {class_name} c = new {class_name}();
+        Boolean result = true;
+        System.exit(result.compareTo(c.get_number() != 6));
+    }}
+}}
+
+'''
+
+lib_java_meson_template = '''project('{project_name}', 'java',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+jarlib = jar('{class_name}', '{source_file}',
+  main_class : '{class_name}',
+  install : true,
+)
+
+test_jar = jar('{class_test}', '{test_source_file}',
+  main_class : '{class_test}',
+  link_with : jarlib)
+test('{test_name}', test_jar)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : jarlib)
+'''
+
+
+class JavaProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        source_name = uppercase_token[0] + lowercase_token[1:] + '.java'
+        open(source_name, 'w').write(hello_java_template.format(project_name=self.name,
+                                                                class_name=class_name))
+        open('meson.build', 'w').write(hello_java_meson_template.format(project_name=self.name,
+                                                                        exe_name=class_name,
+                                                                        source_name=source_name,
+                                                                        version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        class_test = uppercase_token[0] + lowercase_token[1:] + '_test'
+        lib_java_name = uppercase_token[0] + lowercase_token[1:] + '.java'
+        test_java_name = uppercase_token[0] + lowercase_token[1:] + '_test.java'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'class_test': class_test,
+                  'class_name': class_name,
+                  'source_file': lib_java_name,
+                  'test_source_file': test_java_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_java_name, 'w').write(lib_java_template.format(**kwargs))
+        open(test_java_name, 'w').write(lib_java_test_template.format(**kwargs))
+        open('meson.build', 'w').write(lib_java_meson_template.format(**kwargs))
diff -Nru meson-0.53.2/mesonbuild/templates/mesontemplates.py meson-0.57.0+really0.56.2/mesonbuild/templates/mesontemplates.py
--- meson-0.53.2/mesonbuild/templates/mesontemplates.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/mesontemplates.py	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,75 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+
+meson_executable_template = '''project('{project_name}', '{language}',
+  version : '{version}',
+  default_options : [{default_options}])
+
+executable('{executable}',
+           {sourcespec},{depspec}
+           install : true)
+'''
+
+
+meson_jar_template = '''project('{project_name}', '{language}',
+  version : '{version}',
+  default_options : [{default_options}])
+
+jar('{executable}',
+    {sourcespec},{depspec}
+    main_class: '{main_class}',
+    install : true)
+'''
+
+
+def create_meson_build(options: argparse.Namespace) -> None:
+    if options.type != 'executable':
+        raise SystemExit('\nGenerating a meson.build file from existing sources is\n'
+                         'supported only for project type "executable".\n'
+                         'Run meson init in an empty directory to create a sample project.')
+    default_options = ['warning_level=3']
+    if options.language == 'cpp':
+        # This shows how to set this very common option.
+        default_options += ['cpp_std=c++14']
+    # If we get a meson.build autoformatter one day, this code could
+    # be simplified quite a bit.
+    formatted_default_options = ', '.join("'{}'".format(x) for x in default_options)
+    sourcespec = ',\n           '.join("'{}'".format(x) for x in options.srcfiles)
+    depspec = ''
+    if options.deps:
+        depspec = '\n           dependencies : [\n              '
+        depspec += ',\n              '.join("dependency('{}')".format(x)
+                                            for x in options.deps.split(','))
+        depspec += '],'
+    if options.language != 'java':
+        content = meson_executable_template.format(project_name=options.name,
+                                                   language=options.language,
+                                                   version=options.version,
+                                                   executable=options.executable,
+                                                   sourcespec=sourcespec,
+                                                   depspec=depspec,
+                                                   default_options=formatted_default_options)
+    else:
+        content = meson_jar_template.format(project_name=options.name,
+                                            language=options.language,
+                                            version=options.version,
+                                            executable=options.executable,
+                                            main_class=options.name,
+                                            sourcespec=sourcespec,
+                                            depspec=depspec,
+                                            default_options=formatted_default_options)
+    open('meson.build', 'w').write(content)
+    print('Generated meson.build file:\n\n' + content)
diff -Nru meson-0.53.2/mesonbuild/templates/objcpptemplates.py meson-0.57.0+really0.56.2/mesonbuild/templates/objcpptemplates.py
--- meson-0.53.2/mesonbuild/templates/objcpptemplates.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/objcpptemplates.py	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,166 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+  #ifdef BUILDING_{utoken}
+    #define {utoken}_PUBLIC __declspec(dllexport)
+  #else
+    #define {utoken}_PUBLIC __declspec(dllimport)
+  #endif
+#else
+  #ifdef BUILDING_{utoken}
+      #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+  #else
+      #define {utoken}_PUBLIC
+  #endif
+#endif
+
+int {utoken}_PUBLIC {function_name}();
+
+'''
+
+lib_objcpp_template = '''#import <{header_file}>
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+    return 0;
+}}
+
+int {function_name}() {{
+    return internal_function();
+}}
+'''
+
+lib_objcpp_test_template = '''#import <{header_file}>
+#import 
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] << " takes no arguments." << std::endl;
+        return 1;
+    }}
+    return {function_name}();
+}}
+'''
+
+lib_objcpp_meson_template = '''project('{project_name}', 'objcpp',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  objcpp_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+hello_objcpp_template = '''#import 
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] << " takes no arguments." << std::endl;
+        return 1;
+    }}
+    std::cout << "This is project " << PROJECT_NAME << "." << std::endl;
+    return 0;
+}}
+'''
+
+hello_objcpp_meson_template = '''project('{project_name}', 'objcpp',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+
+class ObjCppProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.mm'
+        open(source_name, 'w').write(hello_objcpp_template.format(project_name=self.name))
+        open('meson.build', 'w').write(hello_objcpp_meson_template.format(project_name=self.name,
+                                                                          exe_name=lowercase_token,
+                                                                          source_name=source_name,
+                                                                          version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_h_name = lowercase_token + '.h'
+        lib_objcpp_name = lowercase_token + '.mm'
+        test_objcpp_name = lowercase_token + '_test.mm'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'header_file': lib_h_name,
+                  'source_file': lib_objcpp_name,
+                  'test_source_file': test_objcpp_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_h_name, 'w').write(lib_h_template.format(**kwargs))
+        open(lib_objcpp_name, 'w').write(lib_objcpp_template.format(**kwargs))
+        open(test_objcpp_name, 'w').write(lib_objcpp_test_template.format(**kwargs))
+        open('meson.build', 'w').write(lib_objcpp_meson_template.format(**kwargs))
+
diff -Nru meson-0.53.2/mesonbuild/templates/objctemplates.py meson-0.57.0+really0.56.2/mesonbuild/templates/objctemplates.py
--- meson-0.53.2/mesonbuild/templates/objctemplates.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/objctemplates.py	2020-09-17 22:00:44.000000000 +0000
@@ -11,6 +11,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
 import re
 
 
@@ -121,36 +122,44 @@
 test('basic', exe)
 '''
 
-def create_exe_objc_sample(project_name, project_version):
-    lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
-    source_name = lowercase_token + '.m'
-    open(source_name, 'w').write(hello_objc_template.format(project_name=project_name))
-    open('meson.build', 'w').write(hello_objc_meson_template.format(project_name=project_name,
-                                                                    exe_name=lowercase_token,
-                                                                    source_name=source_name,
-                                                                    version=project_version))
-
-def create_lib_objc_sample(project_name, version):
-    lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
-    uppercase_token = lowercase_token.upper()
-    function_name = lowercase_token[0:3] + '_func'
-    lib_h_name = lowercase_token + '.h'
-    lib_objc_name = lowercase_token + '.m'
-    test_objc_name = lowercase_token + '_test.m'
-    kwargs = {'utoken': uppercase_token,
-              'ltoken': lowercase_token,
-              'header_dir': lowercase_token,
-              'function_name': function_name,
-              'header_file': lib_h_name,
-              'source_file': lib_objc_name,
-              'test_source_file': test_objc_name,
-              'test_exe_name': lowercase_token,
-              'project_name': project_name,
-              'lib_name': lowercase_token,
-              'test_name': lowercase_token,
-              'version': version,
-              }
-    open(lib_h_name, 'w').write(lib_h_template.format(**kwargs))
-    open(lib_objc_name, 'w').write(lib_objc_template.format(**kwargs))
-    open(test_objc_name, 'w').write(lib_objc_test_template.format(**kwargs))
-    open('meson.build', 'w').write(lib_objc_meson_template.format(**kwargs))
+
+class ObjCProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.m'
+        open(source_name, 'w').write(hello_objc_template.format(project_name=self.name))
+        open('meson.build', 'w').write(hello_objc_meson_template.format(project_name=self.name,
+                                                                        exe_name=lowercase_token,
+                                                                        source_name=source_name,
+                                                                        version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_h_name = lowercase_token + '.h'
+        lib_objc_name = lowercase_token + '.m'
+        test_objc_name = lowercase_token + '_test.m'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'header_file': lib_h_name,
+                  'source_file': lib_objc_name,
+                  'test_source_file': test_objc_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_h_name, 'w').write(lib_h_template.format(**kwargs))
+        open(lib_objc_name, 'w').write(lib_objc_template.format(**kwargs))
+        open(test_objc_name, 'w').write(lib_objc_test_template.format(**kwargs))
+        open('meson.build', 'w').write(lib_objc_meson_template.format(**kwargs))
diff -Nru meson-0.53.2/mesonbuild/templates/rusttemplates.py meson-0.57.0+really0.56.2/mesonbuild/templates/rusttemplates.py
--- meson-0.53.2/mesonbuild/templates/rusttemplates.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/rusttemplates.py	2020-09-17 22:00:44.000000000 +0000
@@ -11,6 +11,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
 import re
 
 
@@ -69,35 +70,43 @@
 test('basic', exe)
 '''
 
-def create_exe_rust_sample(project_name, project_version):
-    lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
-    source_name = lowercase_token + '.rs'
-    open(source_name, 'w').write(hello_rust_template.format(project_name=project_name))
-    open('meson.build', 'w').write(hello_rust_meson_template.format(project_name=project_name,
-                                                                    exe_name=lowercase_token,
-                                                                    source_name=source_name,
-                                                                    version=project_version))
-
-def create_lib_rust_sample(project_name, version):
-    lowercase_token = re.sub(r'[^a-z0-9]', '_', project_name.lower())
-    uppercase_token = lowercase_token.upper()
-    function_name = lowercase_token[0:3] + '_func'
-    lib_crate_name = lowercase_token
-    lib_rs_name = lowercase_token + '.rs'
-    test_rs_name = lowercase_token + '_test.rs'
-    kwargs = {'utoken': uppercase_token,
-              'ltoken': lowercase_token,
-              'header_dir': lowercase_token,
-              'function_name': function_name,
-              'crate_file': lib_crate_name,
-              'source_file': lib_rs_name,
-              'test_source_file': test_rs_name,
-              'test_exe_name': lowercase_token,
-              'project_name': project_name,
-              'lib_name': lowercase_token,
-              'test_name': lowercase_token,
-              'version': version,
-              }
-    open(lib_rs_name, 'w').write(lib_rust_template.format(**kwargs))
-    open(test_rs_name, 'w').write(lib_rust_test_template.format(**kwargs))
-    open('meson.build', 'w').write(lib_rust_meson_template.format(**kwargs))
+
+class RustProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.rs'
+        open(source_name, 'w').write(hello_rust_template.format(project_name=self.name))
+        open('meson.build', 'w').write(hello_rust_meson_template.format(project_name=self.name,
+                                                                        exe_name=lowercase_token,
+                                                                        source_name=source_name,
+                                                                        version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_crate_name = lowercase_token
+        lib_rs_name = lowercase_token + '.rs'
+        test_rs_name = lowercase_token + '_test.rs'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'crate_file': lib_crate_name,
+                  'source_file': lib_rs_name,
+                  'test_source_file': test_rs_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_rs_name, 'w').write(lib_rust_template.format(**kwargs))
+        open(test_rs_name, 'w').write(lib_rust_test_template.format(**kwargs))
+        open('meson.build', 'w').write(lib_rust_meson_template.format(**kwargs))
diff -Nru meson-0.53.2/mesonbuild/templates/samplefactory.py meson-0.57.0+really0.56.2/mesonbuild/templates/samplefactory.py
--- meson-0.53.2/mesonbuild/templates/samplefactory.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/samplefactory.py	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,40 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.fortrantemplates import FortranProject
+from mesonbuild.templates.objcpptemplates import ObjCppProject
+from mesonbuild.templates.dlangtemplates import DlangProject
+from mesonbuild.templates.rusttemplates import RustProject
+from mesonbuild.templates.javatemplates import JavaProject
+from mesonbuild.templates.cudatemplates import CudaProject
+from mesonbuild.templates.objctemplates import ObjCProject
+from mesonbuild.templates.cpptemplates import CppProject
+from mesonbuild.templates.cstemplates import CSharpProject
+from mesonbuild.templates.ctemplates import CProject
+from mesonbuild.templates.sampleimpl import SampleImpl
+
+import argparse
+
+def sameple_generator(options: argparse.Namespace) -> SampleImpl:
+    return {
+        'c': CProject,
+        'cpp': CppProject,
+        'cs': CSharpProject,
+        'cuda': CudaProject,
+        'objc': ObjCProject,
+        'objcpp': ObjCppProject,
+        'java': JavaProject,
+        'd': DlangProject,
+        'rust': RustProject,
+        'fortran': FortranProject
+    }[options.language](options)
diff -Nru meson-0.53.2/mesonbuild/templates/sampleimpl.py meson-0.57.0+really0.56.2/mesonbuild/templates/sampleimpl.py
--- meson-0.53.2/mesonbuild/templates/sampleimpl.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/templates/sampleimpl.py	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,21 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class SampleImpl:
+    def create_executable(self) -> None:
+        raise NotImplementedError('Sample implementation for "executable" not implemented!')
+
+    def create_library(self) -> None:
+        raise NotImplementedError('Sample implementation for "library" not implemented!')
diff -Nru meson-0.53.2/mesonbuild/wrap/__init__.py meson-0.57.0+really0.56.2/mesonbuild/wrap/__init__.py
--- meson-0.53.2/mesonbuild/wrap/__init__.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/wrap/__init__.py	2020-10-18 21:29:13.000000000 +0000
@@ -40,6 +40,7 @@
                    'nofallback': 2,
                    'nodownload': 3,
                    'forcefallback': 4,
+                   'nopromote': 5,
                    }
 
 class WrapMode(Enum):
@@ -47,11 +48,12 @@
     nofallback = 2
     nodownload = 3
     forcefallback = 4
+    nopromote = 5
 
     def __str__(self) -> str:
         return self.name
 
     @staticmethod
-    def from_string(mode_name: str):
+    def from_string(mode_name: str) -> 'WrapMode':
         g = string_to_value[mode_name]
         return WrapMode(g)
diff -Nru meson-0.53.2/mesonbuild/wrap/wrap.py meson-0.57.0+really0.56.2/mesonbuild/wrap/wrap.py
--- meson-0.53.2/mesonbuild/wrap/wrap.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/wrap/wrap.py	2021-01-09 10:14:21.000000000 +0000
@@ -26,9 +26,13 @@
 import sys
 import configparser
 import typing as T
+import textwrap
 
+from .._pathlib import Path
 from . import WrapMode
-from ..mesonlib import git, GIT, ProgressBar, MesonException
+from .. import coredata
+from ..mesonlib import quiet_git, GIT, ProgressBar, MesonException
+from  .. import mesonlib
 
 if T.TYPE_CHECKING:
     import http.client
@@ -47,19 +51,7 @@
 SSL_WARNING_PRINTED = False
 WHITELIST_SUBDOMAIN = 'wrapdb.mesonbuild.com'
 
-def quiet_git(cmd: T.List[str], workingdir: str) -> T.Tuple[bool, str]:
-    if not GIT:
-        return False, 'Git program not found.'
-    pc = git(cmd, workingdir, universal_newlines=True,
-             stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    if pc.returncode != 0:
-        return False, pc.stderr
-    return True, pc.stdout
-
-def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool:
-    if not GIT:
-        return False
-    return git(cmd, workingdir, check=check).returncode == 0
+ALL_TYPES = ['file', 'git', 'hg', 'svn']
 
 def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult:
     """ raises WrapException if not whitelisted subdomain """
@@ -78,7 +70,7 @@
     url = whitelist_wrapdb(urlstring)
     if has_ssl:
         try:
-            return urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=REQ_TIMEOUT)
+            return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=REQ_TIMEOUT))
         except urllib.error.URLError as excp:
             raise WrapException('WrapDB connection failed to {} with error {}'.format(urlstring, excp))
 
@@ -88,7 +80,7 @@
         mlog.warning('SSL module not available in {}: WrapDB traffic not authenticated.'.format(sys.executable))
         SSL_WARNING_PRINTED = True
     try:
-        return urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=REQ_TIMEOUT)
+        return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=REQ_TIMEOUT))
     except urllib.error.URLError as excp:
         raise WrapException('WrapDB connection failed to {} with error {}'.format(urlstring, excp))
 
@@ -102,13 +94,55 @@
 class PackageDefinition:
     def __init__(self, fname: str):
         self.filename = fname
+        self.type = None  # type: T.Optional[str]
+        self.values = {} # type: T.Dict[str, str]
+        self.provided_deps = {} # type: T.Dict[str, T.Optional[str]]
+        self.provided_programs = [] # type: T.List[str]
         self.basename = os.path.basename(fname)
-        self.name = self.basename[:-5]
+        self.has_wrap = self.basename.endswith('.wrap')
+        self.name = self.basename[:-5] if self.has_wrap else self.basename
+        self.directory = self.name
+        self.provided_deps[self.name] = None
+        if self.has_wrap:
+            self.parse_wrap()
+        self.directory = self.values.get('directory', self.name)
+        if os.path.dirname(self.directory):
+            raise WrapException('Directory key must be a name and not a path')
+        if self.type and self.type not in ALL_TYPES:
+            raise WrapException('Unknown wrap type {!r}'.format(self.type))
+        self.filesdir = os.path.join(os.path.dirname(self.filename), 'packagefiles')
+
+    def parse_wrap(self) -> None:
         try:
             self.config = configparser.ConfigParser(interpolation=None)
-            self.config.read(fname)
+            self.config.read(self.filename)
         except configparser.Error:
             raise WrapException('Failed to parse {}'.format(self.basename))
+        self.parse_wrap_section()
+        if self.type == 'redirect':
+            # [wrap-redirect] have a `filename` value pointing to the real wrap
+            # file we should parse instead. It must be relative to the current
+            # wrap file location and must be in the form foo/subprojects/bar.wrap.
+            dirname = Path(self.filename).parent
+            fname = Path(self.values['filename'])
+            for i, p in enumerate(fname.parts):
+                if i % 2 == 0:
+                    if p == '..':
+                        raise WrapException('wrap-redirect filename cannot contain ".."')
+                else:
+                    if p != 'subprojects':
+                        raise WrapException('wrap-redirect filename must be in the form foo/subprojects/bar.wrap')
+            if fname.suffix != '.wrap':
+                raise WrapException('wrap-redirect filename must be a .wrap file')
+            fname = dirname / fname
+            if not fname.is_file():
+                raise WrapException('wrap-redirect filename does not exist')
+            self.filename = str(fname)
+            self.parse_wrap()
+            return
+        self.parse_provide_section()
+
+    def parse_wrap_section(self) -> None:
         if len(self.config.sections()) < 1:
             raise WrapException('Missing sections in {}'.format(self.basename))
         self.wrap_section = self.config.sections()[0]
@@ -118,6 +152,27 @@
         self.type = self.wrap_section[5:]
         self.values = dict(self.config[self.wrap_section])
 
+    def parse_provide_section(self) -> None:
+        if self.config.has_section('provide'):
+            for k, v in self.config['provide'].items():
+                if k == 'dependency_names':
+                    # A comma separated list of dependency names that does not
+                    # need a variable name
+                    names_dict = {n.strip(): None for n in v.split(',')}
+                    self.provided_deps.update(names_dict)
+                    continue
+                if k == 'program_names':
+                    # A comma separated list of program names
+                    names_list = [n.strip() for n in v.split(',')]
+                    self.provided_programs += names_list
+                    continue
+                if not v:
+                    m = ('Empty dependency variable name for {!r} in {}. '
+                         'If the subproject uses meson.override_dependency() '
+                         'it can be added in the "dependency_names" special key.')
+                    raise WrapException(m.format(k, self.basename))
+                self.provided_deps[k] = v
+
     def get(self, key: str) -> str:
         try:
             return self.values[key]
@@ -125,36 +180,127 @@
             m = 'Missing key {!r} in {}'
             raise WrapException(m.format(key, self.basename))
 
-    def has_patch(self) -> bool:
-        return 'patch_url' in self.values
-
-def load_wrap(subdir_root: str, packagename: str) -> PackageDefinition:
+def get_directory(subdir_root: str, packagename: str) -> str:
     fname = os.path.join(subdir_root, packagename + '.wrap')
     if os.path.isfile(fname):
-        return PackageDefinition(fname)
-    return None
+        wrap = PackageDefinition(fname)
+        return wrap.directory
+    return packagename
 
-def get_directory(subdir_root: str, packagename: str):
-    directory = packagename
-    # We always have to load the wrap file, if it exists, because it could
-    # override the default directory name.
-    wrap = load_wrap(subdir_root, packagename)
-    if wrap and 'directory' in wrap.values:
-        directory = wrap.get('directory')
-        if os.path.dirname(directory):
-            raise WrapException('Directory key must be a name and not a path')
-    return wrap, directory
+def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool:
+    '''
+    Wrapper to convert GitException to WrapException caught in interpreter.
+    '''
+    try:
+        return mesonlib.verbose_git(cmd, workingdir, check=check)
+    except mesonlib.GitException as e:
+        raise WrapException(str(e))
 
 class Resolver:
-    def __init__(self, subdir_root: str, wrap_mode=WrapMode.default):
+    def __init__(self, source_dir: str, subdir: str, wrap_mode: WrapMode = WrapMode.default) -> None:
+        self.source_dir = source_dir
+        self.subdir = subdir
         self.wrap_mode = wrap_mode
-        self.subdir_root = subdir_root
+        self.subdir_root = os.path.join(source_dir, subdir)
         self.cachedir = os.path.join(self.subdir_root, 'packagecache')
+        self.wraps = {} # type: T.Dict[str, PackageDefinition]
+        self.provided_deps = {} # type: T.Dict[str, PackageDefinition]
+        self.provided_programs = {} # type: T.Dict[str, PackageDefinition]
+        self.load_wraps()
+
+    def load_wraps(self) -> None:
+        if not os.path.isdir(self.subdir_root):
+            return
+        root, dirs, files = next(os.walk(self.subdir_root))
+        for i in files:
+            if not i.endswith('.wrap'):
+                continue
+            fname = os.path.join(self.subdir_root, i)
+            wrap = PackageDefinition(fname)
+            self.wraps[wrap.name] = wrap
+            if wrap.directory in dirs:
+                dirs.remove(wrap.directory)
+        # Add dummy package definition for directories not associated with a wrap file.
+        for i in dirs:
+            if i in ['packagecache', 'packagefiles']:
+                continue
+            fname = os.path.join(self.subdir_root, i)
+            wrap = PackageDefinition(fname)
+            self.wraps[wrap.name] = wrap
+
+        for wrap in self.wraps.values():
+            for k in wrap.provided_deps.keys():
+                if k in self.provided_deps:
+                    prev_wrap = self.provided_deps[k]
+                    m = 'Multiple wrap files provide {!r} dependency: {} and {}'
+                    raise WrapException(m.format(k, wrap.basename, prev_wrap.basename))
+                self.provided_deps[k] = wrap
+            for k in wrap.provided_programs:
+                if k in self.provided_programs:
+                    prev_wrap = self.provided_programs[k]
+                    m = 'Multiple wrap files provide {!r} program: {} and {}'
+                    raise WrapException(m.format(k, wrap.basename, prev_wrap.basename))
+                self.provided_programs[k] = wrap
+
+    def merge_wraps(self, other_resolver: 'Resolver') -> None:
+        for k, v in other_resolver.wraps.items():
+            self.wraps.setdefault(k, v)
+        for k, v in other_resolver.provided_deps.items():
+            self.provided_deps.setdefault(k, v)
+        for k, v in other_resolver.provided_programs.items():
+            self.provided_programs.setdefault(k, v)
+
+    def find_dep_provider(self, packagename: str) -> T.Optional[T.Union[str, T.List[str]]]:
+        # Return value is in the same format as fallback kwarg:
+        # ['subproject_name', 'variable_name'], or 'subproject_name'.
+        wrap = self.provided_deps.get(packagename)
+        if wrap:
+            dep_var = wrap.provided_deps.get(packagename)
+            if dep_var:
+                return [wrap.name, dep_var]
+            return wrap.name
+        return None
+
+    def find_program_provider(self, names: T.List[str]) -> T.Optional[str]:
+        for name in names:
+            wrap = self.provided_programs.get(name)
+            if wrap:
+                return wrap.name
+        return None
 
-    def resolve(self, packagename: str, method: str) -> str:
+    def resolve(self, packagename: str, method: str, current_subproject: str = '') -> str:
+        self.current_subproject = current_subproject
         self.packagename = packagename
-        self.wrap, self.directory = get_directory(self.subdir_root, self.packagename)
-        self.dirname = os.path.join(self.subdir_root, self.directory)
+        self.directory = packagename
+        self.wrap = self.wraps.get(packagename)
+        if not self.wrap:
+            m = 'Neither a subproject directory nor a {}.wrap file was found.'
+            raise WrapNotFoundException(m.format(self.packagename))
+        self.directory = self.wrap.directory
+
+        if self.wrap.has_wrap:
+            # We have a .wrap file, source code will be placed into main
+            # project's subproject_dir even if the wrap file comes from another
+            # subproject.
+            self.dirname = os.path.join(self.subdir_root, self.directory)
+            # Check if the wrap comes from the main project.
+            main_fname = os.path.join(self.subdir_root, self.wrap.basename)
+            if self.wrap.filename != main_fname:
+                rel = os.path.relpath(self.wrap.filename, self.source_dir)
+                mlog.log('Using', mlog.bold(rel))
+                # Write a dummy wrap file in main project that redirect to the
+                # wrap we picked.
+                with open(main_fname, 'w') as f:
+                    f.write(textwrap.dedent('''\
+                        [wrap-redirect]
+                        filename = {}
+                        '''.format(os.path.relpath(self.wrap.filename, self.subdir_root))))
+        else:
+            # No wrap file, it's a dummy package definition for an existing
+            # directory. Use the source code in place.
+            self.dirname = self.wrap.filename
+        rel_path = os.path.relpath(self.dirname, self.source_dir)
+
         meson_file = os.path.join(self.dirname, 'meson.build')
         cmake_file = os.path.join(self.dirname, 'CMakeLists.txt')
 
@@ -163,9 +309,9 @@
 
         # The directory is there and has meson.build? Great, use it.
         if method == 'meson' and os.path.exists(meson_file):
-            return self.directory
+            return rel_path
         if method == 'cmake' and os.path.exists(cmake_file):
-            return self.directory
+            return rel_path
 
         # Check if the subproject is a git submodule
         self.resolve_git_submodule()
@@ -174,11 +320,6 @@
             if not os.path.isdir(self.dirname):
                 raise WrapException('Path already exists but is not a directory')
         else:
-            # A wrap file is required to download
-            if not self.wrap:
-                m = 'Subproject directory not found and {}.wrap file not found'
-                raise WrapNotFoundException(m.format(self.packagename))
-
             if self.wrap.type == 'file':
                 self.get_file()
             else:
@@ -191,6 +332,7 @@
                     self.get_svn()
                 else:
                     raise WrapException('Unknown wrap type {!r}'.format(self.wrap.type))
+            self.apply_patch()
 
         # A meson.build or CMakeLists.txt file is required in the directory
         if method == 'meson' and not os.path.exists(meson_file):
@@ -198,7 +340,7 @@
         if method == 'cmake' and not os.path.exists(cmake_file):
             raise WrapException('Subproject exists but has no CMakeLists.txt file')
 
-        return self.directory
+        return rel_path
 
     def check_can_download(self) -> None:
         # Don't download subproject data based on wrap file if requested.
@@ -250,8 +392,6 @@
             os.mkdir(self.dirname)
             extract_dir = self.dirname
         shutil.unpack_archive(path, extract_dir)
-        if self.wrap.has_patch():
-            self.apply_patch()
 
     def get_git(self) -> None:
         if not GIT:
@@ -330,8 +470,10 @@
             raise WrapException('{} may be a WrapDB-impersonating URL'.format(urlstring))
         else:
             try:
-                resp = urllib.request.urlopen(urlstring, timeout=REQ_TIMEOUT)
-            except urllib.error.URLError:
+                req = urllib.request.Request(urlstring, headers={'User-Agent': 'mesonbuild/{}'.format(coredata.version)})
+                resp = urllib.request.urlopen(req, timeout=REQ_TIMEOUT)
+            except urllib.error.URLError as e:
+                mlog.log(str(e))
                 raise WrapException('could not get {} is the internet available?'.format(urlstring))
         with contextlib.closing(resp) as resp:
             try:
@@ -362,7 +504,9 @@
             hashvalue = h.hexdigest()
         return hashvalue, tmpfile.name
 
-    def check_hash(self, what: str, path: str) -> None:
+    def check_hash(self, what: str, path: str, hash_required: bool = True) -> None:
+        if what + '_hash' not in self.wrap.values and not hash_required:
+            return
         expected = self.wrap.get(what + '_hash')
         h = hashlib.sha256()
         with open(path, 'rb') as f:
@@ -371,39 +515,70 @@
         if dhash != expected:
             raise WrapException('Incorrect hash for {}:\n {} expected\n {} actual.'.format(what, expected, dhash))
 
-    def download(self, what: str, ofname: str) -> None:
+    def download(self, what: str, ofname: str, fallback: bool = False) -> None:
         self.check_can_download()
-        srcurl = self.wrap.get(what + '_url')
+        srcurl = self.wrap.get(what + ('_fallback_url' if fallback else '_url'))
         mlog.log('Downloading', mlog.bold(self.packagename), what, 'from', mlog.bold(srcurl))
-        dhash, tmpfile = self.get_data(srcurl)
-        expected = self.wrap.get(what + '_hash')
-        if dhash != expected:
-            os.remove(tmpfile)
-            raise WrapException('Incorrect hash for {}:\n {} expected\n {} actual.'.format(what, expected, dhash))
+        try:
+            dhash, tmpfile = self.get_data(srcurl)
+            expected = self.wrap.get(what + '_hash')
+            if dhash != expected:
+                os.remove(tmpfile)
+                raise WrapException('Incorrect hash for {}:\n {} expected\n {} actual.'.format(what, expected, dhash))
+        except WrapException:
+            if not fallback:
+                if what + '_fallback_url' in self.wrap.values:
+                    return self.download(what, ofname, fallback=True)
+                mlog.log('A fallback URL could be specified using',
+                         mlog.bold(what + '_fallback_url'), 'key in the wrap file')
+            raise
         os.rename(tmpfile, ofname)
 
     def get_file_internal(self, what: str) -> str:
         filename = self.wrap.get(what + '_filename')
-        cache_path = os.path.join(self.cachedir, filename)
+        if what + '_url' in self.wrap.values:
+            cache_path = os.path.join(self.cachedir, filename)
 
-        if os.path.exists(cache_path):
-            self.check_hash(what, cache_path)
-            mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.')
+            if os.path.exists(cache_path):
+                self.check_hash(what, cache_path)
+                mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.')
+                return cache_path
+
+            if not os.path.isdir(self.cachedir):
+                os.mkdir(self.cachedir)
+            self.download(what, cache_path)
             return cache_path
+        else:
+            from ..interpreterbase import FeatureNew
+            FeatureNew('Local wrap patch files without {}_url'.format(what), '0.55.0').use(self.current_subproject)
+            path = Path(self.wrap.filesdir) / filename
+
+            if not path.exists():
+                raise WrapException('File "{}" does not exist'.format(path))
+            self.check_hash(what, path.as_posix(), hash_required=False)
 
-        if not os.path.isdir(self.cachedir):
-            os.mkdir(self.cachedir)
-        self.download(what, cache_path)
-        return cache_path
+            return path.as_posix()
 
     def apply_patch(self) -> None:
-        path = self.get_file_internal('patch')
-        try:
-            shutil.unpack_archive(path, self.subdir_root)
-        except Exception:
-            with tempfile.TemporaryDirectory() as workdir:
-                shutil.unpack_archive(path, workdir)
-                self.copy_tree(workdir, self.subdir_root)
+        if 'patch_filename' in self.wrap.values and 'patch_directory' in self.wrap.values:
+            m = 'Wrap file {!r} must not have both "patch_filename" and "patch_directory"'
+            raise WrapException(m.format(self.wrap.basename))
+        if 'patch_filename' in self.wrap.values:
+            path = self.get_file_internal('patch')
+            try:
+                shutil.unpack_archive(path, self.subdir_root)
+            except Exception:
+                with tempfile.TemporaryDirectory() as workdir:
+                    shutil.unpack_archive(path, workdir)
+                    self.copy_tree(workdir, self.subdir_root)
+        elif 'patch_directory' in self.wrap.values:
+            from ..interpreterbase import FeatureNew
+            FeatureNew('patch_directory', '0.55.0').use(self.current_subproject)
+            patch_dir = self.wrap.values['patch_directory']
+            src_dir = os.path.join(self.wrap.filesdir, patch_dir)
+            if not os.path.isdir(src_dir):
+                raise WrapException('patch directory does not exists: {}'.format(patch_dir))
+            self.copy_tree(src_dir, self.dirname)
 
     def copy_tree(self, root_src_dir: str, root_dst_dir: str) -> None:
         """
diff -Nru meson-0.53.2/mesonbuild/wrap/wraptool.py meson-0.57.0+really0.56.2/mesonbuild/wrap/wraptool.py
--- meson-0.53.2/mesonbuild/wrap/wraptool.py	2019-12-29 22:47:27.000000000 +0000
+++ meson-0.57.0+really0.56.2/mesonbuild/wrap/wraptool.py	2020-09-17 22:00:44.000000000 +0000
@@ -16,6 +16,7 @@
 import sys, os
 import configparser
 import shutil
+import typing as T
 
 from glob import glob
 
@@ -23,7 +24,10 @@
 
 from .. import mesonlib
 
-def add_arguments(parser):
+if T.TYPE_CHECKING:
+    import argparse
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
     subparsers = parser.add_subparsers(title='Commands', dest='command')
     subparsers.required = True
 
@@ -53,26 +57,28 @@
     p.add_argument('project_path')
     p.set_defaults(wrap_func=promote)
 
-def get_result(urlstring):
+def get_result(urlstring: str) -> T.Dict[str, T.Any]:
     u = open_wrapdburl(urlstring)
     data = u.read().decode('utf-8')
     jd = json.loads(data)
     if jd['output'] != 'ok':
         print('Got bad output from server.', file=sys.stderr)
         raise SystemExit(data)
+    assert isinstance(jd, dict)
     return jd
 
-def get_projectlist():
+def get_projectlist() -> T.List[str]:
     jd = get_result(API_ROOT + 'projects')
     projects = jd['projects']
+    assert isinstance(projects, list)
     return projects
 
-def list_projects(options):
+def list_projects(options: 'argparse.Namespace') -> None:
     projects = get_projectlist()
     for p in projects:
         print(p)
 
-def search(options):
+def search(options: 'argparse.Namespace') -> None:
     name = options.name
     jd = get_result(API_ROOT + 'query/byname/' + name)
     for p in jd['projects']:
@@ -84,7 +90,7 @@
     revision = jd['revision']
     return branch, revision
 
-def install(options):
+def install(options: 'argparse.Namespace') -> None:
     name = options.name
     if not os.path.isdir('subprojects'):
         raise SystemExit('Subprojects dir not found. Run this script in your source root directory.')
@@ -100,25 +106,25 @@
         f.write(data)
     print('Installed', name, 'branch', branch, 'revision', revision)
 
-def parse_patch_url(patch_url):
+def parse_patch_url(patch_url: str) -> T.Tuple[str, int]:
     arr = patch_url.split('/')
     return arr[-3], int(arr[-2])
 
-def get_current_version(wrapfile):
-    cp = configparser.ConfigParser()
+def get_current_version(wrapfile: str) -> T.Tuple[str, int, str, str, str]:
+    cp = configparser.ConfigParser(interpolation=None)
     cp.read(wrapfile)
-    cp = cp['wrap-file']
-    patch_url = cp['patch_url']
+    wrap_data = cp['wrap-file']
+    patch_url = wrap_data['patch_url']
     branch, revision = parse_patch_url(patch_url)
-    return branch, revision, cp['directory'], cp['source_filename'], cp['patch_filename']
+    return branch, revision, wrap_data['directory'], wrap_data['source_filename'], wrap_data['patch_filename']
 
-def update_wrap_file(wrapfile, name, new_branch, new_revision):
+def update_wrap_file(wrapfile: str, name: str, new_branch: str, new_revision: str) -> None:
     u = open_wrapdburl(API_ROOT + 'projects/{}/{}/{}/get_wrap'.format(name, new_branch, new_revision))
     data = u.read()
     with open(wrapfile, 'wb') as f:
         f.write(data)
 
-def update(options):
+def update(options: 'argparse.Namespace') -> None:
     name = options.name
     if not os.path.isdir('subprojects'):
         raise SystemExit('Subprojects dir not found. Run this command in your source root directory.')
@@ -142,7 +148,7 @@
         pass
     print('Updated', name, 'to branch', new_branch, 'revision', new_revision)
 
-def info(options):
+def info(options: 'argparse.Namespace') -> None:
     name = options.name
     jd = get_result(API_ROOT + 'projects/' + name)
     versions = jd['versions']
@@ -152,7 +158,7 @@
     for v in versions:
         print(' ', v['branch'], v['revision'])
 
-def do_promotion(from_path, spdir_name):
+def do_promotion(from_path: str, spdir_name: str) -> None:
     if os.path.isfile(from_path):
         assert(from_path.endswith('.wrap'))
         shutil.copy(from_path, spdir_name)
@@ -163,7 +169,7 @@
             raise SystemExit('Output dir {} already exists. Will not overwrite.'.format(outputdir))
         shutil.copytree(from_path, outputdir, ignore=shutil.ignore_patterns('subprojects'))
 
-def promote(options):
+def promote(options: 'argparse.Namespace') -> None:
     argument = options.project_path
     spdir_name = 'subprojects'
     sprojs = mesonlib.detect_subprojects(spdir_name)
@@ -186,7 +192,7 @@
         raise SystemExit(1)
     do_promotion(matches[0], spdir_name)
 
-def status(options):
+def status(options: 'argparse.Namespace') -> None:
     print('Subproject status')
     for w in glob('subprojects/*.wrap'):
         name = os.path.basename(w)[:-5]
@@ -205,6 +211,6 @@
         else:
             print('', name, 'not up to date. Have {} {}, but {} {} is available.'.format(current_branch, current_revision, latest_branch, latest_revision))
 
-def run(options):
+def run(options: 'argparse.Namespace') -> int:
     options.wrap_func(options)
     return 0
diff -Nru meson-0.53.2/meson.egg-info/PKG-INFO meson-0.57.0+really0.56.2/meson.egg-info/PKG-INFO
--- meson-0.53.2/meson.egg-info/PKG-INFO	2020-02-25 16:01:43.000000000 +0000
+++ meson-0.57.0+really0.56.2/meson.egg-info/PKG-INFO	2021-01-10 12:49:44.000000000 +0000
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: meson
-Version: 0.53.2
+Version: 0.56.2
 Summary: A high performance build system
 Home-page: https://mesonbuild.com
 Author: Jussi Pakkanen
diff -Nru meson-0.53.2/meson.egg-info/SOURCES.txt meson-0.57.0+really0.56.2/meson.egg-info/SOURCES.txt
--- meson-0.53.2/meson.egg-info/SOURCES.txt	2020-02-25 16:01:43.000000000 +0000
+++ meson-0.57.0+really0.56.2/meson.egg-info/SOURCES.txt	2021-01-10 12:49:44.000000000 +0000
@@ -2,6 +2,7 @@
 MANIFEST.in
 README.md
 __main__.py
+contributing.md
 ghwt.py
 meson.py
 pyproject.toml
@@ -12,20 +13,30 @@
 run_unittests.py
 setup.cfg
 setup.py
+cross/arm64cl.txt
 cross/armcc.txt
 cross/armclang-linux.txt
 cross/armclang.txt
+cross/c2000.txt
+cross/ccomp-armv7a.txt
 cross/ccrx.txt
 cross/iphone.txt
+cross/linux-mingw-w64-32bit.json
 cross/linux-mingw-w64-32bit.txt
+cross/linux-mingw-w64-64bit.json
 cross/linux-mingw-w64-64bit.txt
+cross/none.txt
 cross/ownstdlib.txt
 cross/tvos.txt
+cross/ubuntu-armhf.json
 cross/ubuntu-armhf.txt
 cross/ubuntu-faketarget.txt
 cross/wasm.txt
+cross/xc16.txt
 data/com.mesonbuild.install.policy
 data/macros.meson
+data/schema.xsd
+data/test.schema.json
 data/shell-completions/bash/meson
 data/shell-completions/zsh/_meson
 data/syntax-highlighting/emacs/meson.el
@@ -82,12 +93,6 @@
 manual tests/7 vala composite widgets/mywidget.vala
 manual tests/8 timeout/meson.build
 manual tests/8 timeout/sleepprog.c
-manual tests/9 nostdlib/meson.build
-manual tests/9 nostdlib/prog.c
-manual tests/9 nostdlib/subprojects/mylibc/libc.c
-manual tests/9 nostdlib/subprojects/mylibc/meson.build
-manual tests/9 nostdlib/subprojects/mylibc/stdio.h
-manual tests/9 nostdlib/subprojects/mylibc/stubstart.s
 meson.egg-info/PKG-INFO
 meson.egg-info/SOURCES.txt
 meson.egg-info/dependency_links.txt
@@ -95,6 +100,8 @@
 meson.egg-info/requires.txt
 meson.egg-info/top_level.txt
 mesonbuild/__init__.py
+mesonbuild/_pathlib.py
+mesonbuild/arglist.py
 mesonbuild/build.py
 mesonbuild/coredata.py
 mesonbuild/depfile.py
@@ -103,8 +110,10 @@
 mesonbuild/interpreter.py
 mesonbuild/interpreterbase.py
 mesonbuild/linkers.py
+mesonbuild/mcompile.py
 mesonbuild/mconf.py
 mesonbuild/mdist.py
+mesonbuild/mesondata.py
 mesonbuild/mesonlib.py
 mesonbuild/mesonmain.py
 mesonbuild/minit.py
@@ -139,9 +148,9 @@
 mesonbuild/cmake/fileapi.py
 mesonbuild/cmake/generator.py
 mesonbuild/cmake/interpreter.py
+mesonbuild/cmake/toolchain.py
 mesonbuild/cmake/traceparser.py
 mesonbuild/cmake/data/preload.cmake
-mesonbuild/cmake/data/run_ctgt.py
 mesonbuild/compilers/__init__.py
 mesonbuild/compilers/c.py
 mesonbuild/compilers/c_function_attributes.py
@@ -159,9 +168,11 @@
 mesonbuild/compilers/vala.py
 mesonbuild/compilers/mixins/__init__.py
 mesonbuild/compilers/mixins/arm.py
+mesonbuild/compilers/mixins/c2000.py
 mesonbuild/compilers/mixins/ccrx.py
 mesonbuild/compilers/mixins/clang.py
 mesonbuild/compilers/mixins/clike.py
+mesonbuild/compilers/mixins/compcert.py
 mesonbuild/compilers/mixins/elbrus.py
 mesonbuild/compilers/mixins/emscripten.py
 mesonbuild/compilers/mixins/gnu.py
@@ -169,6 +180,7 @@
 mesonbuild/compilers/mixins/islinker.py
 mesonbuild/compilers/mixins/pgi.py
 mesonbuild/compilers/mixins/visualstudio.py
+mesonbuild/compilers/mixins/xc16.py
 mesonbuild/dependencies/__init__.py
 mesonbuild/dependencies/base.py
 mesonbuild/dependencies/boost.py
@@ -191,6 +203,7 @@
 mesonbuild/modules/gnome.py
 mesonbuild/modules/hotdoc.py
 mesonbuild/modules/i18n.py
+mesonbuild/modules/keyval.py
 mesonbuild/modules/modtest.py
 mesonbuild/modules/pkgconfig.py
 mesonbuild/modules/python.py
@@ -201,19 +214,21 @@
 mesonbuild/modules/rpm.py
 mesonbuild/modules/sourceset.py
 mesonbuild/modules/unstable_cuda.py
+mesonbuild/modules/unstable_external_project.py
 mesonbuild/modules/unstable_icestorm.py
-mesonbuild/modules/unstable_kconfig.py
 mesonbuild/modules/unstable_simd.py
 mesonbuild/modules/windows.py
 mesonbuild/scripts/__init__.py
 mesonbuild/scripts/clangformat.py
 mesonbuild/scripts/clangtidy.py
 mesonbuild/scripts/cleantrees.py
+mesonbuild/scripts/cmake_run_ctgt.py
 mesonbuild/scripts/commandrunner.py
 mesonbuild/scripts/coverage.py
 mesonbuild/scripts/delwithsuffix.py
 mesonbuild/scripts/depfixer.py
 mesonbuild/scripts/dirchanger.py
+mesonbuild/scripts/externalproject.py
 mesonbuild/scripts/gettext.py
 mesonbuild/scripts/gtkdochelper.py
 mesonbuild/scripts/hotdochelper.py
@@ -228,11 +243,18 @@
 mesonbuild/scripts/yelphelper.py
 mesonbuild/templates/__init__.py
 mesonbuild/templates/cpptemplates.py
+mesonbuild/templates/cstemplates.py
 mesonbuild/templates/ctemplates.py
+mesonbuild/templates/cudatemplates.py
 mesonbuild/templates/dlangtemplates.py
 mesonbuild/templates/fortrantemplates.py
+mesonbuild/templates/javatemplates.py
+mesonbuild/templates/mesontemplates.py
+mesonbuild/templates/objcpptemplates.py
 mesonbuild/templates/objctemplates.py
 mesonbuild/templates/rusttemplates.py
+mesonbuild/templates/samplefactory.py
+mesonbuild/templates/sampleimpl.py
 mesonbuild/wrap/__init__.py
 mesonbuild/wrap/wrap.py
 mesonbuild/wrap/wraptool.py
@@ -241,12 +263,16 @@
 test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt
 test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp
 test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp
+test cases/cmake/1 basic/subprojects/cmMod/cpp_pch.hpp
 test cases/cmake/10 header only/main.cpp
 test cases/cmake/10 header only/meson.build
 test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt
 test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp
 test cases/cmake/11 cmake_module_path/meson.build
+test cases/cmake/11 cmake_module_path/test.json
 test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake
+test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt
+test cases/cmake/11 cmake_module_path/subprojects/cmMod/gen.py
 test cases/cmake/12 generator expressions/main.cpp
 test cases/cmake/12 generator expressions/meson.build
 test cases/cmake/12 generator expressions/subprojects/cmMod/CMakeLists.txt
@@ -269,22 +295,83 @@
 test cases/cmake/16 threads/main.cpp
 test cases/cmake/16 threads/meson.build
 test cases/cmake/16 threads/meson_options.txt
-test cases/cmake/16 threads/test_matrix.json
+test cases/cmake/16 threads/test.json
 test cases/cmake/16 threads/subprojects/cmMod/CMakeLists.txt
 test cases/cmake/16 threads/subprojects/cmMod/cmMod.cpp
 test cases/cmake/16 threads/subprojects/cmMod/cmMod.hpp
 test cases/cmake/16 threads/subprojects/cmMod/main.cpp
-test cases/cmake/2 advanced/installed_files.txt
+test cases/cmake/17 include path order/main.cpp
+test cases/cmake/17 include path order/meson.build
+test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt
+test cases/cmake/17 include path order/subprojects/cmMod/cmMod.cpp
+test cases/cmake/17 include path order/subprojects/cmMod/incA/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incB/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incC/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incD/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incE/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incF/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incG/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incH/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incI/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incJ/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incL/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incM/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incN/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incO/cmMod.hpp
+test cases/cmake/17 include path order/subprojects/cmMod/incP/cmMod.hpp
+test cases/cmake/18 skip include files/main.cpp
+test cases/cmake/18 skip include files/meson.build
+test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt
+test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.cpp
+test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.hpp
+test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/CMakeLists.txt
+test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc1.cpp
+test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc2.cpp
+test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc3.cpp
+test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc4.cpp
+test cases/cmake/19 advanced options/main.cpp
+test cases/cmake/19 advanced options/meson.build
+test cases/cmake/19 advanced options/test.json
+test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt
+test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp
+test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp
+test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp
+test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp
+test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp
 test cases/cmake/2 advanced/main.cpp
 test cases/cmake/2 advanced/meson.build
+test cases/cmake/2 advanced/test.json
 test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt
 test cases/cmake/2 advanced/subprojects/cmMod/config.h.in
 test cases/cmake/2 advanced/subprojects/cmMod/main.cpp
 test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.cpp
 test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.hpp
-test cases/cmake/3 advanced no dep/installed_files.txt
+test cases/cmake/20 cmake file/foolib.cmake.in
+test cases/cmake/20 cmake file/meson.build
+test cases/cmake/20 cmake file/test.json
+test cases/cmake/21 shared module/meson.build
+test cases/cmake/21 shared module/prog.c
+test cases/cmake/21 shared module/runtime.c
+test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt
+test cases/cmake/21 shared module/subprojects/cmMod/module/module.c
+test cases/cmake/21 shared module/subprojects/cmMod/module/module.h
+test cases/cmake/22 cmake module/meson.build
+test cases/cmake/22 cmake module/projectConfig.cmake.in
+test cases/cmake/22 cmake module/test.json
+test cases/cmake/22 cmake module/cmake_project/CMakeLists.txt
+test cases/cmake/23 cmake toolchain/CMakeToolchain.cmake
+test cases/cmake/23 cmake toolchain/meson.build
+test cases/cmake/23 cmake toolchain/nativefile.ini.in
+test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt
+test cases/cmake/24 mixing languages/main.c
+test cases/cmake/24 mixing languages/meson.build
+test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt
+test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.c
+test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.h
+test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.m
 test cases/cmake/3 advanced no dep/main.cpp
 test cases/cmake/3 advanced no dep/meson.build
+test cases/cmake/3 advanced no dep/test.json
 test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt
 test cases/cmake/3 advanced no dep/subprojects/cmMod/config.h.in
 test cases/cmake/3 advanced no dep/subprojects/cmMod/main.cpp
@@ -310,6 +397,7 @@
 test cases/cmake/6 object library no dep/subprojects/cmObjLib/libB.cpp
 test cases/cmake/6 object library no dep/subprojects/cmObjLib/libB.hpp
 test cases/cmake/7 cmake options/meson.build
+test cases/cmake/7 cmake options/test.json
 test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt
 test cases/cmake/8 custom command/main.cpp
 test cases/cmake/8 custom command/meson.build
@@ -320,24 +408,26 @@
 test cases/cmake/8 custom command/subprojects/cmMod/cp.cpp
 test cases/cmake/8 custom command/subprojects/cmMod/cpyBase.cpp.am
 test cases/cmake/8 custom command/subprojects/cmMod/cpyBase.hpp.am
+test cases/cmake/8 custom command/subprojects/cmMod/cpyInc.hpp.am
 test cases/cmake/8 custom command/subprojects/cmMod/cpyNext.cpp.am
 test cases/cmake/8 custom command/subprojects/cmMod/cpyNext.hpp.am
 test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp
+test cases/cmake/8 custom command/subprojects/cmMod/genMain.cpp
 test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp
-test cases/cmake/8 custom command/subprojects/cmMod/main.cpp
 test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/CMakeLists.txt
 test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest.hpp
 test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest2.hpp
 test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest3.hpp
 test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest4.hpp
+test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest5.hpp
 test cases/cmake/9 disabled subproject/meson.build
 test cases/common/1 trivial/meson.build
 test cases/common/1 trivial/trivial.c
 test cases/common/10 man install/bar.2
 test cases/common/10 man install/baz.1.in
 test cases/common/10 man install/foo.1
-test cases/common/10 man install/installed_files.txt
 test cases/common/10 man install/meson.build
+test cases/common/10 man install/test.json
 test cases/common/10 man install/vanishing/meson.build
 test cases/common/10 man install/vanishing/vanishing.1
 test cases/common/10 man install/vanishing/vanishing.2
@@ -347,10 +437,16 @@
 test cases/common/101 find program path/program.py
 test cases/common/102 subproject subdir/meson.build
 test cases/common/102 subproject subdir/prog.c
+test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap
 test cases/common/102 subproject subdir/subprojects/sub/meson.build
 test cases/common/102 subproject subdir/subprojects/sub/lib/meson.build
 test cases/common/102 subproject subdir/subprojects/sub/lib/sub.c
 test cases/common/102 subproject subdir/subprojects/sub/lib/sub.h
+test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build
+test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/meson.build
+test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/subsubsub.wrap
+test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/packagefiles/subsubsub-1.0.zip
+test cases/common/102 subproject subdir/subprojects/sub_novar/meson.build
 test cases/common/103 postconf/meson.build
 test cases/common/103 postconf/postconf.py
 test cases/common/103 postconf/prog.c
@@ -361,7 +457,7 @@
 test cases/common/104 postconf with args/raw.dat
 test cases/common/105 testframework options/meson.build
 test cases/common/105 testframework options/meson_options.txt
-test cases/common/105 testframework options/test_args.txt
+test cases/common/105 testframework options/test.json
 test cases/common/106 extract same name/lib.c
 test cases/common/106 extract same name/main.c
 test cases/common/106 extract same name/meson.build
@@ -389,9 +485,9 @@
 test cases/common/111 spaces backslash/include/comparer.h
 test cases/common/112 ternary/meson.build
 test cases/common/113 custom target capture/data_source.txt
-test cases/common/113 custom target capture/installed_files.txt
 test cases/common/113 custom target capture/meson.build
 test cases/common/113 custom target capture/my_compiler.py
+test cases/common/113 custom target capture/test.json
 test cases/common/114 allgenerate/converter.py
 test cases/common/114 allgenerate/foobar.cpp.in
 test cases/common/114 allgenerate/meson.build
@@ -412,10 +508,10 @@
 test cases/common/12 data/datafile.dat
 test cases/common/12 data/etcfile.dat
 test cases/common/12 data/fileobject_datafile.dat
-test cases/common/12 data/installed_files.txt
 test cases/common/12 data/meson.build
 test cases/common/12 data/runscript.sh
 test cases/common/12 data/somefile.txt
+test cases/common/12 data/test.json
 test cases/common/12 data/to_be_renamed_1.txt
 test cases/common/12 data/to_be_renamed_3.txt
 test cases/common/12 data/to_be_renamed_4.txt
@@ -425,15 +521,16 @@
 test cases/common/12 data/vanishing/vanishing2.dat
 test cases/common/120 test skip/meson.build
 test cases/common/120 test skip/test_skip.c
-test cases/common/121 shared module/installed_files.txt
 test cases/common/121 shared module/meson.build
 test cases/common/121 shared module/module.c
 test cases/common/121 shared module/nosyms.c
 test cases/common/121 shared module/prog.c
 test cases/common/121 shared module/runtime.c
+test cases/common/121 shared module/test.json
 test cases/common/122 llvm ir and assembly/main.c
 test cases/common/122 llvm ir and assembly/main.cpp
 test cases/common/122 llvm ir and assembly/meson.build
+test cases/common/122 llvm ir and assembly/square-aarch64.S
 test cases/common/122 llvm ir and assembly/square-arm.S
 test cases/common/122 llvm ir and assembly/square-x86.S
 test cases/common/122 llvm ir and assembly/square-x86_64.S
@@ -453,7 +550,6 @@
 test cases/common/124 extract all shared library/prog.c
 test cases/common/124 extract all shared library/three.c
 test cases/common/124 extract all shared library/two.c
-test cases/common/125 object only target/installed_files.txt
 test cases/common/125 object only target/meson.build
 test cases/common/125 object only target/obj_generator.py
 test cases/common/125 object only target/prog.c
@@ -461,12 +557,13 @@
 test cases/common/125 object only target/source2.c
 test cases/common/125 object only target/source2.def
 test cases/common/125 object only target/source3.c
+test cases/common/125 object only target/test.json
 test cases/common/126 no buildincdir/meson.build
 test cases/common/126 no buildincdir/prog.c
 test cases/common/126 no buildincdir/include/header.h
 test cases/common/127 custom target directory install/docgen.py
-test cases/common/127 custom target directory install/installed_files.txt
 test cases/common/127 custom target directory install/meson.build
+test cases/common/127 custom target directory install/test.json
 test cases/common/128 dependency file generation/main .c
 test cases/common/128 dependency file generation/meson.build
 test cases/common/129 configure file in generator/meson.build
@@ -586,7 +683,6 @@
 test cases/common/14 configure file/generator-deps.py
 test cases/common/14 configure file/generator-without-input-file.py
 test cases/common/14 configure file/generator.py
-test cases/common/14 configure file/installed_files.txt
 test cases/common/14 configure file/invalid-utf8.bin.in
 test cases/common/14 configure file/meson.build
 test cases/common/14 configure file/nosubst-nocopy1.txt.in
@@ -600,6 +696,7 @@
 test cases/common/14 configure file/prog9.c
 test cases/common/14 configure file/sameafterbasename.in
 test cases/common/14 configure file/sameafterbasename.in2
+test cases/common/14 configure file/test.json
 test cases/common/14 configure file/test.py.in
 test cases/common/14 configure file/touch.py
 test cases/common/14 configure file/subdir/meson.build
@@ -634,11 +731,14 @@
 test cases/common/143 mesonintrospect from scripts/check_introspection.py
 test cases/common/143 mesonintrospect from scripts/meson.build
 test cases/common/144 custom target multiple outputs/generator.py
-test cases/common/144 custom target multiple outputs/installed_files.txt
 test cases/common/144 custom target multiple outputs/meson.build
+test cases/common/144 custom target multiple outputs/test.json
+test cases/common/145 special characters/arg-char-test.c
+test cases/common/145 special characters/arg-string-test.c
+test cases/common/145 special characters/arg-unquoted-test.c
 test cases/common/145 special characters/check_quoting.py
-test cases/common/145 special characters/installed_files.txt
 test cases/common/145 special characters/meson.build
+test cases/common/145 special characters/test.json
 test cases/common/146 nested links/meson.build
 test cases/common/146 nested links/xephyr.c
 test cases/common/147 list of file sources/foo
@@ -743,16 +843,27 @@
 test cases/common/156 index customtarget/subdir/meson.build
 test cases/common/157 wrap file should not failed/meson.build
 test cases/common/157 wrap file should not failed/src/meson.build
+test cases/common/157 wrap file should not failed/src/test.c
 test cases/common/157 wrap file should not failed/src/subprojects/prog.c
 test cases/common/157 wrap file should not failed/src/subprojects/foo/prog2.c
+test cases/common/157 wrap file should not failed/subprojects/.gitignore
+test cases/common/157 wrap file should not failed/subprojects/bar.wrap
 test cases/common/157 wrap file should not failed/subprojects/foo.wrap
+test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap
 test cases/common/157 wrap file should not failed/subprojects/zlib.wrap
+test cases/common/157 wrap file should not failed/subprojects/bar-1.0/bar.c
+test cases/common/157 wrap file should not failed/subprojects/bar-1.0/meson.build
 test cases/common/157 wrap file should not failed/subprojects/foo-1.0/foo.c
 test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build
+test cases/common/157 wrap file should not failed/subprojects/foo-1.0-patchdir/foo.c
+test cases/common/157 wrap file should not failed/subprojects/foo-1.0-patchdir/meson.build
 test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz
 test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz
 test cases/common/157 wrap file should not failed/subprojects/packagecache/zlib-1.2.8-8-wrap.zip
 test cases/common/157 wrap file should not failed/subprojects/packagecache/zlib-1.2.8.tar.gz
+test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz
+test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz
+test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build
 test cases/common/157 wrap file should not failed/subprojects/zlib-1.2.8/foo.c
 test cases/common/157 wrap file should not failed/subprojects/zlib-1.2.8/meson.build
 test cases/common/158 includedir subproj/meson.build
@@ -775,10 +886,6 @@
 test cases/common/161 custom target subdir depend files/subdir/dep.dat
 test cases/common/161 custom target subdir depend files/subdir/foo.c.in
 test cases/common/161 custom target subdir depend files/subdir/meson.build
-test cases/common/162 external program shebang parsing/input.txt
-test cases/common/162 external program shebang parsing/main.c
-test cases/common/162 external program shebang parsing/meson.build
-test cases/common/162 external program shebang parsing/script.int.in
 test cases/common/163 disabler/meson.build
 test cases/common/164 array option/meson.build
 test cases/common/164 array option/meson_options.txt
@@ -828,7 +935,7 @@
 test cases/common/175 generator link whole/meson_test_function.tmpl
 test cases/common/175 generator link whole/pull_meson_test_function.c
 test cases/common/176 initial c_args/meson.build
-test cases/common/176 initial c_args/test_args.txt
+test cases/common/176 initial c_args/test.json
 test cases/common/177 identical target name in subproject flat layout/foo.c
 test cases/common/177 identical target name in subproject flat layout/main.c
 test cases/common/177 identical target name in subproject flat layout/meson.build
@@ -875,6 +982,8 @@
 test cases/common/187 find override/subdir/converter.py
 test cases/common/187 find override/subdir/gencodegen.py.in
 test cases/common/187 find override/subdir/meson.build
+test cases/common/187 find override/subprojects/sub.wrap
+test cases/common/187 find override/subprojects/sub/meson.build
 test cases/common/188 partial dependency/meson.build
 test cases/common/188 partial dependency/declare_dependency/main.c
 test cases/common/188 partial dependency/declare_dependency/meson.build
@@ -906,11 +1015,11 @@
 test cases/common/195 install_mode/config.h.in
 test cases/common/195 install_mode/data_source.txt
 test cases/common/195 install_mode/foo.1
-test cases/common/195 install_mode/installed_files.txt
 test cases/common/195 install_mode/meson.build
 test cases/common/195 install_mode/rootdir.h
 test cases/common/195 install_mode/runscript.sh
 test cases/common/195 install_mode/stat.c
+test cases/common/195 install_mode/test.json
 test cases/common/195 install_mode/trivial.c
 test cases/common/195 install_mode/sub1/second.dat
 test cases/common/195 install_mode/sub2/stub
@@ -935,10 +1044,6 @@
 test cases/common/200 generator in subdir/com/mesonbuild/meson.build
 test cases/common/200 generator in subdir/com/mesonbuild/subbie.inp
 test cases/common/200 generator in subdir/com/mesonbuild/testprog.c
-test cases/common/201 override with exe/main2.input
-test cases/common/201 override with exe/meson.build
-test cases/common/201 override with exe/subprojects/sub/foobar.c
-test cases/common/201 override with exe/subprojects/sub/meson.build
 test cases/common/202 subproject with features/meson.build
 test cases/common/202 subproject with features/meson_options.txt
 test cases/common/202 subproject with features/nothing.c
@@ -952,41 +1057,45 @@
 test cases/common/202 subproject with features/subprojects/sub/lib/sub.c
 test cases/common/202 subproject with features/subprojects/sub/lib/sub.h
 test cases/common/203 function attributes/meson.build
+test cases/common/203 function attributes/meson_options.txt
+test cases/common/203 function attributes/test.json
 test cases/common/204 broken subproject/meson.build
 test cases/common/204 broken subproject/subprojects/broken/broken.c
 test cases/common/204 broken subproject/subprojects/broken/meson.build
 test cases/common/205 argument syntax/meson.build
-test cases/common/206 install name_prefix name_suffix/installed_files.txt
 test cases/common/206 install name_prefix name_suffix/libfile.c
 test cases/common/206 install name_prefix name_suffix/meson.build
-test cases/common/207 kwarg entry/installed_files.txt
+test cases/common/206 install name_prefix name_suffix/test.json
 test cases/common/207 kwarg entry/meson.build
 test cases/common/207 kwarg entry/prog.c
+test cases/common/207 kwarg entry/test.json
 test cases/common/207 kwarg entry/inc/prog.h
 test cases/common/208 custom target build by default/docgen.py
-test cases/common/208 custom target build by default/installed_files.txt
 test cases/common/208 custom target build by default/meson.build
+test cases/common/208 custom target build by default/test.json
 test cases/common/209 find_library and headers/foo.h
 test cases/common/209 find_library and headers/meson.build
 test cases/common/21 global arg/meson.build
 test cases/common/21 global arg/prog.c
 test cases/common/21 global arg/prog.cc
 test cases/common/210 line continuation/meson.build
-test cases/common/211 cmake module/installed_files.txt
-test cases/common/211 cmake module/meson.build
-test cases/common/211 cmake module/projectConfig.cmake.in
-test cases/common/211 cmake module/cmake_project/CMakeLists.txt
-test cases/common/212 native file path override/installed_files.txt
 test cases/common/212 native file path override/main.cpp
 test cases/common/212 native file path override/meson.build
 test cases/common/212 native file path override/nativefile.ini
+test cases/common/212 native file path override/test.json
+test cases/common/213 tap tests/cat.c
+test cases/common/213 tap tests/issue7515.txt
 test cases/common/213 tap tests/meson.build
 test cases/common/213 tap tests/tester.c
 test cases/common/214 warning level 0/main.cpp
 test cases/common/214 warning level 0/meson.build
 test cases/common/215 link custom/custom_stlib.py
+test cases/common/215 link custom/custom_target.c
+test cases/common/215 link custom/custom_target.py
+test cases/common/215 link custom/dummy.c
 test cases/common/215 link custom/lib.c
 test cases/common/215 link custom/meson.build
+test cases/common/215 link custom/outerlib.c
 test cases/common/215 link custom/prog.c
 test cases/common/216 link custom_i single from multiple/generate_conflicting_stlibs.py
 test cases/common/216 link custom_i single from multiple/meson.build
@@ -1056,9 +1165,10 @@
 test cases/common/225 include_dir dot/src/main.c
 test cases/common/225 include_dir dot/src/meson.build
 test cases/common/225 include_dir dot/src/rone.c
+test cases/common/226 include_type dependency/main.cpp
 test cases/common/226 include_type dependency/meson.build
+test cases/common/226 include_type dependency/pch/test.hpp
 test cases/common/226 include_type dependency/subprojects/subDep/meson.build
-test cases/common/227 fs module/a_symlink
 test cases/common/227 fs module/meson.build
 test cases/common/227 fs module/subdir/meson.build
 test cases/common/227 fs module/subdir/subdirfile.txt
@@ -1066,35 +1176,66 @@
 test cases/common/227 fs module/subprojects/subbie/subprojectfile.txt
 test cases/common/227 fs module/subprojects/subbie/subsub/meson.build
 test cases/common/227 fs module/subprojects/subbie/subsub/subsubfile.txt
+test cases/common/228 zlib/meson.build
+test cases/common/229 native prop/crossfile.ini
+test cases/common/229 native prop/meson.build
+test cases/common/229 native prop/nativefile.ini
 test cases/common/23 object extraction/lib.c
 test cases/common/23 object extraction/lib2.c
 test cases/common/23 object extraction/main.c
 test cases/common/23 object extraction/meson.build
 test cases/common/23 object extraction/src/lib.c
+test cases/common/230 persubproject options/foo.c
+test cases/common/230 persubproject options/meson.build
+test cases/common/230 persubproject options/subprojects/sub1/foo.c
+test cases/common/230 persubproject options/subprojects/sub1/meson.build
+test cases/common/230 persubproject options/subprojects/sub2/foo.c
+test cases/common/230 persubproject options/subprojects/sub2/meson.build
+test cases/common/231 arithmetic operators/meson.build
+test cases/common/232 link language/c_linkage.cpp
+test cases/common/232 link language/c_linkage.h
+test cases/common/232 link language/lib.cpp
+test cases/common/232 link language/main.c
+test cases/common/232 link language/meson.build
+test cases/common/233 link depends indexed custom target/check_arch.py
+test cases/common/233 link depends indexed custom target/foo.c
+test cases/common/233 link depends indexed custom target/make_file.py
+test cases/common/233 link depends indexed custom target/meson.build
+test cases/common/234 very long commmand line/codegen.py
+test cases/common/234 very long commmand line/main.c
+test cases/common/234 very long commmand line/meson.build
+test cases/common/234 very long commmand line/name_gen.py
+test cases/common/235 custom_target source/a
+test cases/common/235 custom_target source/meson.build
+test cases/common/235 custom_target source/x.py
+test cases/common/235 disabler array addition/meson.build
+test cases/common/235 disabler array addition/test.c
+test cases/common/236 external project/app.c
+test cases/common/236 external project/func.c
+test cases/common/236 external project/func.h
+test cases/common/236 external project/meson.build
+test cases/common/236 external project/test.json
+test cases/common/236 external project/libfoo/configure
+test cases/common/236 external project/libfoo/libfoo.c
+test cases/common/236 external project/libfoo/libfoo.h
+test cases/common/236 external project/libfoo/meson.build
+test cases/common/237 subdir files/meson.build
+test cases/common/237 subdir files/subdir/meson.build
+test cases/common/237 subdir files/subdir/prog.c
+test cases/common/239 dependency allow_fallback/meson.build
+test cases/common/239 dependency allow_fallback/subprojects/foob/meson.build
+test cases/common/239 dependency allow_fallback/subprojects/foob3/meson.build
 test cases/common/24 endian/meson.build
 test cases/common/24 endian/prog.c
-test cases/common/25 library versions/installed_files.txt
 test cases/common/25 library versions/lib.c
 test cases/common/25 library versions/meson.build
+test cases/common/25 library versions/test.json
 test cases/common/25 library versions/subdir/meson.build
 test cases/common/26 config subdir/meson.build
 test cases/common/26 config subdir/include/config.h.in
 test cases/common/26 config subdir/include/meson.build
 test cases/common/26 config subdir/src/meson.build
 test cases/common/26 config subdir/src/prog.c
-test cases/common/27 pipeline/input_src.dat
-test cases/common/27 pipeline/meson.build
-test cases/common/27 pipeline/prog.c
-test cases/common/27 pipeline/srcgen.c
-test cases/common/27 pipeline/depends/copyrunner.py
-test cases/common/27 pipeline/depends/filecopier.c
-test cases/common/27 pipeline/depends/libsrc.c.in
-test cases/common/27 pipeline/depends/meson.build
-test cases/common/27 pipeline/depends/prog.c
-test cases/common/27 pipeline/src/input_src.dat
-test cases/common/27 pipeline/src/meson.build
-test cases/common/27 pipeline/src/prog.c
-test cases/common/27 pipeline/src/srcgen.c
 test cases/common/28 find program/meson.build
 test cases/common/28 find program/print-version-with-prefix.py
 test cases/common/28 find program/print-version.py
@@ -1121,10 +1262,6 @@
 test cases/common/35 run program/meson.build
 test cases/common/35 run program/scripts/hello.bat
 test cases/common/35 run program/scripts/hello.sh
-test cases/common/36 tryrun/error.c
-test cases/common/36 tryrun/meson.build
-test cases/common/36 tryrun/no_compile.c
-test cases/common/36 tryrun/ok.c
 test cases/common/37 logic ops/meson.build
 test cases/common/38 string operations/meson.build
 test cases/common/39 has function/meson.build
@@ -1132,9 +1269,9 @@
 test cases/common/4 shared/meson.build
 test cases/common/40 has member/meson.build
 test cases/common/41 alignment/meson.build
-test cases/common/42 library chain/installed_files.txt
 test cases/common/42 library chain/main.c
 test cases/common/42 library chain/meson.build
+test cases/common/42 library chain/test.json
 test cases/common/42 library chain/subdir/lib1.c
 test cases/common/42 library chain/subdir/meson.build
 test cases/common/42 library chain/subdir/subdir2/lib2.c
@@ -1151,8 +1288,8 @@
 test cases/common/44 test args/tester.c
 test cases/common/44 test args/tester.py
 test cases/common/44 test args/testfile.txt
-test cases/common/45 subproject/installed_files.txt
 test cases/common/45 subproject/meson.build
+test cases/common/45 subproject/test.json
 test cases/common/45 subproject/user.c
 test cases/common/45 subproject/subprojects/sublib/meson.build
 test cases/common/45 subproject/subprojects/sublib/simpletest.c
@@ -1162,20 +1299,22 @@
 test cases/common/46 subproject options/meson_options.txt
 test cases/common/46 subproject options/subprojects/subproject/meson.build
 test cases/common/46 subproject options/subprojects/subproject/meson_options.txt
-test cases/common/47 pkgconfig-gen/installed_files.txt
 test cases/common/47 pkgconfig-gen/meson.build
 test cases/common/47 pkgconfig-gen/simple.c
 test cases/common/47 pkgconfig-gen/simple.h
+test cases/common/47 pkgconfig-gen/simple5.c
+test cases/common/47 pkgconfig-gen/test.json
 test cases/common/47 pkgconfig-gen/dependencies/custom.c
 test cases/common/47 pkgconfig-gen/dependencies/exposed.c
 test cases/common/47 pkgconfig-gen/dependencies/internal.c
+test cases/common/47 pkgconfig-gen/dependencies/main.c
 test cases/common/47 pkgconfig-gen/dependencies/meson.build
 test cases/common/48 custom install dirs/datafile.cat
-test cases/common/48 custom install dirs/installed_files.txt
 test cases/common/48 custom install dirs/meson.build
 test cases/common/48 custom install dirs/prog.1
 test cases/common/48 custom install dirs/prog.c
 test cases/common/48 custom install dirs/sample.h
+test cases/common/48 custom install dirs/test.json
 test cases/common/48 custom install dirs/subdir/datafile.dog
 test cases/common/49 subproject subproject/meson.build
 test cases/common/49 subproject subproject/prog.c
@@ -1208,16 +1347,16 @@
 test cases/common/51 file grabber/subdir/subc.c
 test cases/common/51 file grabber/subdir/subprog.c
 test cases/common/52 custom target/data_source.txt
-test cases/common/52 custom target/installed_files.txt
 test cases/common/52 custom target/meson.build
 test cases/common/52 custom target/my_compiler.py
+test cases/common/52 custom target/test.json
 test cases/common/52 custom target/depfile/dep.py
 test cases/common/52 custom target/depfile/meson.build
 test cases/common/53 custom target chain/data_source.txt
-test cases/common/53 custom target chain/installed_files.txt
 test cases/common/53 custom target chain/meson.build
 test cases/common/53 custom target chain/my_compiler.py
 test cases/common/53 custom target chain/my_compiler2.py
+test cases/common/53 custom target chain/test.json
 test cases/common/53 custom target chain/usetarget/meson.build
 test cases/common/53 custom target chain/usetarget/myexe.c
 test cases/common/53 custom target chain/usetarget/subcomp.py
@@ -1233,11 +1372,12 @@
 test cases/common/55 object generator/source.c
 test cases/common/55 object generator/source2.c
 test cases/common/55 object generator/source3.c
-test cases/common/56 install script/installed_files.txt
+test cases/common/56 install script/customtarget.py
 test cases/common/56 install script/meson.build
 test cases/common/56 install script/myinstall.py
-test cases/common/56 install script/no-installed-files
 test cases/common/56 install script/prog.c
+test cases/common/56 install script/test.json
+test cases/common/56 install script/src/a file.txt
 test cases/common/56 install script/src/meson.build
 test cases/common/56 install script/src/myinstall.py
 test cases/common/57 custom target source output/generator.py
@@ -1254,10 +1394,10 @@
 test cases/common/59 array methods/meson.build
 test cases/common/6 linkshared/cpplib.cpp
 test cases/common/6 linkshared/cppmain.cpp
-test cases/common/6 linkshared/installed_files.txt
 test cases/common/6 linkshared/libfile.c
 test cases/common/6 linkshared/main.c
 test cases/common/6 linkshared/meson.build
+test cases/common/6 linkshared/test.json
 test cases/common/60 custom header generator/input.def
 test cases/common/60 custom header generator/makeheader.py
 test cases/common/60 custom header generator/meson.build
@@ -1269,8 +1409,8 @@
 test cases/common/61 multiple generators/mygen.py
 test cases/common/61 multiple generators/subdir/data.dat
 test cases/common/61 multiple generators/subdir/meson.build
-test cases/common/62 install subdir/installed_files.txt
 test cases/common/62 install subdir/meson.build
+test cases/common/62 install subdir/test.json
 test cases/common/62 install subdir/nested_elided/sub/eighth.dat
 test cases/common/62 install subdir/nested_elided/sub/dircheck/ninth.dat
 test cases/common/62 install subdir/sub/sub1/third.dat
@@ -1286,11 +1426,11 @@
 test cases/common/62 install subdir/subdir/sub1/sub2/data2.dat
 test cases/common/62 install subdir/subdir/sub_elided/sixth.dat
 test cases/common/62 install subdir/subdir/sub_elided/dircheck/seventh.dat
-test cases/common/63 foreach/installed_files.txt
 test cases/common/63 foreach/meson.build
 test cases/common/63 foreach/prog1.c
 test cases/common/63 foreach/prog2.c
 test cases/common/63 foreach/prog3.c
+test cases/common/63 foreach/test.json
 test cases/common/64 number arithmetic/meson.build
 test cases/common/65 string arithmetic/meson.build
 test cases/common/66 array arithmetic/meson.build
@@ -1348,10 +1488,10 @@
 test cases/common/78 custom subproject dir/custom_subproject_dir/C/c.c
 test cases/common/78 custom subproject dir/custom_subproject_dir/C/meson.build
 test cases/common/79 has type/meson.build
-test cases/common/8 install/installed_files.txt
 test cases/common/8 install/meson.build
 test cases/common/8 install/prog.c
 test cases/common/8 install/stat.c
+test cases/common/8 install/test.json
 test cases/common/80 extract from nested subdir/meson.build
 test cases/common/80 extract from nested subdir/src/meson.build
 test cases/common/80 extract from nested subdir/src/first/lib_first.c
@@ -1404,15 +1544,17 @@
 test cases/common/89 private include/stlib/meson.build
 test cases/common/89 private include/user/libuser.c
 test cases/common/89 private include/user/meson.build
-test cases/common/9 header install/installed_files.txt
 test cases/common/9 header install/meson.build
 test cases/common/9 header install/rootdir.h
 test cases/common/9 header install/subdir.h
+test cases/common/9 header install/test.json
 test cases/common/9 header install/sub/fileheader.h
 test cases/common/9 header install/sub/meson.build
 test cases/common/9 header install/vanishing_subdir/meson.build
 test cases/common/9 header install/vanishing_subdir/vanished.h
 test cases/common/90 default options/meson.build
+test cases/common/90 default options/subprojects/sub1/meson.build
+test cases/common/90 default options/subprojects/sub1/meson_options.txt
 test cases/common/91 dep fallback/gensrc.py
 test cases/common/91 dep fallback/meson.build
 test cases/common/91 dep fallback/tester.c
@@ -1425,10 +1567,6 @@
 test cases/common/92 default library/ef.h
 test cases/common/92 default library/eftest.cpp
 test cases/common/92 default library/meson.build
-test cases/common/93 selfbuilt custom/data.dat
-test cases/common/93 selfbuilt custom/mainprog.cpp
-test cases/common/93 selfbuilt custom/meson.build
-test cases/common/93 selfbuilt custom/tool.cpp
 test cases/common/94 gen extra/meson.build
 test cases/common/94 gen extra/name.dat
 test cases/common/94 gen extra/name.l
@@ -1457,21 +1595,21 @@
 test cases/common/99 manygen/subdir/funcinfo.def
 test cases/common/99 manygen/subdir/manygen.py
 test cases/common/99 manygen/subdir/meson.build
-test cases/csharp/1 basic/installed_files.txt
 test cases/csharp/1 basic/meson.build
 test cases/csharp/1 basic/prog.cs
+test cases/csharp/1 basic/test.json
 test cases/csharp/1 basic/text.cs
 test cases/csharp/2 library/helper.cs
-test cases/csharp/2 library/installed_files.txt
 test cases/csharp/2 library/meson.build
 test cases/csharp/2 library/prog.cs
+test cases/csharp/2 library/test.json
 test cases/csharp/3 resource/TestRes.resx
 test cases/csharp/3 resource/meson.build
 test cases/csharp/3 resource/resprog.cs
 test cases/csharp/4 external dep/hello.txt
-test cases/csharp/4 external dep/installed_files.txt
 test cases/csharp/4 external dep/meson.build
 test cases/csharp/4 external dep/prog.cs
+test cases/csharp/4 external dep/test.json
 test cases/cuda/1 simple/meson.build
 test cases/cuda/1 simple/prog.cu
 test cases/cuda/10 cuda dependency/meson.build
@@ -1491,6 +1629,10 @@
 test cases/cuda/12 cuda dependency (mixed)/kernel.cu
 test cases/cuda/12 cuda dependency (mixed)/meson.build
 test cases/cuda/12 cuda dependency (mixed)/prog.cpp
+test cases/cuda/13 cuda compiler setting/meson.build
+test cases/cuda/13 cuda compiler setting/nativefile.ini
+test cases/cuda/13 cuda compiler setting/prog.cu
+test cases/cuda/14 cuda has header symbol/meson.build
 test cases/cuda/2 split/lib.cu
 test cases/cuda/2 split/main.cpp
 test cases/cuda/2 split/meson.build
@@ -1519,8 +1661,8 @@
 test cases/cuda/9 optimize for space/main.cu
 test cases/cuda/9 optimize for space/meson.build
 test cases/d/1 simple/app.d
-test cases/d/1 simple/installed_files.txt
 test cases/d/1 simple/meson.build
+test cases/d/1 simple/test.json
 test cases/d/1 simple/utils.d
 test cases/d/10 d cpp/cppmain.cpp
 test cases/d/10 d cpp/dmain.d
@@ -1530,32 +1672,35 @@
 test cases/d/11 dub/meson.build
 test cases/d/11 dub/test.d
 test cases/d/2 static library/app.d
-test cases/d/2 static library/installed_files.txt
 test cases/d/2 static library/libstuff.d
 test cases/d/2 static library/meson.build
+test cases/d/2 static library/test.json
 test cases/d/3 shared library/app.d
-test cases/d/3 shared library/installed_files.txt
 test cases/d/3 shared library/libstuff.d
 test cases/d/3 shared library/libstuff.di
+test cases/d/3 shared library/lld-test.py
 test cases/d/3 shared library/meson.build
-test cases/d/4 library versions/installed_files.txt
+test cases/d/3 shared library/test.json
+test cases/d/3 shared library/sub/libstuff.d
+test cases/d/3 shared library/sub/meson.build
 test cases/d/4 library versions/lib.d
 test cases/d/4 library versions/meson.build
+test cases/d/4 library versions/test.json
 test cases/d/5 mixed/app.d
-test cases/d/5 mixed/installed_files.txt
 test cases/d/5 mixed/libstuff.c
 test cases/d/5 mixed/meson.build
+test cases/d/5 mixed/test.json
 test cases/d/6 unittest/app.d
-test cases/d/6 unittest/installed_files.txt
 test cases/d/6 unittest/meson.build
 test cases/d/6 unittest/second_unit.d
+test cases/d/6 unittest/test.json
 test cases/d/7 multilib/app.d
-test cases/d/7 multilib/installed_files.txt
 test cases/d/7 multilib/meson.build
 test cases/d/7 multilib/say1.d
 test cases/d/7 multilib/say1.di
 test cases/d/7 multilib/say2.d
 test cases/d/7 multilib/say2.di
+test cases/d/7 multilib/test.json
 test cases/d/8 has multi arguments/meson.build
 test cases/d/9 features/app.d
 test cases/d/9 features/extra.d
@@ -1580,6 +1725,7 @@
 test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt
 test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.cpp
 test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.hpp
+test cases/failing build/5 failed pickled/meson.build
 test cases/failing test/1 trivial/main.c
 test cases/failing test/1 trivial/meson.build
 test cases/failing test/2 signal/main.c
@@ -1595,124 +1741,223 @@
 test cases/failing test/6 xpass/xpass.c
 test cases/failing/1 project not first/meson.build
 test cases/failing/1 project not first/prog.c
+test cases/failing/1 project not first/test.json
 test cases/failing/10 out of bounds/meson.build
+test cases/failing/10 out of bounds/test.json
+test cases/failing/100 fallback consistency/meson.build
+test cases/failing/100 fallback consistency/test.json
+test cases/failing/100 fallback consistency/subprojects/sub/meson.build
+test cases/failing/101 no native compiler/main.c
+test cases/failing/101 no native compiler/meson.build
+test cases/failing/101 no native compiler/test.json
+test cases/failing/102 subdir parse error/meson.build
+test cases/failing/102 subdir parse error/test.json
+test cases/failing/102 subdir parse error/subdir/meson.build
+test cases/failing/103 invalid option file/meson.build
+test cases/failing/103 invalid option file/meson_options.txt
+test cases/failing/103 invalid option file/test.json
+test cases/failing/104 no lang/main.c
+test cases/failing/104 no lang/meson.build
+test cases/failing/104 no lang/test.json
+test cases/failing/105 no glib-compile-resources/meson.build
+test cases/failing/105 no glib-compile-resources/test.json
+test cases/failing/105 no glib-compile-resources/trivial.gresource.xml
+test cases/failing/106 fallback consistency/meson.build
+test cases/failing/106 fallback consistency/test.json
+test cases/failing/106 fallback consistency/subprojects/foo.wrap
+test cases/failing/106 fallback consistency/subprojects/foo/meson.build
+test cases/failing/107 number in combo/meson.build
+test cases/failing/107 number in combo/nativefile.ini
+test cases/failing/107 number in combo/test.json
+test cases/failing/108 bool in combo/meson.build
+test cases/failing/108 bool in combo/meson_options.txt
+test cases/failing/108 bool in combo/nativefile.ini
+test cases/failing/108 bool in combo/test.json
+test cases/failing/109 compiler no lang/meson.build
+test cases/failing/109 compiler no lang/test.json
 test cases/failing/11 object arithmetic/meson.build
+test cases/failing/11 object arithmetic/test.json
+test cases/failing/110 no fallback/meson.build
+test cases/failing/110 no fallback/test.json
+test cases/failing/110 no fallback/subprojects/foob/meson.build
 test cases/failing/12 string arithmetic/meson.build
+test cases/failing/12 string arithmetic/test.json
 test cases/failing/13 array arithmetic/meson.build
+test cases/failing/13 array arithmetic/test.json
 test cases/failing/14 invalid option name/meson.build
 test cases/failing/14 invalid option name/meson_options.txt
+test cases/failing/14 invalid option name/test.json
 test cases/failing/15 kwarg before arg/meson.build
 test cases/failing/15 kwarg before arg/prog.c
+test cases/failing/15 kwarg before arg/test.json
 test cases/failing/16 extract from subproject/main.c
 test cases/failing/16 extract from subproject/meson.build
+test cases/failing/16 extract from subproject/test.json
 test cases/failing/16 extract from subproject/subprojects/sub_project/meson.build
 test cases/failing/16 extract from subproject/subprojects/sub_project/sub_lib.c
 test cases/failing/17 same target/file.c
 test cases/failing/17 same target/meson.build
+test cases/failing/17 same target/test.json
 test cases/failing/18 wrong plusassign/meson.build
+test cases/failing/18 wrong plusassign/test.json
 test cases/failing/19 target clash/clash.c
 test cases/failing/19 target clash/meson.build
+test cases/failing/19 target clash/test.json
 test cases/failing/2 missing file/meson.build
+test cases/failing/2 missing file/test.json
 test cases/failing/20 version/meson.build
+test cases/failing/20 version/test.json
 test cases/failing/21 subver/meson.build
+test cases/failing/21 subver/test.json
 test cases/failing/21 subver/subprojects/foo/meson.build
 test cases/failing/22 assert/meson.build
+test cases/failing/22 assert/test.json
 test cases/failing/23 rel testdir/meson.build
 test cases/failing/23 rel testdir/simple.c
+test cases/failing/23 rel testdir/test.json
 test cases/failing/24 int conversion/meson.build
+test cases/failing/24 int conversion/test.json
 test cases/failing/25 badlang/meson.build
+test cases/failing/25 badlang/test.json
 test cases/failing/26 output subdir/foo.in
 test cases/failing/26 output subdir/meson.build
+test cases/failing/26 output subdir/test.json
 test cases/failing/26 output subdir/subdir/dummy.txt
 test cases/failing/27 noprog use/meson.build
+test cases/failing/27 noprog use/test.json
 test cases/failing/28 no crossprop/meson.build
+test cases/failing/28 no crossprop/test.json
 test cases/failing/29 nested ternary/meson.build
+test cases/failing/29 nested ternary/test.json
 test cases/failing/3 missing subdir/meson.build
+test cases/failing/3 missing subdir/test.json
+test cases/failing/30 invalid man extension/foo.a1
 test cases/failing/30 invalid man extension/meson.build
+test cases/failing/30 invalid man extension/test.json
+test cases/failing/31 no man extension/foo
 test cases/failing/31 no man extension/meson.build
+test cases/failing/31 no man extension/test.json
 test cases/failing/32 exe static shared/meson.build
 test cases/failing/32 exe static shared/prog.c
 test cases/failing/32 exe static shared/shlib2.c
 test cases/failing/32 exe static shared/stat.c
+test cases/failing/32 exe static shared/test.json
 test cases/failing/33 non-root subproject/meson.build
+test cases/failing/33 non-root subproject/test.json
 test cases/failing/33 non-root subproject/some/meson.build
 test cases/failing/34 dependency not-required then required/meson.build
+test cases/failing/34 dependency not-required then required/test.json
 test cases/failing/35 project argument after target/exe.c
 test cases/failing/35 project argument after target/meson.build
+test cases/failing/35 project argument after target/test.json
 test cases/failing/36 pkgconfig dependency impossible conditions/meson.build
+test cases/failing/36 pkgconfig dependency impossible conditions/test.json
 test cases/failing/37 has function external dependency/meson.build
 test cases/failing/37 has function external dependency/mylib.c
+test cases/failing/37 has function external dependency/test.json
 test cases/failing/38 libdir must be inside prefix/meson.build
+test cases/failing/38 libdir must be inside prefix/test.json
 test cases/failing/39 prefix absolute/meson.build
+test cases/failing/39 prefix absolute/test.json
 test cases/failing/4 missing meson.build/meson.build
+test cases/failing/4 missing meson.build/test.json
 test cases/failing/4 missing meson.build/subdir/dummy.txt
 test cases/failing/40 kwarg assign/dummy.c
 test cases/failing/40 kwarg assign/meson.build
 test cases/failing/40 kwarg assign/prog.c
+test cases/failing/40 kwarg assign/test.json
 test cases/failing/41 custom target plainname many inputs/1.txt
 test cases/failing/41 custom target plainname many inputs/2.txt
 test cases/failing/41 custom target plainname many inputs/catfiles.py
 test cases/failing/41 custom target plainname many inputs/meson.build
+test cases/failing/41 custom target plainname many inputs/test.json
 test cases/failing/42 custom target outputs not matching install_dirs/generator.py
-test cases/failing/42 custom target outputs not matching install_dirs/installed_files.txt
 test cases/failing/42 custom target outputs not matching install_dirs/meson.build
+test cases/failing/42 custom target outputs not matching install_dirs/test.json
 test cases/failing/43 project name colon/meson.build
+test cases/failing/43 project name colon/test.json
 test cases/failing/44 abs subdir/meson.build
+test cases/failing/44 abs subdir/test.json
 test cases/failing/44 abs subdir/bob/meson.build
 test cases/failing/45 abspath to srcdir/meson.build
+test cases/failing/45 abspath to srcdir/test.json
 test cases/failing/46 pkgconfig variables reserved/meson.build
 test cases/failing/46 pkgconfig variables reserved/simple.c
 test cases/failing/46 pkgconfig variables reserved/simple.h
+test cases/failing/46 pkgconfig variables reserved/test.json
 test cases/failing/47 pkgconfig variables zero length/meson.build
 test cases/failing/47 pkgconfig variables zero length/simple.c
 test cases/failing/47 pkgconfig variables zero length/simple.h
+test cases/failing/47 pkgconfig variables zero length/test.json
 test cases/failing/48 pkgconfig variables zero length value/meson.build
 test cases/failing/48 pkgconfig variables zero length value/simple.c
 test cases/failing/48 pkgconfig variables zero length value/simple.h
+test cases/failing/48 pkgconfig variables zero length value/test.json
 test cases/failing/49 pkgconfig variables not key value/meson.build
 test cases/failing/49 pkgconfig variables not key value/simple.c
 test cases/failing/49 pkgconfig variables not key value/simple.h
+test cases/failing/49 pkgconfig variables not key value/test.json
 test cases/failing/5 misplaced option/meson.build
+test cases/failing/5 misplaced option/test.json
 test cases/failing/50 executable comparison/meson.build
 test cases/failing/50 executable comparison/prog.c
+test cases/failing/50 executable comparison/test.json
 test cases/failing/51 inconsistent comparison/meson.build
+test cases/failing/51 inconsistent comparison/test.json
 test cases/failing/52 slashname/meson.build
+test cases/failing/52 slashname/test.json
 test cases/failing/52 slashname/sub/meson.build
 test cases/failing/52 slashname/sub/prog.c
 test cases/failing/53 reserved meson prefix/meson.build
+test cases/failing/53 reserved meson prefix/test.json
 test cases/failing/53 reserved meson prefix/meson-foo/meson.build
 test cases/failing/54 wrong shared crate type/foo.rs
 test cases/failing/54 wrong shared crate type/meson.build
+test cases/failing/54 wrong shared crate type/test.json
 test cases/failing/55 wrong static crate type/foo.rs
 test cases/failing/55 wrong static crate type/meson.build
+test cases/failing/55 wrong static crate type/test.json
 test cases/failing/56 or on new line/meson.build
 test cases/failing/56 or on new line/meson_options.txt
+test cases/failing/56 or on new line/test.json
 test cases/failing/57 kwarg in module/meson.build
+test cases/failing/57 kwarg in module/test.json
 test cases/failing/58 link with executable/meson.build
 test cases/failing/58 link with executable/module.c
 test cases/failing/58 link with executable/prog.c
+test cases/failing/58 link with executable/test.json
 test cases/failing/59 assign custom target index/meson.build
+test cases/failing/59 assign custom target index/test.json
 test cases/failing/6 missing incdir/meson.build
+test cases/failing/6 missing incdir/test.json
 test cases/failing/60 getoption prefix/meson.build
+test cases/failing/60 getoption prefix/test.json
 test cases/failing/60 getoption prefix/subprojects/abc/meson.build
 test cases/failing/60 getoption prefix/subprojects/abc/meson_options.txt
 test cases/failing/61 bad option argument/meson.build
 test cases/failing/61 bad option argument/meson_options.txt
+test cases/failing/61 bad option argument/test.json
 test cases/failing/62 subproj filegrab/meson.build
 test cases/failing/62 subproj filegrab/prog.c
+test cases/failing/62 subproj filegrab/test.json
 test cases/failing/62 subproj filegrab/subprojects/a/meson.build
 test cases/failing/63 grab subproj/meson.build
+test cases/failing/63 grab subproj/test.json
 test cases/failing/63 grab subproj/subprojects/foo/meson.build
 test cases/failing/63 grab subproj/subprojects/foo/sub.c
 test cases/failing/64 grab sibling/meson.build
+test cases/failing/64 grab sibling/test.json
 test cases/failing/64 grab sibling/subprojects/a/meson.build
 test cases/failing/64 grab sibling/subprojects/b/meson.build
 test cases/failing/64 grab sibling/subprojects/b/sneaky.c
 test cases/failing/65 string as link target/meson.build
 test cases/failing/65 string as link target/prog.c
+test cases/failing/65 string as link target/test.json
 test cases/failing/66 dependency not-found and required/meson.build
+test cases/failing/66 dependency not-found and required/test.json
 test cases/failing/67 subproj different versions/main.c
 test cases/failing/67 subproj different versions/meson.build
+test cases/failing/67 subproj different versions/test.json
 test cases/failing/67 subproj different versions/subprojects/a/a.c
 test cases/failing/67 subproj different versions/subprojects/a/a.h
 test cases/failing/67 subproj different versions/subprojects/a/meson.build
@@ -1722,64 +1967,101 @@
 test cases/failing/67 subproj different versions/subprojects/c/c.h
 test cases/failing/67 subproj different versions/subprojects/c/meson.build
 test cases/failing/68 wrong boost module/meson.build
+test cases/failing/68 wrong boost module/test.json
 test cases/failing/69 install_data rename bad size/file1.txt
 test cases/failing/69 install_data rename bad size/file2.txt
 test cases/failing/69 install_data rename bad size/meson.build
+test cases/failing/69 install_data rename bad size/test.json
 test cases/failing/7 go to subproject/meson.build
+test cases/failing/7 go to subproject/test.json
 test cases/failing/7 go to subproject/subprojects/meson.build
 test cases/failing/70 skip only subdir/meson.build
+test cases/failing/70 skip only subdir/test.json
 test cases/failing/70 skip only subdir/subdir/meson.build
-test cases/failing/71 invalid escape char/meson.build
-test cases/failing/72 dual override/meson.build
-test cases/failing/72 dual override/overrides.py
-test cases/failing/73 override used/meson.build
-test cases/failing/73 override used/other.py
-test cases/failing/73 override used/something.py
-test cases/failing/74 run_command unclean exit/meson.build
-test cases/failing/74 run_command unclean exit/returncode.py
-test cases/failing/75 int literal leading zero/meson.build
-test cases/failing/76 configuration immutable/input
-test cases/failing/76 configuration immutable/meson.build
-test cases/failing/77 link with shared module on osx/meson.build
-test cases/failing/77 link with shared module on osx/module.c
-test cases/failing/77 link with shared module on osx/prog.c
-test cases/failing/78 non ascii in ascii encoded configure file/config9.h.in
-test cases/failing/78 non ascii in ascii encoded configure file/meson.build
-test cases/failing/79 subproj dependency not-found and required/meson.build
+test cases/failing/71 dual override/meson.build
+test cases/failing/71 dual override/overrides.py
+test cases/failing/71 dual override/test.json
+test cases/failing/72 override used/meson.build
+test cases/failing/72 override used/other.py
+test cases/failing/72 override used/something.py
+test cases/failing/72 override used/test.json
+test cases/failing/73 run_command unclean exit/meson.build
+test cases/failing/73 run_command unclean exit/returncode.py
+test cases/failing/73 run_command unclean exit/test.json
+test cases/failing/74 int literal leading zero/meson.build
+test cases/failing/74 int literal leading zero/test.json
+test cases/failing/75 configuration immutable/input
+test cases/failing/75 configuration immutable/meson.build
+test cases/failing/75 configuration immutable/test.json
+test cases/failing/76 link with shared module on osx/meson.build
+test cases/failing/76 link with shared module on osx/module.c
+test cases/failing/76 link with shared module on osx/prog.c
+test cases/failing/76 link with shared module on osx/test.json
+test cases/failing/77 non ascii in ascii encoded configure file/config9.h.in
+test cases/failing/77 non ascii in ascii encoded configure file/meson.build
+test cases/failing/77 non ascii in ascii encoded configure file/test.json
+test cases/failing/78 subproj dependency not-found and required/meson.build
+test cases/failing/78 subproj dependency not-found and required/test.json
+test cases/failing/79 unfound run/meson.build
+test cases/failing/79 unfound run/test.json
 test cases/failing/8 recursive/meson.build
+test cases/failing/8 recursive/test.json
 test cases/failing/8 recursive/subprojects/a/meson.build
 test cases/failing/8 recursive/subprojects/b/meson.build
-test cases/failing/80 unfound run/meson.build
-test cases/failing/81 framework dependency with version/meson.build
-test cases/failing/82 override exe config/foo.c
-test cases/failing/82 override exe config/meson.build
-test cases/failing/83 gl dependency with version/meson.build
-test cases/failing/84 threads dependency with version/meson.build
-test cases/failing/85 gtest dependency with version/meson.build
-test cases/failing/86 dub libray/meson.build
-test cases/failing/87 dub executable/meson.build
-test cases/failing/88 dub compiler/meson.build
-test cases/failing/89 subproj not-found dep/meson.build
-test cases/failing/89 subproj not-found dep/subprojects/somesubproj/meson.build
+test cases/failing/80 framework dependency with version/meson.build
+test cases/failing/80 framework dependency with version/test.json
+test cases/failing/81 override exe config/foo.c
+test cases/failing/81 override exe config/meson.build
+test cases/failing/81 override exe config/test.json
+test cases/failing/82 gl dependency with version/meson.build
+test cases/failing/82 gl dependency with version/test.json
+test cases/failing/83 threads dependency with version/meson.build
+test cases/failing/83 threads dependency with version/test.json
+test cases/failing/84 gtest dependency with version/meson.build
+test cases/failing/84 gtest dependency with version/test.json
+test cases/failing/85 dub libray/meson.build
+test cases/failing/85 dub libray/test.json
+test cases/failing/86 dub executable/meson.build
+test cases/failing/86 dub executable/test.json
+test cases/failing/87 dub compiler/meson.build
+test cases/failing/87 dub compiler/test.json
+test cases/failing/88 subproj not-found dep/meson.build
+test cases/failing/88 subproj not-found dep/test.json
+test cases/failing/88 subproj not-found dep/subprojects/somesubproj/meson.build
+test cases/failing/89 invalid configure file/input
+test cases/failing/89 invalid configure file/meson.build
+test cases/failing/89 invalid configure file/test.json
 test cases/failing/9 missing extra file/meson.build
 test cases/failing/9 missing extra file/prog.c
-test cases/failing/90 invalid configure file/input
-test cases/failing/90 invalid configure file/meson.build
-test cases/failing/91 kwarg dupe/meson.build
-test cases/failing/91 kwarg dupe/prog.c
-test cases/failing/92 missing pch file/meson.build
-test cases/failing/92 missing pch file/prog.c
-test cases/failing/93 pch source different folder/meson.build
-test cases/failing/93 pch source different folder/prog.c
-test cases/failing/93 pch source different folder/include/pch.h
-test cases/failing/93 pch source different folder/src/pch.c
-test cases/failing/94 vala without c/meson.build
-test cases/failing/95 unknown config tool/meson.build
-test cases/failing/96 custom target install data/Info.plist.cpp
-test cases/failing/96 custom target install data/meson.build
-test cases/failing/96 custom target install data/preproc.py
-test cases/failing/97 add dict non string key/meson.build
-test cases/failing/98 add dict duplicate keys/meson.build
+test cases/failing/9 missing extra file/test.json
+test cases/failing/90 kwarg dupe/meson.build
+test cases/failing/90 kwarg dupe/prog.c
+test cases/failing/90 kwarg dupe/test.json
+test cases/failing/91 missing pch file/meson.build
+test cases/failing/91 missing pch file/prog.c
+test cases/failing/91 missing pch file/test.json
+test cases/failing/92 pch source different folder/meson.build
+test cases/failing/92 pch source different folder/prog.c
+test cases/failing/92 pch source different folder/test.json
+test cases/failing/92 pch source different folder/include/pch.h
+test cases/failing/92 pch source different folder/src/pch.c
+test cases/failing/93 vala without c/meson.build
+test cases/failing/93 vala without c/test.json
+test cases/failing/94 unknown config tool/meson.build
+test cases/failing/94 unknown config tool/test.json
+test cases/failing/95 custom target install data/Info.plist.cpp
+test cases/failing/95 custom target install data/meson.build
+test cases/failing/95 custom target install data/preproc.py
+test cases/failing/95 custom target install data/test.json
+test cases/failing/96 add dict non string key/meson.build
+test cases/failing/96 add dict non string key/test.json
+test cases/failing/97 add dict duplicate keys/meson.build
+test cases/failing/97 add dict duplicate keys/test.json
+test cases/failing/98 fallback consistency/meson.build
+test cases/failing/98 fallback consistency/test.json
+test cases/failing/98 fallback consistency/subprojects/sub/meson.build
+test cases/failing/99 no native prop/meson.build
+test cases/failing/99 no native prop/test.json
 test cases/fortran/1 basic/meson.build
 test cases/fortran/1 basic/simple.f90
 test cases/fortran/10 find library/gzip.f90
@@ -1802,12 +2084,15 @@
 test cases/fortran/15 include/inc2.f90
 test cases/fortran/15 include/include_hierarchy.f90
 test cases/fortran/15 include/include_syntax.f90
-test cases/fortran/15 include/include_tests.f90
 test cases/fortran/15 include/meson.build
 test cases/fortran/15 include/timestwo.f90
+test cases/fortran/15 include/subprojects/cmake_inc/CMakeLists.txt
+test cases/fortran/15 include/subprojects/cmake_inc/main.f90
+test cases/fortran/15 include/subprojects/cmake_inc/thousand.f90
 test cases/fortran/16 openmp/main.f90
 test cases/fortran/16 openmp/meson.build
 test cases/fortran/17 add_languages/meson.build
+test cases/fortran/18 first_arg/main.f90
 test cases/fortran/18 first_arg/meson.build
 test cases/fortran/19 fortran_std/legacy.f
 test cases/fortran/19 fortran_std/meson.build
@@ -1837,6 +2122,7 @@
 test cases/fortran/7 generated/meson.build
 test cases/fortran/7 generated/mod1.fpp
 test cases/fortran/7 generated/mod2.fpp
+test cases/fortran/7 generated/mod3.fpp
 test cases/fortran/7 generated/prog.f90
 test cases/fortran/8 module names/meson.build
 test cases/fortran/8 module names/mod1.f90
@@ -1851,16 +2137,18 @@
 test cases/frameworks/1 boost/extralib.cpp
 test cases/frameworks/1 boost/linkexe.cc
 test cases/frameworks/1 boost/meson.build
+test cases/frameworks/1 boost/meson_options.txt
 test cases/frameworks/1 boost/nomod.cpp
 test cases/frameworks/1 boost/python_module.cpp
+test cases/frameworks/1 boost/test.json
 test cases/frameworks/1 boost/test_python_module.py
 test cases/frameworks/1 boost/unit_test.cpp
 test cases/frameworks/1 boost/partial_dep/foo.cpp
 test cases/frameworks/1 boost/partial_dep/foo.hpp
 test cases/frameworks/1 boost/partial_dep/main.cpp
 test cases/frameworks/1 boost/partial_dep/meson.build
-test cases/frameworks/10 gtk-doc/installed_files.txt
 test cases/frameworks/10 gtk-doc/meson.build
+test cases/frameworks/10 gtk-doc/test.json
 test cases/frameworks/10 gtk-doc/doc/foobar-docs.sgml
 test cases/frameworks/10 gtk-doc/doc/meson.build
 test cases/frameworks/10 gtk-doc/doc/version.xml.in
@@ -1876,8 +2164,8 @@
 test cases/frameworks/10 gtk-doc/include/foo.h
 test cases/frameworks/10 gtk-doc/include/generate-enums-docbook.py
 test cases/frameworks/10 gtk-doc/include/meson.build
-test cases/frameworks/11 gir subproject/installed_files.txt
 test cases/frameworks/11 gir subproject/meson.build
+test cases/frameworks/11 gir subproject/test.json
 test cases/frameworks/11 gir subproject/gir/meson-subsample.c
 test cases/frameworks/11 gir subproject/gir/meson-subsample.h
 test cases/frameworks/11 gir subproject/gir/meson.build
@@ -1886,8 +2174,8 @@
 test cases/frameworks/11 gir subproject/subprojects/mesongir/meson-sample.c
 test cases/frameworks/11 gir subproject/subprojects/mesongir/meson-sample.h
 test cases/frameworks/11 gir subproject/subprojects/mesongir/meson.build
-test cases/frameworks/12 multiple gir/installed_files.txt
 test cases/frameworks/12 multiple gir/meson.build
+test cases/frameworks/12 multiple gir/test.json
 test cases/frameworks/12 multiple gir/gir/meson-subsample.c
 test cases/frameworks/12 multiple gir/gir/meson-subsample.h
 test cases/frameworks/12 multiple gir/gir/meson.build
@@ -1895,8 +2183,8 @@
 test cases/frameworks/12 multiple gir/mesongir/meson-sample.c
 test cases/frameworks/12 multiple gir/mesongir/meson-sample.h.in
 test cases/frameworks/12 multiple gir/mesongir/meson.build
-test cases/frameworks/13 yelp/installed_files.txt
 test cases/frameworks/13 yelp/meson.build
+test cases/frameworks/13 yelp/test.json
 test cases/frameworks/13 yelp/help/LINGUAS
 test cases/frameworks/13 yelp/help/meson.build
 test cases/frameworks/13 yelp/help/C/index.page
@@ -1904,21 +2192,27 @@
 test cases/frameworks/13 yelp/help/de/de.po
 test cases/frameworks/13 yelp/help/es/es.po
 test cases/frameworks/13 yelp/help/es/media/test.txt
-test cases/frameworks/14 doxygen/installed_files.txt
 test cases/frameworks/14 doxygen/meson.build
+test cases/frameworks/14 doxygen/test.json
 test cases/frameworks/14 doxygen/doc/Doxyfile.in
 test cases/frameworks/14 doxygen/doc/meson.build
 test cases/frameworks/14 doxygen/include/comedian.h
 test cases/frameworks/14 doxygen/include/spede.h
 test cases/frameworks/14 doxygen/src/spede.cpp
 test cases/frameworks/15 llvm/meson.build
+test cases/frameworks/15 llvm/meson_options.txt
 test cases/frameworks/15 llvm/sum.c
+test cases/frameworks/15 llvm/test.json
 test cases/frameworks/16 sdl2/meson.build
+test cases/frameworks/16 sdl2/meson_options.txt
 test cases/frameworks/16 sdl2/sdl2prog.c
+test cases/frameworks/16 sdl2/test.json
 test cases/frameworks/17 mpi/main.c
 test cases/frameworks/17 mpi/main.cpp
 test cases/frameworks/17 mpi/main.f90
 test cases/frameworks/17 mpi/meson.build
+test cases/frameworks/17 mpi/meson_options.txt
+test cases/frameworks/17 mpi/test.json
 test cases/frameworks/18 vulkan/meson.build
 test cases/frameworks/18 vulkan/vulkanprog.c
 test cases/frameworks/19 pcap/meson.build
@@ -1939,8 +2233,8 @@
 test cases/frameworks/22 gir link order/get-prgname/get-prgname.c
 test cases/frameworks/22 gir link order/get-prgname/get-prgname.h
 test cases/frameworks/22 gir link order/get-prgname/meson.build
-test cases/frameworks/23 hotdoc/installed_files.txt
 test cases/frameworks/23 hotdoc/meson.build
+test cases/frameworks/23 hotdoc/test.json
 test cases/frameworks/23 hotdoc/doc/index.md
 test cases/frameworks/23 hotdoc/doc/meson.build
 test cases/frameworks/23 hotdoc/doc/sitemap.txt
@@ -1950,6 +2244,8 @@
 test cases/frameworks/25 hdf5/main.cpp
 test cases/frameworks/25 hdf5/main.f90
 test cases/frameworks/25 hdf5/meson.build
+test cases/frameworks/25 hdf5/meson_options.txt
+test cases/frameworks/25 hdf5/test.json
 test cases/frameworks/26 netcdf/main.c
 test cases/frameworks/26 netcdf/main.cpp
 test cases/frameworks/26 netcdf/main.f90
@@ -1968,6 +2264,22 @@
 test cases/frameworks/30 scalapack/main.f90
 test cases/frameworks/30 scalapack/meson.build
 test cases/frameworks/30 scalapack/cmake/FindSCALAPACK.cmake
+test cases/frameworks/31 curses/main.c
+test cases/frameworks/31 curses/meson.build
+test cases/frameworks/31 curses/meson_options.txt
+test cases/frameworks/31 curses/test.json
+test cases/frameworks/32 boost root/meson.build
+test cases/frameworks/32 boost root/nativefile.ini.in
+test cases/frameworks/32 boost root/boost/include/boost/version.hpp
+test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib
+test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib
+test cases/frameworks/32 boost root/boost/lib/libboost_regex.so.0.1.0
+test cases/frameworks/33 boost split root/meson.build
+test cases/frameworks/33 boost split root/nativefile.ini.in
+test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp
+test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib
+test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib
+test cases/frameworks/33 boost split root/boost/lib/libboost_regex.so.0.2.0
 test cases/frameworks/4 qt/main.cpp
 test cases/frameworks/4 qt/mainWindow.cpp
 test cases/frameworks/4 qt/mainWindow.h
@@ -2006,8 +2318,8 @@
 test cases/frameworks/5 protocol buffers/withpath/pathprog.cpp
 test cases/frameworks/5 protocol buffers/withpath/com/mesonbuild/simple.proto
 test cases/frameworks/5 protocol buffers/withpath/com/mesonbuild/subsite/complex.proto
-test cases/frameworks/6 gettext/installed_files.txt
 test cases/frameworks/6 gettext/meson.build
+test cases/frameworks/6 gettext/test.json
 test cases/frameworks/6 gettext/data/meson.build
 test cases/frameworks/6 gettext/data/test.desktop.in
 test cases/frameworks/6 gettext/data/test2.desktop.in
@@ -2027,8 +2339,8 @@
 test cases/frameworks/6 gettext/po/ru.po
 test cases/frameworks/6 gettext/src/intlmain.c
 test cases/frameworks/6 gettext/src/meson.build
-test cases/frameworks/7 gnome/installed_files.txt
 test cases/frameworks/7 gnome/meson.build
+test cases/frameworks/7 gnome/test.json
 test cases/frameworks/7 gnome/gdbus/gdbusprog.c
 test cases/frameworks/7 gnome/gdbus/meson.build
 test cases/frameworks/7 gnome/gdbus/data/com.example.Sample.xml
@@ -2048,6 +2360,9 @@
 test cases/frameworks/7 gnome/gir/dep1/dep2/dep2.c
 test cases/frameworks/7 gnome/gir/dep1/dep2/dep2.h
 test cases/frameworks/7 gnome/gir/dep1/dep2/meson.build
+test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c
+test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h
+test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build
 test cases/frameworks/7 gnome/mkenums/enums.c.in
 test cases/frameworks/7 gnome/mkenums/enums.h.in
 test cases/frameworks/7 gnome/mkenums/enums2.c.in
@@ -2085,8 +2400,8 @@
 test cases/frameworks/9 wxwidgets/meson.build
 test cases/frameworks/9 wxwidgets/wxprog.cpp
 test cases/frameworks/9 wxwidgets/wxstc.cpp
-test cases/java/1 basic/installed_files.txt
 test cases/java/1 basic/meson.build
+test cases/java/1 basic/test.json
 test cases/java/1 basic/com/mesonbuild/Simple.java
 test cases/java/2 subdir/meson.build
 test cases/java/2 subdir/sub/meson.build
@@ -2113,16 +2428,17 @@
 test cases/java/8 codegen custom target/com/mesonbuild/Simple.java
 test cases/java/8 codegen custom target/com/mesonbuild/TextPrinter.java
 test cases/java/8 codegen custom target/com/mesonbuild/meson.build
-test cases/kconfig/1 basic/.config
-test cases/kconfig/1 basic/meson.build
-test cases/kconfig/2 subdir/.config
-test cases/kconfig/2 subdir/meson.build
-test cases/kconfig/2 subdir/dir/meson.build
-test cases/kconfig/3 load_config files/meson.build
-test cases/kconfig/3 load_config files/dir/config
-test cases/kconfig/3 load_config files/dir/meson.build
-test cases/kconfig/4 load_config builddir/config
-test cases/kconfig/4 load_config builddir/meson.build
+test cases/keyval/1 basic/.config
+test cases/keyval/1 basic/meson.build
+test cases/keyval/1 basic/test.json
+test cases/keyval/2 subdir/.config
+test cases/keyval/2 subdir/meson.build
+test cases/keyval/2 subdir/dir/meson.build
+test cases/keyval/3 load_config files/meson.build
+test cases/keyval/3 load_config files/dir/config
+test cases/keyval/3 load_config files/dir/meson.build
+test cases/keyval/4 load_config builddir/config
+test cases/keyval/4 load_config builddir/meson.build
 test cases/linuxlike/1 pkg-config/meson.build
 test cases/linuxlike/1 pkg-config/prog-checkver.c
 test cases/linuxlike/1 pkg-config/prog.c
@@ -2143,13 +2459,16 @@
 test cases/linuxlike/12 subprojects in subprojects/subprojects/b/meson.build
 test cases/linuxlike/12 subprojects in subprojects/subprojects/c/c.h
 test cases/linuxlike/12 subprojects in subprojects/subprojects/c/meson.build
+test cases/linuxlike/13 cmake dependency/cmVers.sh
 test cases/linuxlike/13 cmake dependency/meson.build
 test cases/linuxlike/13 cmake dependency/prog-checkver.c
 test cases/linuxlike/13 cmake dependency/prog.c
-test cases/linuxlike/13 cmake dependency/setup_env.json
+test cases/linuxlike/13 cmake dependency/test.json
 test cases/linuxlike/13 cmake dependency/testFlagSet.c
 test cases/linuxlike/13 cmake dependency/cmake/FindImportedTarget.cmake
 test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake
+test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake
+test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake
 test cases/linuxlike/13 cmake dependency/cmake_pref_env/lib/cmake/cmMesonTestDep/cmMesonTestDepConfig.cmake
 test cases/linuxlike/13 cmake dependency/incdir/myinc.h
 test cases/linuxlike/14 static dynamic linkage/main.c
@@ -2171,6 +2490,7 @@
 test cases/linuxlike/4 extdep static lib/meson.build
 test cases/linuxlike/4 extdep static lib/prog.c
 test cases/linuxlike/5 dependency versions/meson.build
+test cases/linuxlike/5 dependency versions/subprojects/fakezlib/meson.build
 test cases/linuxlike/5 dependency versions/subprojects/somelib/lib.c
 test cases/linuxlike/5 dependency versions/subprojects/somelib/meson.build
 test cases/linuxlike/5 dependency versions/subprojects/somelibnover/lib.c
@@ -2181,17 +2501,60 @@
 test cases/linuxlike/6 subdir include order/prog.c
 test cases/linuxlike/6 subdir include order/subdir/glib.h
 test cases/linuxlike/7 library versions/exe.orig.c
-test cases/linuxlike/7 library versions/installed_files.txt
 test cases/linuxlike/7 library versions/lib.c
 test cases/linuxlike/7 library versions/meson.build
-test cases/linuxlike/8 subproject library install/installed_files.txt
+test cases/linuxlike/7 library versions/test.json
 test cases/linuxlike/8 subproject library install/meson.build
+test cases/linuxlike/8 subproject library install/test.json
 test cases/linuxlike/8 subproject library install/subprojects/sublib/meson.build
 test cases/linuxlike/8 subproject library install/subprojects/sublib/sublib.c
 test cases/linuxlike/8 subproject library install/subprojects/sublib/include/subdefs.h
 test cases/linuxlike/9 compiler checks with dependencies/meson.build
 test cases/nasm/1 configure file/hello.asm
 test cases/nasm/1 configure file/meson.build
+test cases/native/1 trivial/meson.build
+test cases/native/1 trivial/trivial.c
+test cases/native/162 external program shebang parsing/input.txt
+test cases/native/162 external program shebang parsing/main.c
+test cases/native/162 external program shebang parsing/meson.build
+test cases/native/162 external program shebang parsing/script.int.in
+test cases/native/201 override with exe/main2.input
+test cases/native/201 override with exe/meson.build
+test cases/native/201 override with exe/subprojects/sub/foobar.c
+test cases/native/201 override with exe/subprojects/sub/meson.build
+test cases/native/21 global arg/meson.build
+test cases/native/21 global arg/prog.c
+test cases/native/21 global arg/prog.cc
+test cases/native/27 pipeline/input_src.dat
+test cases/native/27 pipeline/meson.build
+test cases/native/27 pipeline/prog.c
+test cases/native/27 pipeline/srcgen.c
+test cases/native/27 pipeline/depends/copyrunner.py
+test cases/native/27 pipeline/depends/filecopier.c
+test cases/native/27 pipeline/depends/libsrc.c.in
+test cases/native/27 pipeline/depends/meson.build
+test cases/native/27 pipeline/depends/prog.c
+test cases/native/27 pipeline/src/input_src.dat
+test cases/native/27 pipeline/src/meson.build
+test cases/native/27 pipeline/src/prog.c
+test cases/native/27 pipeline/src/srcgen.c
+test cases/native/36 tryrun/error.c
+test cases/native/36 tryrun/meson.build
+test cases/native/36 tryrun/no_compile.c
+test cases/native/36 tryrun/ok.c
+test cases/native/56 install script/file.txt
+test cases/native/56 install script/meson.build
+test cases/native/56 install script/test.json
+test cases/native/56 install script/wrap.py
+test cases/native/56 install script/src/exe.c
+test cases/native/56 install script/src/meson.build
+test cases/native/85 add language/meson.build
+test cases/native/85 add language/prog.cc
+test cases/native/93 selfbuilt custom/checkarg.cpp
+test cases/native/93 selfbuilt custom/data.dat
+test cases/native/93 selfbuilt custom/mainprog.cpp
+test cases/native/93 selfbuilt custom/meson.build
+test cases/native/93 selfbuilt custom/tool.cpp
 test cases/objc/1 simple/meson.build
 test cases/objc/1 simple/prog.m
 test cases/objc/2 nsstring/meson.build
@@ -2210,20 +2573,20 @@
 test cases/osx/1 basic/meson.build
 test cases/osx/2 library versions/CMakeLists.txt
 test cases/osx/2 library versions/exe.orig.c
-test cases/osx/2 library versions/installed_files.txt
 test cases/osx/2 library versions/lib.c
 test cases/osx/2 library versions/meson.build
 test cases/osx/2 library versions/require_pkgconfig.py
+test cases/osx/2 library versions/test.json
 test cases/osx/3 has function xcode8/meson.build
-test cases/osx/4 framework/installed_files.txt
 test cases/osx/4 framework/meson.build
 test cases/osx/4 framework/prog.c
 test cases/osx/4 framework/stat.c
+test cases/osx/4 framework/test.json
 test cases/osx/4 framework/xcode-frameworks.png
-test cases/osx/5 extra frameworks/installed_files.txt
 test cases/osx/5 extra frameworks/meson.build
 test cases/osx/5 extra frameworks/prog.c
 test cases/osx/5 extra frameworks/stat.c
+test cases/osx/5 extra frameworks/test.json
 test cases/osx/6 multiframework/main.m
 test cases/osx/6 multiframework/meson.build
 test cases/osx/7 bitcode/libbar.mm
@@ -2309,35 +2672,35 @@
 test cases/rewrite/4 same name targets/meson.build
 test cases/rewrite/4 same name targets/sub1/meson.build
 test cases/rewrite/5 sorting/meson.build
-test cases/rust/1 basic/installed_files.txt
 test cases/rust/1 basic/meson.build
 test cases/rust/1 basic/prog.rs
+test cases/rust/1 basic/test.json
 test cases/rust/1 basic/subdir/meson.build
 test cases/rust/1 basic/subdir/prog.rs
-test cases/rust/2 sharedlib/installed_files.txt
 test cases/rust/2 sharedlib/meson.build
 test cases/rust/2 sharedlib/prog.rs
 test cases/rust/2 sharedlib/stuff.rs
-test cases/rust/3 staticlib/installed_files.txt
+test cases/rust/2 sharedlib/test.json
 test cases/rust/3 staticlib/meson.build
 test cases/rust/3 staticlib/prog.rs
 test cases/rust/3 staticlib/stuff.rs
-test cases/rust/4 polyglot/installed_files.txt
+test cases/rust/3 staticlib/test.json
 test cases/rust/4 polyglot/meson.build
 test cases/rust/4 polyglot/prog.c
 test cases/rust/4 polyglot/stuff.rs
-test cases/rust/5 polyglot static/installed_files.txt
+test cases/rust/4 polyglot/test.json
 test cases/rust/5 polyglot static/meson.build
 test cases/rust/5 polyglot static/prog.c
 test cases/rust/5 polyglot static/stuff.rs
-test cases/rust/6 named staticlib/installed_files.txt
+test cases/rust/5 polyglot static/test.json
 test cases/rust/6 named staticlib/meson.build
 test cases/rust/6 named staticlib/prog.rs
 test cases/rust/6 named staticlib/stuff.rs
-test cases/rust/7 private crate collision/installed_files.txt
+test cases/rust/6 named staticlib/test.json
 test cases/rust/7 private crate collision/meson.build
 test cases/rust/7 private crate collision/prog.rs
 test cases/rust/7 private crate collision/rand.rs
+test cases/rust/7 private crate collision/test.json
 test cases/rust/8 many files/foo.rs
 test cases/rust/8 many files/main.rs
 test cases/rust/8 many files/meson.build
@@ -2580,6 +2943,7 @@
 test cases/unit/54 clang-format/meson.build
 test cases/unit/54 clang-format/prog_expected_c
 test cases/unit/54 clang-format/prog_orig_c
+test cases/unit/54 clang-format/dummydir.h/dummy.dat
 test cases/unit/55 introspect buildoptions/subprojects/projectBad/meson.build
 test cases/unit/55 introspect buildoptions/subprojects/projectBad/meson_options.txt
 test cases/unit/56 dedup compiler libs/meson.build
@@ -2591,6 +2955,7 @@
 test cases/unit/56 dedup compiler libs/libb/libb.c
 test cases/unit/56 dedup compiler libs/libb/libb.h
 test cases/unit/56 dedup compiler libs/libb/meson.build
+test cases/unit/57 introspection/cp.py
 test cases/unit/57 introspection/meson.build
 test cases/unit/57 introspection/meson_options.txt
 test cases/unit/57 introspection/t1.cpp
@@ -2636,39 +3001,39 @@
 test cases/unit/65 cmake parser/prefix/lib/cmake/mesontest/mesontest-config.cmake
 test cases/unit/66 alias target/main.c
 test cases/unit/66 alias target/meson.build
-test cases/unit/68 static archive stripping/app/appA.c
-test cases/unit/68 static archive stripping/app/appB.c
-test cases/unit/68 static archive stripping/app/meson.build
-test cases/unit/68 static archive stripping/lib/libA.c
-test cases/unit/68 static archive stripping/lib/libA.h
-test cases/unit/68 static archive stripping/lib/libB.c
-test cases/unit/68 static archive stripping/lib/libB.h
-test cases/unit/68 static archive stripping/lib/meson.build
-test cases/unit/69 static link/meson.build
-test cases/unit/69 static link/test1.c
-test cases/unit/69 static link/test2.c
-test cases/unit/69 static link/test3.c
-test cases/unit/69 static link/test4.c
-test cases/unit/69 static link/test5.c
-test cases/unit/69 static link/lib/func1.c
-test cases/unit/69 static link/lib/func10.c
-test cases/unit/69 static link/lib/func11.c
-test cases/unit/69 static link/lib/func12.c
-test cases/unit/69 static link/lib/func14.c
-test cases/unit/69 static link/lib/func15.c
-test cases/unit/69 static link/lib/func16.c
-test cases/unit/69 static link/lib/func17.c
-test cases/unit/69 static link/lib/func18.c
-test cases/unit/69 static link/lib/func19.c
-test cases/unit/69 static link/lib/func2.c
-test cases/unit/69 static link/lib/func3.c
-test cases/unit/69 static link/lib/func4.c
-test cases/unit/69 static link/lib/func5.c
-test cases/unit/69 static link/lib/func6.c
-test cases/unit/69 static link/lib/func7.c
-test cases/unit/69 static link/lib/func8.c
-test cases/unit/69 static link/lib/func9.c
-test cases/unit/69 static link/lib/meson.build
+test cases/unit/67 static archive stripping/app/appA.c
+test cases/unit/67 static archive stripping/app/appB.c
+test cases/unit/67 static archive stripping/app/meson.build
+test cases/unit/67 static archive stripping/lib/libA.c
+test cases/unit/67 static archive stripping/lib/libA.h
+test cases/unit/67 static archive stripping/lib/libB.c
+test cases/unit/67 static archive stripping/lib/libB.h
+test cases/unit/67 static archive stripping/lib/meson.build
+test cases/unit/68 static link/meson.build
+test cases/unit/68 static link/test1.c
+test cases/unit/68 static link/test2.c
+test cases/unit/68 static link/test3.c
+test cases/unit/68 static link/test4.c
+test cases/unit/68 static link/test5.c
+test cases/unit/68 static link/lib/func1.c
+test cases/unit/68 static link/lib/func10.c
+test cases/unit/68 static link/lib/func11.c
+test cases/unit/68 static link/lib/func12.c
+test cases/unit/68 static link/lib/func14.c
+test cases/unit/68 static link/lib/func15.c
+test cases/unit/68 static link/lib/func16.c
+test cases/unit/68 static link/lib/func17.c
+test cases/unit/68 static link/lib/func18.c
+test cases/unit/68 static link/lib/func19.c
+test cases/unit/68 static link/lib/func2.c
+test cases/unit/68 static link/lib/func3.c
+test cases/unit/68 static link/lib/func4.c
+test cases/unit/68 static link/lib/func5.c
+test cases/unit/68 static link/lib/func6.c
+test cases/unit/68 static link/lib/func7.c
+test cases/unit/68 static link/lib/func8.c
+test cases/unit/68 static link/lib/func9.c
+test cases/unit/68 static link/lib/meson.build
 test cases/unit/69 test env value/meson.build
 test cases/unit/69 test env value/test.py
 test cases/unit/7 run installed/meson.build
@@ -2678,19 +3043,66 @@
 test cases/unit/70 clang-tidy/.clang-tidy
 test cases/unit/70 clang-tidy/cttest.cpp
 test cases/unit/70 clang-tidy/meson.build
+test cases/unit/70 clang-tidy/dummydir.h/dummy.dat
 test cases/unit/71 cross/crossfile.in
 test cases/unit/71 cross/meson.build
 test cases/unit/71 cross/meson_options.txt
-test cases/unit/73 wrap file url/meson.build
-test cases/unit/73 wrap file url/subprojects/foo-patch.tar.xz
-test cases/unit/73 wrap file url/subprojects/foo.tar.xz
-test cases/unit/74 summary/meson.build
-test cases/unit/74 summary/subprojects/sub/meson.build
-test cases/unit/74 summary/subprojects/sub2/meson.build
+test cases/unit/72 cross test passed/exewrapper.py
+test cases/unit/72 cross test passed/meson.build
+test cases/unit/72 cross test passed/meson_options.txt
+test cases/unit/72 cross test passed/script.py
+test cases/unit/72 cross test passed/src/main.c
+test cases/unit/73 summary/meson.build
+test cases/unit/73 summary/subprojects/sub/meson.build
+test cases/unit/73 summary/subprojects/sub2/meson.build
+test cases/unit/74 wrap file url/meson.build
+test cases/unit/74 wrap file url/subprojects/foo-patch.tar.xz
+test cases/unit/74 wrap file url/subprojects/foo.tar.xz
+test cases/unit/75 dep files/foo.c
+test cases/unit/75 dep files/meson.build
+test cases/unit/77 pkgconfig prefixes/client/client.c
+test cases/unit/77 pkgconfig prefixes/client/meson.build
+test cases/unit/77 pkgconfig prefixes/val1/meson.build
+test cases/unit/77 pkgconfig prefixes/val1/val1.c
+test cases/unit/77 pkgconfig prefixes/val1/val1.h
+test cases/unit/77 pkgconfig prefixes/val2/meson.build
+test cases/unit/77 pkgconfig prefixes/val2/val2.c
+test cases/unit/77 pkgconfig prefixes/val2/val2.h
+test cases/unit/78 subdir libdir/meson.build
+test cases/unit/78 subdir libdir/subprojects/flub/meson.build
+test cases/unit/79 as link whole/bar.c
+test cases/unit/79 as link whole/foo.c
+test cases/unit/79 as link whole/meson.build
+test cases/unit/79 nostdlib/meson.build
+test cases/unit/79 nostdlib/prog.c
+test cases/unit/79 nostdlib/subprojects/mylibc/libc.c
+test cases/unit/79 nostdlib/subprojects/mylibc/meson.build
+test cases/unit/79 nostdlib/subprojects/mylibc/stdio.h
+test cases/unit/79 nostdlib/subprojects/mylibc/stubstart.s
+test cases/unit/79 user options for subproject/.gitignore
+test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore
+test cases/unit/79 user options for subproject/75 user options for subproject/meson.build
 test cases/unit/8 -L -l order/first.pc
 test cases/unit/8 -L -l order/meson.build
 test cases/unit/8 -L -l order/prog.c
 test cases/unit/8 -L -l order/second.pc
+test cases/unit/80 global-rpath/meson.build
+test cases/unit/80 global-rpath/rpathified.cpp
+test cases/unit/80 global-rpath/yonder/meson.build
+test cases/unit/80 global-rpath/yonder/yonder.cpp
+test cases/unit/80 global-rpath/yonder/yonder.h
+test cases/unit/81 wrap-git/meson.build
+test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build
+test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c
+test cases/unit/82 meson version compare/meson.build
+test cases/unit/83 cross only introspect/meson.build
+test cases/unit/84 change option choices/meson.build
+test cases/unit/84 change option choices/meson_options.1.txt
+test cases/unit/84 change option choices/meson_options.2.txt
+test cases/unit/85 nested subproject regenerate depends/main.c
+test cases/unit/85 nested subproject regenerate depends/meson.build
+test cases/unit/85 nested subproject regenerate depends/subprojects/sub1/meson.build
+test cases/unit/85 nested subproject regenerate depends/subprojects/sub2/CMakeLists.txt
 test cases/unit/9 d dedup/meson.build
 test cases/unit/9 d dedup/prog.c
 test cases/vala/1 basic/meson.build
@@ -2700,9 +3112,9 @@
 test cases/vala/10 mixed sources/c/meson.build
 test cases/vala/10 mixed sources/c/writec.py
 test cases/vala/10 mixed sources/vala/bar.vala
-test cases/vala/11 generated vapi/installed_files.txt
 test cases/vala/11 generated vapi/main.vala
 test cases/vala/11 generated vapi/meson.build
+test cases/vala/11 generated vapi/test.json
 test cases/vala/11 generated vapi/libbar/bar.c
 test cases/vala/11 generated vapi/libbar/bar.h
 test cases/vala/11 generated vapi/libbar/meson.build
@@ -2780,18 +3192,18 @@
 test cases/vala/5 target glib/GLib.Thread.vala
 test cases/vala/5 target glib/meson.build
 test cases/vala/5 target glib/retcode.c
-test cases/vala/6 static library/installed_files.txt
 test cases/vala/6 static library/meson.build
 test cases/vala/6 static library/mylib.vala
 test cases/vala/6 static library/prog.vala
-test cases/vala/7 shared library/installed_files.txt
+test cases/vala/6 static library/test.json
 test cases/vala/7 shared library/meson.build
+test cases/vala/7 shared library/test.json
 test cases/vala/7 shared library/lib/meson.build
 test cases/vala/7 shared library/lib/mylib.vala
 test cases/vala/7 shared library/prog/meson.build
 test cases/vala/7 shared library/prog/prog.vala
-test cases/vala/8 generated sources/installed_files.txt
 test cases/vala/8 generated sources/meson.build
+test cases/vala/8 generated sources/test.json
 test cases/vala/8 generated sources/dependency-generated/enum-types.c.template
 test cases/vala/8 generated sources/dependency-generated/enum-types.h.template
 test cases/vala/8 generated sources/dependency-generated/enums.h
@@ -2809,24 +3221,31 @@
 test cases/vala/8 generated sources/src/write_wrapper.py
 test cases/vala/8 generated sources/tools/meson.build
 test cases/vala/9 gir/foo.vala
-test cases/vala/9 gir/installed_files.txt
 test cases/vala/9 gir/meson.build
+test cases/vala/9 gir/test.json
 test cases/warning/1 version for string div/meson.build
+test cases/warning/1 version for string div/test.json
 test cases/warning/1 version for string div/a/b.c
+test cases/warning/2 languages missing native/meson.build
+test cases/warning/2 languages missing native/test.json
+test cases/wasm/1 basic/hello.c
 test cases/wasm/1 basic/hello.cpp
 test cases/wasm/1 basic/hello.html
 test cases/wasm/1 basic/meson.build
-test cases/windows/1 basic/installed_files.txt
+test cases/wasm/2 threads/meson.build
+test cases/wasm/2 threads/threads.c
+test cases/wasm/2 threads/threads.cpp
 test cases/windows/1 basic/meson.build
 test cases/windows/1 basic/prog.c
+test cases/windows/1 basic/test.json
 test cases/windows/10 vs module defs generated custom target/meson.build
 test cases/windows/10 vs module defs generated custom target/prog.c
 test cases/windows/10 vs module defs generated custom target/subdir/make_def.py
 test cases/windows/10 vs module defs generated custom target/subdir/meson.build
 test cases/windows/10 vs module defs generated custom target/subdir/somedll.c
-test cases/windows/11 exe implib/installed_files.txt
 test cases/windows/11 exe implib/meson.build
 test cases/windows/11 exe implib/prog.c
+test cases/windows/11 exe implib/test.json
 test cases/windows/12 resources with custom targets/meson.build
 test cases/windows/12 resources with custom targets/prog.c
 test cases/windows/12 resources with custom targets/res/gen-res.py
@@ -2874,6 +3293,8 @@
 test cases/windows/16 gui app/gui_app_tester.py
 test cases/windows/16 gui app/gui_prog.c
 test cases/windows/16 gui app/meson.build
+test cases/windows/17 msvc ndebug/main.cpp
+test cases/windows/17 msvc ndebug/meson.build
 test cases/windows/2 winmain/meson.build
 test cases/windows/2 winmain/prog.c
 test cases/windows/3 cpp/meson.build
@@ -2895,9 +3316,9 @@
 test cases/windows/6 vs module defs/subdir/somedll.def
 test cases/windows/7 dll versioning/copyfile.py
 test cases/windows/7 dll versioning/exe.orig.c
-test cases/windows/7 dll versioning/installed_files.txt
 test cases/windows/7 dll versioning/lib.c
 test cases/windows/7 dll versioning/meson.build
+test cases/windows/7 dll versioning/test.json
 test cases/windows/8 find program/meson.build
 test cases/windows/8 find program/test-script
 test cases/windows/8 find program/test-script-ext.py
@@ -2908,5 +3329,9 @@
 test cases/windows/9 vs module defs generated/subdir/somedll.def.in
 tools/ac_converter.py
 tools/boost_names.py
+tools/build_website.py
 tools/cmake2meson.py
-tools/dircondenser.py
\ No newline at end of file
+tools/copy_files.py
+tools/dircondenser.py
+tools/gen_data.py
+tools/regenerate_docs.py
\ No newline at end of file
diff -Nru meson-0.53.2/meson.py meson-0.57.0+really0.56.2/meson.py
--- meson-0.53.2/meson.py	2018-08-25 08:05:43.000000000 +0000
+++ meson-0.57.0+really0.56.2/meson.py	2021-01-06 10:39:48.000000000 +0000
@@ -15,7 +15,7 @@
 # limitations under the License.
 
 import sys
-from pathlib import Path
+from mesonbuild._pathlib import Path
 
 # If we're run uninstalled, add the script directory to sys.path to ensure that
 # we always import the correct mesonbuild modules even if PYTHONPATH is mangled
diff -Nru meson-0.53.2/PKG-INFO meson-0.57.0+really0.56.2/PKG-INFO
--- meson-0.53.2/PKG-INFO	2020-02-25 16:02:10.000000000 +0000
+++ meson-0.57.0+really0.56.2/PKG-INFO	2021-01-10 12:49:51.051664400 +0000
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: meson
-Version: 0.53.2
+Version: 0.56.2
 Summary: A high performance build system
 Home-page: https://mesonbuild.com
 Author: Jussi Pakkanen
diff -Nru meson-0.53.2/README.md meson-0.57.0+really0.56.2/README.md
--- meson-0.53.2/README.md	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/README.md	2021-01-09 10:14:21.000000000 +0000
@@ -7,7 +7,6 @@
 #### Status
 
 [![PyPI](https://img.shields.io/pypi/v/meson.svg)](https://pypi.python.org/pypi/meson)
-[![Travis](https://travis-ci.org/mesonbuild/meson.svg?branch=master)](https://travis-ci.org/mesonbuild/meson)
 [![Build Status](https://dev.azure.com/jussi0947/jussi/_apis/build/status/mesonbuild.meson)](https://dev.azure.com/jussi0947/jussi/_build/latest?definitionId=1)
 [![Codecov](https://codecov.io/gh/mesonbuild/meson/coverage.svg?branch=master)](https://codecov.io/gh/mesonbuild/meson/branch/master)
 [![Code Quality: Python](https://img.shields.io/lgtm/grade/python/g/mesonbuild/meson.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mesonbuild/meson/context:python)
@@ -16,31 +15,32 @@
 #### Dependencies
 
  - [Python](https://python.org) (version 3.5 or newer)
- - [Ninja](https://ninja-build.org) (version 1.5 or newer)
+ - [Ninja](https://ninja-build.org) (version 1.7 or newer)
 
 #### Installing from source
 
-You can run Meson directly from a revision control checkout or an
-extracted tarball. If you wish you can install it locally with the
-standard Python command
+Meson is available on [PyPi](https://pypi.python.org/pypi/meson), so
+it can be installed with `pip3 install meson`.  The exact command to
+type to install with `pip` can vary between systems, be sure to use
+the Python 3 version of `pip`.
 
-```sh
-python3 -m pip install meson 
-```
+If you wish you can install it locally with the standard Python command:
 
-Meson is also available from
-[PyPi](https://pypi.python.org/pypi/meson), so it can be installed
-with `pip3 install meson` (this does not require a source checkout,
-pip will download the package automatically). The exact command to
-type to install with Pip can vary between systems, be sure to use the
-Python 3 version of Pip.
+```console
+python3 -m pip install meson
+```
 
-For builds using Ninja, Ninja can be [downloaded directly](https://github.com/ninja-build/ninja/releases) or via
+For builds using Ninja, Ninja can be downloaded directly from Ninja
+[GitHub release page](https://github.com/ninja-build/ninja/releases)
+or via [PyPi](https://pypi.python.org/pypi/ninja)
 
-```sh
+```console
 python3 -m pip install ninja
 ```
 
+More on Installing Meson build can be found at the
+[getting meson page](https://mesonbuild.com/Getting-meson.html).
+
 #### Running
 
 Meson requires that you have a source directory and a build directory
@@ -48,7 +48,7 @@
 file called `meson.build`. To generate the build system run this
 command:
 
-`meson  `
+`meson setup  `
 
 Depending on how you obtained Meson the command might also be called
 `meson.py` instead of plain `meson`. In the rest of this document we
@@ -58,27 +58,22 @@
 the current directory and autodetect what you mean. This allows you to
 do things like this:
 
-`cd source_root; mkdir builddir; cd builddir; meson ..`
-
-or
-
-`cd source_root; mkdir builddir; meson builddir`
+```console
+cd 
+meson setup builddir
+```
 
 To compile, cd into your build directory and type `ninja`. To run unit
 tests, type `ninja test`.
 
-Install is the same but it can take an extra argument:
-
-`DESTDIR=/destdir/path ninja install`
-
-`DESTDIR` can be omitted. If you are installing to system directories,
-you may need to run this command with sudo.
-
+More on running Meson build system commands can be found at the
+[running meson page](https://mesonbuild.com/Running-Meson.html)
+or by typing `meson --help`.
 
 #### Contributing
 
 We love code contributions. See the [contribution
-page](https://mesonbuild.com/Contributing.html) on the web site for
+page](https://mesonbuild.com/Contributing.html) on the website for
 details.
 
 
@@ -96,4 +91,4 @@
 More information about the Meson build system can be found at the
 [project's home page](https://mesonbuild.com).
 
-Meson is a registered trademark of Jussi Pakkanen.
+Meson is a registered trademark of ***Jussi Pakkanen***.
diff -Nru meson-0.53.2/run_cross_test.py meson-0.57.0+really0.56.2/run_cross_test.py
--- meson-0.53.2/run_cross_test.py	2019-04-17 08:08:43.000000000 +0000
+++ meson-0.57.0+really0.56.2/run_cross_test.py	2020-10-18 21:29:13.000000000 +0000
@@ -15,44 +15,49 @@
 # limitations under the License.
 
 '''Runs the basic test suite through a cross compiler.
-Not part of the main test suite because of two reasons:
 
-1) setup of the cross build is platform specific
-2) it can be slow (e.g. when invoking test apps via wine)
+This is now just a wrapper around run_project_tests.py with specific arguments
+'''
 
-Eventually migrate to something fancier.'''
-
-import sys
-import os
-from pathlib import Path
 import argparse
+import subprocess
+from mesonbuild import mesonlib
+from mesonbuild.coredata import version as meson_version
+from pathlib import Path
+import json
+import os
 
-from run_project_tests import gather_tests, run_tests, StopException, setup_commands
-from run_project_tests import failing_logs
 
-def runtests(cross_file, failfast):
-    commontests = [('common', gather_tests(Path('test cases', 'common')), False)]
-    try:
-        (passing_tests, failing_tests, skipped_tests) = \
-            run_tests(commontests, 'meson-cross-test-run', failfast, ['--cross-file', cross_file])
-    except StopException:
-        pass
-    print('\nTotal passed cross tests:', passing_tests)
-    print('Total failed cross tests:', failing_tests)
-    print('Total skipped cross tests:', skipped_tests)
-    if failing_tests > 0 and ('CI' in os.environ):
-        print('\nMesonlogs of failing tests\n')
-        for log in failing_logs:
-            print(log, '\n')
-    return failing_tests
+def runtests(cross_file, failfast, cross_only, test_list, env=None):
+    tests = ['--only'] + test_list
+    if not cross_only:
+        tests.append('native')
+    cmd = mesonlib.python_command + ['run_project_tests.py', '--backend', 'ninja']
+    if failfast:
+        cmd += ['--failfast']
+    cmd += tests
+    cmd += ['--cross-file', cross_file]
+    if cross_only:
+        cmd += ['--native-file', 'cross/none.txt']
+    return subprocess.call(cmd, env=env)
 
 def main():
     parser = argparse.ArgumentParser()
     parser.add_argument('--failfast', action='store_true')
+    parser.add_argument('--cross-only', action='store_true')
     parser.add_argument('cross_file')
     options = parser.parse_args()
-    setup_commands('ninja')
-    return runtests(options.cross_file, options.failfast)
+    cf_path = Path(options.cross_file)
+    try:
+        data = json.loads(cf_path.read_text())
+        real_cf = cf_path.resolve().parent / data['file']
+        assert real_cf.exists()
+        env = os.environ.copy()
+        env.update(data['env'])
+        return runtests(real_cf.as_posix(), options.failfast, options.cross_only, data['tests'], env=env)
+    except Exception:
+        return runtests(options.cross_file, options.failfast, options.cross_only, ['common'])
 
 if __name__ == '__main__':
-    sys.exit(main())
+    print('Meson build system', meson_version, 'Cross Tests')
+    raise SystemExit(main())
diff -Nru meson-0.53.2/run_meson_command_tests.py meson-0.57.0+really0.56.2/run_meson_command_tests.py
--- meson-0.53.2/run_meson_command_tests.py	2019-08-28 17:15:39.000000000 +0000
+++ meson-0.57.0+really0.56.2/run_meson_command_tests.py	2021-01-06 10:39:48.000000000 +0000
@@ -14,15 +14,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import sys
 import os
 import tempfile
 import unittest
 import subprocess
 import zipapp
-from pathlib import Path
+from mesonbuild._pathlib import Path
 
 from mesonbuild.mesonlib import windows_proof_rmtree, python_command, is_windows
+from mesonbuild.coredata import version as meson_version
+
 
 def get_pypath():
     import sysconfig
@@ -128,6 +129,10 @@
         os.environ['PYTHONPATH'] = os.path.join(str(pylibdir), '')
         os.environ['PATH'] = str(bindir) + os.pathsep + os.environ['PATH']
         self._run(python_command + ['setup.py', 'install', '--prefix', str(prefix)])
+        # Fix importlib-metadata by appending all dirs in pylibdir
+        PYTHONPATHS = [pylibdir] + [x for x in pylibdir.iterdir()]
+        PYTHONPATHS = [os.path.join(str(x), '') for x in PYTHONPATHS]
+        os.environ['PYTHONPATH'] = os.pathsep.join(PYTHONPATHS)
         # Check that all the files were installed correctly
         self.assertTrue(bindir.is_dir())
         self.assertTrue(pylibdir.is_dir())
@@ -195,4 +200,5 @@
 
 
 if __name__ == '__main__':
-    sys.exit(unittest.main(buffer=True))
+    print('Meson build system', meson_version, 'Command Tests')
+    raise SystemExit(unittest.main(buffer=True))
diff -Nru meson-0.53.2/run_project_tests.py meson-0.57.0+really0.56.2/run_project_tests.py
--- meson-0.53.2/run_project_tests.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/run_project_tests.py	2021-01-06 10:39:48.000000000 +0000
@@ -14,41 +14,44 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import typing as T
+from concurrent.futures import ProcessPoolExecutor, CancelledError
+from enum import Enum
+from io import StringIO
+from mesonbuild._pathlib import Path, PurePath
+import argparse
+import functools
 import itertools
+import json
+import multiprocessing
 import os
-import subprocess
+import re
+import shlex
 import shutil
-import sys
 import signal
-import shlex
-from io import StringIO
-from ast import literal_eval
-from enum import Enum
+import subprocess
+import sys
 import tempfile
-from pathlib import Path, PurePath
+import time
+import typing as T
+import xml.etree.ElementTree as ET
+
 from mesonbuild import build
 from mesonbuild import environment
 from mesonbuild import compilers
 from mesonbuild import mesonlib
 from mesonbuild import mlog
 from mesonbuild import mtest
-from mesonbuild.mesonlib import MachineChoice, stringlistify, Popen_safe
-from mesonbuild.coredata import backendlist
-import argparse
-import json
-import xml.etree.ElementTree as ET
-import time
-import multiprocessing
-from concurrent.futures import ProcessPoolExecutor, CancelledError
-import re
+from mesonbuild.build import ConfigurationData
+from mesonbuild.mesonlib import MachineChoice, Popen_safe
+from mesonbuild.coredata import backendlist, version as meson_version
+
 from run_tests import get_fake_options, run_configure, get_meson_script
 from run_tests import get_backend_commands, get_backend_args_for_dir, Backend
 from run_tests import ensure_backend_detects_changes
 from run_tests import guess_backend
 
-ALL_TESTS = ['cmake', 'common', 'warning-meson', 'failing-meson', 'failing-build', 'failing-test',
-             'kconfig', 'platform-osx', 'platform-windows', 'platform-linux',
+ALL_TESTS = ['cmake', 'common', 'native', 'warning-meson', 'failing-meson', 'failing-build', 'failing-test',
+             'keyval', 'platform-osx', 'platform-windows', 'platform-linux',
              'java', 'C#', 'vala',  'rust', 'd', 'objective c', 'objective c++',
              'fortran', 'swift', 'cuda', 'python3', 'python', 'fpga', 'frameworks', 'nasm', 'wasm'
              ]
@@ -63,25 +66,147 @@
     validate = 6
 
 
-class TestResult:
-    def __init__(self, msg, step, stdo, stde, mlog, cicmds, conftime=0, buildtime=0, testtime=0):
-        self.msg = msg
-        self.step = step
-        self.stdo = stdo
-        self.stde = stde
-        self.mlog = mlog
+class TestResult(BaseException):
+    def __init__(self, cicmds):
+        self.msg = ''  # empty msg indicates test success
+        self.stdo = ''
+        self.stde = ''
+        self.mlog = ''
         self.cicmds = cicmds
-        self.conftime = conftime
-        self.buildtime = buildtime
-        self.testtime = testtime
+        self.conftime = 0
+        self.buildtime = 0
+        self.testtime = 0
 
+    def add_step(self, step, stdo, stde, mlog='', time=0):
+        self.step = step
+        self.stdo += stdo
+        self.stde += stde
+        self.mlog += mlog
+        if step == BuildStep.configure:
+            self.conftime = time
+        elif step == BuildStep.build:
+            self.buildtime = time
+        elif step == BuildStep.test:
+            self.testtime = time
+
+    def fail(self, msg):
+        self.msg = msg
+
+class InstalledFile:
+    def __init__(self, raw: T.Dict[str, str]):
+        self.path = raw['file']
+        self.typ = raw['type']
+        self.platform = raw.get('platform', None)
+        self.language = raw.get('language', 'c')  # type: str
+
+        version = raw.get('version', '')  # type: str
+        if version:
+            self.version = version.split('.')  # type: T.List[str]
+        else:
+            # split on '' will return [''], we want an empty list though
+            self.version = []
 
+    def get_path(self, compiler: str, env: environment.Environment) -> T.Optional[Path]:
+        p = Path(self.path)
+        canonical_compiler = compiler
+        if ((compiler in ['clang-cl', 'intel-cl']) or
+                (env.machines.host.is_windows() and compiler in {'pgi', 'dmd', 'ldc'})):
+            canonical_compiler = 'msvc'
+
+        has_pdb = False
+        if self.language in {'c', 'cpp'}:
+            has_pdb = canonical_compiler == 'msvc'
+        elif self.language == 'd':
+            # dmd's optlink does not genearte pdb iles
+            has_pdb = env.coredata.compilers.host['d'].linker.id in {'link', 'lld-link'}
+
+        # Abort if the platform does not match
+        matches = {
+            'msvc': canonical_compiler == 'msvc',
+            'gcc': canonical_compiler != 'msvc',
+            'cygwin': env.machines.host.is_cygwin(),
+            '!cygwin': not env.machines.host.is_cygwin(),
+        }.get(self.platform or '', True)
+        if not matches:
+            return None
+
+        # Handle the different types
+        if self.typ in ['file', 'dir']:
+            return p
+        elif self.typ == 'shared_lib':
+            if env.machines.host.is_windows() or env.machines.host.is_cygwin():
+                # Windows only has foo.dll and foo-X.dll
+                if len(self.version) > 1:
+                    return None
+                if self.version:
+                    p = p.with_name('{}-{}'.format(p.name, self.version[0]))
+                return p.with_suffix('.dll')
+
+            p = p.with_name('lib{}'.format(p.name))
+            if env.machines.host.is_darwin():
+                # MacOS only has libfoo.dylib and libfoo.X.dylib
+                if len(self.version) > 1:
+                    return None
+
+                # pathlib.Path.with_suffix replaces, not appends
+                suffix = '.dylib'
+                if self.version:
+                    suffix = '.{}{}'.format(self.version[0], suffix)
+            else:
+                # pathlib.Path.with_suffix replaces, not appends
+                suffix = '.so'
+                if self.version:
+                    suffix = '{}.{}'.format(suffix, '.'.join(self.version))
+            return p.with_suffix(suffix)
+        elif self.typ == 'exe':
+            if env.machines.host.is_windows() or env.machines.host.is_cygwin():
+                return p.with_suffix('.exe')
+        elif self.typ == 'pdb':
+            if self.version:
+                p = p.with_name('{}-{}'.format(p.name, self.version[0]))
+            return p.with_suffix('.pdb') if has_pdb else None
+        elif self.typ == 'implib' or self.typ == 'implibempty':
+            if env.machines.host.is_windows() and canonical_compiler == 'msvc':
+                # only MSVC doesn't generate empty implibs
+                if self.typ == 'implibempty' and compiler == 'msvc':
+                    return None
+                return p.parent / (re.sub(r'^lib', '', p.name) + '.lib')
+            elif env.machines.host.is_windows() or env.machines.host.is_cygwin():
+                return p.with_suffix('.dll.a')
+            else:
+                return None
+        elif self.typ == 'expr':
+            return Path(platform_fix_name(p.as_posix(), canonical_compiler, env))
+        else:
+            raise RuntimeError('Invalid installed file type {}'.format(self.typ))
+
+        return p
+
+    def get_paths(self, compiler: str, env: environment.Environment, installdir: Path) -> T.List[Path]:
+        p = self.get_path(compiler, env)
+        if not p:
+            return []
+        if self.typ == 'dir':
+            abs_p = installdir / p
+            if not abs_p.exists():
+                raise RuntimeError('{} does not exist'.format(p))
+            if not abs_p.is_dir():
+                raise RuntimeError('{} is not a directory'.format(p))
+            return [x.relative_to(installdir) for x in abs_p.rglob('*') if x.is_file() or x.is_symlink()]
+        else:
+            return [p]
+
+@functools.total_ordering
 class TestDef:
     def __init__(self, path: Path, name: T.Optional[str], args: T.List[str], skip: bool = False):
         self.path = path
         self.name = name
         self.args = args
         self.skip = skip
+        self.env = os.environ.copy()
+        self.installed_files = []  # type: T.List[InstalledFile]
+        self.do_not_set_opts = []  # type: T.List[str]
+        self.stdout = [] # type: T.List[T.Dict[str, str]]
 
     def __repr__(self) -> str:
         return '<{}: {:<48} [{}: {}] -- {}>'.format(type(self).__name__, str(self.path), self.name, self.args, self.skip)
@@ -91,6 +216,14 @@
             return '{}   ({})'.format(self.path.as_posix(), self.name)
         return self.path.as_posix()
 
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, TestDef):
+            # None is not sortable, so replace it with an empty string
+            s_id = int(self.path.name.split(' ')[0])
+            o_id = int(other.path.name.split(' ')[0])
+            return (s_id, self.path, self.name or '') < (o_id, other.path, other.name or '')
+        return NotImplemented
+
 class AutoDeletedDir:
     def __init__(self, d):
         self.dir = d
@@ -109,11 +242,14 @@
 failing_logs = []
 print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ
 under_ci = 'CI' in os.environ
-under_old_os_ci = 'OLD_OS_CI' in os.environ
+under_xenial_ci = under_ci and ('XENIAL' in os.environ)
+skip_scientific = under_ci and ('SKIP_SCIENTIFIC' in os.environ)
 do_debug = under_ci or print_debug
 no_meson_log_msg = 'No meson-log.txt found.'
 
-system_compiler = None
+host_c_compiler = None
+compiler_id_map = {}  # type: T.Dict[str, str]
+tool_vers_map = {}    # type: T.Dict[str, str]
 
 class StopException(Exception):
     def __init__(self):
@@ -133,17 +269,8 @@
     compile_commands, clean_commands, test_commands, install_commands, \
         uninstall_commands = get_backend_commands(backend, do_debug)
 
-def get_relative_files_list_from_dir(fromdir: Path) -> T.List[Path]:
-    return [file.relative_to(fromdir) for file in fromdir.rglob('*') if file.is_file()]
-
-def platform_fix_name(fname: str, compiler, env) -> str:
-    # canonicalize compiler
-    if (compiler in {'clang-cl', 'intel-cl'} or
-       (env.machines.host.is_windows() and compiler == 'pgi')):
-        canonical_compiler = 'msvc'
-    else:
-        canonical_compiler = compiler
-
+# TODO try to eliminate or at least reduce this function
+def platform_fix_name(fname: str, canonical_compiler: str, env: environment.Environment) -> str:
     if '?lib' in fname:
         if env.machines.host.is_windows() and canonical_compiler == 'msvc':
             fname = re.sub(r'lib/\?lib(.*)\.', r'bin/\1.', fname)
@@ -160,31 +287,6 @@
         else:
             fname = re.sub(r'\?lib', 'lib', fname)
 
-    if fname.endswith('?exe'):
-        fname = fname[:-4]
-        if env.machines.host.is_windows() or env.machines.host.is_cygwin():
-            return fname + '.exe'
-
-    if fname.startswith('?msvc:'):
-        fname = fname[6:]
-        if canonical_compiler != 'msvc':
-            return None
-
-    if fname.startswith('?gcc:'):
-        fname = fname[5:]
-        if canonical_compiler == 'msvc':
-            return None
-
-    if fname.startswith('?cygwin:'):
-        fname = fname[8:]
-        if not env.machines.host.is_cygwin():
-            return None
-
-    if fname.startswith('?!cygwin:'):
-        fname = fname[9:]
-        if env.machines.host.is_cygwin():
-            return None
-
     if fname.endswith('?so'):
         if env.machines.host.is_windows() and canonical_compiler == 'msvc':
             fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname)
@@ -204,53 +306,33 @@
         else:
             return fname[:-3] + '.so'
 
-    if fname.endswith('?implib') or fname.endswith('?implibempty'):
-        if env.machines.host.is_windows() and canonical_compiler == 'msvc':
-            # only MSVC doesn't generate empty implibs
-            if fname.endswith('?implibempty') and compiler == 'msvc':
-                return None
-            return re.sub(r'/(?:lib|)([^/]*?)\?implib(?:empty|)$', r'/\1.lib', fname)
-        elif env.machines.host.is_windows() or env.machines.host.is_cygwin():
-            return re.sub(r'\?implib(?:empty|)$', r'.dll.a', fname)
-        else:
-            return None
-
     return fname
 
-def validate_install(srcdir: str, installdir: Path, compiler, env) -> str:
-    # List of installed files
-    info_file = Path(srcdir) / 'installed_files.txt'
-    installdir = Path(installdir)
-    # If this exists, the test does not install any other files
-    noinst_file = Path('usr/no-installed-files')
-    expected = {}  # type: T.Dict[Path, bool]
+def validate_install(test: TestDef, installdir: Path, compiler: str, env: environment.Environment) -> str:
     ret_msg = ''
-    # Generate list of expected files
-    if (installdir / noinst_file).is_file():
-        expected[noinst_file] = False
-    elif info_file.is_file():
-        with info_file.open() as f:
-            for line in f:
-                line = platform_fix_name(line.strip(), compiler, env)
-                if line:
-                    expected[Path(line)] = False
-    # Check if expected files were found
-    for fname in expected:
-        file_path = installdir / fname
-        if file_path.is_file() or file_path.is_symlink():
-            expected[fname] = True
-    for (fname, found) in expected.items():
-        if not found:
-            ret_msg += 'Expected file {} missing.\n'.format(fname)
-    # Check if there are any unexpected files
-    found = get_relative_files_list_from_dir(installdir)
+    expected_raw = []  # type: T.List[Path]
+    for i in test.installed_files:
+        try:
+            expected_raw += i.get_paths(compiler, env, installdir)
+        except RuntimeError as err:
+            ret_msg += 'Expected path error: {}\n'.format(err)
+    expected = {x: False for x in expected_raw}
+    found = [x.relative_to(installdir) for x in installdir.rglob('*') if x.is_file() or x.is_symlink()]
+    # Mark all found files as found and detect unexpected files
     for fname in found:
         if fname not in expected:
             ret_msg += 'Extra file {} found.\n'.format(fname)
+            continue
+        expected[fname] = True
+    # Check if expected files were found
+    for p, f in expected.items():
+        if not f:
+            ret_msg += 'Expected file {} missing.\n'.format(p)
+    # List dir content on error
     if ret_msg != '':
         ret_msg += '\nInstall dir contents:\n'
         for i in found:
-            ret_msg += '  - {}'.format(i)
+            ret_msg += '  - {}\n'.format(i)
     return ret_msg
 
 def log_text_file(logfile, testdir, stdo, stde):
@@ -280,30 +362,29 @@
 
 
 def bold(text):
-    return mlog.bold(text).get_text(mlog.colorize_console)
+    return mlog.bold(text).get_text(mlog.colorize_console())
 
 
 def green(text):
-    return mlog.green(text).get_text(mlog.colorize_console)
+    return mlog.green(text).get_text(mlog.colorize_console())
 
 
 def red(text):
-    return mlog.red(text).get_text(mlog.colorize_console)
+    return mlog.red(text).get_text(mlog.colorize_console())
 
 
 def yellow(text):
-    return mlog.yellow(text).get_text(mlog.colorize_console)
+    return mlog.yellow(text).get_text(mlog.colorize_console())
 
 
 def _run_ci_include(args: T.List[str]) -> str:
     if not args:
         return 'At least one parameter required'
     try:
-        file_path = Path(args[0])
-        data = file_path.open(errors='ignore', encoding='utf-8').read()
+        data = Path(args[0]).read_text(errors='ignore', encoding='utf-8')
         return 'Included file {}:\n{}\n'.format(args[0], data)
     except Exception:
-        return 'Failed to open {} ({})'.format(args[0])
+        return 'Failed to open {}'.format(args[0])
 
 ci_commands = {
     'ci_include': _run_ci_include
@@ -320,6 +401,66 @@
         res += ['CI COMMAND {}:\n{}\n'.format(cmd[0], ci_commands[cmd[0]](cmd[1:]))]
     return res
 
+def _compare_output(expected: T.List[T.Dict[str, str]], output: str, desc: str) -> str:
+    if expected:
+        i = iter(expected)
+
+        def next_expected(i):
+            # Get the next expected line
+            item = next(i)
+            how = item.get('match', 'literal')
+            expected = item.get('line')
+
+            # Simple heuristic to automatically convert path separators for
+            # Windows:
+            #
+            # Any '/' appearing before 'WARNING' or 'ERROR' (i.e. a path in a
+            # filename part of a location) is replaced with '\' (in a re: '\\'
+            # which matches a literal '\')
+            #
+            # (There should probably be a way to turn this off for more complex
+            # cases which don't fit this)
+            if mesonlib.is_windows():
+                if how != "re":
+                    sub = r'\\'
+                else:
+                    sub = r'\\\\'
+                expected = re.sub(r'/(?=.*(WARNING|ERROR))', sub, expected)
+
+            return how, expected
+
+        try:
+            how, expected = next_expected(i)
+            for actual in output.splitlines():
+                if how == "re":
+                    match = bool(re.match(expected, actual))
+                else:
+                    match = (expected == actual)
+                if match:
+                    how, expected = next_expected(i)
+
+            # reached the end of output without finding expected
+            return 'expected "{}" not found in {}'.format(expected, desc)
+        except StopIteration:
+            # matched all expected lines
+            pass
+
+    return ''
+
+def validate_output(test: TestDef, stdo: str, stde: str) -> str:
+    return _compare_output(test.stdout, stdo, 'stdout')
+
+# There are some class variables and such that cahce
+# information. Clear all of these. The better solution
+# would be to change the code so that no state is persisted
+# but that would be a lot of work given that Meson was originally
+# coded to run as a batch process.
+def clear_internal_caches():
+    import mesonbuild.interpreterbase
+    from mesonbuild.dependencies import CMakeDependency
+    from mesonbuild.mesonlib import PerMachine
+    mesonbuild.interpreterbase.FeatureNew.feature_registry = {}
+    CMakeDependency.class_cmakeinfo = PerMachine(None, None)
 
 def run_test_inprocess(testdir):
     old_stdout = sys.stdout
@@ -342,170 +483,225 @@
         os.chdir(old_cwd)
     return max(returncode_test, returncode_benchmark), mystdout.getvalue(), mystderr.getvalue(), test_log
 
-def parse_test_args(testdir):
-    args = []
-    try:
-        with open(os.path.join(testdir, 'test_args.txt'), 'r') as f:
-            content = f.read()
-            try:
-                args = literal_eval(content)
-            except Exception:
-                raise Exception('Malformed test_args file.')
-            args = stringlistify(args)
-    except FileNotFoundError:
-        pass
-    return args
-
 # Build directory name must be the same so Ccache works over
 # consecutive invocations.
-def create_deterministic_builddir(src_dir, name):
+def create_deterministic_builddir(test: TestDef, use_tmpdir: bool) -> str:
     import hashlib
-    if name:
-        src_dir += name
+    src_dir = test.path.as_posix()
+    if test.name:
+        src_dir += test.name
     rel_dirname = 'b ' + hashlib.sha256(src_dir.encode(errors='ignore')).hexdigest()[0:10]
-    os.mkdir(rel_dirname)
-    abs_pathname = os.path.join(os.getcwd(), rel_dirname)
+    abs_pathname = os.path.join(tempfile.gettempdir() if use_tmpdir else os.getcwd(), rel_dirname)
+    os.mkdir(abs_pathname)
     return abs_pathname
 
-def run_test(skipped, testdir, name, extra_args, compiler, backend, flags, commands, should_fail):
-    if skipped:
+def format_parameter_file(file_basename: str, test: TestDef, test_build_dir: str) -> Path:
+    confdata = ConfigurationData()
+    confdata.values = {'MESON_TEST_ROOT': (str(test.path.absolute()), 'base directory of current test')}
+
+    template = test.path / (file_basename + '.in')
+    destination = Path(test_build_dir) / file_basename
+    mesonlib.do_conf_file(str(template), str(destination), confdata, 'meson')
+
+    return destination
+
+def detect_parameter_files(test: TestDef, test_build_dir: str) -> (Path, Path):
+    nativefile = test.path / 'nativefile.ini'
+    crossfile = test.path / 'crossfile.ini'
+
+    if os.path.exists(str(test.path / 'nativefile.ini.in')):
+        nativefile = format_parameter_file('nativefile.ini', test, test_build_dir)
+
+    if os.path.exists(str(test.path / 'crossfile.ini.in')):
+        crossfile = format_parameter_file('crossfile.ini', test, test_build_dir)
+
+    return nativefile, crossfile
+
+def run_test(test: TestDef, extra_args, compiler, backend, flags, commands, should_fail, use_tmp: bool):
+    if test.skip:
         return None
-    with AutoDeletedDir(create_deterministic_builddir(testdir, name)) as build_dir:
-        with AutoDeletedDir(tempfile.mkdtemp(prefix='i ', dir=os.getcwd())) as install_dir:
+    with AutoDeletedDir(create_deterministic_builddir(test, use_tmp)) as build_dir:
+        with AutoDeletedDir(tempfile.mkdtemp(prefix='i ', dir=None if use_tmp else os.getcwd())) as install_dir:
             try:
-                return _run_test(testdir, build_dir, install_dir, extra_args, compiler, backend, flags, commands, should_fail)
+                return _run_test(test, build_dir, install_dir, extra_args, compiler, backend, flags, commands, should_fail)
+            except TestResult as r:
+                return r
             finally:
                 mlog.shutdown() # Close the log file because otherwise Windows wets itself.
 
-def pass_prefix_to_test(dirname):
-    if '39 prefix absolute' in dirname:
-        return False
-    return True
-
-def pass_libdir_to_test(dirname):
-    if '8 install' in dirname:
-        return False
-    if '38 libdir must be inside prefix' in dirname:
-        return False
-    if '195 install_mode' in dirname:
-        return False
-    return True
-
-def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backend, flags, commands, should_fail):
+def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args, compiler, backend, flags, commands, should_fail):
     compile_commands, clean_commands, install_commands, uninstall_commands = commands
-    test_args = parse_test_args(testdir)
     gen_start = time.time()
-    setup_env = None
     # Configure in-process
-    if pass_prefix_to_test(testdir):
-        gen_args = ['--prefix', '/usr']
-    else:
-        gen_args = []
-    if pass_libdir_to_test(testdir):
+    gen_args = []  # type: T.List[str]
+    if 'prefix' not in test.do_not_set_opts:
+        gen_args += ['--prefix', 'x:/usr'] if mesonlib.is_windows() else ['--prefix', '/usr']
+    if 'libdir' not in test.do_not_set_opts:
         gen_args += ['--libdir', 'lib']
-    gen_args += [testdir, test_build_dir] + flags + test_args + extra_args
-    nativefile = os.path.join(testdir, 'nativefile.ini')
-    if os.path.exists(nativefile):
-        gen_args.extend(['--native-file', nativefile])
-    crossfile = os.path.join(testdir, 'crossfile.ini')
-    if os.path.exists(crossfile):
-        gen_args.extend(['--cross-file', crossfile])
-    setup_env_file = os.path.join(testdir, 'setup_env.json')
-    if os.path.exists(setup_env_file):
-        setup_env = os.environ.copy()
-        with open(setup_env_file, 'r') as fp:
-            data = json.load(fp)
-            for key, val in data.items():
-                val = val.replace('@ROOT@', os.path.abspath(testdir))
-                setup_env[key] = val
-    (returncode, stdo, stde) = run_configure(gen_args, env=setup_env)
+    gen_args += [test.path.as_posix(), test_build_dir] + flags + extra_args
+
+    nativefile, crossfile = detect_parameter_files(test, test_build_dir)
+
+    if nativefile.exists():
+        gen_args.extend(['--native-file', nativefile.as_posix()])
+    if crossfile.exists():
+        gen_args.extend(['--cross-file', crossfile.as_posix()])
+    (returncode, stdo, stde) = run_configure(gen_args, env=test.env)
     try:
         logfile = Path(test_build_dir, 'meson-logs', 'meson-log.txt')
         mesonlog = logfile.open(errors='ignore', encoding='utf-8').read()
     except Exception:
         mesonlog = no_meson_log_msg
     cicmds = run_ci_commands(mesonlog)
-    gen_time = time.time() - gen_start
+    testresult = TestResult(cicmds)
+    testresult.add_step(BuildStep.configure, stdo, stde, mesonlog, time.time() - gen_start)
+    output_msg = validate_output(test, stdo, stde)
+    testresult.mlog += output_msg
+    if output_msg:
+        testresult.fail('Unexpected output while configuring.')
+        return testresult
     if should_fail == 'meson':
         if returncode == 1:
-            return TestResult('', BuildStep.configure, stdo, stde, mesonlog, cicmds, gen_time)
+            return testresult
         elif returncode != 0:
-            return TestResult('Test exited with unexpected status {}'.format(returncode), BuildStep.configure, stdo, stde, mesonlog, cicmds, gen_time)
+            testresult.fail('Test exited with unexpected status {}.'.format(returncode))
+            return testresult
         else:
-            return TestResult('Test that should have failed succeeded', BuildStep.configure, stdo, stde, mesonlog, cicmds, gen_time)
+            testresult.fail('Test that should have failed succeeded.')
+            return testresult
     if returncode != 0:
-        return TestResult('Generating the build system failed.', BuildStep.configure, stdo, stde, mesonlog, cicmds, gen_time)
+        testresult.fail('Generating the build system failed.')
+        return testresult
     builddata = build.load(test_build_dir)
-    # Touch the meson.build file to force a regenerate so we can test that
-    # regeneration works before a build is run.
-    ensure_backend_detects_changes(backend)
-    os.utime(os.path.join(testdir, 'meson.build'))
-    # Build with subprocess
     dir_args = get_backend_args_for_dir(backend, test_build_dir)
-    build_start = time.time()
-    pc, o, e = Popen_safe(compile_commands + dir_args, cwd=test_build_dir)
-    build_time = time.time() - build_start
-    stdo += o
-    stde += e
-    if should_fail == 'build':
+
+    # Build with subprocess
+    def build_step():
+        build_start = time.time()
+        pc, o, e = Popen_safe(compile_commands + dir_args, cwd=test_build_dir)
+        testresult.add_step(BuildStep.build, o, e, '', time.time() - build_start)
+        if should_fail == 'build':
+            if pc.returncode != 0:
+                raise testresult
+            testresult.fail('Test that should have failed to build succeeded.')
+            raise testresult
         if pc.returncode != 0:
-            return TestResult('', BuildStep.build, stdo, stde, mesonlog, cicmds, gen_time)
-        return TestResult('Test that should have failed to build succeeded', BuildStep.build, stdo, stde, mesonlog, cicmds, gen_time)
-    if pc.returncode != 0:
-        return TestResult('Compiling source code failed.', BuildStep.build, stdo, stde, mesonlog, cicmds, gen_time, build_time)
-    # Touch the meson.build file to force a regenerate so we can test that
-    # regeneration works after a build is complete.
-    ensure_backend_detects_changes(backend)
-    os.utime(os.path.join(testdir, 'meson.build'))
-    test_start = time.time()
+            testresult.fail('Compiling source code failed.')
+            raise testresult
+
+    # Touch the meson.build file to force a regenerate
+    def force_regenerate():
+        ensure_backend_detects_changes(backend)
+        os.utime(str(test.path / 'meson.build'))
+
+    # just test building
+    build_step()
+
+    # test that regeneration works for build step
+    force_regenerate()
+    build_step()  # TBD: assert nothing gets built after the regenerate?
+
+    # test that regeneration works for test step
+    force_regenerate()
+
     # Test in-process
+    clear_internal_caches()
+    test_start = time.time()
     (returncode, tstdo, tstde, test_log) = run_test_inprocess(test_build_dir)
-    test_time = time.time() - test_start
-    stdo += tstdo
-    stde += tstde
-    mesonlog += test_log
+    testresult.add_step(BuildStep.test, tstdo, tstde, test_log, time.time() - test_start)
     if should_fail == 'test':
         if returncode != 0:
-            return TestResult('', BuildStep.test, stdo, stde, mesonlog, cicmds, gen_time)
-        return TestResult('Test that should have failed to run unit tests succeeded', BuildStep.test, stdo, stde, mesonlog, cicmds, gen_time)
+            return testresult
+        testresult.fail('Test that should have failed to run unit tests succeeded.')
+        return testresult
     if returncode != 0:
-        return TestResult('Running unit tests failed.', BuildStep.test, stdo, stde, mesonlog, cicmds, gen_time, build_time, test_time)
+        testresult.fail('Running unit tests failed.')
+        return testresult
+
     # Do installation, if the backend supports it
     if install_commands:
         env = os.environ.copy()
         env['DESTDIR'] = install_dir
         # Install with subprocess
         pi, o, e = Popen_safe(install_commands, cwd=test_build_dir, env=env)
-        stdo += o
-        stde += e
+        testresult.add_step(BuildStep.install, o, e)
         if pi.returncode != 0:
-            return TestResult('Running install failed.', BuildStep.install, stdo, stde, mesonlog, cicmds, gen_time, build_time, test_time)
+            testresult.fail('Running install failed.')
+            return testresult
+
     # Clean with subprocess
     env = os.environ.copy()
     pi, o, e = Popen_safe(clean_commands + dir_args, cwd=test_build_dir, env=env)
-    stdo += o
-    stde += e
+    testresult.add_step(BuildStep.clean, o, e)
     if pi.returncode != 0:
-        return TestResult('Running clean failed.', BuildStep.clean, stdo, stde, mesonlog, cicmds, gen_time, build_time, test_time)
+        testresult.fail('Running clean failed.')
+        return testresult
+
+    # Validate installed files
+    testresult.add_step(BuildStep.install, '', '')
     if not install_commands:
-        return TestResult('', BuildStep.install, '', '', mesonlog, cicmds, gen_time, build_time, test_time)
-    return TestResult(validate_install(testdir, install_dir, compiler, builddata.environment),
-                      BuildStep.validate, stdo, stde, mesonlog, cicmds, gen_time, build_time, test_time)
+        return testresult
+    install_msg = validate_install(test, Path(install_dir), compiler, builddata.environment)
+    if install_msg:
+        testresult.fail('\n' + install_msg)
+        return testresult
+
+    return testresult
 
-def gather_tests(testdir: Path) -> T.List[TestDef]:
-    tests = [t.name for t in testdir.glob('*') if t.is_dir()]
+def gather_tests(testdir: Path, stdout_mandatory: bool) -> T.List[TestDef]:
+    tests = [t.name for t in testdir.iterdir() if t.is_dir()]
     tests = [t for t in tests if not t.startswith('.')]  # Filter non-tests files (dot files, etc)
-    tests = [TestDef(testdir / t, None, []) for t in tests]
-    all_tests = []
-    for t in tests:
-        matrix_file = t.path / 'test_matrix.json'
-        if not matrix_file.is_file():
+    test_defs = [TestDef(testdir / t, None, []) for t in tests]
+    all_tests = []  # type: T.List[TestDef]
+    for t in test_defs:
+        test_def = {}
+        test_def_file = t.path / 'test.json'
+        if test_def_file.is_file():
+            test_def = json.loads(test_def_file.read_text())
+
+        # Handle additional environment variables
+        env = {}  # type: T.Dict[str, str]
+        if 'env' in test_def:
+            assert isinstance(test_def['env'], dict)
+            env = test_def['env']
+            for key, val in env.items():
+                val = val.replace('@ROOT@', t.path.resolve().as_posix())
+                env[key] = val
+
+        # Handle installed files
+        installed = []  # type: T.List[InstalledFile]
+        if 'installed' in test_def:
+            installed = [InstalledFile(x) for x in test_def['installed']]
+
+        # Handle expected output
+        stdout = test_def.get('stdout', [])
+        if stdout_mandatory and not stdout:
+            raise RuntimeError("{} must contain a non-empty stdout key".format(test_def_file))
+
+        # Handle the do_not_set_opts list
+        do_not_set_opts = test_def.get('do_not_set_opts', [])  # type: T.List[str]
+
+        # Skip tests if the tool requirements are not met
+        if 'tools' in test_def:
+            assert isinstance(test_def['tools'], dict)
+            for tool, vers_req in test_def['tools'].items():
+                if tool not in tool_vers_map:
+                    t.skip = True
+                elif not mesonlib.version_compare(tool_vers_map[tool], vers_req):
+                    t.skip = True
+
+        # Skip the matrix code and just update the existing test
+        if 'matrix' not in test_def:
+            t.env.update(env)
+            t.installed_files = installed
+            t.do_not_set_opts = do_not_set_opts
+            t.stdout = stdout
             all_tests += [t]
             continue
 
-        # Build multiple tests from matrix definition
+        # 'matrix; entry is present, so build multiple tests from matrix definition
         opt_list = []  # type: T.List[T.List[T.Tuple[str, bool]]]
-        matrix = json.loads(matrix_file.read_text())
+        matrix = test_def['matrix']
         assert "options" in matrix
         for key, val in matrix["options"].items():
             assert isinstance(val, list)
@@ -517,10 +713,22 @@
 
                 # Skip the matrix entry if environment variable is present
                 if 'skip_on_env' in i:
-                    for env in i['skip_on_env']:
-                        if env in os.environ:
+                    for skip_env_var in i['skip_on_env']:
+                        if skip_env_var in os.environ:
                             skip = True
 
+                # Only run the test if all compiler ID's match
+                if 'compilers' in i:
+                    for lang, id_list in i['compilers'].items():
+                        if lang not in compiler_id_map or compiler_id_map[lang] not in id_list:
+                            skip = True
+                            break
+
+                # Add an empty matrix entry
+                if i['val'] is None:
+                    tmp_opts += [(None, skip)]
+                    continue
+
                 tmp_opts += [('{}={}'.format(key, i['val']), skip)]
 
             if opt_list:
@@ -532,16 +740,36 @@
             else:
                 opt_list = [[x] for x in tmp_opts]
 
+        # Exclude specific configurations
+        if 'exclude' in matrix:
+            assert isinstance(matrix['exclude'], list)
+            new_opt_list = []  # type: T.List[T.List[T.Tuple[str, bool]]]
+            for i in opt_list:
+                exclude = False
+                opt_names = [x[0] for x in i]
+                for j in matrix['exclude']:
+                    ex_list = ['{}={}'.format(k, v) for k, v in j.items()]
+                    if all([x in opt_names for x in ex_list]):
+                        exclude = True
+                        break
+
+                if not exclude:
+                    new_opt_list += [i]
+
+            opt_list = new_opt_list
+
         for i in opt_list:
-            name = ' '.join([x[0] for x in i])
-            opts = ['-D' + x[0] for x in i]
+            name = ' '.join([x[0] for x in i if x[0] is not None])
+            opts = ['-D' + x[0] for x in i if x[0] is not None]
             skip = any([x[1] for x in i])
-            all_tests += [TestDef(t.path, name, opts, skip)]
+            test = TestDef(t.path, name, opts, skip or t.skip)
+            test.env.update(env)
+            test.installed_files = installed
+            test.do_not_set_opts = do_not_set_opts
+            test.stdout = stdout
+            all_tests += [test]
 
-    all_tests = [(int(t.path.name.split()[0]), t.name or '', t) for t in all_tests]
-    all_tests.sort()
-    all_tests = [t[2] for t in all_tests]
-    return all_tests
+    return sorted(all_tests)
 
 def have_d_compiler():
     if shutil.which("ldc2"):
@@ -562,8 +790,8 @@
         return True
     return False
 
-def have_objc_compiler():
-    with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir:
+def have_objc_compiler(use_tmp: bool) -> bool:
+    with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if use_tmp else '.')) as build_dir:
         env = environment.Environment(None, build_dir, get_fake_options('/'))
         try:
             objc_comp = env.detect_objc_compiler(MachineChoice.HOST)
@@ -578,8 +806,8 @@
             return False
     return True
 
-def have_objcpp_compiler():
-    with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir:
+def have_objcpp_compiler(use_tmp: bool) -> bool:
+    with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if use_tmp else '.')) as build_dir:
         env = environment.Environment(None, build_dir, get_fake_options('/'))
         try:
             objcpp_comp = env.detect_objcpp_compiler(MachineChoice.HOST)
@@ -601,12 +829,16 @@
 
 def skippable(suite, test):
     # Everything is optional when not running on CI, or on Ubuntu 16.04 CI
-    if not under_ci or under_old_os_ci:
+    if not under_ci or under_xenial_ci:
         return True
 
     if not suite.endswith('frameworks'):
         return True
 
+    # this test assumptions aren't valid for Windows paths
+    if test.endswith('38 libdir must be inside prefix'):
+        return True
+
     # gtk-doc test may be skipped, pending upstream fixes for spaces in
     # filenames landing in the distro used for CI
     if test.endswith('10 gtk-doc'):
@@ -624,6 +856,21 @@
     if test.endswith('29 blocks'):
         return True
 
+    # Scientific libraries are skippable on certain systems
+    # See the discussion here: https://github.com/mesonbuild/meson/pull/6562
+    if any([x in test for x in ['17 mpi', '25 hdf5', '30 scalapack']]) and skip_scientific:
+        return True
+
+    # These create OS specific tests, and need to be skippable
+    if any([x in test for x in ['16 sdl', '17 mpi']]):
+        return True
+
+    # We test cmake, and llvm-config. Some linux spins don't provide cmake or
+    # don't provide either the static or shared llvm libraries (fedora and
+    # opensuse only have the dynamic ones, for example).
+    if test.endswith('15 llvm'):
+        return True
+
     # No frameworks test should be skipped on linux CI, as we expect all
     # prerequisites to be installed
     if mesonlib.is_linux():
@@ -692,7 +939,7 @@
         return True
     return False
 
-def detect_tests_to_run(only: T.List[str]) -> T.List[T.Tuple[str, T.List[TestDef], bool]]:
+def detect_tests_to_run(only: T.List[str], use_tmp: bool) -> T.List[T.Tuple[str, T.List[TestDef], bool]]:
     """
     Parameters
     ----------
@@ -710,60 +957,66 @@
                        shutil.which('pgfortran') or
                        shutil.which('ifort'))
 
-    # Name, subdirectory, skip condition.
+    class TestCategory:
+        def __init__(self, category: str, subdir: str, skip: bool = False, stdout_mandatory: bool = False):
+            self.category = category                  # category name
+            self.subdir = subdir                      # subdirectory
+            self.skip = skip                          # skip condition
+            self.stdout_mandatory = stdout_mandatory  # expected stdout is mandatory for tests in this categroy
+
     all_tests = [
-        ('cmake', 'cmake', not shutil.which('cmake') or (os.environ.get('compiler') == 'msvc2015' and under_ci)),
-        ('common', 'common', False),
-        ('warning-meson', 'warning', False),
-        ('failing-meson', 'failing', False),
-        ('failing-build', 'failing build', False),
-        ('failing-test',  'failing test', False),
-        ('kconfig', 'kconfig', False),
-
-        ('platform-osx', 'osx', not mesonlib.is_osx()),
-        ('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()),
-        ('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()),
-
-        ('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()),
-        ('C#', 'csharp', skip_csharp(backend)),
-        ('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))),
-        ('rust', 'rust', should_skip_rust(backend)),
-        ('d', 'd', backend is not Backend.ninja or not have_d_compiler()),
-        ('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler()),
-        ('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler()),
-        ('fortran', 'fortran', skip_fortran or backend != Backend.ninja),
-        ('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')),
+        TestCategory('cmake', 'cmake', not shutil.which('cmake') or (os.environ.get('compiler') == 'msvc2015' and under_ci)),
+        TestCategory('common', 'common'),
+        TestCategory('native', 'native'),
+        TestCategory('warning-meson', 'warning', stdout_mandatory=True),
+        TestCategory('failing-meson', 'failing', stdout_mandatory=True),
+        TestCategory('failing-build', 'failing build'),
+        TestCategory('failing-test',  'failing test'),
+        TestCategory('keyval', 'keyval'),
+        TestCategory('platform-osx', 'osx', not mesonlib.is_osx()),
+        TestCategory('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()),
+        TestCategory('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()),
+        TestCategory('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()),
+        TestCategory('C#', 'csharp', skip_csharp(backend)),
+        TestCategory('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))),
+        TestCategory('rust', 'rust', should_skip_rust(backend)),
+        TestCategory('d', 'd', backend is not Backend.ninja or not have_d_compiler()),
+        TestCategory('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)),
+        TestCategory('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)),
+        TestCategory('fortran', 'fortran', skip_fortran or backend != Backend.ninja),
+        TestCategory('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')),
         # CUDA tests on Windows: use Ninja backend:  python run_project_tests.py --only cuda --backend ninja
-        ('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')),
-        ('python3', 'python3', backend is not Backend.ninja),
-        ('python', 'python', backend is not Backend.ninja),
-        ('fpga', 'fpga', shutil.which('yosys') is None),
-        ('frameworks', 'frameworks', False),
-        ('nasm', 'nasm', False),
-        ('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja),
+        TestCategory('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')),
+        TestCategory('python3', 'python3', backend is not Backend.ninja),
+        TestCategory('python', 'python'),
+        TestCategory('fpga', 'fpga', shutil.which('yosys') is None),
+        TestCategory('frameworks', 'frameworks'),
+        TestCategory('nasm', 'nasm'),
+        TestCategory('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja),
     ]
 
-    names = [t[0] for t in all_tests]
-    assert names == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests names'
+    categories = [t.category for t in all_tests]
+    assert categories == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests categories'
+
     if only:
-        ind = [names.index(o) for o in only]
-        all_tests = [all_tests[i] for i in ind]
-    gathered_tests = [(name, gather_tests(Path('test cases', subdir)), skip) for name, subdir, skip in all_tests]
+        all_tests = [t for t in all_tests if t.category in only]
+
+    gathered_tests = [(t.category, gather_tests(Path('test cases', t.subdir), t.stdout_mandatory), t.skip) for t in all_tests]
     return gathered_tests
 
 def run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]],
               log_name_base: str, failfast: bool,
-              extra_args: T.List[str]) -> T.Tuple[int, int, int]:
+              extra_args: T.List[str], use_tmp: bool) -> T.Tuple[int, int, int]:
     global logfile
     txtname = log_name_base + '.txt'
     with open(txtname, 'w', encoding='utf-8', errors='ignore') as lf:
         logfile = lf
-        return _run_tests(all_tests, log_name_base, failfast, extra_args)
+        return _run_tests(all_tests, log_name_base, failfast, extra_args, use_tmp)
 
 def _run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]],
                log_name_base: str, failfast: bool,
-               extra_args: T.List[str]) -> T.Tuple[int, int, int]:
-    global stop, executor, futures, system_compiler
+               extra_args: T.List[str], use_tmp: bool) -> T.Tuple[int, int, int]:
+    global stop, executor, futures, host_c_compiler
     xmlname = log_name_base + '.xml'
     junit_root = ET.Element('testsuites')
     conf_time = 0
@@ -814,8 +1067,9 @@
                 suite_args = ['--fatal-meson-warnings']
                 should_fail = name.split('warning-')[1]
 
-            result = executor.submit(run_test, skipped or t.skip, t.path.as_posix(), t.name, extra_args + suite_args + t.args,
-                                     system_compiler, backend, backend_flags, commands, should_fail)
+            t.skip = skipped or t.skip
+            result = executor.submit(run_test, t, extra_args + suite_args + t.args,
+                                     host_c_compiler, backend, backend_flags, commands, should_fail, use_tmp)
             futures.append((testname, t, result))
         for (testname, t, result) in futures:
             sys.stdout.flush()
@@ -911,14 +1165,16 @@
                       '.build',
                       '.md',
                       }
+    skip_dirs = {
+        '.dub',                         # external deps are here
+        '.pytest_cache',
+        'meson-logs', 'meson-private',
+        'work area',
+        '.eggs', '_cache',              # e.g. .mypy_cache
+        'venv',                         # virtualenvs have DOS line endings
+    }
     for (root, _, filenames) in os.walk('.'):
-        if '.dub' in root: # external deps are here
-            continue
-        if '.pytest_cache' in root:
-            continue
-        if 'meson-logs' in root or 'meson-private' in root:
-            continue
-        if '.eggs' in root or '_cache' in root:  # e.g. .mypy_cache
+        if any([x in root for x in skip_dirs]):
             continue
         for fname in filenames:
             file = Path(fname)
@@ -927,16 +1183,21 @@
                     continue
                 check_file(root / file)
 
-def check_meson_commands_work():
+def check_meson_commands_work(options):
     global backend, compile_commands, test_commands, install_commands
     testdir = PurePath('test cases', 'common', '1 trivial').as_posix()
     meson_commands = mesonlib.python_command + [get_meson_script()]
-    with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir:
+    with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if options.use_tmpdir else '.')) as build_dir:
         print('Checking that configuring works...')
-        gen_cmd = meson_commands + [testdir, build_dir] + backend_flags
+        gen_cmd = meson_commands + [testdir, build_dir] + backend_flags + options.extra_args
         pc, o, e = Popen_safe(gen_cmd)
         if pc.returncode != 0:
             raise RuntimeError('Failed to configure {!r}:\n{}\n{}'.format(testdir, e, o))
+        print('Checking that introspect works...')
+        pc, o, e = Popen_safe(meson_commands + ['introspect', '--targets'], cwd=build_dir)
+        json.loads(o)
+        if pc.returncode != 0:
+            raise RuntimeError('Failed to introspect --targets {!r}:\n{}\n{}'.format(testdir, e, o))
         print('Checking that building works...')
         dir_args = get_backend_args_for_dir(backend, build_dir)
         pc, o, e = Popen_safe(compile_commands + dir_args, cwd=build_dir)
@@ -953,35 +1214,69 @@
                 raise RuntimeError('Failed to install {!r}:\n{}\n{}'.format(testdir, e, o))
 
 
-def detect_system_compiler():
-    global system_compiler
+def detect_system_compiler(options):
+    global host_c_compiler, compiler_id_map
+
+    with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if options.use_tmpdir else '.')) as build_dir:
+        fake_opts = get_fake_options('/')
+        if options.cross_file:
+            fake_opts.cross_file = [options.cross_file]
+        if options.native_file:
+            fake_opts.native_file = [options.native_file]
+
+        env = environment.Environment(None, build_dir, fake_opts)
+
+        print_compilers(env, MachineChoice.HOST)
+        if options.cross_file:
+            print_compilers(env, MachineChoice.BUILD)
 
-    with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir:
-        env = environment.Environment(None, build_dir, get_fake_options('/'))
-        print()
         for lang in sorted(compilers.all_languages):
             try:
                 comp = env.compiler_from_language(lang, MachineChoice.HOST)
-                details = '%s %s' % (' '.join(comp.get_exelist()), comp.get_version_string())
+                # note compiler id for later use with test.json matrix
+                compiler_id_map[lang] = comp.get_id()
             except mesonlib.MesonException:
                 comp = None
-                details = 'not found'
-            print('%-7s: %s' % (lang, details))
 
             # note C compiler for later use by platform_fix_name()
             if lang == 'c':
                 if comp:
-                    system_compiler = comp.get_id()
+                    host_c_compiler = comp.get_id()
                 else:
                     raise RuntimeError("Could not find C compiler.")
-        print()
+
+
+def print_compilers(env, machine):
+    print()
+    print('{} machine compilers'.format(machine.get_lower_case_name()))
+    print()
+    for lang in sorted(compilers.all_languages):
+        try:
+            comp = env.compiler_from_language(lang, machine)
+            details = '{:<10} {} {}'.format('[' + comp.get_id() + ']', ' '.join(comp.get_exelist()), comp.get_version_string())
+        except mesonlib.MesonException:
+            details = '[not found]'
+        print('%-7s: %s' % (lang, details))
+
 
 def print_tool_versions():
     tools = [
         {
+            'tool': 'ninja',
+            'args': ['--version'],
+            'regex': re.compile(r'^([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'),
+            'match_group': 1,
+        },
+        {
             'tool': 'cmake',
             'args': ['--version'],
-            'regex': re.compile(r'^cmake version ([0-9]+(\.[0-9]+)*)$'),
+            'regex': re.compile(r'^cmake version ([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'),
+            'match_group': 1,
+        },
+        {
+            'tool': 'hotdoc',
+            'args': ['--version'],
+            'regex': re.compile(r'^([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'),
             'match_group': 1,
         },
     ]
@@ -999,10 +1294,15 @@
             i = i.strip('\n\r\t ')
             m = t['regex'].match(i)
             if m is not None:
+                tool_vers_map[t['tool']] = m.group(t['match_group'])
                 return '{} ({})'.format(exe, m.group(t['match_group']))
 
         return '{} (unknown)'.format(exe)
 
+    print()
+    print('tools')
+    print()
+
     max_width = max([len(x['tool']) for x in tools] + [7])
     for tool in tools:
         print('{0:<{2}}: {1}'.format(tool['tool'], get_version(tool), max_width))
@@ -1018,19 +1318,29 @@
     parser.add_argument('--no-unittests', action='store_true',
                         help='Not used, only here to simplify run_tests.py')
     parser.add_argument('--only', help='name of test(s) to run', nargs='+', choices=ALL_TESTS)
+    parser.add_argument('--cross-file', action='store', help='File describing cross compilation environment.')
+    parser.add_argument('--native-file', action='store', help='File describing native compilation environment.')
+    parser.add_argument('--use-tmpdir', action='store_true', help='Use tmp directory for temporary files.')
     options = parser.parse_args()
-    setup_commands(options.backend)
 
-    detect_system_compiler()
+    if options.cross_file:
+        options.extra_args += ['--cross-file', options.cross_file]
+    if options.native_file:
+        options.extra_args += ['--native-file', options.native_file]
+
+    print('Meson build system', meson_version, 'Project Tests')
+    print('Using python', sys.version.split('\n')[0])
+    setup_commands(options.backend)
+    detect_system_compiler(options)
     print_tool_versions()
     script_dir = os.path.split(__file__)[0]
     if script_dir != '':
         os.chdir(script_dir)
     check_format()
-    check_meson_commands_work()
+    check_meson_commands_work(options)
     try:
-        all_tests = detect_tests_to_run(options.only)
-        (passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.failfast, options.extra_args)
+        all_tests = detect_tests_to_run(options.only, options.use_tmpdir)
+        (passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.failfast, options.extra_args, options.use_tmpdir)
     except StopException:
         pass
     print('\nTotal passed tests:', green(str(passing_tests)))
diff -Nru meson-0.53.2/run_tests.py meson-0.57.0+really0.56.2/run_tests.py
--- meson-0.53.2/run_tests.py	2020-02-25 18:00:46.000000000 +0000
+++ meson-0.57.0+really0.56.2/run_tests.py	2021-01-06 10:39:48.000000000 +0000
@@ -25,7 +25,8 @@
 from io import StringIO
 from enum import Enum
 from glob import glob
-from pathlib import Path
+from mesonbuild._pathlib import Path
+from unittest import mock
 from mesonbuild import compilers
 from mesonbuild import dependencies
 from mesonbuild import mesonlib
@@ -33,9 +34,27 @@
 from mesonbuild import mtest
 from mesonbuild import mlog
 from mesonbuild.environment import Environment, detect_ninja
-from mesonbuild.coredata import backendlist
+from mesonbuild.coredata import backendlist, version as meson_version
 
 NINJA_1_9_OR_NEWER = False
+NINJA_CMD = None
+# If we're on CI, just assume we have ninja in PATH and it's new enough because
+# we provide that. This avoids having to detect ninja for every subprocess unit
+# test that we run.
+if 'CI' in os.environ:
+    NINJA_1_9_OR_NEWER = True
+    NINJA_CMD = ['ninja']
+else:
+    # Look for 1.9 to see if https://github.com/ninja-build/ninja/issues/1219
+    # is fixed
+    NINJA_CMD = detect_ninja('1.9')
+    if NINJA_CMD is not None:
+        NINJA_1_9_OR_NEWER = True
+    else:
+        mlog.warning('Found ninja <1.9, tests will run slower', once=True)
+        NINJA_CMD = detect_ninja()
+if NINJA_CMD is None:
+    raise RuntimeError('Could not find Ninja v1.7 or newer')
 
 def guess_backend(backend, msbuild_exe: str):
     # Auto-detect backend if unspecified
@@ -108,7 +127,7 @@
     if opts is None:
         opts = get_fake_options(prefix)
     env = Environment(sdir, bdir, opts)
-    env.coredata.compiler_options.host['c_args'] = FakeCompilerOptions()
+    env.coredata.compiler_options.host['c']['args'] = FakeCompilerOptions()
     env.machines.host.cpu_family = 'x86_64' # Used on macOS inside find_library
     return env
 
@@ -202,22 +221,8 @@
         clean_cmd = cmd + ['-alltargets', 'clean', '-UseNewBuildSystem=FALSE']
         test_cmd = cmd + ['-target', 'RUN_TESTS']
     elif backend is Backend.ninja:
-        global NINJA_1_9_OR_NEWER
-        # Look for 1.9 to see if https://github.com/ninja-build/ninja/issues/1219
-        # is fixed, else require 1.6 for -w dupbuild=err
-        for v in ('1.9', '1.6'):
-            ninja_cmd = detect_ninja(v)
-            if ninja_cmd is not None:
-                if v == '1.9':
-                    NINJA_1_9_OR_NEWER = True
-                else:
-                    mlog.warning('Found ninja <1.9, tests will run slower', once=True)
-                    if 'CI' in os.environ and 'OLD_OS_CI' not in os.environ:
-                        raise RuntimeError('Require ninja >= 1.9 when running on Meson CI')
-                break
-        cmd = [ninja_cmd, '-w', 'dupbuild=err', '-d', 'explain']
-        if cmd[0] is None:
-            raise RuntimeError('Could not find Ninja v1.6 or newer')
+        global NINJA_CMD
+        cmd = NINJA_CMD + ['-w', 'dupbuild=err', '-d', 'explain']
         if debug:
             cmd += ['-v']
         clean_cmd = cmd + ['clean']
@@ -251,42 +256,27 @@
         time.sleep(1)
 
 def run_mtest_inprocess(commandlist):
-    old_stdout = sys.stdout
-    sys.stdout = mystdout = StringIO()
-    old_stderr = sys.stderr
-    sys.stderr = mystderr = StringIO()
-    try:
+    stderr = StringIO()
+    stdout = StringIO()
+    with mock.patch.object(sys, 'stdout', stdout), mock.patch.object(sys, 'stderr', stderr):
         returncode = mtest.run_with_args(commandlist)
-    finally:
-        sys.stdout = old_stdout
-        sys.stderr = old_stderr
-    return returncode, mystdout.getvalue(), mystderr.getvalue()
+    return returncode, stdout.getvalue(), stderr.getvalue()
 
 def clear_meson_configure_class_caches():
-    compilers.CCompiler.library_dirs_cache = {}
-    compilers.CCompiler.program_dirs_cache = {}
     compilers.CCompiler.find_library_cache = {}
     compilers.CCompiler.find_framework_cache = {}
     dependencies.PkgConfigDependency.pkgbin_cache = {}
     dependencies.PkgConfigDependency.class_pkgbin = mesonlib.PerMachine(None, None)
 
 def run_configure_inprocess(commandlist, env=None):
-    old_stdout = sys.stdout
-    sys.stdout = mystdout = StringIO()
-    old_stderr = sys.stderr
-    sys.stderr = mystderr = StringIO()
-    old_environ = os.environ.copy()
-    if env is not None:
-        os.environ.update(env)
-    try:
-        returncode = mesonmain.run(commandlist, get_meson_script())
-    finally:
-        sys.stdout = old_stdout
-        sys.stderr = old_stderr
-        clear_meson_configure_class_caches()
-        os.environ.clear()
-        os.environ.update(old_environ)
-    return returncode, mystdout.getvalue(), mystderr.getvalue()
+    stderr = StringIO()
+    stdout = StringIO()
+    with mock.patch.dict(os.environ, env or {}), mock.patch.object(sys, 'stdout', stdout), mock.patch.object(sys, 'stderr', stderr):
+        try:
+            returncode = mesonmain.run(commandlist, get_meson_script())
+        finally:
+            clear_meson_configure_class_caches()
+    return returncode, stdout.getvalue(), stderr.getvalue()
 
 def run_configure_external(full_command, env=None):
     pc, o, e = mesonlib.Popen_safe(full_command, env=env)
@@ -299,13 +289,14 @@
     return run_configure_inprocess(commandlist, env=env)
 
 def print_system_info():
-    print(mlog.bold('System information.').get_text(mlog.colorize_console))
+    print(mlog.bold('System information.').get_text(mlog.colorize_console()))
     print('Architecture:', platform.architecture())
     print('Machine:', platform.machine())
     print('Platform:', platform.system())
     print('Processor:', platform.processor())
     print('System:', platform.system())
     print('')
+    print(flush=True)
 
 def main():
     print_system_info()
@@ -313,7 +304,8 @@
     parser.add_argument('--cov', action='store_true')
     parser.add_argument('--backend', default=None, dest='backend',
                         choices=backendlist)
-    parser.add_argument('--cross', default=False, dest='cross', action='store_true')
+    parser.add_argument('--cross', default=[], dest='cross', action='append')
+    parser.add_argument('--cross-only', action='store_true')
     parser.add_argument('--failfast', action='store_true')
     parser.add_argument('--no-unittests', action='store_true', default=False)
     (options, _) = parser.parse_known_args()
@@ -325,7 +317,6 @@
         import coverage
         coverage.process_startup()
     returncode = 0
-    cross = options.cross
     backend, _ = guess_backend(options.backend, shutil.which('msbuild'))
     no_unittests = options.no_unittests
     # Running on a developer machine? Be nice!
@@ -346,8 +337,6 @@
     if 'APPVEYOR' in os.environ and os.environ['arch'] == 'x86':
         os.environ.pop('platform')
     # Run tests
-    print(mlog.bold('Running unittests.').get_text(mlog.colorize_console))
-    print(flush=True)
     # Can't pass arguments to unit tests, so set the backend to use in the environment
     env = os.environ.copy()
     env['MESON_UNIT_TEST_BACKEND'] = backend.name
@@ -362,7 +351,7 @@
                 env['PYTHONPATH'] = os.pathsep.join([temp_dir, env.get('PYTHONPATH')])
             else:
                 env['PYTHONPATH'] = temp_dir
-        if not cross:
+        if not options.cross:
             cmd = mesonlib.python_command + ['run_meson_command_tests.py', '-v']
             if options.failfast:
                 cmd += ['--failfast']
@@ -371,8 +360,11 @@
                 return returncode
             if no_unittests:
                 print('Skipping all unit tests.')
+                print(flush=True)
                 returncode = 0
             else:
+                print(mlog.bold('Running unittests.').get_text(mlog.colorize_console()))
+                print(flush=True)
                 cmd = mesonlib.python_command + ['run_unittests.py', '-v']
                 if options.failfast:
                     cmd += ['--failfast']
@@ -383,22 +375,19 @@
             returncode += subprocess.call(cmd, env=env)
         else:
             cross_test_args = mesonlib.python_command + ['run_cross_test.py']
-            print(mlog.bold('Running armhf cross tests.').get_text(mlog.colorize_console))
-            print(flush=True)
-            cmd = cross_test_args + ['cross/ubuntu-armhf.txt']
-            if options.failfast:
-                cmd += ['--failfast']
-            returncode += subprocess.call(cmd, env=env)
-            if options.failfast and returncode != 0:
-                return returncode
-            print(mlog.bold('Running mingw-w64 64-bit cross tests.')
-                  .get_text(mlog.colorize_console))
-            print(flush=True)
-            cmd = cross_test_args + ['cross/linux-mingw-w64-64bit.txt']
-            if options.failfast:
-                cmd += ['--failfast']
-            returncode += subprocess.call(cmd, env=env)
+            for cf in options.cross:
+                print(mlog.bold('Running {} cross tests.'.format(cf)).get_text(mlog.colorize_console()))
+                print(flush=True)
+                cmd = cross_test_args + ['cross/' + cf]
+                if options.failfast:
+                    cmd += ['--failfast']
+                if options.cross_only:
+                    cmd += ['--cross-only']
+                returncode += subprocess.call(cmd, env=env)
+                if options.failfast and returncode != 0:
+                    return returncode
     return returncode
 
 if __name__ == '__main__':
-    sys.exit(main())
+    print('Meson build system', meson_version, 'Project and Unit Tests')
+    raise SystemExit(main())
diff -Nru meson-0.53.2/run_unittests.py meson-0.57.0+really0.56.2/run_unittests.py
--- meson-0.53.2/run_unittests.py	2020-02-25 18:00:47.000000000 +0000
+++ meson-0.57.0+really0.56.2/run_unittests.py	2021-01-09 10:14:21.000000000 +0000
@@ -13,6 +13,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from mesonbuild.compilers.objc import AppleClangObjCCompiler
+import time
 import stat
 import subprocess
 import re
@@ -38,11 +40,13 @@
 from configparser import ConfigParser
 from contextlib import contextmanager
 from glob import glob
-from pathlib import (PurePath, Path)
+from mesonbuild._pathlib import (PurePath, Path)
 from distutils.dir_util import copy_tree
+import typing as T
 
 import mesonbuild.mlog
 import mesonbuild.depfile
+import mesonbuild.dependencies.base
 import mesonbuild.compilers
 import mesonbuild.envconfig
 import mesonbuild.environment
@@ -55,17 +59,19 @@
     BuildDirLock, LibType, MachineChoice, PerMachine, Version, is_windows,
     is_osx, is_cygwin, is_dragonflybsd, is_openbsd, is_haiku, is_sunos,
     windows_proof_rmtree, python_command, version_compare, split_args,
-    quote_arg
+    quote_arg, relpath, is_linux, git, GIT
 )
 from mesonbuild.environment import detect_ninja
 from mesonbuild.mesonlib import MesonException, EnvironmentException
 from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram
 import mesonbuild.dependencies.base
-from mesonbuild.build import Target
+from mesonbuild.build import Target, ConfigurationData
 import mesonbuild.modules.pkgconfig
 
 from mesonbuild.mtest import TAPParser, TestResult
 
+from mesonbuild.wrap.wrap import PackageDefinition, WrapException
+
 from run_tests import (
     Backend, FakeBuild, FakeCompilerOptions,
     ensure_backend_detects_changes, exe_suffix, get_backend_commands,
@@ -76,6 +82,15 @@
 
 URLOPEN_TIMEOUT = 5
 
+@contextmanager
+def chdir(path: str):
+    curdir = os.getcwd()
+    os.chdir(path)
+    try:
+        yield
+    finally:
+        os.chdir(curdir)
+
 
 def get_dynamic_section_entry(fname, entry):
     if is_cygwin() or is_osx():
@@ -110,15 +125,6 @@
         return True
     return False
 
-def is_pull():
-    # Travis
-    if os.environ.get('TRAVIS_PULL_REQUEST', 'false') != 'false':
-        return True
-    # Azure
-    if 'SYSTEM_PULLREQUEST_ISFORK' in os.environ:
-        return True
-    return False
-
 def _git_init(project_dir):
     subprocess.check_call(['git', 'init'], cwd=project_dir, stdout=subprocess.DEVNULL)
     subprocess.check_call(['git', 'config',
@@ -303,8 +309,14 @@
         self.assertEqual(searchfunc('1.2.3'), '1.2.3')
         self.assertEqual(searchfunc('foobar 2016.10.28 1.2.3'), '1.2.3')
         self.assertEqual(searchfunc('2016.10.28 1.2.3'), '1.2.3')
-        self.assertEqual(searchfunc('foobar 2016.10.128'), 'unknown version')
-        self.assertEqual(searchfunc('2016.10.128'), 'unknown version')
+        self.assertEqual(searchfunc('foobar 2016.10.128'), '2016.10.128')
+        self.assertEqual(searchfunc('2016.10.128'), '2016.10.128')
+        self.assertEqual(searchfunc('2016.10'), '2016.10')
+        self.assertEqual(searchfunc('2016.10 1.2.3'), '1.2.3')
+        self.assertEqual(searchfunc('oops v1.2.3'), '1.2.3')
+        self.assertEqual(searchfunc('2016.oops 1.2.3'), '1.2.3')
+        self.assertEqual(searchfunc('2016.x'), 'unknown version')
+
 
     def test_mode_symbolic_to_bits(self):
         modefunc = mesonbuild.mesonlib.FileMode.perms_s_to_bits
@@ -341,17 +353,40 @@
                          stat.S_IRWXU | stat.S_ISUID |
                          stat.S_IRGRP | stat.S_IXGRP)
 
-    def test_compiler_args_class(self):
-        cargsfunc = mesonbuild.compilers.CompilerArgs
-        cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock())
+    def test_compiler_args_class_none_flush(self):
+        cc = mesonbuild.compilers.ClangCCompiler([], 'fake', MachineChoice.HOST, False, mock.Mock())
+        a = cc.compiler_args(['-I.'])
+        #first we are checking if the tree construction deduplicates the correct -I argument
+        a += ['-I..']
+        a += ['-I./tests/']
+        a += ['-I./tests2/']
+        #think this here as assertion, we cannot apply it, otherwise the CompilerArgs would already flush the changes:
+        # assertEqual(a, ['-I.', '-I./tests2/', '-I./tests/', '-I..', '-I.'])
+        a += ['-I.']
+        a += ['-I.', '-I./tests/']
+        self.assertEqual(a, ['-I.', '-I./tests/', '-I./tests2/', '-I..'])
+
+        #then we are checking that when CompilerArgs already have a build container list, that the deduplication is taking the correct one
+        a += ['-I.', '-I./tests2/']
+        self.assertEqual(a, ['-I.', '-I./tests2/', '-I./tests/', '-I..'])
+
+    def test_compiler_args_class_d(self):
+        d = mesonbuild.compilers.DmdDCompiler([], 'fake', MachineChoice.HOST, 'info', 'arch')
+        # check include order is kept when deduplicating
+        a = d.compiler_args(['-Ifirst', '-Isecond', '-Ithird'])
+        a += ['-Ifirst']
+        self.assertEqual(a, ['-Ifirst', '-Isecond', '-Ithird'])
+
+    def test_compiler_args_class_clike(self):
+        cc = mesonbuild.compilers.ClangCCompiler([], 'fake', MachineChoice.HOST, False, mock.Mock())
         # Test that empty initialization works
-        a = cargsfunc(cc)
+        a = cc.compiler_args()
         self.assertEqual(a, [])
         # Test that list initialization works
-        a = cargsfunc(cc, ['-I.', '-I..'])
+        a = cc.compiler_args(['-I.', '-I..'])
         self.assertEqual(a, ['-I.', '-I..'])
         # Test that there is no de-dup on initialization
-        self.assertEqual(cargsfunc(cc, ['-I.', '-I.']), ['-I.', '-I.'])
+        self.assertEqual(cc.compiler_args(['-I.', '-I.']), ['-I.', '-I.'])
 
         ## Test that appending works
         a.append('-I..')
@@ -397,7 +432,7 @@
         self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall'])
 
         ## Test that adding libraries works
-        l = cargsfunc(cc, ['-Lfoodir', '-lfoo'])
+        l = cc.compiler_args(['-Lfoodir', '-lfoo'])
         self.assertEqual(l, ['-Lfoodir', '-lfoo'])
         # Adding a library and a libpath appends both correctly
         l += ['-Lbardir', '-lbar']
@@ -407,7 +442,7 @@
         self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar'])
 
         ## Test that 'direct' append and extend works
-        l = cargsfunc(cc, ['-Lfoodir', '-lfoo'])
+        l = cc.compiler_args(['-Lfoodir', '-lfoo'])
         self.assertEqual(l, ['-Lfoodir', '-lfoo'])
         # Direct-adding a library and a libpath appends both correctly
         l.extend_direct(['-Lbardir', '-lbar'])
@@ -423,14 +458,13 @@
         self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a'])
 
     def test_compiler_args_class_gnuld(self):
-        cargsfunc = mesonbuild.compilers.CompilerArgs
         ## Test --start/end-group
-        linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', [])
+        linker = mesonbuild.linkers.GnuBFDDynamicLinker([], MachineChoice.HOST, '-Wl,', [])
         gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker)
         ## Ensure that the fake compiler is never called by overriding the relevant function
         gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include']
         ## Test that 'direct' append and extend works
-        l = cargsfunc(gcc, ['-Lfoodir', '-lfoo'])
+        l = gcc.compiler_args(['-Lfoodir', '-lfoo'])
         self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group'])
         # Direct-adding a library and a libpath appends both correctly
         l.extend_direct(['-Lbardir', '-lbar'])
@@ -452,14 +486,13 @@
         self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--export-dynamic', '-Wl,-ldl', '-Wl,--end-group'])
 
     def test_compiler_args_remove_system(self):
-        cargsfunc = mesonbuild.compilers.CompilerArgs
         ## Test --start/end-group
-        linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', [])
+        linker = mesonbuild.linkers.GnuBFDDynamicLinker([], MachineChoice.HOST, '-Wl,', [])
         gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker)
         ## Ensure that the fake compiler is never called by overriding the relevant function
         gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include']
         ## Test that 'direct' append and extend works
-        l = cargsfunc(gcc, ['-Lfoodir', '-lfoo'])
+        l = gcc.compiler_args(['-Lfoodir', '-lfoo'])
         self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group'])
         ## Test that to_native removes all system includes
         l += ['-isystem/usr/include', '-isystem=/usr/share/include', '-DSOMETHING_IMPORTANT=1', '-isystem', '/usr/local/include']
@@ -673,17 +706,21 @@
         self.assertEqual([1, [2, [3]]], listify([1, [2, [3]]], flatten=False))
         # Test flattening and unholdering
         holder1 = ObjectHolder(1)
-        holder3 = ObjectHolder(3)
         self.assertEqual([holder1], listify(holder1))
         self.assertEqual([holder1], listify([holder1]))
         self.assertEqual([holder1, 2], listify([holder1, 2]))
         self.assertEqual([holder1, 2, 3], listify([holder1, 2, [3]]))
-        self.assertEqual([1], listify(holder1, unholder=True))
-        self.assertEqual([1], listify([holder1], unholder=True))
-        self.assertEqual([1, 2], listify([holder1, 2], unholder=True))
-        self.assertEqual([1, 2, 3], listify([holder1, 2, [holder3]], unholder=True))
-        # Unholding doesn't work recursively when not flattening
-        self.assertEqual([1, [2], [holder3]], listify([holder1, [2], [holder3]], unholder=True, flatten=False))
+
+    def test_unholder(self):
+        unholder = mesonbuild.mesonlib.unholder
+
+        holder1 = ObjectHolder(1)
+        holder3 = ObjectHolder(3)
+        holders = [holder1, holder3]
+
+        self.assertEqual(1, unholder(holder1))
+        self.assertEqual([1], unholder([holder1]))
+        self.assertEqual([1, 3], unholder(holders))
 
     def test_extract_as_list(self):
         extract = mesonbuild.mesonlib.extract_as_list
@@ -693,35 +730,33 @@
         self.assertEqual(kwargs, {'sources': [1, 2, 3]})
         self.assertEqual([1, 2, 3], extract(kwargs, 'sources', pop=True))
         self.assertEqual(kwargs, {})
+
         # Test unholding
         holder3 = ObjectHolder(3)
         kwargs = {'sources': [1, 2, holder3]}
-        self.assertEqual([1, 2, 3], extract(kwargs, 'sources', unholder=True))
         self.assertEqual(kwargs, {'sources': [1, 2, holder3]})
-        self.assertEqual([1, 2, 3], extract(kwargs, 'sources', unholder=True, pop=True))
-        self.assertEqual(kwargs, {})
-        # Test listification
-        kwargs = {'sources': [1, 2, 3], 'pch_sources': [4, 5, 6]}
-        self.assertEqual([[1, 2, 3], [4, 5, 6]], extract(kwargs, 'sources', 'pch_sources'))
-
-    def test_pkgconfig_module(self):
 
-        class Mock:
-            pass
+        # flatten nested lists
+        kwargs = {'sources': [1, [2, [3]]]}
+        self.assertEqual([1, 2, 3], extract(kwargs, 'sources'))
 
-        mock = Mock()
-        mock.pcdep = Mock()
-        mock.pcdep.name = "some_name"
-        mock.version_reqs = []
+    def test_pkgconfig_module(self):
+        dummystate = mock.Mock()
+        dummystate.subproject = 'dummy'
+        _mock = mock.Mock(spec=mesonbuild.dependencies.ExternalDependency)
+        _mock.pcdep = mock.Mock()
+        _mock.pcdep.name = "some_name"
+        _mock.version_reqs = []
+        _mock = mock.Mock(held_object=_mock)
 
         # pkgconfig dependency as lib
-        deps = mesonbuild.modules.pkgconfig.DependenciesHelper("thislib")
-        deps.add_pub_libs([mock])
+        deps = mesonbuild.modules.pkgconfig.DependenciesHelper(dummystate, "thislib")
+        deps.add_pub_libs([_mock])
         self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name")
 
         # pkgconfig dependency as requires
-        deps = mesonbuild.modules.pkgconfig.DependenciesHelper("thislib")
-        deps.add_pub_reqs([mock])
+        deps = mesonbuild.modules.pkgconfig.DependenciesHelper(dummystate, "thislib")
+        deps.add_pub_reqs([_mock])
         self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name")
 
     def _test_all_naming(self, cc, env, patterns, platform):
@@ -751,7 +786,7 @@
                 f.write('')
             with open(os.path.join(tmpdir, 'libfoo.so.70.0.so.1'), 'w') as f:
                 f.write('')
-            found = cc.find_library_real('foo', env, [tmpdir], '', LibType.PREFER_SHARED)
+            found = cc._find_library_real('foo', env, [tmpdir], '', LibType.PREFER_SHARED)
             self.assertEqual(os.path.basename(found[0]), 'libfoo.so.54.0')
 
     def test_find_library_patterns(self):
@@ -799,6 +834,7 @@
             env.machines.host.system = 'windows'
             self._test_all_naming(cc, env, patterns, 'windows-mingw')
 
+    @skipIfNoPkgconfig
     def test_pkgconfig_parse_libs(self):
         '''
         Unit test for parsing of pkg-config output to search for libraries
@@ -821,7 +857,7 @@
             env = get_fake_env()
             compiler = env.detect_c_compiler(MachineChoice.HOST)
             env.coredata.compilers.host = {'c': compiler}
-            env.coredata.compiler_options.host['c_link_args'] = FakeCompilerOptions()
+            env.coredata.compiler_options.host['c']['link_args'] = FakeCompilerOptions()
             p1 = Path(tmpdir) / '1'
             p2 = Path(tmpdir) / '2'
             p1.mkdir()
@@ -1192,6 +1228,52 @@
             ['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/libdata/pkgconfig']),
             ['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'])
 
+    def test_dependency_factory_order(self):
+        b = mesonbuild.dependencies.base
+        with tempfile.TemporaryDirectory() as tmpdir:
+            with chdir(tmpdir):
+                env = get_fake_env()
+                env.scratch_dir = tmpdir
+
+                f = b.DependencyFactory(
+                    'test_dep',
+                    methods=[b.DependencyMethods.PKGCONFIG, b.DependencyMethods.CMAKE]
+                )
+                actual = [m() for m in f(env, MachineChoice.HOST, {'required': False})]
+                self.assertListEqual([m.type_name for m in actual], ['pkgconfig', 'cmake'])
+
+                f = b.DependencyFactory(
+                    'test_dep',
+                    methods=[b.DependencyMethods.CMAKE, b.DependencyMethods.PKGCONFIG]
+                )
+                actual = [m() for m in f(env, MachineChoice.HOST, {'required': False})]
+                self.assertListEqual([m.type_name for m in actual], ['cmake', 'pkgconfig'])
+
+    def test_validate_json(self) -> None:
+        """Validate the json schema for the test cases."""
+        try:
+            from jsonschema import validate, ValidationError
+        except ImportError:
+            if is_ci():
+                raise
+            raise unittest.SkipTest('Python jsonschema module not found.')
+
+        with Path('data/test.schema.json').open() as f:
+            schema = json.load(f)
+
+        errors = []  # type: T.Tuple[str, Exception]
+        for p in Path('test cases').glob('**/test.json'):
+            with p.open() as f:
+                try:
+                    validate(json.load(f), schema=schema)
+                except ValidationError as e:
+                    errors.append((p.resolve(), e))
+
+        for f, e in errors:
+            print('Failed to validate: "{}"'.format(f))
+            print(str(e))
+
+        self.assertFalse(errors)
 
 @unittest.skipIf(is_tarball(), 'Skipping because this is a tarball release')
 class DataTests(unittest.TestCase):
@@ -1243,60 +1325,95 @@
                 self.assertIn(opt, md)
         self.assertNotIn('b_unknown', md)
 
+    @staticmethod
+    def _get_section_content(name, sections, md):
+        for section in sections:
+            if section and section.group(1) == name:
+                try:
+                    next_section = next(sections)
+                    end = next_section.start()
+                except StopIteration:
+                    end = len(md)
+                # Extract the content for this section
+                return md[section.end():end]
+        raise RuntimeError('Could not find "{}" heading'.format(name))
+
     def test_builtin_options_documented(self):
         '''
         Test that universal options and base options are documented in
         Builtin-Options.md.
         '''
+        from itertools import tee
         md = None
         with open('docs/markdown/Builtin-options.md', encoding='utf-8') as f:
             md = f.read()
         self.assertIsNotNone(md)
 
         found_entries = set()
-        sections = list(re.finditer(r"^## (.+)$", md, re.MULTILINE)) + [None]
-
-        for s1, s2 in zip(sections[:], sections[1:]):
-            if s1.group(1) == "Universal options":
-                # Extract the content for this section
-                end = s2.start() if s2 is not None else len(md)
-                content = md[s1.end():end]
-                subsections = list(re.finditer(r"^### (.+)$", content, re.MULTILINE)) + [None]
-
-                for sub1, sub2 in zip(subsections[:], subsections[1:]):
-                    if sub1.group(1) == "Directories" or sub1.group(1) == "Core options":
-                        # Extract the content for this subsection
-                        sub_end = sub2.start() if sub2 is not None else len(content)
-                        subcontent = content[sub1.end():sub_end]
-                        # Find the list entries
-                        arches = [m.group(1) for m in re.finditer(r"^\| (\w+) .* \|", subcontent, re.MULTILINE)]
-                        # Drop the header
-                        arches = set(arches[1:])
-
-                        self.assertEqual(len(found_entries & arches), 0)
-                        found_entries |= arches
-            break
+        sections = re.finditer(r"^## (.+)$", md, re.MULTILINE)
+        # Extract the content for this section
+        content = self._get_section_content("Universal options", sections, md)
+        subsections = tee(re.finditer(r"^### (.+)$", content, re.MULTILINE))
+        subcontent1 = self._get_section_content("Directories", subsections[0], content)
+        subcontent2 = self._get_section_content("Core options", subsections[1], content)
+        for subcontent in (subcontent1, subcontent2):
+            # Find the option names
+            options = set()
+            # Match either a table row or a table heading separator: | ------ |
+            rows = re.finditer(r"^\|(?: (\w+) .* | *-+ *)\|", subcontent, re.MULTILINE)
+            # Skip the header of the first table
+            next(rows)
+            # Skip the heading separator of the first table
+            next(rows)
+            for m in rows:
+                value = m.group(1)
+                # End when the `buildtype` table starts
+                if value is None:
+                    break
+                options.add(value)
+            self.assertEqual(len(found_entries & options), 0)
+            found_entries |= options
 
         self.assertEqual(found_entries, set([
-            *mesonbuild.coredata.builtin_options.keys(),
-            *mesonbuild.coredata.builtin_options_per_machine.keys()
+            *mesonbuild.coredata.BUILTIN_OPTIONS.keys(),
+            *mesonbuild.coredata.BUILTIN_OPTIONS_PER_MACHINE.keys()
         ]))
 
+        # Check that `buildtype` table inside `Core options` matches how
+        # setting of builtin options behaves
+        #
+        # Find all tables inside this subsection
+        tables = re.finditer(r"^\| (\w+) .* \|\n\| *[-|\s]+ *\|$", subcontent2, re.MULTILINE)
+        # Get the table we want using the header of the first column
+        table = self._get_section_content('buildtype', tables, subcontent2)
+        # Get table row data
+        rows = re.finditer(r"^\|(?: (\w+)\s+\| (\w+)\s+\| (\w+) .* | *-+ *)\|", table, re.MULTILINE)
+        env = get_fake_env()
+        for m in rows:
+            buildtype, debug, opt = m.groups()
+            if debug == 'true':
+                debug = True
+            elif debug == 'false':
+                debug = False
+            else:
+                raise RuntimeError('Invalid debug value {!r} in row:\n{}'.format(debug, m.group()))
+            env.coredata.set_builtin_option('buildtype', buildtype)
+            self.assertEqual(env.coredata.builtins['buildtype'].value, buildtype)
+            self.assertEqual(env.coredata.builtins['optimization'].value, opt)
+            self.assertEqual(env.coredata.builtins['debug'].value, debug)
+
     def test_cpu_families_documented(self):
         with open("docs/markdown/Reference-tables.md", encoding='utf-8') as f:
             md = f.read()
         self.assertIsNotNone(md)
 
-        sections = list(re.finditer(r"^## (.+)$", md, re.MULTILINE))
-        for s1, s2 in zip(sections[::2], sections[1::2]):
-            if s1.group(1) == "CPU families":
-                # Extract the content for this section
-                content = md[s1.end():s2.start()]
-                # Find the list entries
-                arches = [m.group(1) for m in re.finditer(r"^\| (\w+) +\|", content, re.MULTILINE)]
-                # Drop the header
-                arches = set(arches[1:])
-                self.assertEqual(arches, set(mesonbuild.environment.known_cpu_families))
+        sections = re.finditer(r"^## (.+)$", md, re.MULTILINE)
+        content = self._get_section_content("CPU families", sections, md)
+        # Find the list entries
+        arches = [m.group(1) for m in re.finditer(r"^\| (\w+) +\|", content, re.MULTILINE)]
+        # Drop the header
+        arches = set(arches[1:])
+        self.assertEqual(arches, set(mesonbuild.environment.known_cpu_families))
 
     def test_markdown_files_in_sitemap(self):
         '''
@@ -1323,43 +1440,6 @@
             res = re.search(r'syn keyword mesonBuiltin(\s+\\\s\w+)+', f.read(), re.MULTILINE)
             defined = set([a.strip() for a in res.group().split('\\')][1:])
             self.assertEqual(defined, set(chain(interp.funcs.keys(), interp.builtin.keys())))
-
-    @unittest.skipIf(is_pull(), 'Skipping because this is a pull request')
-    def test_json_grammar_syntax_highlighting(self):
-        '''
-        Ensure that syntax highlighting JSON grammar written by TingPing was
-        updated for new functions in the global namespace in build files.
-        https://github.com/TingPing/language-meson/
-        '''
-        env = get_fake_env()
-        interp = Interpreter(FakeBuild(env), mock=True)
-        url = 'https://raw.githubusercontent.com/TingPing/language-meson/master/grammars/meson.json'
-        try:
-            # Use a timeout to avoid blocking forever in case the network is
-            # slow or unavailable in a weird way
-            r = urllib.request.urlopen(url, timeout=URLOPEN_TIMEOUT)
-        except urllib.error.URLError as e:
-            # Skip test when network is not available, such as during packaging
-            # by a distro or Flatpak
-            if not isinstance(e, urllib.error.HTTPError):
-                raise unittest.SkipTest('Network unavailable')
-            # Don't fail the test if github is down, but do fail if 4xx
-            if e.code >= 500:
-                raise unittest.SkipTest('Server error ' + str(e.code))
-            raise e
-        # On Python 3.5, we must decode bytes to string. Newer versions don't require that.
-        grammar = json.loads(r.read().decode('utf-8', 'surrogatepass'))
-        for each in grammar['patterns']:
-            if 'name' in each and each['name'] == 'support.function.builtin.meson':
-                # The string is of the form: (?x)\\b(func1|func2|...\n)\\b\\s*(?=\\() and
-                # we convert that to [func1, func2, ...] without using regex to parse regex
-                funcs = set(each['match'].split('\\b(')[1].split('\n')[0].split('|'))
-            if 'name' in each and each['name'] == 'support.variable.meson':
-                # \\b(builtin1|builtin2...)\\b
-                builtin = set(each['match'].split('\\b(')[1].split(')\\b')[0].split('|'))
-        self.assertEqual(builtin, set(interp.builtin.keys()))
-        self.assertEqual(funcs, set(interp.funcs.keys()))
-
     def test_all_functions_defined_in_ast_interpreter(self):
         '''
         Ensure that the all functions defined in the Interpreter are also defined
@@ -1367,23 +1447,57 @@
         '''
         env = get_fake_env()
         interp = Interpreter(FakeBuild(env), mock=True)
-        astint = AstInterpreter('.', '')
+        astint = AstInterpreter('.', '', '')
         self.assertEqual(set(interp.funcs.keys()), set(astint.funcs.keys()))
 
+    def test_mesondata_is_up_to_date(self):
+        from mesonbuild.mesondata import mesondata
+        err_msg = textwrap.dedent('''
+
+            ###########################################################
+            ###        mesonbuild.mesondata is not up-to-date       ###
+            ###  Please regenerate it by running tools/gen_data.py  ###
+            ###########################################################
+
+        ''')
+
+        root_dir = Path(__file__).resolve().parent
+        mesonbuild_dir = root_dir / 'mesonbuild'
+
+        data_dirs = mesonbuild_dir.glob('**/data')
+        data_files = []  # type: T.List[T.Tuple(str, str)]
+
+        for i in data_dirs:
+            for p in i.iterdir():
+                data_files += [(p.relative_to(mesonbuild_dir).as_posix(), hashlib.sha256(p.read_bytes()).hexdigest())]
+
+        from pprint import pprint
+        current_files = set(mesondata.keys())
+        scanned_files = set([x[0] for x in data_files])
+
+        self.assertSetEqual(current_files, scanned_files, err_msg + 'Data files were added or removed\n')
+        errors = []
+        for i in data_files:
+            if mesondata[i[0]].sha256sum != i[1]:
+                errors += [i[0]]
+
+        self.assertListEqual(errors, [], err_msg + 'Files were changed')
 
 class BasePlatformTests(unittest.TestCase):
+    prefix = '/usr'
+    libdir = 'lib'
+
     def setUp(self):
         super().setUp()
         self.maxDiff = None
         src_root = os.path.dirname(__file__)
         src_root = os.path.join(os.getcwd(), src_root)
         self.src_root = src_root
-        self.prefix = '/usr'
-        self.libdir = 'lib'
         # Get the backend
         # FIXME: Extract this from argv?
         self.backend = getattr(Backend, os.environ.get('MESON_UNIT_TEST_BACKEND', 'ninja'))
         self.meson_args = ['--backend=' + self.backend.name]
+        self.meson_native_file = None
         self.meson_cross_file = None
         self.meson_command = python_command + [get_meson_script()]
         self.setup_command = self.meson_command + self.meson_args
@@ -1400,6 +1514,7 @@
         self.framework_test_dir = os.path.join(src_root, 'test cases/frameworks')
         self.unit_test_dir = os.path.join(src_root, 'test cases/unit')
         self.rewrite_test_dir = os.path.join(src_root, 'test cases/rewrite')
+        self.linuxlike_test_dir = os.path.join(src_root, 'test cases/linuxlike')
         # Misc stuff
         self.orig_env = os.environ.copy()
         if self.backend is Backend.ninja:
@@ -1481,7 +1596,8 @@
              extra_args=None,
              default_args=True,
              inprocess=False,
-             override_envvars=None):
+             override_envvars=None,
+             workdir=None):
         self.assertPathExists(srcdir)
         if extra_args is None:
             extra_args = []
@@ -1489,20 +1605,17 @@
             extra_args = [extra_args]
         args = [srcdir, self.builddir]
         if default_args:
-            args += ['--prefix', self.prefix,
-                     '--libdir', self.libdir]
+            args += ['--prefix', self.prefix]
+            if self.libdir:
+                args += ['--libdir', self.libdir]
+            if self.meson_native_file:
+                args += ['--native-file', self.meson_native_file]
             if self.meson_cross_file:
                 args += ['--cross-file', self.meson_cross_file]
         self.privatedir = os.path.join(self.builddir, 'meson-private')
         if inprocess:
             try:
-                if override_envvars is not None:
-                    old_envvars = os.environ.copy()
-                    os.environ.update(override_envvars)
-                (returncode, out, err) = run_configure_inprocess(self.meson_args + args + extra_args)
-                if override_envvars is not None:
-                    os.environ.clear()
-                    os.environ.update(old_envvars)
+                (returncode, out, err) = run_configure_inprocess(self.meson_args + args + extra_args, override_envvars)
                 if 'MESON_SKIP_TEST' in out:
                     raise unittest.SkipTest('Project requested skipping.')
                 if returncode != 0:
@@ -1522,7 +1635,7 @@
                 mesonbuild.mlog.log_file = None
         else:
             try:
-                out = self._run(self.setup_command + args + extra_args, override_envvars=override_envvars)
+                out = self._run(self.setup_command + args + extra_args, override_envvars=override_envvars, workdir=workdir)
             except unittest.SkipTest:
                 raise unittest.SkipTest('Project requested skipping: ' + srcdir)
             except Exception:
@@ -1546,15 +1659,8 @@
         if not inprocess:
             self._run(self.test_command, workdir=self.builddir, override_envvars=override_envvars)
         else:
-            if override_envvars is not None:
-                old_envvars = os.environ.copy()
-                os.environ.update(override_envvars)
-            try:
+            with mock.patch.dict(os.environ, override_envvars):
                 run_mtest_inprocess(['-C', self.builddir])
-            finally:
-                if override_envvars is not None:
-                    os.environ.clear()
-                    os.environ.update(old_envvars)
 
     def install(self, *, use_destdir=True, override_envvars=None):
         if self.backend is not Backend.ninja:
@@ -1627,6 +1733,15 @@
         cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)]
         return cmds
 
+    def get_meson_log_sanitychecks(self):
+        '''
+        Same as above, but for the sanity checks that were run
+        '''
+        log = self.get_meson_log()
+        prefix = 'Sanity check compiler command line:'
+        cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)]
+        return cmds
+
     def introspect(self, args):
         if isinstance(args, str):
             args = [args]
@@ -1670,20 +1785,43 @@
         path_basename = PurePath(path).parts[-1]
         self.assertEqual(PurePath(path_basename), PurePath(basename), msg)
 
+    def assertReconfiguredBuildIsNoop(self):
+        'Assert that we reconfigured and then there was nothing to do'
+        ret = self.build()
+        self.assertIn('The Meson build system', ret)
+        if self.backend is Backend.ninja:
+            for line in ret.split('\n'):
+                if line in self.no_rebuild_stdout:
+                    break
+            else:
+                raise AssertionError('build was reconfigured, but was not no-op')
+        elif self.backend is Backend.vs:
+            # Ensure that some target said that no rebuild was done
+            # XXX: Note CustomBuild did indeed rebuild, because of the regen checker!
+            self.assertIn('ClCompile:\n  All outputs are up-to-date.', ret)
+            self.assertIn('Link:\n  All outputs are up-to-date.', ret)
+            # Ensure that no targets were built
+            self.assertNotRegex(ret, re.compile('ClCompile:\n [^\n]*cl', flags=re.IGNORECASE))
+            self.assertNotRegex(ret, re.compile('Link:\n [^\n]*link', flags=re.IGNORECASE))
+        elif self.backend is Backend.xcode:
+            raise unittest.SkipTest('Please help us fix this test on the xcode backend')
+        else:
+            raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name))
+
     def assertBuildIsNoop(self):
         ret = self.build()
         if self.backend is Backend.ninja:
             self.assertIn(ret.split('\n')[-2], self.no_rebuild_stdout)
         elif self.backend is Backend.vs:
-            # Ensure that some target said that no rebuild was done
+            # Ensure that some target of each type said that no rebuild was done
+            # We always have at least one CustomBuild target for the regen checker
             self.assertIn('CustomBuild:\n  All outputs are up-to-date.', ret)
             self.assertIn('ClCompile:\n  All outputs are up-to-date.', ret)
             self.assertIn('Link:\n  All outputs are up-to-date.', ret)
             # Ensure that no targets were built
-            clre = re.compile('ClCompile:\n [^\n]*cl', flags=re.IGNORECASE)
-            linkre = re.compile('Link:\n [^\n]*link', flags=re.IGNORECASE)
-            self.assertNotRegex(ret, clre)
-            self.assertNotRegex(ret, linkre)
+            self.assertNotRegex(ret, re.compile('CustomBuild:\n [^\n]*cl', flags=re.IGNORECASE))
+            self.assertNotRegex(ret, re.compile('ClCompile:\n [^\n]*cl', flags=re.IGNORECASE))
+            self.assertNotRegex(ret, re.compile('Link:\n [^\n]*link', flags=re.IGNORECASE))
         elif self.backend is Backend.xcode:
             raise unittest.SkipTest('Please help us fix this test on the xcode backend')
         else:
@@ -1702,6 +1840,33 @@
         else:
             raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name))
 
+    @staticmethod
+    def get_target_from_filename(filename):
+        base = os.path.splitext(filename)[0]
+        if base.startswith(('lib', 'cyg')):
+            return base[3:]
+        return base
+
+    def assertBuildRelinkedOnlyTarget(self, target):
+        ret = self.build()
+        if self.backend is Backend.ninja:
+            linked_targets = []
+            for line in ret.split('\n'):
+                if 'Linking target' in line:
+                    fname = line.rsplit('target ')[-1]
+                    linked_targets.append(self.get_target_from_filename(fname))
+            self.assertEqual(linked_targets, [target])
+        elif self.backend is Backend.vs:
+            # Ensure that this target was rebuilt
+            linkre = re.compile(r'Link:\n  [^\n]*link.exe[^\n]*/OUT:".\\([^"]*)"', flags=re.IGNORECASE)
+            matches = linkre.findall(ret)
+            self.assertEqual(len(matches), 1, msg=matches)
+            self.assertEqual(self.get_target_from_filename(matches[0]), target)
+        elif self.backend is Backend.xcode:
+            raise unittest.SkipTest('Please help us fix this test on the xcode backend')
+        else:
+            raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name))
+
     def assertPathExists(self, path):
         m = 'Path {!r} should exist'.format(path)
         self.assertTrue(os.path.exists(path), msg=m)
@@ -1745,6 +1910,54 @@
         self.assertEqual(conf_file('@VAR@\n@VAR@\n', confdata), 'foo\nfoo\n')
         self.assertEqual(conf_file('@VAR@\r\n@VAR@\r\n', confdata), 'foo\r\nfoo\r\n')
 
+    def test_do_conf_file_by_format(self):
+        def conf_str(in_data, confdata, vformat):
+            (result, missing_variables, confdata_useless) = mesonbuild.mesonlib.do_conf_str(in_data, confdata, variable_format = vformat)
+            return '\n'.join(result)
+
+        def check_formats(confdata, result):
+            self.assertEqual(conf_str(['#mesondefine VAR'], confdata, 'meson'), result)
+            self.assertEqual(conf_str(['#cmakedefine VAR ${VAR}'], confdata, 'cmake'), result)
+            self.assertEqual(conf_str(['#cmakedefine VAR @VAR@'], confdata, 'cmake@'), result)
+
+        confdata = ConfigurationData()
+        # Key error as they do not exists
+        check_formats(confdata, '/* #undef VAR */\n')
+
+        # Check boolean
+        confdata.values = {'VAR': (False, 'description')}
+        check_formats(confdata, '#undef VAR\n')
+        confdata.values = {'VAR': (True, 'description')}
+        check_formats(confdata, '#define VAR\n')
+
+        # Check string
+        confdata.values = {'VAR': ('value', 'description')}
+        check_formats(confdata, '#define VAR value\n')
+
+        # Check integer
+        confdata.values = {'VAR': (10, 'description')}
+        check_formats(confdata, '#define VAR 10\n')
+
+        # Check multiple string with cmake formats
+        confdata.values = {'VAR': ('value', 'description')}
+        self.assertEqual(conf_str(['#cmakedefine VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'), '#define VAR xxx value yyy value\n')
+        self.assertEqual(conf_str(['#define VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'), '#define VAR xxx value yyy value')
+        self.assertEqual(conf_str(['#cmakedefine VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'), '#define VAR xxx value yyy value\n')
+        self.assertEqual(conf_str(['#define VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'), '#define VAR xxx value yyy value')
+
+        # Handles meson format exceptions
+        #   Unknown format
+        self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR xxx'], confdata, 'unknown_format')
+        #   More than 2 params in mesondefine
+        self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR xxx'], confdata, 'meson')
+        #   Mismatched line with format
+        self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#cmakedefine VAR'], confdata, 'meson')
+        self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'cmake')
+        self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'cmake@')
+        #   Dict value in confdata
+        confdata.values = {'VAR': (['value'], 'description')}
+        self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'meson')
+
     def test_absolute_prefix_libdir(self):
         '''
         Tests that setting absolute paths for --prefix and --libdir work. Can't
@@ -1753,7 +1966,8 @@
         https://github.com/mesonbuild/meson/issues/1345
         '''
         testdir = os.path.join(self.common_test_dir, '90 default options')
-        prefix = '/someabs'
+        # on Windows, /someabs is *not* an absolute path
+        prefix = 'x:/someabs' if is_windows() else '/someabs'
         libdir = 'libdir'
         extra_args = ['--prefix=' + prefix,
                       # This can just be a relative path, but we want to test
@@ -1774,16 +1988,25 @@
         '''
         testdir = os.path.join(self.common_test_dir, '1 trivial')
         # libdir being inside prefix is ok
-        args = ['--prefix', '/opt', '--libdir', '/opt/lib32']
+        if is_windows():
+            args = ['--prefix', 'x:/opt', '--libdir', 'x:/opt/lib32']
+        else:
+            args = ['--prefix', '/opt', '--libdir', '/opt/lib32']
         self.init(testdir, extra_args=args)
         self.wipe()
         # libdir not being inside prefix is not ok
-        args = ['--prefix', '/usr', '--libdir', '/opt/lib32']
+        if is_windows():
+            args = ['--prefix', 'x:/usr', '--libdir', 'x:/opt/lib32']
+        else:
+            args = ['--prefix', '/usr', '--libdir', '/opt/lib32']
         self.assertRaises(subprocess.CalledProcessError, self.init, testdir, extra_args=args)
         self.wipe()
         # libdir must be inside prefix even when set via mesonconf
         self.init(testdir)
-        self.assertRaises(subprocess.CalledProcessError, self.setconf, '-Dlibdir=/opt', False)
+        if is_windows():
+            self.assertRaises(subprocess.CalledProcessError, self.setconf, '-Dlibdir=x:/opt', False)
+        else:
+            self.assertRaises(subprocess.CalledProcessError, self.setconf, '-Dlibdir=/opt', False)
 
     def test_prefix_dependent_defaults(self):
         '''
@@ -2053,6 +2276,12 @@
         self.build()
         self.run_tests()
 
+    def test_force_fallback_for(self):
+        testdir = os.path.join(self.unit_test_dir, '31 forcefallback')
+        self.init(testdir, extra_args=['--force-fallback-for=zlib,foo'])
+        self.build()
+        self.run_tests()
+
     def test_env_ops_dont_stack(self):
         '''
         Test that env ops prepend/append do not stack, and that this usage issues a warning
@@ -2224,6 +2453,9 @@
         self.assertPathExists(exe2)
 
     def test_internal_include_order(self):
+        if mesonbuild.environment.detect_msys2_arch() and ('MESON_RSP_THRESHOLD' in os.environ):
+            raise unittest.SkipTest('Test does not yet support gcc rsp files on msys2')
+
         testdir = os.path.join(self.common_test_dir, '134 include order')
         self.init(testdir)
         execmd = fxecmd = None
@@ -2239,9 +2471,12 @@
         # Check include order for 'someexe'
         incs = [a for a in split_args(execmd) if a.startswith("-I")]
         self.assertEqual(len(incs), 9)
-        # target private dir
-        someexe_id = Target.construct_id_from_path("sub4", "someexe", "@exe")
-        self.assertPathEqual(incs[0], "-I" + os.path.join("sub4", someexe_id))
+        # Need to run the build so the private dir is created.
+        self.build()
+        pdirs = glob(os.path.join(self.builddir, 'sub4/someexe*.p'))
+        self.assertEqual(len(pdirs), 1)
+        privdir = pdirs[0][len(self.builddir)+1:]
+        self.assertPathEqual(incs[0], "-I" + privdir)
         # target build subdir
         self.assertPathEqual(incs[1], "-Isub4")
         # target source subdir
@@ -2262,7 +2497,10 @@
         incs = [a for a in split_args(fxecmd) if a.startswith('-I')]
         self.assertEqual(len(incs), 9)
         # target private dir
-        self.assertPathEqual(incs[0], '-Isomefxe@exe')
+        pdirs = glob(os.path.join(self.builddir, 'somefxe*.p'))
+        self.assertEqual(len(pdirs), 1)
+        privdir = pdirs[0][len(self.builddir)+1:]
+        self.assertPathEqual(incs[0], '-I' + privdir)
         # target build dir
         self.assertPathEqual(incs[1], '-I.')
         # target source dir
@@ -2337,6 +2575,8 @@
                 self.assertIsInstance(linker, ar)
                 if is_osx():
                     self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker)
+                elif is_sunos():
+                    self.assertIsInstance(cc.linker, (mesonbuild.linkers.SolarisDynamicLinker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin))
                 else:
                     self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin)
             if isinstance(cc, clangcl):
@@ -2495,6 +2735,23 @@
         meson_exe_dat2 = glob(os.path.join(self.privatedir, 'meson_exe*.dat'))
         self.assertListEqual(meson_exe_dat1, meson_exe_dat2)
 
+    def test_noop_changes_cause_no_rebuilds(self):
+        '''
+        Test that no-op changes to the build files such as mtime do not cause
+        a rebuild of anything.
+        '''
+        testdir = os.path.join(self.common_test_dir, '6 linkshared')
+        self.init(testdir)
+        self.build()
+        # Immediately rebuilding should not do anything
+        self.assertBuildIsNoop()
+        # Changing mtime of meson.build should not rebuild anything
+        self.utime(os.path.join(testdir, 'meson.build'))
+        self.assertReconfiguredBuildIsNoop()
+        # Changing mtime of libefile.c should rebuild the library, but not relink the executable
+        self.utime(os.path.join(testdir, 'libfile.c'))
+        self.assertBuildRelinkedOnlyTarget('mylib')
+
     def test_source_changes_cause_rebuild(self):
         '''
         Test that changes to sources and headers cause rebuilds, but not
@@ -2508,7 +2765,7 @@
         self.assertBuildIsNoop()
         # Changing mtime of header.h should rebuild everything
         self.utime(os.path.join(testdir, 'header.h'))
-        self.assertRebuiltTarget('prog')
+        self.assertBuildRelinkedOnlyTarget('prog')
 
     def test_custom_target_changes_cause_rebuild(self):
         '''
@@ -2524,7 +2781,7 @@
         # Changing mtime of these should rebuild everything
         for f in ('input.def', 'makeheader.py', 'somefile.txt'):
             self.utime(os.path.join(testdir, f))
-            self.assertRebuiltTarget('prog')
+            self.assertBuildRelinkedOnlyTarget('prog')
 
     def test_source_generator_program_cause_rebuild(self):
         '''
@@ -2570,9 +2827,25 @@
             # fails sometimes.
             pass
 
-    def test_dist_hg(self):
+    def has_working_hg(self):
         if not shutil.which('hg'):
-            raise unittest.SkipTest('Mercurial not found')
+            return False
+        try:
+            # This check should not be necessary, but
+            # CI under macOS passes the above test even
+            # though Mercurial is not installed.
+            if subprocess.call(['hg', '--version'],
+                               stdout=subprocess.DEVNULL,
+                               stderr=subprocess.DEVNULL) != 0:
+                return False
+            return True
+        except FileNotFoundError:
+            return False
+
+
+    def test_dist_hg(self):
+        if not self.has_working_hg():
+            raise unittest.SkipTest('Mercurial not found or broken.')
         if self.backend is not Backend.ninja:
             raise unittest.SkipTest('Dist is only supported with Ninja')
 
@@ -2624,20 +2897,22 @@
         # the source tree leads to all kinds of trouble.
         with tempfile.TemporaryDirectory() as project_dir:
             with open(os.path.join(project_dir, 'meson.build'), 'w') as ofile:
-                ofile.write('''project('disttest', 'c', version : '1.4.3')
-e = executable('distexe', 'distexe.c')
-test('dist test', e)
-subproject('vcssub', required : false)
-subproject('tarballsub', required : false)
-''')
+                ofile.write(textwrap.dedent('''\
+                    project('disttest', 'c', version : '1.4.3')
+                    e = executable('distexe', 'distexe.c')
+                    test('dist test', e)
+                    subproject('vcssub', required : false)
+                    subproject('tarballsub', required : false)
+                    '''))
             with open(os.path.join(project_dir, 'distexe.c'), 'w') as ofile:
-                ofile.write('''#include
+                ofile.write(textwrap.dedent('''\
+                    #include
 
-int main(int argc, char **argv) {
-    printf("I am a distribution test.\\n");
-    return 0;
-}
-''')
+                    int main(int argc, char **argv) {
+                        printf("I am a distribution test.\\n");
+                        return 0;
+                    }
+                    '''))
             xz_distfile = os.path.join(self.distdir, 'disttest-1.4.3.tar.xz')
             xz_checksumfile = xz_distfile + '.sha256sum'
             zip_distfile = os.path.join(self.distdir, 'disttest-1.4.3.zip')
@@ -2726,7 +3001,7 @@
         test. Needs to be a unit test because it accesses Meson internals.
         '''
         testdir = os.path.join(self.common_test_dir, '154 reserved targets')
-        targets = mesonbuild.coredata.forbidden_target_names
+        targets = mesonbuild.coredata.FORBIDDEN_TARGET_NAMES
         # We don't actually define a target with this name
         targets.pop('build.ninja')
         # Remove this to avoid multiple entries with the same name
@@ -2896,6 +3171,7 @@
                         os.unlink(fname)
 
     @skipIfNoPkgconfig
+    @mock.patch.dict(os.environ)
     def test_pkgconfig_gen_escaping(self):
         testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen')
         prefix = '/usr/with spaces'
@@ -2913,8 +3189,9 @@
         self.assertEqual(foo_dep.get_link_args(), link_args)
         # Ensure include args are properly quoted
         incdir = PurePath(prefix) / PurePath('include')
-        cargs = ['-I' + incdir.as_posix()]
-        self.assertEqual(foo_dep.get_compile_args(), cargs)
+        cargs = ['-I' + incdir.as_posix(), '-DLIBFOO']
+        # pkg-config and pkgconf does not respect the same order
+        self.assertEqual(sorted(foo_dep.get_compile_args()), sorted(cargs))
 
     def test_array_option_change(self):
         def get_opt():
@@ -3008,6 +3285,58 @@
         self.setconf("-Dfree_array_opt=['a,b', 'c,d']", will_build=False)
         self.opt_has('free_array_opt', ['a,b', 'c,d'])
 
+    # When running under Travis Mac CI, the file updates seem to happen
+    # too fast so the timestamps do not get properly updated.
+    # Call this method before file operations in appropriate places
+    # to make things work.
+    def mac_ci_delay(self):
+        if is_osx() and is_ci():
+            import time
+            time.sleep(1)
+
+    def test_options_with_choices_changing(self) -> None:
+        """Detect when options like arrays or combos have their choices change."""
+        testdir = Path(os.path.join(self.unit_test_dir, '84 change option choices'))
+        options1 = str(testdir / 'meson_options.1.txt')
+        options2 = str(testdir / 'meson_options.2.txt')
+
+        # Test that old options are changed to the new defaults if they are not valid
+        real_options = str(testdir / 'meson_options.txt')
+        self.addCleanup(os.unlink, real_options)
+
+        shutil.copy(options1, real_options)
+        self.init(str(testdir))
+        self.mac_ci_delay()
+        shutil.copy(options2, real_options)
+
+        self.build()
+        opts = self.introspect('--buildoptions')
+        for item in opts:
+            if item['name'] == 'combo':
+                self.assertEqual(item['value'], 'b')
+                self.assertEqual(item['choices'], ['b', 'c', 'd'])
+            elif item['name'] == 'arr':
+                self.assertEqual(item['value'], ['b'])
+                self.assertEqual(item['choices'], ['b', 'c', 'd'])
+
+        self.wipe()
+        self.mac_ci_delay()
+
+        # When the old options are valid they should remain
+        shutil.copy(options1, real_options)
+        self.init(str(testdir), extra_args=['-Dcombo=c', '-Darray=b,c'])
+        self.mac_ci_delay()
+        shutil.copy(options2, real_options)
+        self.build()
+        opts = self.introspect('--buildoptions')
+        for item in opts:
+            if item['name'] == 'combo':
+                self.assertEqual(item['value'], 'c')
+                self.assertEqual(item['choices'], ['b', 'c', 'd'])
+            elif item['name'] == 'arr':
+                self.assertEqual(item['value'], ['b', 'c'])
+                self.assertEqual(item['choices'], ['b', 'c', 'd'])
+
     def test_subproject_promotion(self):
         testdir = os.path.join(self.unit_test_dir, '12 promote')
         workdir = os.path.join(self.builddir, 'work')
@@ -3016,7 +3345,9 @@
         s3dir = os.path.join(spdir, 's3')
         scommondir = os.path.join(spdir, 'scommon')
         self.assertFalse(os.path.isdir(s3dir))
-        subprocess.check_call(self.wrap_command + ['promote', 's3'], cwd=workdir)
+        subprocess.check_call(self.wrap_command + ['promote', 's3'],
+                              cwd=workdir,
+                              stdout=subprocess.DEVNULL)
         self.assertTrue(os.path.isdir(s3dir))
         self.assertFalse(os.path.isdir(scommondir))
         self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'scommon'],
@@ -3063,6 +3394,50 @@
         ]:
             self.assertRegex(out, re.escape(expected))
 
+        for wd in [
+            self.src_root,
+            self.builddir,
+            os.getcwd(),
+        ]:
+            self.new_builddir()
+            out = self.init(tdir, workdir=wd)
+            expected = os.path.join(relpath(tdir, self.src_root), 'meson.build')
+            relwd = relpath(self.src_root, wd)
+            if relwd != '.':
+                expected = os.path.join(relwd, expected)
+                expected = '\n' + expected + ':'
+            self.assertIn(expected, out)
+
+    def test_error_location_path(self):
+        '''Test locations in meson errors contain correct paths'''
+        # this list contains errors from all the different steps in the
+        # lexer/parser/interpreter we have tests for.
+        for (t, f) in [
+            ('10 out of bounds', 'meson.build'),
+            ('18 wrong plusassign', 'meson.build'),
+            ('61 bad option argument', 'meson_options.txt'),
+            ('102 subdir parse error', os.path.join('subdir', 'meson.build')),
+            ('103 invalid option file', 'meson_options.txt'),
+        ]:
+            tdir = os.path.join(self.src_root, 'test cases', 'failing', t)
+
+            for wd in [
+                self.src_root,
+                self.builddir,
+                os.getcwd(),
+            ]:
+                try:
+                    self.init(tdir, workdir=wd)
+                except subprocess.CalledProcessError as e:
+                    expected = os.path.join('test cases', 'failing', t, f)
+                    relwd = relpath(self.src_root, wd)
+                    if relwd != '.':
+                        expected = os.path.join(relwd, expected)
+                    expected = '\n' + expected + ':'
+                    self.assertIn(expected, e.output)
+                else:
+                    self.fail('configure unexpectedly succeeded')
+
     def test_permitted_method_kwargs(self):
         tdir = os.path.join(self.unit_test_dir, '25 non-permitted kwargs')
         out = self.init(tdir)
@@ -3085,15 +3460,28 @@
         except EnvironmentException:
             pass
         try:
+            env.detect_cs_compiler(MachineChoice.HOST)
+            langs.append('cs')
+        except EnvironmentException:
+            pass
+        try:
             env.detect_d_compiler(MachineChoice.HOST)
             langs.append('d')
         except EnvironmentException:
             pass
         try:
+            env.detect_java_compiler(MachineChoice.HOST)
+            langs.append('java')
+        except EnvironmentException:
+            pass
+        try:
+            env.detect_cuda_compiler(MachineChoice.HOST)
+            langs.append('cuda')
+        except EnvironmentException:
+            pass
+        try:
             env.detect_fortran_compiler(MachineChoice.HOST)
-            if is_windows() or platform.machine().lower() != 'e2k':
-                # Elbrus Fortran compiler can't generate debug information
-                langs.append('fortran')
+            langs.append('fortran')
         except EnvironmentException:
             pass
         try:
@@ -3101,7 +3489,18 @@
             langs.append('objc')
         except EnvironmentException:
             pass
+        try:
+            env.detect_objcpp_compiler(MachineChoice.HOST)
+            langs.append('objcpp')
+        except EnvironmentException:
+            pass
         # FIXME: omitting rust as Windows AppVeyor CI finds Rust but doesn't link correctly
+        if not is_windows():
+            try:
+                env.detect_rust_compiler(MachineChoice.HOST)
+                langs.append('rust')
+            except EnvironmentException:
+                pass
 
         for lang in langs:
             for target_type in ('executable', 'library'):
@@ -3114,72 +3513,16 @@
                     self._run(ninja,
                               workdir=os.path.join(tmpdir, 'builddir'))
             # test directory with existing code file
-            if lang in ('c', 'cpp'):
+            if lang in ('c', 'cpp', 'd'):
                 with tempfile.TemporaryDirectory() as tmpdir:
                     with open(os.path.join(tmpdir, 'foo.' + lang), 'w') as f:
                         f.write('int main(void) {}')
                     self._run(self.meson_command + ['init', '-b'], workdir=tmpdir)
-
-    # The test uses mocking and thus requires that
-    # the current process is the one to run the Meson steps.
-    # If we are using an external test executable (most commonly
-    # in Debian autopkgtests) then the mocking won't work.
-    @unittest.skipIf('MESON_EXE' in os.environ, 'MESON_EXE is defined, can not use mocking.')
-    def test_cross_file_system_paths(self):
-        if is_windows():
-            raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)')
-        if is_sunos():
-            cc = 'gcc'
-        else:
-            cc = 'cc'
-
-        testdir = os.path.join(self.common_test_dir, '1 trivial')
-        cross_content = textwrap.dedent("""\
-            [binaries]
-            c = '/usr/bin/{}'
-            ar = '/usr/bin/ar'
-            strip = '/usr/bin/ar'
-
-            [properties]
-
-            [host_machine]
-            system = 'linux'
-            cpu_family = 'x86'
-            cpu = 'i686'
-            endian = 'little'
-            """.format(cc))
-
-        with tempfile.TemporaryDirectory() as d:
-            dir_ = os.path.join(d, 'meson', 'cross')
-            os.makedirs(dir_)
-            with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
-                f.write(cross_content)
-            name = os.path.basename(f.name)
-
-            with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}):
-                self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
-                self.wipe()
-
-            with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}):
-                os.environ.pop('XDG_DATA_HOME', None)
-                self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
-                self.wipe()
-
-        with tempfile.TemporaryDirectory() as d:
-            dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross')
-            os.makedirs(dir_)
-            with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
-                f.write(cross_content)
-            name = os.path.basename(f.name)
-
-            # If XDG_DATA_HOME is set in the environment running the
-            # tests this test will fail, os mock the environment, pop
-            # it, then test
-            with mock.patch.dict(os.environ):
-                os.environ.pop('XDG_DATA_HOME', None)
-                with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)):
-                    self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
-                    self.wipe()
+            elif lang in ('java'):
+                with tempfile.TemporaryDirectory() as tmpdir:
+                    with open(os.path.join(tmpdir, 'Foo.' + lang), 'w') as f:
+                        f.write('public class Foo { public static void main() {} }')
+                    self._run(self.meson_command + ['init', '-b'], workdir=tmpdir)
 
     def test_compiler_run_command(self):
         '''
@@ -3228,8 +3571,8 @@
         """
         tdir = os.path.join(self.unit_test_dir, '30 shared_mod linking')
         out = self.init(tdir)
-        msg = ('''WARNING: target links against shared modules. This is not
-recommended as it is not supported on some platforms''')
+        msg = ('WARNING: target links against shared modules. This is not '
+               'recommended as it is not supported on some platforms')
         self.assertIn(msg, out)
 
     def test_ndebug_if_release_disabled(self):
@@ -3360,8 +3703,9 @@
     def test_command_line(self):
         testdir = os.path.join(self.unit_test_dir, '34 command line')
 
-        # Verify default values when passing no args
-        self.init(testdir)
+        # Verify default values when passing no args that affect the
+        # configuration, and as a bonus, test that --profile-self works.
+        self.init(testdir, extra_args=['--profile-self'])
         obj = mesonbuild.coredata.load(self.builddir)
         self.assertEqual(obj.builtins['default_library'].value, 'static')
         self.assertEqual(obj.builtins['warning_level'].value, '1')
@@ -3445,11 +3789,11 @@
         # c_args value should be parsed with split_args
         self.init(testdir, extra_args=['-Dc_args=-Dfoo -Dbar "-Dthird=one two"'])
         obj = mesonbuild.coredata.load(self.builddir)
-        self.assertEqual(obj.compiler_options.host['c_args'].value, ['-Dfoo', '-Dbar', '-Dthird=one two'])
+        self.assertEqual(obj.compiler_options.host['c']['args'].value, ['-Dfoo', '-Dbar', '-Dthird=one two'])
 
         self.setconf('-Dc_args="foo bar" one two')
         obj = mesonbuild.coredata.load(self.builddir)
-        self.assertEqual(obj.compiler_options.host['c_args'].value, ['foo bar', 'one', 'two'])
+        self.assertEqual(obj.compiler_options.host['c']['args'].value, ['foo bar', 'one', 'two'])
         self.wipe()
 
         self.init(testdir, extra_args=['-Dset_percent_opt=myoption%'])
@@ -3467,7 +3811,7 @@
             self.assertEqual(obj.builtins['bindir'].value, 'bar')
             self.assertEqual(obj.builtins['buildtype'].value, 'release')
             self.assertEqual(obj.base_options['b_sanitize'].value, 'thread')
-            self.assertEqual(obj.compiler_options.host['c_args'].value, ['-Dbar'])
+            self.assertEqual(obj.compiler_options.host['c']['args'].value, ['-Dbar'])
             self.setconf(['--bindir=bar', '--bindir=foo',
                           '-Dbuildtype=release', '-Dbuildtype=plain',
                           '-Db_sanitize=thread', '-Db_sanitize=address',
@@ -3476,7 +3820,7 @@
             self.assertEqual(obj.builtins['bindir'].value, 'foo')
             self.assertEqual(obj.builtins['buildtype'].value, 'plain')
             self.assertEqual(obj.base_options['b_sanitize'].value, 'address')
-            self.assertEqual(obj.compiler_options.host['c_args'].value, ['-Dfoo'])
+            self.assertEqual(obj.compiler_options.host['c']['args'].value, ['-Dfoo'])
             self.wipe()
         except KeyError:
             # Ignore KeyError, it happens on CI for compilers that does not
@@ -3602,6 +3946,33 @@
         self.assertEqual(opts['debug'], True)
         self.assertEqual(opts['optimization'], '0')
 
+        # Command-line parsing of buildtype settings should be the same as
+        # setting with `meson configure`.
+        #
+        # Setting buildtype should set optimization/debug
+        self.new_builddir()
+        self.init(testdir, extra_args=['-Dbuildtype=debugoptimized'])
+        opts = self.get_opts_as_dict()
+        self.assertEqual(opts['debug'], True)
+        self.assertEqual(opts['optimization'], '2')
+        self.assertEqual(opts['buildtype'], 'debugoptimized')
+        # Setting optimization/debug should set buildtype
+        self.new_builddir()
+        self.init(testdir, extra_args=['-Doptimization=2', '-Ddebug=true'])
+        opts = self.get_opts_as_dict()
+        self.assertEqual(opts['debug'], True)
+        self.assertEqual(opts['optimization'], '2')
+        self.assertEqual(opts['buildtype'], 'debugoptimized')
+        # Setting both buildtype and debug on the command-line should work, and
+        # should warn not to do that. Also test that --debug is parsed as -Ddebug=true
+        self.new_builddir()
+        out = self.init(testdir, extra_args=['-Dbuildtype=debugoptimized', '--debug'])
+        self.assertRegex(out, 'Recommend using either.*buildtype.*debug.*redundant')
+        opts = self.get_opts_as_dict()
+        self.assertEqual(opts['debug'], True)
+        self.assertEqual(opts['optimization'], '2')
+        self.assertEqual(opts['buildtype'], 'debugoptimized')
+
     @skipIfNoPkgconfig
     @unittest.skipIf(is_windows(), 'Help needed with fixing this test on windows')
     def test_native_dep_pkgconfig(self):
@@ -3610,7 +3981,7 @@
         with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile:
             crossfile.write(textwrap.dedent(
                 '''[binaries]
-                pkgconfig = r'{0}'
+                pkgconfig = '{0}'
 
                 [properties]
 
@@ -3629,6 +4000,34 @@
         self.wipe()
         self.init(testdir, extra_args=['-Dstart_native=true'], override_envvars=env)
 
+    @skipIfNoPkgconfig
+    @unittest.skipIf(is_windows(), 'Help needed with fixing this test on windows')
+    def test_pkg_config_libdir(self):
+        testdir = os.path.join(self.unit_test_dir,
+                               '46 native dep pkgconfig var')
+        with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile:
+            crossfile.write(textwrap.dedent(
+                '''[binaries]
+                pkgconfig = 'pkg-config'
+
+                [properties]
+                pkg_config_libdir = ['{0}']
+
+                [host_machine]
+                system = 'linux'
+                cpu_family = 'arm'
+                cpu = 'armv7'
+                endian = 'little'
+                '''.format(os.path.join(testdir, 'cross_pkgconfig'))))
+            crossfile.flush()
+            self.meson_cross_file = crossfile.name
+
+        env = {'PKG_CONFIG_LIBDIR':  os.path.join(testdir,
+                                                  'native_pkgconfig')}
+        self.init(testdir, extra_args=['-Dstart_native=false'], override_envvars=env)
+        self.wipe()
+        self.init(testdir, extra_args=['-Dstart_native=true'], override_envvars=env)
+
     def __reconfigure(self, change_minor=False):
         # Set an older version to force a reconfigure from scratch
         filename = os.path.join(self.privatedir, 'coredata.dat')
@@ -3650,7 +4049,7 @@
         self.__reconfigure()
 
         out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3'])
-        self.assertRegex(out, 'WARNING:.*Regenerating configuration from scratch')
+        self.assertRegex(out, 'Regenerating configuration from scratch')
         self.assertRegex(out, 'opt1 val1')
         self.assertRegex(out, 'opt2 val2')
         self.assertRegex(out, 'opt3 val3')
@@ -3687,7 +4086,7 @@
         self.__reconfigure(change_minor=True)
 
         out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3'])
-        self.assertNotRegex(out, 'WARNING:.*Regenerating configuration from scratch')
+        self.assertNotRegex(out, 'Regenerating configuration from scratch')
         self.assertRegex(out, 'opt1 val1')
         self.assertRegex(out, 'opt2 val2')
         self.assertRegex(out, 'opt3 val3')
@@ -3745,11 +4144,32 @@
                 {
                     'descriptive_name': 'sub',
                     'name': 'sub',
+                    'version': '1.0'
+                },
+                {
+                    'descriptive_name': 'sub_implicit',
+                    'name': 'sub_implicit',
+                    'version': '1.0',
+                },
+                {
+                    'descriptive_name': 'sub-novar',
+                    'name': 'sub_novar',
+                    'version': '1.0',
+                },
+                {
+                    'descriptive_name': 'subsub',
+                    'name': 'subsub',
                     'version': 'undefined'
-                }
+                },
+                {
+                    'descriptive_name': 'subsubsub',
+                    'name': 'subsubsub',
+                    'version': 'undefined'
+                },
             ]
         }
-        self.assertDictEqual(res, expected)
+        res['subprojects'] = sorted(res['subprojects'], key=lambda i: i['name'])
+        self.assertDictEqual(expected, res)
 
     def test_introspection_target_subproject(self):
         testdir = os.path.join(self.common_test_dir, '45 subproject')
@@ -3816,9 +4236,11 @@
         if is_osx():
             raise unittest.SkipTest('Apple ships a broken clang-tidy that chokes on -pipe.')
         testdir = os.path.join(self.unit_test_dir, '70 clang-tidy')
+        dummydir = os.path.join(testdir, 'dummydir.h')
         self.init(testdir, override_envvars={'CXX': 'c++'})
         out = self.run_target('clang-tidy')
         self.assertIn('cttest.cpp:4:20', out)
+        self.assertNotIn(dummydir, out)
 
     def test_identity_cross(self):
         testdir = os.path.join(self.unit_test_dir, '71 cross')
@@ -3843,16 +4265,36 @@
         testdir = os.path.join(self.unit_test_dir, '59 introspect buildoptions')
         self._run(self.mconf_command + [testdir])
 
+    def test_introspect_buildoptions_cross_only(self):
+        testdir = os.path.join(self.unit_test_dir, '83 cross only introspect')
+        testfile = os.path.join(testdir, 'meson.build')
+        res = self.introspect_directory(testfile, ['--buildoptions'] + self.meson_args)
+        optnames = [o['name'] for o in res]
+        self.assertIn('c_args', optnames)
+        self.assertNotIn('build.c_args', optnames)
+
     def test_introspect_json_dump(self):
         testdir = os.path.join(self.unit_test_dir, '57 introspection')
         self.init(testdir)
         infodir = os.path.join(self.builddir, 'meson-info')
         self.assertPathExists(infodir)
 
-        def assertKeyTypes(key_type_list, obj):
+        def assertKeyTypes(key_type_list, obj, strict: bool = True):
             for i in key_type_list:
+                if isinstance(i[1], (list, tuple)) and None in i[1]:
+                    i = (i[0], tuple([x for x in i[1] if x is not None]))
+                    if i[0] not in obj or obj[i[0]] is None:
+                        continue
                 self.assertIn(i[0], obj)
                 self.assertIsInstance(obj[i[0]], i[1])
+            if strict:
+                for k in obj.keys():
+                    found = False
+                    for i in key_type_list:
+                        if k == i[0]:
+                            found = True
+                            break
+                    self.assertTrue(found, 'Key "{}" not in expected list'.format(k))
 
         root_keylist = [
             ('benchmarks', list),
@@ -3873,6 +4315,9 @@
             ('suite', list),
             ('is_parallel', bool),
             ('protocol', str),
+            ('depends', list),
+            ('workdir', (str, None)),
+            ('priority', int),
         ]
 
         buildoptions_keylist = [
@@ -3881,6 +4326,8 @@
             ('type', str),
             ('description', str),
             ('machine', str),
+            ('choices', (list, None)),
+            ('value', (str, int, bool, list)),
         ]
 
         buildoptions_typelist = [
@@ -3909,6 +4356,9 @@
             ('filename', list),
             ('build_by_default', bool),
             ('target_sources', list),
+            ('extra_files', list),
+            ('subproject', (str, None)),
+            ('install_filename', (list, None)),
             ('installed', bool),
         ]
 
@@ -3930,12 +4380,28 @@
 
         assertKeyTypes(root_keylist, res)
 
+        # Match target ids to input and output files for ease of reference
+        src_to_id = {}
+        out_to_id = {}
+        for i in res['targets']:
+            print(json.dump(i, sys.stdout))
+            out_to_id.update({os.path.relpath(out, self.builddir): i['id']
+                              for out in i['filename']})
+            for group in i['target_sources']:
+                src_to_id.update({os.path.relpath(src, testdir): i['id']
+                                  for src in group['sources']})
+
         # Check Tests and benchmarks
         tests_to_find = ['test case 1', 'test case 2', 'benchmark 1']
+        deps_to_find = {'test case 1': [src_to_id['t1.cpp']],
+                        'test case 2': [src_to_id['t2.cpp'], src_to_id['t3.cpp']],
+                        'benchmark 1': [out_to_id['file2'], src_to_id['t3.cpp']]}
         for i in res['benchmarks'] + res['tests']:
             assertKeyTypes(test_keylist, i)
             if i['name'] in tests_to_find:
                 tests_to_find.remove(i['name'])
+            self.assertEqual(sorted(i['depends']),
+                             sorted(deps_to_find[i['name']]))
         self.assertListEqual(tests_to_find, [])
 
         # Check buildoptions
@@ -3946,7 +4412,7 @@
             for j in buildoptions_typelist:
                 if i['type'] == j[0]:
                     self.assertIsInstance(i['value'], j[1])
-                    assertKeyTypes(j[2], i)
+                    assertKeyTypes(j[2], i, strict=False)
                     valid_type = True
                     break
 
@@ -4076,6 +4542,7 @@
         res_nb = self.introspect_directory(testfile, ['--targets'] + self.meson_args)
 
         # Account for differences in output
+        res_wb = [i for i in res_wb if i['type'] != 'custom']
         for i in res_wb:
             i['filename'] = [os.path.relpath(x, self.builddir) for x in i['filename']]
             if 'install_filename' in i:
@@ -4095,6 +4562,83 @@
         self.maxDiff = None
         self.assertListEqual(res_nb, res_wb)
 
+    def test_introspect_ast_source(self):
+        testdir = os.path.join(self.unit_test_dir, '57 introspection')
+        testfile = os.path.join(testdir, 'meson.build')
+        res_nb = self.introspect_directory(testfile, ['--ast'] + self.meson_args)
+
+        node_counter = {}
+
+        def accept_node(json_node):
+            self.assertIsInstance(json_node, dict)
+            for i in ['lineno', 'colno', 'end_lineno', 'end_colno']:
+                self.assertIn(i, json_node)
+                self.assertIsInstance(json_node[i], int)
+            self.assertIn('node', json_node)
+            n = json_node['node']
+            self.assertIsInstance(n, str)
+            self.assertIn(n, nodes)
+            if n not in node_counter:
+                node_counter[n] = 0
+            node_counter[n] = node_counter[n] + 1
+            for nodeDesc in nodes[n]:
+                key = nodeDesc[0]
+                func = nodeDesc[1]
+                self.assertIn(key, json_node)
+                if func is None:
+                    tp = nodeDesc[2]
+                    self.assertIsInstance(json_node[key], tp)
+                    continue
+                func(json_node[key])
+
+        def accept_node_list(node_list):
+            self.assertIsInstance(node_list, list)
+            for i in node_list:
+                accept_node(i)
+
+        def accept_kwargs(kwargs):
+            self.assertIsInstance(kwargs, list)
+            for i in kwargs:
+                self.assertIn('key', i)
+                self.assertIn('val', i)
+                accept_node(i['key'])
+                accept_node(i['val'])
+
+        nodes = {
+            'BooleanNode': [('value', None, bool)],
+            'IdNode': [('value', None, str)],
+            'NumberNode': [('value', None, int)],
+            'StringNode': [('value', None, str)],
+            'ContinueNode': [],
+            'BreakNode': [],
+            'ArgumentNode': [('positional', accept_node_list), ('kwargs', accept_kwargs)],
+            'ArrayNode': [('args', accept_node)],
+            'DictNode': [('args', accept_node)],
+            'EmptyNode': [],
+            'OrNode': [('left', accept_node), ('right', accept_node)],
+            'AndNode': [('left', accept_node), ('right', accept_node)],
+            'ComparisonNode': [('left', accept_node), ('right', accept_node), ('ctype', None, str)],
+            'ArithmeticNode': [('left', accept_node), ('right', accept_node), ('op', None, str)],
+            'NotNode': [('right', accept_node)],
+            'CodeBlockNode': [('lines', accept_node_list)],
+            'IndexNode': [('object', accept_node), ('index', accept_node)],
+            'MethodNode': [('object', accept_node), ('args', accept_node), ('name', None, str)],
+            'FunctionNode': [('args', accept_node), ('name', None, str)],
+            'AssignmentNode': [('value', accept_node), ('var_name', None, str)],
+            'PlusAssignmentNode': [('value', accept_node), ('var_name', None, str)],
+            'ForeachClauseNode': [('items', accept_node), ('block', accept_node), ('varnames', None, list)],
+            'IfClauseNode': [('ifs', accept_node_list), ('else', accept_node)],
+            'IfNode': [('condition', accept_node), ('block', accept_node)],
+            'UMinusNode': [('right', accept_node)],
+            'TernaryNode': [('condition', accept_node), ('true', accept_node), ('false', accept_node)],
+        }
+
+        accept_node(res_nb)
+
+        for n, c in [('ContinueNode', 2), ('BreakNode', 1), ('NotNode', 3)]:
+            self.assertIn(n, node_counter)
+            self.assertEqual(node_counter[n], c)
+
     def test_introspect_dependencies_from_source(self):
         testdir = os.path.join(self.unit_test_dir, '57 introspection')
         testfile = os.path.join(testdir, 'meson.build')
@@ -4174,7 +4718,7 @@
         self._run(self.mconf_command + [self.builddir])
 
     def test_summary(self):
-        testdir = os.path.join(self.unit_test_dir, '74 summary')
+        testdir = os.path.join(self.unit_test_dir, '73 summary')
         out = self.init(testdir)
         expected = textwrap.dedent(r'''
             Some Subproject 2.0
@@ -4192,14 +4736,19 @@
                          A list: string
                                  1
                                  True
-                     empty list: 
+                     empty list:
                        A number: 1
                             yes: YES
                              no: NO
+                      coma list: a, b, c
+
+              Plugins
+                 long coma list: alpha, alphacolor, apetag, audiofx, audioparsers, auparse,
+                                 autodetect, avi
 
               Subprojects
                             sub: YES
-                           sub2: NO
+                           sub2: NO Problem encountered: This subproject failed
             ''')
         expected_lines = expected.split('\n')[1:]
         out_start = out.find(expected_lines[0])
@@ -4211,6 +4760,463 @@
         else:
             self.assertEqual(expected_lines, out_lines)
 
+    def test_meson_compile(self):
+        """Test the meson compile command."""
+
+        def get_exe_name(basename: str) -> str:
+            if is_windows():
+                return '{}.exe'.format(basename)
+            else:
+                return basename
+
+        def get_shared_lib_name(basename: str) -> str:
+            if mesonbuild.environment.detect_msys2_arch():
+                return 'lib{}.dll'.format(basename)
+            elif is_windows():
+                return '{}.dll'.format(basename)
+            elif is_cygwin():
+                return 'cyg{}.dll'.format(basename)
+            elif is_osx():
+                return 'lib{}.dylib'.format(basename)
+            else:
+                return 'lib{}.so'.format(basename)
+
+        def get_static_lib_name(basename: str) -> str:
+            return 'lib{}.a'.format(basename)
+
+        # Base case (no targets or additional arguments)
+
+        testdir = os.path.join(self.common_test_dir, '1 trivial')
+        self.init(testdir)
+
+        self._run([*self.meson_command, 'compile', '-C', self.builddir])
+        self.assertPathExists(os.path.join(self.builddir, get_exe_name('trivialprog')))
+
+        # `--clean`
+
+        self._run([*self.meson_command, 'compile', '-C', self.builddir, '--clean'])
+        self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog')))
+
+        # Target specified in a project with unique names
+
+        testdir = os.path.join(self.common_test_dir, '6 linkshared')
+        self.init(testdir, extra_args=['--wipe'])
+        # Multiple targets and target type specified
+        self._run([*self.meson_command, 'compile', '-C', self.builddir, 'mylib', 'mycpplib:shared_library'])
+        # Check that we have a shared lib, but not an executable, i.e. check that target actually worked
+        self.assertPathExists(os.path.join(self.builddir, get_shared_lib_name('mylib')))
+        self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('prog')))
+        self.assertPathExists(os.path.join(self.builddir, get_shared_lib_name('mycpplib')))
+        self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('cppprog')))
+
+        # Target specified in a project with non unique names
+
+        testdir = os.path.join(self.common_test_dir, '190 same target name')
+        self.init(testdir, extra_args=['--wipe'])
+        self._run([*self.meson_command, 'compile', '-C', self.builddir, './foo'])
+        self.assertPathExists(os.path.join(self.builddir, get_static_lib_name('foo')))
+        self._run([*self.meson_command, 'compile', '-C', self.builddir, 'sub/foo'])
+        self.assertPathExists(os.path.join(self.builddir, 'sub', get_static_lib_name('foo')))
+
+        # run_target
+
+        testdir = os.path.join(self.common_test_dir, '54 run target')
+        self.init(testdir, extra_args=['--wipe'])
+        out = self._run([*self.meson_command, 'compile', '-C', self.builddir, 'py3hi'])
+        self.assertIn('I am Python3.', out)
+
+        # `--$BACKEND-args`
+
+        testdir = os.path.join(self.common_test_dir, '1 trivial')
+        if self.backend is Backend.ninja:
+            self.init(testdir, extra_args=['--wipe'])
+            # Dry run - should not create a program
+            self._run([*self.meson_command, 'compile', '-C', self.builddir, '--ninja-args=-n'])
+            self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog')))
+        elif self.backend is Backend.vs:
+            self.init(testdir, extra_args=['--wipe'])
+            self._run([*self.meson_command, 'compile', '-C', self.builddir])
+            # Explicitly clean the target through msbuild interface
+            self._run([*self.meson_command, 'compile', '-C', self.builddir, '--vs-args=-t:{}:Clean'.format(re.sub(r'[\%\$\@\;\.\(\)\']', '_', get_exe_name('trivialprog')))])
+            self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog')))
+
+    def test_spurious_reconfigure_built_dep_file(self):
+        testdir = os.path.join(self.unit_test_dir, '75 dep files')
+
+        # Regression test: Spurious reconfigure was happening when build
+        # directory is inside source directory.
+        # See https://gitlab.freedesktop.org/gstreamer/gst-build/-/issues/85.
+        srcdir = os.path.join(self.builddir, 'srctree')
+        shutil.copytree(testdir, srcdir)
+        builddir = os.path.join(srcdir, '_build')
+        self.change_builddir(builddir)
+
+        self.init(srcdir)
+        self.build()
+
+        # During first configure the file did not exist so no dependency should
+        # have been set. A rebuild should not trigger a reconfigure.
+        self.clean()
+        out = self.build()
+        self.assertNotIn('Project configured', out)
+
+        self.init(srcdir, extra_args=['--reconfigure'])
+
+        # During the reconfigure the file did exist, but is inside build
+        # directory, so no dependency should have been set. A rebuild should not
+        # trigger a reconfigure.
+        self.clean()
+        out = self.build()
+        self.assertNotIn('Project configured', out)
+
+    def _test_junit(self, case: str) -> None:
+        try:
+            import lxml.etree as et
+        except ImportError:
+            raise unittest.SkipTest('lxml required, but not found.')
+
+        schema = et.XMLSchema(et.parse(str(Path(__file__).parent / 'data' / 'schema.xsd')))
+
+        self.init(case)
+        self.run_tests()
+
+        junit = et.parse(str(Path(self.builddir) / 'meson-logs' / 'testlog.junit.xml'))
+        try:
+            schema.assertValid(junit)
+        except et.DocumentInvalid as e:
+            self.fail(e.error_log)
+
+    def test_junit_valid_tap(self):
+        self._test_junit(os.path.join(self.common_test_dir, '213 tap tests'))
+
+    def test_junit_valid_exitcode(self):
+        self._test_junit(os.path.join(self.common_test_dir, '44 test args'))
+
+    def test_junit_valid_gtest(self):
+        self._test_junit(os.path.join(self.framework_test_dir, '2 gtest'))
+
+    def test_link_language_linker(self):
+        # TODO: there should be some way to query how we're linking things
+        # without resorting to reading the ninja.build file
+        if self.backend is not Backend.ninja:
+            raise unittest.SkipTest('This test reads the ninja file')
+
+        testdir = os.path.join(self.common_test_dir, '232 link language')
+        self.init(testdir)
+
+        build_ninja = os.path.join(self.builddir, 'build.ninja')
+        with open(build_ninja, 'r', encoding='utf-8') as f:
+            contents = f.read()
+
+        self.assertRegex(contents, r'build main(\.exe)?.*: c_LINKER')
+        self.assertRegex(contents, r'build (lib|cyg)?mylib.*: c_LINKER')
+
+    def test_commands_documented(self):
+        '''
+        Test that all listed meson commands are documented in Commands.md.
+        '''
+
+        # The docs directory is not in release tarballs.
+        if not os.path.isdir('docs'):
+            raise unittest.SkipTest('Doc directory does not exist.')
+        doc_path = 'docs/markdown_dynamic/Commands.md'
+
+        md = None
+        with open(doc_path, encoding='utf-8') as f:
+            md = f.read()
+        self.assertIsNotNone(md)
+
+        ## Get command sections
+
+        section_pattern = re.compile(r'^### (.+)$', re.MULTILINE)
+        md_command_section_matches = [i for i in section_pattern.finditer(md)]
+        md_command_sections = dict()
+        for i, s in enumerate(md_command_section_matches):
+            section_end = len(md) if i == len(md_command_section_matches) - 1 else md_command_section_matches[i + 1].start()
+            md_command_sections[s.group(1)] = (s.start(), section_end)
+
+        ## Validate commands
+
+        md_commands = set(k for k,v in md_command_sections.items())
+
+        help_output = self._run(self.meson_command + ['--help'])
+        help_commands = set(c.strip() for c in re.findall(r'usage:(?:.+)?{((?:[a-z]+,*)+?)}', help_output, re.MULTILINE|re.DOTALL)[0].split(','))
+
+        self.assertEqual(md_commands | {'help'}, help_commands, 'Doc file: `{}`'.format(doc_path))
+
+        ## Validate that each section has proper placeholders
+
+        def get_data_pattern(command):
+            return re.compile(
+                r'^```[\r\n]'
+                r'{{ cmd_help\[\'' + command + r'\'\]\[\'usage\'\] }}[\r\n]'
+                r'^```[\r\n]'
+                r'.*?'
+                r'^```[\r\n]'
+                r'{{ cmd_help\[\'' + command + r'\'\]\[\'arguments\'\] }}[\r\n]'
+                r'^```',
+                flags = re.MULTILINE|re.DOTALL)
+
+        for command in md_commands:
+            m = get_data_pattern(command).search(md, pos=md_command_sections[command][0], endpos=md_command_sections[command][1])
+            self.assertIsNotNone(m, 'Command `{}` is missing placeholders for dynamic data. Doc file: `{}`'.format(command, doc_path))
+
+    def _check_coverage_files(self, types=('text', 'xml', 'html')):
+        covdir = Path(self.builddir) / 'meson-logs'
+        files = []
+        if 'text' in types:
+            files.append('coverage.txt')
+        if 'xml' in types:
+            files.append('coverage.xml')
+        if 'html' in types:
+            files.append('coveragereport/index.html')
+        for f in files:
+            self.assertTrue((covdir / f).is_file(), msg='{} is not a file'.format(f))
+
+    def test_coverage(self):
+        if mesonbuild.environment.detect_msys2_arch():
+            raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+        gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+        if not gcovr_exe:
+            raise unittest.SkipTest('gcovr not found, or too old')
+        testdir = os.path.join(self.common_test_dir, '1 trivial')
+        env = get_fake_env(testdir, self.builddir, self.prefix)
+        cc = env.detect_c_compiler(MachineChoice.HOST)
+        if cc.get_id() == 'clang':
+            if not mesonbuild.environment.detect_llvm_cov():
+                raise unittest.SkipTest('llvm-cov not found')
+        if cc.get_id() == 'msvc':
+            raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+        self.init(testdir, extra_args=['-Db_coverage=true'])
+        self.build()
+        self.run_tests()
+        self.run_target('coverage')
+        self._check_coverage_files()
+
+    def test_coverage_complex(self):
+        if mesonbuild.environment.detect_msys2_arch():
+            raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+        gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+        if not gcovr_exe:
+            raise unittest.SkipTest('gcovr not found, or too old')
+        testdir = os.path.join(self.common_test_dir, '109 generatorcustom')
+        env = get_fake_env(testdir, self.builddir, self.prefix)
+        cc = env.detect_c_compiler(MachineChoice.HOST)
+        if cc.get_id() == 'clang':
+            if not mesonbuild.environment.detect_llvm_cov():
+                raise unittest.SkipTest('llvm-cov not found')
+        if cc.get_id() == 'msvc':
+            raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+        self.init(testdir, extra_args=['-Db_coverage=true'])
+        self.build()
+        self.run_tests()
+        self.run_target('coverage')
+        self._check_coverage_files()
+
+    def test_coverage_html(self):
+        if mesonbuild.environment.detect_msys2_arch():
+            raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+        gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+        if not gcovr_exe:
+            raise unittest.SkipTest('gcovr not found, or too old')
+        testdir = os.path.join(self.common_test_dir, '1 trivial')
+        env = get_fake_env(testdir, self.builddir, self.prefix)
+        cc = env.detect_c_compiler(MachineChoice.HOST)
+        if cc.get_id() == 'clang':
+            if not mesonbuild.environment.detect_llvm_cov():
+                raise unittest.SkipTest('llvm-cov not found')
+        if cc.get_id() == 'msvc':
+            raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+        self.init(testdir, extra_args=['-Db_coverage=true'])
+        self.build()
+        self.run_tests()
+        self.run_target('coverage-html')
+        self._check_coverage_files(['html'])
+
+    def test_coverage_text(self):
+        if mesonbuild.environment.detect_msys2_arch():
+            raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+        gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+        if not gcovr_exe:
+            raise unittest.SkipTest('gcovr not found, or too old')
+        testdir = os.path.join(self.common_test_dir, '1 trivial')
+        env = get_fake_env(testdir, self.builddir, self.prefix)
+        cc = env.detect_c_compiler(MachineChoice.HOST)
+        if cc.get_id() == 'clang':
+            if not mesonbuild.environment.detect_llvm_cov():
+                raise unittest.SkipTest('llvm-cov not found')
+        if cc.get_id() == 'msvc':
+            raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+        self.init(testdir, extra_args=['-Db_coverage=true'])
+        self.build()
+        self.run_tests()
+        self.run_target('coverage-text')
+        self._check_coverage_files(['text'])
+
+    def test_coverage_xml(self):
+        if mesonbuild.environment.detect_msys2_arch():
+            raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+        gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+        if not gcovr_exe:
+            raise unittest.SkipTest('gcovr not found, or too old')
+        testdir = os.path.join(self.common_test_dir, '1 trivial')
+        env = get_fake_env(testdir, self.builddir, self.prefix)
+        cc = env.detect_c_compiler(MachineChoice.HOST)
+        if cc.get_id() == 'clang':
+            if not mesonbuild.environment.detect_llvm_cov():
+                raise unittest.SkipTest('llvm-cov not found')
+        if cc.get_id() == 'msvc':
+            raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+        self.init(testdir, extra_args=['-Db_coverage=true'])
+        self.build()
+        self.run_tests()
+        self.run_target('coverage-xml')
+        self._check_coverage_files(['xml'])
+
+    def test_cross_file_constants(self):
+        with temp_filename() as crossfile1, temp_filename() as crossfile2:
+            with open(crossfile1, 'w') as f:
+                f.write(textwrap.dedent(
+                    '''
+                    [constants]
+                    compiler = 'gcc'
+                    '''))
+            with open(crossfile2, 'w') as f:
+                f.write(textwrap.dedent(
+                    '''
+                    [constants]
+                    toolchain = '/toolchain/'
+                    common_flags = ['--sysroot=' + toolchain / 'sysroot']
+
+                    [properties]
+                    c_args = common_flags + ['-DSOMETHING']
+                    cpp_args = c_args + ['-DSOMETHING_ELSE']
+
+                    [binaries]
+                    c = toolchain / compiler
+                    '''))
+
+            values = mesonbuild.coredata.parse_machine_files([crossfile1, crossfile2])
+            self.assertEqual(values['binaries']['c'], '/toolchain/gcc')
+            self.assertEqual(values['properties']['c_args'],
+                             ['--sysroot=/toolchain/sysroot', '-DSOMETHING'])
+            self.assertEqual(values['properties']['cpp_args'],
+                             ['--sysroot=/toolchain/sysroot', '-DSOMETHING', '-DSOMETHING_ELSE'])
+
+    @unittest.skipIf(is_windows(), 'Directory cleanup fails for some reason')
+    def test_wrap_git(self):
+        with tempfile.TemporaryDirectory() as tmpdir:
+            srcdir = os.path.join(tmpdir, 'src')
+            shutil.copytree(os.path.join(self.unit_test_dir, '81 wrap-git'), srcdir)
+            upstream = os.path.join(srcdir, 'subprojects', 'wrap_git_upstream')
+            upstream_uri = Path(upstream).as_uri()
+            _git_init(upstream)
+            with open(os.path.join(srcdir, 'subprojects', 'wrap_git.wrap'), 'w') as f:
+                f.write(textwrap.dedent('''
+                  [wrap-git]
+                  url = {}
+                  patch_directory = wrap_git_builddef
+                  revision = master
+                '''.format(upstream_uri)))
+            self.init(srcdir)
+            self.build()
+            self.run_tests()
+
+    def test_multi_output_custom_target_no_warning(self):
+        testdir = os.path.join(self.common_test_dir, '235 custom_target source')
+
+        out = self.init(testdir)
+        self.assertNotRegex(out, 'WARNING:.*Using the first one.')
+        self.build()
+        self.run_tests()
+
+    @unittest.skipUnless(is_linux() and (re.search('^i.86$|^x86$|^x64$|^x86_64$|^amd64$', platform.processor()) is not None),
+        'Requires ASM compiler for x86 or x86_64 platform currently only available on Linux CI runners')
+    def test_nostdlib(self):
+        testdir = os.path.join(self.unit_test_dir, '79 nostdlib')
+        machinefile = os.path.join(self.builddir, 'machine.txt')
+        with open(machinefile, 'w') as f:
+            f.write(textwrap.dedent('''
+                [properties]
+                c_stdlib = 'mylibc'
+                '''))
+
+        # Test native C stdlib
+        self.meson_native_file = machinefile
+        self.init(testdir)
+        self.build()
+
+        # Test cross C stdlib
+        self.new_builddir()
+        self.meson_native_file = None
+        self.meson_cross_file = machinefile
+        self.init(testdir)
+        self.build()
+
+    def test_meson_version_compare(self):
+        testdir = os.path.join(self.unit_test_dir, '82 meson version compare')
+        out = self.init(testdir)
+        self.assertNotRegex(out, r'WARNING')
+
+    def test_wrap_redirect(self):
+        redirect_wrap = os.path.join(self.builddir, 'redirect.wrap')
+        real_wrap = os.path.join(self.builddir, 'foo/subprojects/real.wrap')
+        os.makedirs(os.path.dirname(real_wrap))
+
+        # Invalid redirect, filename must have .wrap extension
+        with open(redirect_wrap, 'w') as f:
+            f.write(textwrap.dedent('''
+                [wrap-redirect]
+                filename = foo/subprojects/real.wrapper
+                '''))
+        with self.assertRaisesRegex(WrapException, 'wrap-redirect filename must be a .wrap file'):
+            PackageDefinition(redirect_wrap)
+
+        # Invalid redirect, filename cannot be in parent directory
+        with open(redirect_wrap, 'w') as f:
+            f.write(textwrap.dedent('''
+                [wrap-redirect]
+                filename = ../real.wrap
+                '''))
+        with self.assertRaisesRegex(WrapException, 'wrap-redirect filename cannot contain ".."'):
+            PackageDefinition(redirect_wrap)
+
+        # Invalid redirect, filename must be in foo/subprojects/real.wrap
+        with open(redirect_wrap, 'w') as f:
+            f.write(textwrap.dedent('''
+                [wrap-redirect]
+                filename = foo/real.wrap
+                '''))
+        with self.assertRaisesRegex(WrapException, 'wrap-redirect filename must be in the form foo/subprojects/bar.wrap'):
+            wrap = PackageDefinition(redirect_wrap)
+
+        # Correct redirect
+        with open(redirect_wrap, 'w') as f:
+            f.write(textwrap.dedent('''
+                [wrap-redirect]
+                filename = foo/subprojects/real.wrap
+                '''))
+        with open(real_wrap, 'w') as f:
+            f.write(textwrap.dedent('''
+                [wrap-git]
+                url = http://invalid
+                '''))
+        wrap = PackageDefinition(redirect_wrap)
+        self.assertEqual(wrap.get('url'), 'http://invalid')
+
+    @skip_if_no_cmake
+    def test_nested_cmake_rebuild(self) -> None:
+        # This checks a bug where if a non-meson project is used as a third
+        # level (or deeper) subproject it doesn't cause a rebuild if the build
+        # files for that project are changed
+        testdir = os.path.join(self.unit_test_dir, '85 nested subproject regenerate depends')
+        cmakefile = Path(testdir) / 'subprojects' / 'sub2' / 'CMakeLists.txt'
+        self.init(testdir)
+        self.build()
+        with cmakefile.open('a') as f:
+            os.utime(str(cmakefile))
+        self.assertReconfiguredBuildIsNoop()
+
 
 class FailureTests(BasePlatformTests):
     '''
@@ -4304,7 +5310,7 @@
             raise unittest.SkipTest('zlib not found with pkg-config')
         a = (("dependency('zlib', method : 'fail')", "'fail' is invalid"),
              ("dependency('zlib', static : '1')", "[Ss]tatic.*boolean"),
-             ("dependency('zlib', version : 1)", "[Vv]ersion.*string or list"),
+             ("dependency('zlib', version : 1)", "Item must be a list or one of "),
              ("dependency('zlib', required : 1)", "[Rr]equired.*boolean"),
              ("dependency('zlib', method : 1)", "[Mm]ethod.*string"),
              ("dependency('zlibfail')", self.dnf),)
@@ -4421,16 +5427,16 @@
            correct message when the fallback subproject is found but the
            variable inside it is not.
         4. A fallback dependency is found from the subproject parsed in (3)
-        5. The correct message is outputted when the .wrap file is missing for
-           a sub-subproject.
+        5. A wrap file from a subproject is used but fails because it does not
+           contain required keys.
         '''
         tdir = os.path.join(self.unit_test_dir, '20 subproj dep variables')
         out = self.init(tdir, inprocess=True)
-        self.assertRegex(out, r"Subproject directory not found and .*nosubproj.wrap.* file not found")
+        self.assertRegex(out, r"Neither a subproject directory nor a .*nosubproj.wrap.* file was found")
         self.assertRegex(out, r'Function does not take positional arguments.')
-        self.assertRegex(out, r'WARNING:.* Dependency .*subsubproject.* not found but it is available in a sub-subproject.')
-        self.assertRegex(out, r'Subproject directory not found and .*subsubproject.wrap.* file not found')
+        self.assertRegex(out, r'Dependency .*somenotfounddep.* from subproject .*subprojects/somesubproj.* found: .*NO.*')
         self.assertRegex(out, r'Dependency .*zlibproxy.* from subproject .*subprojects.*somesubproj.* found: .*YES.*')
+        self.assertRegex(out, r'Missing key .*source_filename.* in subsubproject.wrap')
 
     def test_exception_exit_status(self):
         '''
@@ -4508,6 +5514,30 @@
                                "}['a'] == 2)\n",
                                r"Assert failed: {k1 : 1}\['a'\] == 2")
 
+    def test_wrap_nofallback(self):
+        self.assertMesonRaises("dependency('notfound', fallback : ['foo', 'foo_dep'])",
+                               r"Dependency \'notfound\' not found and fallback is disabled",
+                               extra_args=['--wrap-mode=nofallback'])
+
+    def test_message(self):
+        self.assertMesonOutputs("message('Array:', ['a', 'b'])",
+                                r"Message:.* Array: \['a', 'b'\]")
+
+    def test_warning(self):
+        self.assertMesonOutputs("warning('Array:', ['a', 'b'])",
+                                r"WARNING:.* Array: \['a', 'b'\]")
+
+    def test_override_dependency_twice(self):
+        self.assertMesonRaises("meson.override_dependency('foo', declare_dependency())\n" +
+                               "meson.override_dependency('foo', declare_dependency())",
+                               """Tried to override dependency 'foo' which has already been resolved or overridden""")
+
+    @unittest.skipIf(is_windows(), 'zlib is not available on Windows')
+    def test_override_resolved_dependency(self):
+        self.assertMesonRaises("dependency('zlib')\n" +
+                               "meson.override_dependency('zlib', declare_dependency())",
+                               """Tried to override dependency 'zlib' which has already been resolved or overridden""")
+
 @unittest.skipUnless(is_windows() or is_cygwin(), "requires Windows (or Windows via Cygwin)")
 class WindowsTests(BasePlatformTests):
     '''
@@ -4519,6 +5549,7 @@
         self.platform_test_dir = os.path.join(self.src_root, 'test cases/windows')
 
     @unittest.skipIf(is_cygwin(), 'Test only applicable to Windows')
+    @mock.patch.dict(os.environ)
     def test_find_program(self):
         '''
         Test that Windows-specific edge-cases in find_program are functioning
@@ -4532,6 +5563,10 @@
         prog2 = ExternalProgram('cmd.exe')
         self.assertTrue(prog2.found(), msg='cmd.exe not found')
         self.assertPathEqual(prog1.get_path(), prog2.get_path())
+        # Find cmd.exe with args without searching
+        prog = ExternalProgram('cmd', command=['cmd', '/C'])
+        self.assertTrue(prog.found(), msg='cmd not found with args')
+        self.assertPathEqual(prog.get_command()[0], 'cmd')
         # Find cmd with an absolute path that's missing the extension
         cmd_path = prog2.get_path()[:-4]
         prog = ExternalProgram(cmd_path)
@@ -4544,9 +5579,9 @@
         self.assertTrue(prog.found(), msg='test-script-ext.py not found')
         # Finding a script in PATH
         os.environ['PATH'] += os.pathsep + testdir
-        # Finding a script in PATH w/o extension works and adds the interpreter
-        # (check only if `.PY` is in PATHEXT)
+        # If `.PY` is in PATHEXT, scripts can be found as programs
         if '.PY' in [ext.upper() for ext in os.environ['PATHEXT'].split(';')]:
+            # Finding a script in PATH w/o extension works and adds the interpreter
             prog = ExternalProgram('test-script-ext')
             self.assertTrue(prog.found(), msg='test-script-ext not found in PATH')
             self.assertPathEqual(prog.get_command()[0], python_command[0])
@@ -4556,6 +5591,18 @@
         self.assertTrue(prog.found(), msg='test-script-ext.py not found in PATH')
         self.assertPathEqual(prog.get_command()[0], python_command[0])
         self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py')
+        # Using a script with an extension directly via command= works and adds the interpreter
+        prog = ExternalProgram('test-script-ext.py', command=[os.path.join(testdir, 'test-script-ext.py'), '--help'])
+        self.assertTrue(prog.found(), msg='test-script-ext.py with full path not picked up via command=')
+        self.assertPathEqual(prog.get_command()[0], python_command[0])
+        self.assertPathEqual(prog.get_command()[2], '--help')
+        self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py')
+        # Using a script without an extension directly via command= works and adds the interpreter
+        prog = ExternalProgram('test-script', command=[os.path.join(testdir, 'test-script'), '--help'])
+        self.assertTrue(prog.found(), msg='test-script with full path not picked up via command=')
+        self.assertPathEqual(prog.get_command()[0], python_command[0])
+        self.assertPathEqual(prog.get_command()[2], '--help')
+        self.assertPathBasenameEqual(prog.get_path(), 'test-script')
         # Ensure that WindowsApps gets removed from PATH
         path = os.environ['PATH']
         if 'WindowsApps' not in path:
@@ -4648,27 +5695,55 @@
     def _check_ld(self, name: str, lang: str, expected: str) -> None:
         if not shutil.which(name):
             raise unittest.SkipTest('Could not find {}.'.format(name))
-        envvar = mesonbuild.envconfig.BinaryTable.evarMap['{}_ld'.format(lang)]
-        with mock.patch.dict(os.environ, {envvar: name}):
-            env = get_fake_env()
-            try:
-                comp = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
-            except EnvironmentException:
-                raise unittest.SkipTest('Could not find a compiler for {}'.format(lang))
-            self.assertEqual(comp.linker.id, expected)
+        envvars = [mesonbuild.envconfig.BinaryTable.evarMap['{}_ld'.format(lang)]]
+
+        # Also test a deprecated variable if there is one.
+        if envvars[0] in mesonbuild.envconfig.BinaryTable.DEPRECATION_MAP:
+            envvars.append(
+                mesonbuild.envconfig.BinaryTable.DEPRECATION_MAP[envvars[0]])
+
+        for envvar in envvars:
+            with mock.patch.dict(os.environ, {envvar: name}):
+                env = get_fake_env()
+                try:
+                    comp = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
+                except EnvironmentException:
+                    raise unittest.SkipTest('Could not find a compiler for {}'.format(lang))
+                self.assertEqual(comp.linker.id, expected)
 
     def test_link_environment_variable_lld_link(self):
+        env = get_fake_env()
+        comp = getattr(env, 'detect_c_compiler')(MachineChoice.HOST)
+        if isinstance(comp, mesonbuild.compilers.GnuLikeCompiler):
+            raise unittest.SkipTest('GCC cannot be used with link compatible linkers.')
         self._check_ld('lld-link', 'c', 'lld-link')
 
     def test_link_environment_variable_link(self):
+        env = get_fake_env()
+        comp = getattr(env, 'detect_c_compiler')(MachineChoice.HOST)
+        if isinstance(comp, mesonbuild.compilers.GnuLikeCompiler):
+            raise unittest.SkipTest('GCC cannot be used with link compatible linkers.')
         self._check_ld('link', 'c', 'link')
 
     def test_link_environment_variable_optlink(self):
+        env = get_fake_env()
+        comp = getattr(env, 'detect_c_compiler')(MachineChoice.HOST)
+        if isinstance(comp, mesonbuild.compilers.GnuLikeCompiler):
+            raise unittest.SkipTest('GCC cannot be used with link compatible linkers.')
         self._check_ld('optlink', 'c', 'optlink')
 
+    @skip_if_not_language('rust')
     def test_link_environment_variable_rust(self):
         self._check_ld('link', 'rust', 'link')
 
+    @skip_if_not_language('d')
+    def test_link_environment_variable_d(self):
+        env = get_fake_env()
+        comp = getattr(env, 'detect_d_compiler')(MachineChoice.HOST)
+        if comp.id == 'dmd':
+            raise unittest.SkipTest('meson cannot reliably make DMD use a different linker.')
+        self._check_ld('lld-link', 'd', 'lld-link')
+
     def test_pefile_checksum(self):
         try:
             import pefile
@@ -4677,7 +5752,7 @@
                 raise
             raise unittest.SkipTest('pefile module not found')
         testdir = os.path.join(self.common_test_dir, '6 linkshared')
-        self.init(testdir)
+        self.init(testdir, extra_args=['--buildtype=release'])
         self.build()
         # Test that binaries have a non-zero checksum
         env = get_fake_env()
@@ -4696,6 +5771,77 @@
                 # Verify that a valid checksum was written by all other compilers
                 self.assertTrue(pe.verify_checksum(), msg=msg)
 
+    def test_qt5dependency_vscrt(self):
+        '''
+        Test that qt5 dependencies use the debug module suffix when b_vscrt is
+        set to 'mdd'
+        '''
+        # Verify that the `b_vscrt` option is available
+        env = get_fake_env()
+        cc = env.detect_c_compiler(MachineChoice.HOST)
+        if 'b_vscrt' not in cc.base_options:
+            raise unittest.SkipTest('Compiler does not support setting the VS CRT')
+        # Verify that qmake is for Qt5
+        if not shutil.which('qmake-qt5'):
+            if not shutil.which('qmake') and not is_ci():
+                raise unittest.SkipTest('QMake not found')
+            output = subprocess.getoutput('qmake --version')
+            if 'Qt version 5' not in output and not is_ci():
+                raise unittest.SkipTest('Qmake found, but it is not for Qt 5.')
+        # Setup with /MDd
+        testdir = os.path.join(self.framework_test_dir, '4 qt')
+        self.init(testdir, extra_args=['-Db_vscrt=mdd'])
+        # Verify that we're linking to the debug versions of Qt DLLs
+        build_ninja = os.path.join(self.builddir, 'build.ninja')
+        with open(build_ninja, 'r', encoding='utf-8') as f:
+            contents = f.read()
+            m = re.search('build qt5core.exe: cpp_LINKER.*Qt5Cored.lib', contents)
+        self.assertIsNotNone(m, msg=contents)
+
+    def test_compiler_checks_vscrt(self):
+        '''
+        Test that the correct VS CRT is used when running compiler checks
+        '''
+        # Verify that the `b_vscrt` option is available
+        env = get_fake_env()
+        cc = env.detect_c_compiler(MachineChoice.HOST)
+        if 'b_vscrt' not in cc.base_options:
+            raise unittest.SkipTest('Compiler does not support setting the VS CRT')
+
+        def sanitycheck_vscrt(vscrt):
+            checks = self.get_meson_log_sanitychecks()
+            self.assertTrue(len(checks) > 0)
+            for check in checks:
+                self.assertIn(vscrt, check)
+
+        testdir = os.path.join(self.common_test_dir, '1 trivial')
+        self.init(testdir)
+        sanitycheck_vscrt('/MDd')
+
+        self.new_builddir()
+        self.init(testdir, extra_args=['-Dbuildtype=debugoptimized'])
+        sanitycheck_vscrt('/MD')
+
+        self.new_builddir()
+        self.init(testdir, extra_args=['-Dbuildtype=release'])
+        sanitycheck_vscrt('/MD')
+
+        self.new_builddir()
+        self.init(testdir, extra_args=['-Db_vscrt=md'])
+        sanitycheck_vscrt('/MD')
+
+        self.new_builddir()
+        self.init(testdir, extra_args=['-Db_vscrt=mdd'])
+        sanitycheck_vscrt('/MDd')
+
+        self.new_builddir()
+        self.init(testdir, extra_args=['-Db_vscrt=mt'])
+        sanitycheck_vscrt('/MT')
+
+        self.new_builddir()
+        self.init(testdir, extra_args=['-Db_vscrt=mtd'])
+        sanitycheck_vscrt('/MTd')
+
 
 @unittest.skipUnless(is_osx(), "requires Darwin")
 class DarwinTests(BasePlatformTests):
@@ -4798,7 +5944,7 @@
 
     def test_removing_unused_linker_args(self):
         testdir = os.path.join(self.common_test_dir, '108 has arg')
-        env = {'CFLAGS': '-L/tmp -L /var/tmp -headerpad_max_install_names -Wl,-export_dynamic'}
+        env = {'CFLAGS': '-L/tmp -L /var/tmp -headerpad_max_install_names -Wl,-export_dynamic -framework Foundation'}
         self.init(testdir, override_envvars=env)
 
 
@@ -4855,6 +6001,7 @@
         compdb = self.get_compdb()
         self.assertNotIn('-fPIC', compdb[0]['command'])
 
+    @mock.patch.dict(os.environ)
     def test_pkgconfig_gen(self):
         '''
         Test that generated pkg-config files can be found and have the correct
@@ -4874,6 +6021,12 @@
         self.assertEqual(foo_dep.get_pkgconfig_variable('foo', {}), 'bar')
         self.assertPathEqual(foo_dep.get_pkgconfig_variable('datadir', {}), '/usr/data')
 
+        libhello_nolib = PkgConfigDependency('libhello_nolib', env, kwargs)
+        self.assertTrue(libhello_nolib.found())
+        self.assertEqual(libhello_nolib.get_link_args(), [])
+        self.assertEqual(libhello_nolib.get_compile_args(), [])
+        self.assertEqual(libhello_nolib.get_pkgconfig_variable('foo', {}), 'bar')
+
     def test_pkgconfig_gen_deps(self):
         '''
         Test that generated pkg-config files correctly handle dependencies
@@ -4887,7 +6040,6 @@
         self.init(testdir, override_envvars={'PKG_CONFIG_LIBDIR': privatedir1})
         privatedir2 = self.privatedir
 
-        os.environ
         env = {
             'PKG_CONFIG_LIBDIR': os.pathsep.join([privatedir1, privatedir2]),
             'PKG_CONFIG_SYSTEM_LIBRARY_PATH': '/usr/lib',
@@ -4935,6 +6087,36 @@
         out = self._run(cmd + ['--libs'], override_envvars=env).strip().split()
         self.assertEqual(out, ['-llibmain2', '-llibinternal'])
 
+        # See common/47 pkgconfig-gen/meson.build for description of the case this test
+        with open(os.path.join(privatedir1, 'simple2.pc')) as f:
+            content = f.read()
+            self.assertIn('Libs: -L${libdir} -lsimple2 -lsimple1', content)
+            self.assertIn('Libs.private: -lz', content)
+
+        with open(os.path.join(privatedir1, 'simple3.pc')) as f:
+            content = f.read()
+            self.assertEqual(1, content.count('-lsimple3'))
+
+        with open(os.path.join(privatedir1, 'simple5.pc')) as f:
+            content = f.read()
+            self.assertNotIn('-lstat2', content)
+
+    @mock.patch.dict(os.environ)
+    def test_pkgconfig_uninstalled(self):
+        testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen')
+        self.init(testdir)
+        self.build()
+
+        os.environ['PKG_CONFIG_LIBDIR'] = os.path.join(self.builddir, 'meson-uninstalled')
+        if is_cygwin():
+            os.environ['PATH'] += os.pathsep + self.builddir
+
+        self.new_builddir()
+        testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen', 'dependencies')
+        self.init(testdir)
+        self.build()
+        self.run_tests()
+
     def test_pkg_unfound(self):
         testdir = os.path.join(self.unit_test_dir, '23 unfound pkgconfig')
         self.init(testdir)
@@ -5029,6 +6211,10 @@
         self.assertRegex('\n'.join(mesonlog),
                          r'Run-time dependency qt5 \(modules: Core\) found: YES .* \((qmake|qmake-qt5)\)\n')
 
+    def glob_sofiles_without_privdir(self, g):
+        files = glob(g)
+        return [f for f in files if not f.endswith('.p')]
+
     def _test_soname_impl(self, libpath, install):
         if is_cygwin() or is_osx():
             raise unittest.SkipTest('Test only applicable to ELF and linuxlike sonames')
@@ -5044,28 +6230,28 @@
         self.assertPathExists(nover)
         self.assertFalse(os.path.islink(nover))
         self.assertEqual(get_soname(nover), 'libnover.so')
-        self.assertEqual(len(glob(nover[:-3] + '*')), 1)
+        self.assertEqual(len(self.glob_sofiles_without_privdir(nover[:-3] + '*')), 1)
 
         # File with version set
         verset = os.path.join(libpath, 'libverset.so')
         self.assertPathExists(verset + '.4.5.6')
         self.assertEqual(os.readlink(verset), 'libverset.so.4')
         self.assertEqual(get_soname(verset), 'libverset.so.4')
-        self.assertEqual(len(glob(verset[:-3] + '*')), 3)
+        self.assertEqual(len(self.glob_sofiles_without_privdir(verset[:-3] + '*')), 3)
 
         # File with soversion set
         soverset = os.path.join(libpath, 'libsoverset.so')
         self.assertPathExists(soverset + '.1.2.3')
         self.assertEqual(os.readlink(soverset), 'libsoverset.so.1.2.3')
         self.assertEqual(get_soname(soverset), 'libsoverset.so.1.2.3')
-        self.assertEqual(len(glob(soverset[:-3] + '*')), 2)
+        self.assertEqual(len(self.glob_sofiles_without_privdir(soverset[:-3] + '*')), 2)
 
         # File with version and soversion set to same values
         settosame = os.path.join(libpath, 'libsettosame.so')
         self.assertPathExists(settosame + '.7.8.9')
         self.assertEqual(os.readlink(settosame), 'libsettosame.so.7.8.9')
         self.assertEqual(get_soname(settosame), 'libsettosame.so.7.8.9')
-        self.assertEqual(len(glob(settosame[:-3] + '*')), 2)
+        self.assertEqual(len(self.glob_sofiles_without_privdir(settosame[:-3] + '*')), 2)
 
         # File with version and soversion set to different values
         bothset = os.path.join(libpath, 'libbothset.so')
@@ -5073,7 +6259,7 @@
         self.assertEqual(os.readlink(bothset), 'libbothset.so.1.2.3')
         self.assertEqual(os.readlink(bothset + '.1.2.3'), 'libbothset.so.4.5.6')
         self.assertEqual(get_soname(bothset), 'libbothset.so.1.2.3')
-        self.assertEqual(len(glob(bothset[:-3] + '*')), 3)
+        self.assertEqual(len(self.glob_sofiles_without_privdir(bothset[:-3] + '*')), 3)
 
     def test_soname(self):
         self._test_soname_impl(self.builddir, False)
@@ -5111,8 +6297,6 @@
             self.assertEqual(Oargs, [Oflag, '-O0'])
 
     def _test_stds_impl(self, testdir, compiler, p: str):
-        lang_std = p + '_std'
-
         has_cpp17 = (compiler.get_id() not in {'clang', 'gcc'} or
                      compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=5.0.0', '>=9.1') or
                      compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=5.0.0'))
@@ -5125,7 +6309,8 @@
         # Check that all the listed -std=xxx options for this compiler work just fine when used
         # https://en.wikipedia.org/wiki/Xcode#Latest_versions
         # https://www.gnu.org/software/gcc/projects/cxx-status.html
-        for v in compiler.get_options()[lang_std].choices:
+        for v in compiler.get_options()['std'].choices:
+            lang_std = p + '_std'
             # we do it like this to handle gnu++17,c++17 and gnu17,c17 cleanly
             # thus, C++ first
             if '++17' in v and not has_cpp17:
@@ -5194,10 +6379,12 @@
     def test_unity_subproj(self):
         testdir = os.path.join(self.common_test_dir, '45 subproject')
         self.init(testdir, extra_args='--unity=subprojects')
-        simpletest_id = Target.construct_id_from_path('subprojects/sublib', 'simpletest', '@exe')
-        self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', simpletest_id, 'simpletest-unity.c'))
-        sublib_id = Target.construct_id_from_path('subprojects/sublib', 'sublib', '@sha')
-        self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', sublib_id, 'sublib-unity.c'))
+        pdirs = glob(os.path.join(self.builddir, 'subprojects/sublib/simpletest*.p'))
+        self.assertEqual(len(pdirs), 1)
+        self.assertPathExists(os.path.join(pdirs[0], 'simpletest-unity0.c'))
+        sdirs = glob(os.path.join(self.builddir, 'subprojects/sublib/*sublib*.p'))
+        self.assertEqual(len(sdirs), 1)
+        self.assertPathExists(os.path.join(sdirs[0], 'sublib-unity0.c'))
         self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c'))
         self.build()
 
@@ -5469,6 +6656,51 @@
         self.assertIsInstance(docbook_target, dict)
         self.assertEqual(os.path.basename(t['filename'][0]), 'generated-gdbus-doc-' + os.path.basename(t['target_sources'][0]['sources'][0]))
 
+    def test_introspect_installed(self):
+        testdir = os.path.join(self.linuxlike_test_dir, '7 library versions')
+        self.init(testdir)
+
+        install = self.introspect('--installed')
+        install = {os.path.basename(k): v for k, v in install.items()}
+        print(install)
+        if is_osx():
+            the_truth = {
+                'libmodule.dylib': '/usr/lib/libmodule.dylib',
+                'libnoversion.dylib': '/usr/lib/libnoversion.dylib',
+                'libonlysoversion.5.dylib': '/usr/lib/libonlysoversion.5.dylib',
+                'libonlysoversion.dylib': '/usr/lib/libonlysoversion.dylib',
+                'libonlyversion.1.dylib': '/usr/lib/libonlyversion.1.dylib',
+                'libonlyversion.dylib': '/usr/lib/libonlyversion.dylib',
+                'libsome.0.dylib': '/usr/lib/libsome.0.dylib',
+                'libsome.dylib': '/usr/lib/libsome.dylib',
+            }
+            the_truth_2 = {'/usr/lib/libsome.dylib',
+                           '/usr/lib/libsome.0.dylib',
+            }
+        else:
+            the_truth = {
+                'libmodule.so': '/usr/lib/libmodule.so',
+                'libnoversion.so': '/usr/lib/libnoversion.so',
+                'libonlysoversion.so': '/usr/lib/libonlysoversion.so',
+                'libonlysoversion.so.5': '/usr/lib/libonlysoversion.so.5',
+                'libonlyversion.so': '/usr/lib/libonlyversion.so',
+                'libonlyversion.so.1': '/usr/lib/libonlyversion.so.1',
+                'libonlyversion.so.1.4.5': '/usr/lib/libonlyversion.so.1.4.5',
+                'libsome.so': '/usr/lib/libsome.so',
+                'libsome.so.0': '/usr/lib/libsome.so.0',
+                'libsome.so.1.2.3': '/usr/lib/libsome.so.1.2.3',
+            }
+            the_truth_2 = {'/usr/lib/libsome.so',
+                           '/usr/lib/libsome.so.0',
+                           '/usr/lib/libsome.so.1.2.3'}
+        self.assertDictEqual(install, the_truth)
+
+        targets = self.introspect('--targets')
+        for t in targets:
+            if t['name'] != 'some':
+                continue
+            self.assertSetEqual(the_truth_2, set(t['install_filename']))
+
     def test_build_rpath(self):
         if is_cygwin():
             raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH')
@@ -5488,34 +6720,67 @@
         install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/progcxx'))
         self.assertEqual(install_rpath, 'baz')
 
-    @skip_if_not_base_option('b_sanitize')
-    def test_pch_with_address_sanitizer(self):
+    def test_global_rpath(self):
         if is_cygwin():
-            raise unittest.SkipTest('asan not available on Cygwin')
-        if is_openbsd():
-            raise unittest.SkipTest('-fsanitize=address is not supported on OpenBSD')
+            raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH')
+        if is_osx():
+            raise unittest.SkipTest('Global RPATHs via LDFLAGS not yet supported on MacOS (does anybody need it?)')
 
-        testdir = os.path.join(self.common_test_dir, '13 pch')
-        self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false'])
+        testdir = os.path.join(self.unit_test_dir, '80 global-rpath')
+        oldinstalldir = self.installdir
+
+        # Build and install an external library without DESTDIR.
+        # The external library generates a .pc file without an rpath.
+        yonder_dir = os.path.join(testdir, 'yonder')
+        yonder_prefix = os.path.join(oldinstalldir, 'yonder')
+        yonder_libdir = os.path.join(yonder_prefix, self.libdir)
+        self.prefix = yonder_prefix
+        self.installdir = yonder_prefix
+        self.init(yonder_dir)
         self.build()
-        compdb = self.get_compdb()
-        for i in compdb:
-            self.assertIn("-fsanitize=address", i["command"])
+        self.install(use_destdir=False)
 
-    def test_coverage(self):
-        gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
-        if not gcovr_exe:
-            raise unittest.SkipTest('gcovr not found')
-        if not shutil.which('genhtml') and not gcovr_new_rootdir:
-            raise unittest.SkipTest('genhtml not found and gcovr is too old')
-        if 'clang' in os.environ.get('CC', ''):
-            # We need to use llvm-cov instead of gcovr with clang
-            raise unittest.SkipTest('Coverage does not work with clang right now, help wanted!')
-        testdir = os.path.join(self.common_test_dir, '1 trivial')
-        self.init(testdir, extra_args=['-Db_coverage=true'])
+        # Since rpath has multiple valid formats we need to
+        # test that they are all properly used.
+        rpath_formats = [
+            ('-Wl,-rpath=', False),
+            ('-Wl,-rpath,', False),
+            ('-Wl,--just-symbols=', True),
+            ('-Wl,--just-symbols,', True),
+            ('-Wl,-R', False),
+            ('-Wl,-R,', False)
+        ]
+        for rpath_format, exception in rpath_formats:
+            # Build an app that uses that installed library.
+            # Supply the rpath to the installed library via LDFLAGS
+            # (as systems like buildroot and guix are wont to do)
+            # and verify install preserves that rpath.
+            self.new_builddir()
+            env = {'LDFLAGS': rpath_format + yonder_libdir,
+                   'PKG_CONFIG_PATH': os.path.join(yonder_libdir, 'pkgconfig')}
+            if exception:
+                with self.assertRaises(subprocess.CalledProcessError):
+                    self.init(testdir, override_envvars=env)
+                continue
+            self.init(testdir, override_envvars=env)
+            self.build()
+            self.install(use_destdir=False)
+            got_rpath = get_rpath(os.path.join(yonder_prefix, 'bin/rpathified'))
+            self.assertEqual(got_rpath, yonder_libdir, rpath_format)
+
+    @skip_if_not_base_option('b_sanitize')
+    def test_pch_with_address_sanitizer(self):
+        if is_cygwin():
+            raise unittest.SkipTest('asan not available on Cygwin')
+        if is_openbsd():
+            raise unittest.SkipTest('-fsanitize=address is not supported on OpenBSD')
+
+        testdir = os.path.join(self.common_test_dir, '13 pch')
+        self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false'])
         self.build()
-        self.run_tests()
-        self.run_target('coverage-html')
+        compdb = self.get_compdb()
+        for i in compdb:
+            self.assertIn("-fsanitize=address", i["command"])
 
     def test_cross_find_program(self):
         testdir = os.path.join(self.unit_test_dir, '11 cross prog')
@@ -5666,7 +6931,7 @@
         Check that Meson produces valid static archives with --strip enabled
         '''
         with tempfile.TemporaryDirectory() as tempdirname:
-            testdirbase = os.path.join(self.unit_test_dir, '68 static archive stripping')
+            testdirbase = os.path.join(self.unit_test_dir, '67 static archive stripping')
 
             # build lib
             self.new_builddir()
@@ -5760,7 +7025,7 @@
         '''
         Test that we produce the correct dependencies when a program is overridden with an executable.
         '''
-        testdir = os.path.join(self.common_test_dir, '201 override with exe')
+        testdir = os.path.join(self.src_root, 'test cases', 'native', '201 override with exe')
         self.init(testdir)
         with open(os.path.join(self.builddir, 'build.ninja')) as bfile:
             for line in bfile:
@@ -5799,13 +7064,15 @@
         self.build(override_envvars=env)
         # test uninstalled
         self.run_tests(override_envvars=env)
-        if not is_osx():
-            # Rest of the workflow only works on macOS
+        if not (is_osx() or is_linux()):
             return
         # test running after installation
         self.install(use_destdir=False)
         prog = os.path.join(self.installdir, 'bin', 'prog')
         self._run([prog])
+        if not is_osx():
+            # Rest of the workflow only works on macOS
+            return
         out = self._run(['otool', '-L', prog])
         self.assertNotIn('@rpath', out)
         ## New builddir for testing that DESTDIR is not added to install_name
@@ -5822,6 +7089,57 @@
             # Ensure that the otool output does not contain self.installdir
             self.assertNotRegex(out, self.installdir + '.*dylib ')
 
+    @skipIfNoPkgconfig
+    def test_usage_pkgconfig_prefixes(self):
+        '''
+        Build and install two external libraries, to different prefixes,
+        then build and install a client program that finds them via pkgconfig,
+        and verify the installed client program runs.
+        '''
+        oldinstalldir = self.installdir
+
+        # Build and install both external libraries without DESTDIR
+        val1dir = os.path.join(self.unit_test_dir, '77 pkgconfig prefixes', 'val1')
+        val1prefix = os.path.join(oldinstalldir, 'val1')
+        self.prefix = val1prefix
+        self.installdir = val1prefix
+        self.init(val1dir)
+        self.build()
+        self.install(use_destdir=False)
+        self.new_builddir()
+
+        env1 = {}
+        env1['PKG_CONFIG_PATH'] = os.path.join(val1prefix, self.libdir, 'pkgconfig')
+        val2dir = os.path.join(self.unit_test_dir, '77 pkgconfig prefixes', 'val2')
+        val2prefix = os.path.join(oldinstalldir, 'val2')
+        self.prefix = val2prefix
+        self.installdir = val2prefix
+        self.init(val2dir, override_envvars=env1)
+        self.build()
+        self.install(use_destdir=False)
+        self.new_builddir()
+
+        # Build, install, and run the client program
+        env2 = {}
+        env2['PKG_CONFIG_PATH'] = os.path.join(val2prefix, self.libdir, 'pkgconfig')
+        testdir = os.path.join(self.unit_test_dir, '77 pkgconfig prefixes', 'client')
+        testprefix = os.path.join(oldinstalldir, 'client')
+        self.prefix = testprefix
+        self.installdir = testprefix
+        self.init(testdir, override_envvars=env2)
+        self.build()
+        self.install(use_destdir=False)
+        prog = os.path.join(self.installdir, 'bin', 'client')
+        env3 = {}
+        if is_cygwin():
+            env3['PATH'] = os.path.join(val1prefix, 'bin') + \
+                os.pathsep + \
+                os.path.join(val2prefix, 'bin') + \
+                os.pathsep + os.environ['PATH']
+        out = self._run([prog], override_envvars=env3).strip()
+        # Expected output is val1 + val2 = 3
+        self.assertEqual(out, '3')
+
     def install_subdir_invalid_symlinks(self, testdir, subdir_path):
         '''
         Test that installation of broken symlinks works fine.
@@ -5829,25 +7147,23 @@
         '''
         testdir = os.path.join(self.common_test_dir, testdir)
         subdir = os.path.join(testdir, subdir_path)
-        curdir = os.getcwd()
-        os.chdir(subdir)
-        # Can't distribute broken symlinks in the source tree because it breaks
-        # the creation of zipapps. Create it dynamically and run the test by
-        # hand.
-        src = '../../nonexistent.txt'
-        os.symlink(src, 'invalid-symlink.txt')
-        try:
-            self.init(testdir)
-            self.build()
-            self.install()
-            install_path = subdir_path.split(os.path.sep)[-1]
-            link = os.path.join(self.installdir, 'usr', 'share', install_path, 'invalid-symlink.txt')
-            self.assertTrue(os.path.islink(link), msg=link)
-            self.assertEqual(src, os.readlink(link))
-            self.assertFalse(os.path.isfile(link), msg=link)
-        finally:
-            os.remove(os.path.join(subdir, 'invalid-symlink.txt'))
-            os.chdir(curdir)
+        with chdir(subdir):
+            # Can't distribute broken symlinks in the source tree because it breaks
+            # the creation of zipapps. Create it dynamically and run the test by
+            # hand.
+            src = '../../nonexistent.txt'
+            os.symlink(src, 'invalid-symlink.txt')
+            try:
+                self.init(testdir)
+                self.build()
+                self.install()
+                install_path = subdir_path.split(os.path.sep)[-1]
+                link = os.path.join(self.installdir, 'usr', 'share', install_path, 'invalid-symlink.txt')
+                self.assertTrue(os.path.islink(link), msg=link)
+                self.assertEqual(src, os.readlink(link))
+                self.assertFalse(os.path.isfile(link), msg=link)
+            finally:
+                os.remove(os.path.join(subdir, 'invalid-symlink.txt'))
 
     def test_install_subdir_symlinks(self):
         self.install_subdir_invalid_symlinks('62 install subdir', os.path.join('sub', 'sub1'))
@@ -5863,6 +7179,11 @@
         testdir = os.path.join(self.unit_test_dir, '52 ldflagdedup')
         if is_cygwin() or is_osx():
             raise unittest.SkipTest('Not applicable on Cygwin or OSX.')
+        env = get_fake_env()
+        cc = env.detect_c_compiler(MachineChoice.HOST)
+        linker = cc.linker
+        if not linker.export_dynamic_args(env):
+            raise unittest.SkipTest('Not applicable for linkers without --export-dynamic')
         self.init(testdir)
         build_ninja = os.path.join(self.builddir, 'build.ninja')
         max_count = 0
@@ -5898,11 +7219,36 @@
 
     def test_identity_cross(self):
         testdir = os.path.join(self.unit_test_dir, '61 identity cross')
+
+        nativefile = tempfile.NamedTemporaryFile(mode='w')
+        nativefile.write(textwrap.dedent('''\
+            [binaries]
+            c = ['{0}']
+            '''.format(os.path.join(testdir, 'build_wrapper.py'))))
+        nativefile.flush()
+        self.meson_native_file = nativefile.name
+
+        crossfile = tempfile.NamedTemporaryFile(mode='w')
+        crossfile.write(textwrap.dedent('''\
+            [binaries]
+            c = ['{0}']
+            '''.format(os.path.join(testdir, 'host_wrapper.py'))))
+        crossfile.flush()
+        self.meson_cross_file = crossfile.name
+
+        # TODO should someday be explicit about build platform only here
+        self.init(testdir)
+
+    def test_identity_cross_env(self):
+        testdir = os.path.join(self.unit_test_dir, '61 identity cross')
+        env = {
+            'CC_FOR_BUILD': '"' + os.path.join(testdir, 'build_wrapper.py') + '"',
+        }
         crossfile = tempfile.NamedTemporaryFile(mode='w')
-        env = {'CC': '"' + os.path.join(testdir, 'build_wrapper.py') + '"'}
-        crossfile.write('''[binaries]
-c = ['{0}']
-'''.format(os.path.join(testdir, 'host_wrapper.py')))
+        crossfile.write(textwrap.dedent('''\
+            [binaries]
+            c = ['{0}']
+            '''.format(os.path.join(testdir, 'host_wrapper.py'))))
         crossfile.flush()
         self.meson_cross_file = crossfile.name
         # TODO should someday be explicit about build platform only here
@@ -5914,7 +7260,7 @@
             raise unittest.SkipTest("Cygwin doesn't support LD_LIBRARY_PATH.")
 
         # Build some libraries and install them
-        testdir = os.path.join(self.unit_test_dir, '69 static link/lib')
+        testdir = os.path.join(self.unit_test_dir, '68 static link/lib')
         libdir = os.path.join(self.installdir, self.libdir)
         oldprefix = self.prefix
         self.prefix = self.installdir
@@ -5926,7 +7272,7 @@
         self.prefix = oldprefix
         meson_args = ['-Dc_link_args=-L{}'.format(libdir),
                       '--fatal-meson-warnings']
-        testdir = os.path.join(self.unit_test_dir, '69 static link')
+        testdir = os.path.join(self.unit_test_dir, '68 static link')
         env = {'PKG_CONFIG_LIBDIR': os.path.join(libdir, 'pkgconfig')}
         self.init(testdir, extra_args=meson_args, override_envvars=env)
         self.build()
@@ -5937,14 +7283,26 @@
             raise unittest.SkipTest('Solaris currently cannot override the linker.')
         if not shutil.which(check):
             raise unittest.SkipTest('Could not find {}.'.format(check))
-        envvar = mesonbuild.envconfig.BinaryTable.evarMap['{}_ld'.format(lang)]
-        with mock.patch.dict(os.environ, {envvar: name}):
-            env = get_fake_env()
-            comp = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
-            if lang != 'rust' and comp.use_linker_args('foo') == []:
-                raise unittest.SkipTest(
-                    'Compiler {} does not support using alternative linkers'.format(comp.id))
-            self.assertEqual(comp.linker.id, expected)
+        envvars = [mesonbuild.envconfig.BinaryTable.evarMap['{}_ld'.format(lang)]]
+
+        # Also test a deprecated variable if there is one.
+        if envvars[0] in mesonbuild.envconfig.BinaryTable.DEPRECATION_MAP:
+            envvars.append(
+                mesonbuild.envconfig.BinaryTable.DEPRECATION_MAP[envvars[0]])
+
+        for envvar in envvars:
+            with mock.patch.dict(os.environ, {envvar: name}):
+                env = get_fake_env()
+                comp = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
+                if isinstance(comp, (mesonbuild.compilers.AppleClangCCompiler,
+                                     mesonbuild.compilers.AppleClangCPPCompiler,
+                                     mesonbuild.compilers.AppleClangObjCCompiler,
+                                     mesonbuild.compilers.AppleClangObjCPPCompiler)):
+                    raise unittest.SkipTest('AppleClang is currently only supported with ld64')
+                if lang != 'rust' and comp.use_linker_args('bfd') == []:
+                    raise unittest.SkipTest(
+                        'Compiler {} does not support using alternative linkers'.format(comp.id))
+                self.assertEqual(comp.linker.id, expected)
 
     def test_ld_environment_variable_bfd(self):
         self._check_ld('ld.bfd', 'bfd', 'c', 'ld.bfd')
@@ -5955,29 +7313,38 @@
     def test_ld_environment_variable_lld(self):
         self._check_ld('ld.lld', 'lld', 'c', 'ld.lld')
 
-    @skipIfNoExecutable('rustc')
+    @skip_if_not_language('rust')
+    @skipIfNoExecutable('ld.gold')  # need an additional check here because _check_ld checks for gcc
     def test_ld_environment_variable_rust(self):
-        self._check_ld('ld.gold', 'gold', 'rust', 'ld.gold')
+        self._check_ld('gcc', 'gcc -fuse-ld=gold', 'rust', 'ld.gold')
 
     def test_ld_environment_variable_cpp(self):
         self._check_ld('ld.gold', 'gold', 'cpp', 'ld.gold')
 
+    @skip_if_not_language('objc')
     def test_ld_environment_variable_objc(self):
         self._check_ld('ld.gold', 'gold', 'objc', 'ld.gold')
 
+    @skip_if_not_language('objcpp')
     def test_ld_environment_variable_objcpp(self):
         self._check_ld('ld.gold', 'gold', 'objcpp', 'ld.gold')
 
-    @skipIfNoExecutable('gfortran')
+    @skip_if_not_language('fortran')
     def test_ld_environment_variable_fortran(self):
         self._check_ld('ld.gold', 'gold', 'fortran', 'ld.gold')
 
+    @skip_if_not_language('d')
+    def test_ld_environment_variable_d(self):
+        # At least for me, ldc defaults to gold, and gdc defaults to bfd, so
+        # let's pick lld, which isn't the default for either (currently)
+        self._check_ld('ld.lld', 'lld', 'd', 'ld.lld')
+
     def compute_sha256(self, filename):
         with open(filename, 'rb') as f:
             return hashlib.sha256(f.read()).hexdigest()
 
     def test_wrap_with_file_url(self):
-        testdir = os.path.join(self.unit_test_dir, '73 wrap file url')
+        testdir = os.path.join(self.unit_test_dir, '74 wrap file url')
         source_filename = os.path.join(testdir, 'subprojects', 'foo.tar.xz')
         patch_filename = os.path.join(testdir, 'subprojects', 'foo-patch.tar.xz')
         wrap_filename = os.path.join(testdir, 'subprojects', 'foo.wrap')
@@ -5987,11 +7354,13 @@
             [wrap-file]
             directory = foo
 
-            source_url = file://{}
+            source_url = http://server.invalid/foo
+            source_fallback_url = file://{}
             source_filename = foo.tar.xz
             source_hash = {}
 
-            patch_url = file://{}
+            patch_url = http://server.invalid/foo
+            patch_fallback_url = file://{}
             patch_filename = foo-patch.tar.xz
             patch_hash = {}
             """.format(source_filename, source_hash, patch_filename, patch_hash))
@@ -6005,12 +7374,53 @@
         windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'foo'))
         os.unlink(wrap_filename)
 
+    def test_no_rpath_for_static(self):
+        testdir = os.path.join(self.common_test_dir, '5 linkstatic')
+        self.init(testdir)
+        self.build()
+        build_rpath = get_rpath(os.path.join(self.builddir, 'prog'))
+        self.assertIsNone(build_rpath)
+
+    def test_lookup_system_after_broken_fallback(self):
+        # Just to generate libfoo.pc so we can test system dependency lookup.
+        testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen')
+        self.init(testdir)
+        privatedir = self.privatedir
+
+        # Write test project where the first dependency() returns not-found
+        # because 'broken' subproject does not exit, but that should not prevent
+        # the 2nd dependency() to lookup on system.
+        self.new_builddir()
+        with tempfile.TemporaryDirectory() as d:
+            with open(os.path.join(d, 'meson.build'), 'w') as f:
+                f.write(textwrap.dedent('''\
+                    project('test')
+                    dependency('notfound', fallback: 'broken', required: false)
+                    dependency('libfoo', fallback: 'broken', required: true)
+                    '''))
+            self.init(d, override_envvars={'PKG_CONFIG_LIBDIR': privatedir})
+
+    def test_as_link_whole(self):
+        testdir = os.path.join(self.unit_test_dir, '79 as link whole')
+        self.init(testdir)
+        with open(os.path.join(self.privatedir, 'bar1.pc')) as f:
+            content = f.read()
+            self.assertIn('-lfoo', content)
+        with open(os.path.join(self.privatedir, 'bar2.pc')) as f:
+            content = f.read()
+            self.assertNotIn('-lfoo', content)
+
+class BaseLinuxCrossTests(BasePlatformTests):
+    # Don't pass --libdir when cross-compiling. We have tests that
+    # check whether meson auto-detects it correctly.
+    libdir = None
+
 
 def should_run_cross_arm_tests():
     return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm')
 
 @unittest.skipUnless(not is_windows() and should_run_cross_arm_tests(), "requires ability to cross compile to ARM")
-class LinuxCrossArmTests(BasePlatformTests):
+class LinuxCrossArmTests(BaseLinuxCrossTests):
     '''
     Tests that cross-compilation to Linux/ARM works
     '''
@@ -6057,6 +7467,17 @@
                 return
         self.assertTrue(False, 'Option libdir not in introspect data.')
 
+    def test_cross_libdir_subproject(self):
+        # Guard against a regression where calling "subproject"
+        # would reset the value of libdir to its default value.
+        testdir = os.path.join(self.unit_test_dir, '78 subdir libdir')
+        self.init(testdir, extra_args=['--libdir=fuf'])
+        for i in self.introspect('--buildoptions'):
+            if i['name'] == 'libdir':
+                self.assertEqual(i['value'], 'fuf')
+                return
+        self.assertTrue(False, 'Libdir specified on command line gets reset.')
+
     def test_std_remains(self):
         # C_std defined in project options must be in effect also when cross compiling.
         testdir = os.path.join(self.unit_test_dir, '51 noncross options')
@@ -6080,7 +7501,7 @@
     return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin())
 
 @unittest.skipUnless(not is_windows() and should_run_cross_mingw_tests(), "requires ability to cross compile with MinGW")
-class LinuxCrossMingwTests(BasePlatformTests):
+class LinuxCrossMingwTests(BaseLinuxCrossTests):
     '''
     Tests that cross-compilation to Windows/MinGW works
     '''
@@ -6541,7 +7962,12 @@
             for section, entries in values.items():
                 f.write('[{}]\n'.format(section))
                 for k, v in entries.items():
-                    f.write("{}='{}'\n".format(k, v))
+                    if isinstance(v, (bool, int, float)):
+                        f.write("{}={}\n".format(k, v))
+                    elif isinstance(v, list):
+                        f.write("{}=[{}]\n".format(k, ', '.join(["'{}'".format(w) for w in v])))
+                    else:
+                        f.write("{}='{}'\n".format(k, v))
         return filename
 
     def helper_create_binary_wrapper(self, binary, dir_=None, extra_args=None, **kwargs):
@@ -6650,9 +8076,9 @@
             '--native-file', config, '--native-file', config2,
             '-Dcase=find_program'])
 
-    def _simple_test(self, case, binary):
+    def _simple_test(self, case, binary, entry=None):
         wrapper = self.helper_create_binary_wrapper(binary, version='12345')
-        config = self.helper_create_native_file({'binaries': {binary: wrapper}})
+        config = self.helper_create_native_file({'binaries': {entry or binary: wrapper}})
         self.init(self.testcase, extra_args=['--native-file', config, '-Dcase={}'.format(case)])
 
     def test_find_program(self):
@@ -6675,16 +8101,21 @@
             # python module breaks. This is fine on other OSes because they
             # don't need the extra indirection.
             raise unittest.SkipTest('bat indirection breaks internal sanity checks.')
-        if os.path.exists('/etc/debian_version'):
-            rc = subprocess.call(['pkg-config', '--cflags', 'python2'],
-                                 stdout=subprocess.DEVNULL,
-                                 stderr=subprocess.DEVNULL)
-            if rc != 0:
-                # Python 2 will be removed in Debian Bullseye, thus we must
-                # remove the build dependency on python2-dev. Keep the tests
-                # but only run them if dev packages are available.
+        elif is_osx():
+            binary = 'python'
+        else:
+            binary = 'python2'
+
+            # We not have python2, check for it
+            for v in ['2', '2.7', '-2.7']:
+                rc = subprocess.call(['pkg-config', '--cflags', 'python{}'.format(v)],
+                                     stdout=subprocess.DEVNULL,
+                                     stderr=subprocess.DEVNULL)
+                if rc == 0:
+                    break
+            else:
                 raise unittest.SkipTest('Not running Python 2 tests because dev packages not installed.')
-        self._simple_test('python', 'python')
+        self._simple_test('python', binary, entry='python')
 
     @unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard')
     @skip_if_env_set('CC')
@@ -6860,15 +8291,375 @@
         self.init(testcase, extra_args=['--native-file', config])
         self.build()
 
+    def test_user_options(self):
+        testcase = os.path.join(self.common_test_dir, '43 options')
+        for opt, value in [('testoption', 'some other val'), ('other_one', True),
+                           ('combo_opt', 'one'), ('array_opt', ['two']),
+                           ('integer_opt', 0),
+                           ('CaseSenSiTivE', 'SOME other Value'),
+                           ('CASESENSITIVE', 'some other Value')]:
+            config = self.helper_create_native_file({'project options': {opt: value}})
+            with self.assertRaises(subprocess.CalledProcessError) as cm:
+                self.init(testcase, extra_args=['--native-file', config])
+                self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option')
+
+    def test_user_options_command_line_overrides(self):
+        testcase = os.path.join(self.common_test_dir, '43 options')
+        config = self.helper_create_native_file({'project options': {'other_one': True}})
+        self.init(testcase, extra_args=['--native-file', config, '-Dother_one=false'])
+
+    def test_user_options_subproject(self):
+        testcase = os.path.join(self.unit_test_dir, '79 user options for subproject')
+
+        s = os.path.join(testcase, 'subprojects')
+        if not os.path.exists(s):
+            os.mkdir(s)
+        s = os.path.join(s, 'sub')
+        if not os.path.exists(s):
+            sub = os.path.join(self.common_test_dir, '43 options')
+            shutil.copytree(sub, s)
+
+        for opt, value in [('testoption', 'some other val'), ('other_one', True),
+                           ('combo_opt', 'one'), ('array_opt', ['two']),
+                           ('integer_opt', 0)]:
+            config = self.helper_create_native_file({'sub:project options': {opt: value}})
+            with self.assertRaises(subprocess.CalledProcessError) as cm:
+                self.init(testcase, extra_args=['--native-file', config])
+                self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option')
+
+    def test_option_bool(self):
+        # Bools are allowed to be unquoted
+        testcase = os.path.join(self.common_test_dir, '1 trivial')
+        config = self.helper_create_native_file({'built-in options': {'werror': True}})
+        self.init(testcase, extra_args=['--native-file', config])
+        configuration = self.introspect('--buildoptions')
+        for each in configuration:
+            # Test that no-per subproject options are inherited from the parent
+            if 'werror' in each['name']:
+                self.assertEqual(each['value'], True)
+                break
+        else:
+            self.fail('Did not find werror in build options?')
+
+    def test_option_integer(self):
+        # Bools are allowed to be unquoted
+        testcase = os.path.join(self.common_test_dir, '1 trivial')
+        config = self.helper_create_native_file({'built-in options': {'unity_size': 100}})
+        self.init(testcase, extra_args=['--native-file', config])
+        configuration = self.introspect('--buildoptions')
+        for each in configuration:
+            # Test that no-per subproject options are inherited from the parent
+            if 'unity_size' in each['name']:
+                self.assertEqual(each['value'], 100)
+                break
+        else:
+            self.fail('Did not find unity_size in build options?')
+
+    def test_builtin_options(self):
+        testcase = os.path.join(self.common_test_dir, '2 cpp')
+        config = self.helper_create_native_file({'built-in options': {'cpp_std': 'c++14'}})
+
+        self.init(testcase, extra_args=['--native-file', config])
+        configuration = self.introspect('--buildoptions')
+        for each in configuration:
+            if each['name'] == 'cpp_std':
+                self.assertEqual(each['value'], 'c++14')
+                break
+        else:
+            self.fail('Did not find werror in build options?')
+
+    def test_builtin_options_conf_overrides_env(self):
+        testcase = os.path.join(self.common_test_dir, '2 cpp')
+        config = self.helper_create_native_file({'built-in options': {'pkg_config_path': '/foo'}})
+
+        self.init(testcase, extra_args=['--native-file', config], override_envvars={'PKG_CONFIG_PATH': '/bar'})
+        configuration = self.introspect('--buildoptions')
+        for each in configuration:
+            if each['name'] == 'pkg_config_path':
+                self.assertEqual(each['value'], ['/foo'])
+                break
+        else:
+            self.fail('Did not find pkg_config_path in build options?')
+
+    def test_builtin_options_subprojects(self):
+        testcase = os.path.join(self.common_test_dir, '102 subproject subdir')
+        config = self.helper_create_native_file({'built-in options': {'default_library': 'both', 'c_args': ['-Dfoo']}, 'sub:built-in options': {'default_library': 'static'}})
+
+        self.init(testcase, extra_args=['--native-file', config])
+        configuration = self.introspect('--buildoptions')
+        found = 0
+        for each in configuration:
+            # Test that no-per subproject options are inherited from the parent
+            if 'c_args' in each['name']:
+                # This path will be hit twice, once for build and once for host,
+                self.assertEqual(each['value'], ['-Dfoo'])
+                found += 1
+            elif each['name'] == 'default_library':
+                self.assertEqual(each['value'], 'both')
+                found += 1
+            elif each['name'] == 'sub:default_library':
+                self.assertEqual(each['value'], 'static')
+                found += 1
+        self.assertEqual(found, 4, 'Did not find all three sections')
+
+    def test_builtin_options_subprojects_overrides_buildfiles(self):
+        # If the buildfile says subproject(... default_library: shared), ensure that's overwritten
+        testcase = os.path.join(self.common_test_dir, '230 persubproject options')
+        config = self.helper_create_native_file({'sub2:built-in options': {'default_library': 'shared'}})
+
+        with self.assertRaises(subprocess.CalledProcessError) as cm:
+            self.init(testcase, extra_args=['--native-file', config])
+            self.assertIn(cm.exception.stdout, 'Parent should override default_library')
+
+    def test_builtin_options_subprojects_dont_inherits_parent_override(self):
+        # If the buildfile says subproject(... default_library: shared), ensure that's overwritten
+        testcase = os.path.join(self.common_test_dir, '230 persubproject options')
+        config = self.helper_create_native_file({'built-in options': {'default_library': 'both'}})
+        self.init(testcase, extra_args=['--native-file', config])
+
+    def test_builtin_options_compiler_properties(self):
+        # the properties section can have lang_args, and those need to be
+        # overwritten by the built-in options
+        testcase = os.path.join(self.common_test_dir, '1 trivial')
+        config = self.helper_create_native_file({
+            'built-in options': {'c_args': ['-DFOO']},
+            'properties': {'c_args': ['-DBAR']},
+        })
+
+        self.init(testcase, extra_args=['--native-file', config])
+        configuration = self.introspect('--buildoptions')
+        for each in configuration:
+            if each['name'] == 'c_args':
+                self.assertEqual(each['value'], ['-DFOO'])
+                break
+        else:
+            self.fail('Did not find c_args in build options?')
+
+    def test_builtin_options_compiler_properties_legacy(self):
+        # The legacy placement in properties is still valid if a 'built-in
+        # options' setting is present, but doesn't have the lang_args
+        testcase = os.path.join(self.common_test_dir, '1 trivial')
+        config = self.helper_create_native_file({
+            'built-in options': {'default_library': 'static'},
+            'properties': {'c_args': ['-DBAR']},
+        })
+
+        self.init(testcase, extra_args=['--native-file', config])
+        configuration = self.introspect('--buildoptions')
+        for each in configuration:
+            if each['name'] == 'c_args':
+                self.assertEqual(each['value'], ['-DBAR'])
+                break
+        else:
+            self.fail('Did not find c_args in build options?')
+
+    def test_builtin_options_paths(self):
+        # the properties section can have lang_args, and those need to be
+        # overwritten by the built-in options
+        testcase = os.path.join(self.common_test_dir, '1 trivial')
+        config = self.helper_create_native_file({
+            'built-in options': {'bindir': 'foo'},
+            'paths': {'bindir': 'bar'},
+        })
+
+        self.init(testcase, extra_args=['--native-file', config])
+        configuration = self.introspect('--buildoptions')
+        for each in configuration:
+            if each['name'] == 'bindir':
+                self.assertEqual(each['value'], 'foo')
+                break
+        else:
+            self.fail('Did not find bindir in build options?')
+
+    def test_builtin_options_paths_legacy(self):
+        testcase = os.path.join(self.common_test_dir, '1 trivial')
+        config = self.helper_create_native_file({
+            'built-in options': {'default_library': 'static'},
+            'paths': {'bindir': 'bar'},
+        })
+
+        self.init(testcase, extra_args=['--native-file', config])
+        configuration = self.introspect('--buildoptions')
+        for each in configuration:
+            if each['name'] == 'bindir':
+                self.assertEqual(each['value'], 'bar')
+                break
+        else:
+            self.fail('Did not find bindir in build options?')
+
+    def test_builtin_options_paths_legacy(self):
+        testcase = os.path.join(self.common_test_dir, '1 trivial')
+        config = self.helper_create_native_file({
+            'built-in options': {'default_library': 'static'},
+            'paths': {'bindir': 'bar'},
+        })
+
+        self.init(testcase, extra_args=['--native-file', config])
+        configuration = self.introspect('--buildoptions')
+        for each in configuration:
+            if each['name'] == 'bindir':
+                self.assertEqual(each['value'], 'bar')
+                break
+        else:
+            self.fail('Did not find bindir in build options?')
+
 
 class CrossFileTests(BasePlatformTests):
 
-    """Tests for cross file functioality not directly related to
+    """Tests for cross file functionality not directly related to
     cross compiling.
 
     This is mainly aimed to testing overrides from cross files.
     """
 
+    def setUp(self):
+        super().setUp()
+        self.current_config = 0
+        self.current_wrapper = 0
+
+    def _cross_file_generator(self, *, needs_exe_wrapper: bool = False,
+                              exe_wrapper: T.Optional[T.List[str]] = None) -> str:
+        if is_windows():
+            raise unittest.SkipTest('Cannot run this test on non-mingw/non-cygwin windows')
+        if is_sunos():
+            cc = 'gcc'
+        else:
+            cc = 'cc'
+
+        return textwrap.dedent("""\
+            [binaries]
+            c = '/usr/bin/{}'
+            ar = '/usr/bin/ar'
+            strip = '/usr/bin/ar'
+            {}
+
+            [properties]
+            needs_exe_wrapper = {}
+
+            [host_machine]
+            system = 'linux'
+            cpu_family = 'x86'
+            cpu = 'i686'
+            endian = 'little'
+            """.format(cc,
+                       'exe_wrapper = {}'.format(str(exe_wrapper)) if exe_wrapper is not None else '',
+                       needs_exe_wrapper))
+
+    def _stub_exe_wrapper(self) -> str:
+        return textwrap.dedent('''\
+            #!/usr/bin/env python3
+            import subprocess
+            import sys
+
+            sys.exit(subprocess.run(sys.argv[1:]).returncode)
+            ''')
+
+    def test_needs_exe_wrapper_true(self):
+        testdir = os.path.join(self.unit_test_dir, '72 cross test passed')
+        with tempfile.TemporaryDirectory() as d:
+            p = Path(d) / 'crossfile'
+            with p.open('wt') as f:
+                f.write(self._cross_file_generator(needs_exe_wrapper=True))
+            self.init(testdir, extra_args=['--cross-file=' + str(p)])
+            out = self.run_target('test')
+            self.assertRegex(out, r'Skipped:\s*1\s*\n')
+
+    def test_needs_exe_wrapper_false(self):
+        testdir = os.path.join(self.unit_test_dir, '72 cross test passed')
+        with tempfile.TemporaryDirectory() as d:
+            p = Path(d) / 'crossfile'
+            with p.open('wt') as f:
+                f.write(self._cross_file_generator(needs_exe_wrapper=False))
+            self.init(testdir, extra_args=['--cross-file=' + str(p)])
+            out = self.run_target('test')
+            self.assertNotRegex(out, r'Skipped:\s*1\n')
+
+    def test_needs_exe_wrapper_true_wrapper(self):
+        testdir = os.path.join(self.unit_test_dir, '72 cross test passed')
+        with tempfile.TemporaryDirectory() as d:
+            s = Path(d) / 'wrapper.py'
+            with s.open('wt') as f:
+                f.write(self._stub_exe_wrapper())
+            s.chmod(0o774)
+            p = Path(d) / 'crossfile'
+            with p.open('wt') as f:
+                f.write(self._cross_file_generator(
+                    needs_exe_wrapper=True,
+                    exe_wrapper=[str(s)]))
+
+            self.init(testdir, extra_args=['--cross-file=' + str(p), '-Dexpect=true'])
+            out = self.run_target('test')
+            self.assertRegex(out, r'Ok:\s*3\s*\n')
+
+    def test_cross_exe_passed_no_wrapper(self):
+        testdir = os.path.join(self.unit_test_dir, '72 cross test passed')
+        with tempfile.TemporaryDirectory() as d:
+            p = Path(d) / 'crossfile'
+            with p.open('wt') as f:
+                f.write(self._cross_file_generator(needs_exe_wrapper=True))
+
+            self.init(testdir, extra_args=['--cross-file=' + str(p)])
+            self.build()
+            out = self.run_target('test')
+            self.assertRegex(out, r'Skipped:\s*1\s*\n')
+
+    # The test uses mocking and thus requires that the current process is the
+    # one to run the Meson steps. If we are using an external test executable
+    # (most commonly in Debian autopkgtests) then the mocking won't work.
+    @unittest.skipIf('MESON_EXE' in os.environ, 'MESON_EXE is defined, can not use mocking.')
+    def test_cross_file_system_paths(self):
+        if is_windows():
+            raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)')
+
+        testdir = os.path.join(self.common_test_dir, '1 trivial')
+        cross_content = self._cross_file_generator()
+        with tempfile.TemporaryDirectory() as d:
+            dir_ = os.path.join(d, 'meson', 'cross')
+            os.makedirs(dir_)
+            with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
+                f.write(cross_content)
+            name = os.path.basename(f.name)
+
+            with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}):
+                self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
+                self.wipe()
+
+            with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}):
+                os.environ.pop('XDG_DATA_HOME', None)
+                self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
+                self.wipe()
+
+        with tempfile.TemporaryDirectory() as d:
+            dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross')
+            os.makedirs(dir_)
+            with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
+                f.write(cross_content)
+            name = os.path.basename(f.name)
+
+            # If XDG_DATA_HOME is set in the environment running the
+            # tests this test will fail, os mock the environment, pop
+            # it, then test
+            with mock.patch.dict(os.environ):
+                os.environ.pop('XDG_DATA_HOME', None)
+                with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)):
+                    self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
+                    self.wipe()
+
+    def helper_create_cross_file(self, values):
+        """Create a config file as a temporary file.
+
+        values should be a nested dictionary structure of {section: {key:
+        value}}
+        """
+        filename = os.path.join(self.builddir, 'generated{}.config'.format(self.current_config))
+        self.current_config += 1
+        with open(filename, 'wt') as f:
+            for section, entries in values.items():
+                f.write('[{}]\n'.format(section))
+                for k, v in entries.items():
+                    f.write("{}='{}'\n".format(k, v))
+        return filename
+
     def test_cross_file_dirs(self):
         testcase = os.path.join(self.unit_test_dir, '60 native file override')
         self.init(testcase, default_args=False,
@@ -6925,6 +8716,89 @@
                               '-Ddef_sharedstatedir=sharedstatebar',
                               '-Ddef_sysconfdir=sysconfbar'])
 
+    def test_user_options(self):
+        # This is just a touch test for cross file, since the implementation
+        # shares code after loading from the files
+        testcase = os.path.join(self.common_test_dir, '43 options')
+        config = self.helper_create_cross_file({'project options': {'testoption': 'some other value'}})
+        with self.assertRaises(subprocess.CalledProcessError) as cm:
+            self.init(testcase, extra_args=['--cross-file', config])
+            self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option')
+
+    def test_builtin_options(self):
+        testcase = os.path.join(self.common_test_dir, '2 cpp')
+        config = self.helper_create_cross_file({'built-in options': {'cpp_std': 'c++14'}})
+
+        self.init(testcase, extra_args=['--cross-file', config])
+        configuration = self.introspect('--buildoptions')
+        for each in configuration:
+            if each['name'] == 'cpp_std':
+                self.assertEqual(each['value'], 'c++14')
+                break
+        else:
+            self.fail('No c++ standard set?')
+
+    def test_builtin_options_per_machine(self):
+        """Test options that are allowed to be set on a per-machine basis.
+
+        Such options could be passed twice, once for the build machine, and
+        once for the host machine. I've picked pkg-config path, but any would
+        do that can be set for both.
+        """
+        testcase = os.path.join(self.common_test_dir, '2 cpp')
+        cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/cross/path', 'cpp_std': 'c++17'}})
+        native = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/native/path', 'cpp_std': 'c++14'}})
+
+        # Ensure that PKG_CONFIG_PATH is not set in the environment
+        with mock.patch.dict('os.environ'):
+            for k in ['PKG_CONFIG_PATH', 'PKG_CONFIG_PATH_FOR_BUILD']:
+                try:
+                    del os.environ[k]
+                except KeyError:
+                    pass
+            self.init(testcase, extra_args=['--cross-file', cross, '--native-file', native])
+
+        configuration = self.introspect('--buildoptions')
+        found = 0
+        for each in configuration:
+            if each['name'] == 'pkg_config_path':
+                self.assertEqual(each['value'], ['/cross/path'])
+                found += 1
+            elif each['name'] == 'cpp_std':
+                self.assertEqual(each['value'], 'c++17')
+                found += 1
+            elif each['name'] == 'build.pkg_config_path':
+                self.assertEqual(each['value'], ['/native/path'])
+                found += 1
+            elif each['name'] == 'build.cpp_std':
+                self.assertEqual(each['value'], 'c++14')
+                found += 1
+
+            if found == 4:
+                break
+        self.assertEqual(found, 4, 'Did not find all sections.')
+
+    def test_builtin_options_conf_overrides_env(self):
+        testcase = os.path.join(self.common_test_dir, '2 cpp')
+        config = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/native'}})
+        cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/cross'}})
+
+        self.init(testcase, extra_args=['--native-file', config, '--cross-file', cross],
+                  override_envvars={'PKG_CONFIG_PATH': '/bar', 'PKG_CONFIG_PATH_FOR_BUILD': '/dir'})
+        configuration = self.introspect('--buildoptions')
+        found = 0
+        for each in configuration:
+            if each['name'] == 'pkg_config_path':
+                self.assertEqual(each['value'], ['/cross'])
+                found += 1
+            elif each['name'] == 'build.pkg_config_path':
+                self.assertEqual(each['value'], ['/native'])
+                found += 1
+            if found == 2:
+                break
+        self.assertEqual(found, 2, 'Did not find all sections.')
+
+
 class TAPParserTests(unittest.TestCase):
     def assert_test(self, events, **kwargs):
         if 'explanation' not in kwargs:
@@ -7197,6 +9071,196 @@
         self.assert_test(events, number=2, name='', result=TestResult.FAIL)
         self.assert_last(events)
 
+class SubprojectsCommandTests(BasePlatformTests):
+    def setUp(self):
+        super().setUp()
+        self.root_dir = Path(self.builddir)
+
+        self.project_dir = self.root_dir / 'src'
+        self._create_project(self.project_dir)
+
+        self.subprojects_dir = self.project_dir / 'subprojects'
+        os.makedirs(str(self.subprojects_dir))
+
+    def _create_project(self, path, project_name='dummy'):
+        os.makedirs(str(path), exist_ok=True)
+        with open(str(path / 'meson.build'), 'w') as f:
+            f.write("project('{}')".format(project_name))
+
+    def _git(self, cmd, workdir):
+        return git(cmd, str(workdir), check=True)[1].strip()
+
+    def _git_config(self, workdir):
+        self._git(['config', 'user.name', 'Meson Test'], workdir)
+        self._git(['config', 'user.email', 'meson.test@example.com'], workdir)
+
+    def _git_remote(self, cmd, name):
+        return self._git(cmd, self.root_dir / name)
+
+    def _git_local(self, cmd, name):
+        return self._git(cmd, self.subprojects_dir / name)
+
+    def _git_local_branch(self, name):
+        # Same as `git branch --show-current` but compatible with older git version
+        branch = self._git_local(['rev-parse', '--abbrev-ref', 'HEAD'], name)
+        return branch if branch != 'HEAD' else ''
+
+    def _git_local_commit(self, name, ref='HEAD'):
+        return self._git_local(['rev-parse', ref], name)
+
+    def _git_remote_commit(self, name, ref='HEAD'):
+        return self._git_remote(['rev-parse', ref], name)
+
+    def _git_create_repo(self, path):
+        self._create_project(path)
+        self._git(['init'], path)
+        self._git_config(path)
+        self._git(['add', '.'], path)
+        self._git(['commit', '-m', 'Initial commit'], path)
+
+    def _git_create_remote_repo(self, name):
+        self._git_create_repo(self.root_dir / name)
+
+    def _git_create_local_repo(self, name):
+        self._git_create_repo(self.subprojects_dir / name)
+
+    def _git_create_remote_commit(self, name, branch):
+        self._git_remote(['checkout', branch], name)
+        self._git_remote(['commit', '--allow-empty', '-m', 'initial {} commit'.format(branch)], name)
+
+    def _git_create_remote_branch(self, name, branch):
+        self._git_remote(['checkout', '-b', branch], name)
+        self._git_remote(['commit', '--allow-empty', '-m', 'initial {} commit'.format(branch)], name)
+
+    def _git_create_remote_tag(self, name, tag):
+        self._git_remote(['commit', '--allow-empty', '-m', 'tag {} commit'.format(tag)], name)
+        self._git_remote(['tag', tag], name)
+
+    def _wrap_create_git(self, name, revision='master'):
+        path = self.root_dir / name
+        with open(str((self.subprojects_dir / name).with_suffix('.wrap')), 'w') as f:
+            f.write(textwrap.dedent(
+                '''
+                [wrap-git]
+                url={}
+                revision={}
+                '''.format(os.path.abspath(str(path)), revision)))
+
+    def _wrap_create_file(self, name, tarball='dummy.tar.gz'):
+        path = self.root_dir / tarball
+        with open(str((self.subprojects_dir / name).with_suffix('.wrap')), 'w') as f:
+            f.write(textwrap.dedent(
+                '''
+                [wrap-file]
+                source_url={}
+                '''.format(os.path.abspath(str(path)))))
+
+    def _subprojects_cmd(self, args):
+        return self._run(self.meson_command + ['subprojects'] + args, workdir=str(self.project_dir))
+
+    def test_git_update(self):
+        subp_name = 'sub1'
+
+        # Create a fake remote git repository and a wrap file. Checks that
+        # "meson subprojects download" works.
+        self._git_create_remote_repo(subp_name)
+        self._wrap_create_git(subp_name)
+        self._subprojects_cmd(['download'])
+        self.assertPathExists(str(self.subprojects_dir / subp_name))
+        self._git_config(self.subprojects_dir / subp_name)
+
+        # Create a new remote branch and update the wrap file. Checks that
+        # "meson subprojects update --reset" checkout the new branch.
+        self._git_create_remote_branch(subp_name, 'newbranch')
+        self._wrap_create_git(subp_name, 'newbranch')
+        self._subprojects_cmd(['update', '--reset'])
+        self.assertEqual(self._git_local_branch(subp_name), 'newbranch')
+        self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newbranch'))
+
+        # Update remote newbranch. Checks the new commit is pulled into existing
+        # local newbranch. Make sure it does not print spurious 'git stash' message.
+        self._git_create_remote_commit(subp_name, 'newbranch')
+        out = self._subprojects_cmd(['update', '--reset'])
+        self.assertNotIn('No local changes to save', out)
+        self.assertEqual(self._git_local_branch(subp_name), 'newbranch')
+        self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newbranch'))
+
+        # Update remote newbranch and switch to another branch. Checks that it
+        # switch current branch to newbranch and pull latest commit.
+        self._git_local(['checkout', 'master'], subp_name)
+        self._git_create_remote_commit(subp_name, 'newbranch')
+        self._subprojects_cmd(['update', '--reset'])
+        self.assertEqual(self._git_local_branch(subp_name), 'newbranch')
+        self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newbranch'))
+
+        # Stage some local changes then update. Checks that local changes got
+        # stashed.
+        self._create_project(self.subprojects_dir / subp_name, 'new_project_name')
+        self._git_local(['add', '.'], subp_name)
+        self._git_create_remote_commit(subp_name, 'newbranch')
+        self._subprojects_cmd(['update', '--reset'])
+        self.assertEqual(self._git_local_branch(subp_name), 'newbranch')
+        self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newbranch'))
+        self.assertTrue(self._git_local(['stash', 'list'], subp_name))
+
+        # Create a new remote tag and update the wrap file. Checks that
+        # "meson subprojects update --reset" checkout the new tag in detached mode.
+        self._git_create_remote_tag(subp_name, 'newtag')
+        self._wrap_create_git(subp_name, 'newtag')
+        self._subprojects_cmd(['update', '--reset'])
+        self.assertEqual(self._git_local_branch(subp_name), '')
+        self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newtag'))
+
+        # Create a new remote commit and update the wrap file with the commit id.
+        # Checks that "meson subprojects update --reset" checkout the new commit
+        # in detached mode.
+        self._git_local(['checkout', 'master'], subp_name)
+        self._git_create_remote_commit(subp_name, 'newbranch')
+        new_commit = self._git_remote(['rev-parse', 'HEAD'], subp_name)
+        self._wrap_create_git(subp_name, new_commit)
+        self._subprojects_cmd(['update', '--reset'])
+        self.assertEqual(self._git_local_branch(subp_name), '')
+        self.assertEqual(self._git_local_commit(subp_name), new_commit)
+
+        # Create a local project not in a git repository, then update it with
+        # a git wrap. Without --reset it should print error message and return
+        # failure. With --reset it should delete existing project and clone the
+        # new project.
+        subp_name = 'sub2'
+        self._create_project(self.subprojects_dir / subp_name)
+        self._git_create_remote_repo(subp_name)
+        self._wrap_create_git(subp_name)
+        with self.assertRaises(subprocess.CalledProcessError) as cm:
+            self._subprojects_cmd(['update'])
+        self.assertIn('Not a git repository', cm.exception.output)
+        self._subprojects_cmd(['update', '--reset'])
+        self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name))
+
+    @skipIfNoExecutable('true')
+    def test_foreach(self):
+        self._create_project(self.subprojects_dir / 'sub_file')
+        self._wrap_create_file('sub_file')
+        self._git_create_local_repo('sub_git')
+        self._wrap_create_git('sub_git')
+        self._git_create_local_repo('sub_git_no_wrap')
+
+        def ran_in(s):
+            ret = []
+            prefix = 'Executing command in '
+            for l in s.splitlines():
+                if l.startswith(prefix):
+                    ret.append(l[len(prefix):])
+            return sorted(ret)
+
+        dummy_cmd = ['true']
+        out = self._subprojects_cmd(['foreach'] + dummy_cmd)
+        self.assertEqual(ran_in(out), sorted(['subprojects/sub_file', 'subprojects/sub_git', 'subprojects/sub_git_no_wrap']))
+        out = self._subprojects_cmd(['foreach', '--types', 'git,file'] + dummy_cmd)
+        self.assertEqual(ran_in(out), sorted(['subprojects/sub_file', 'subprojects/sub_git']))
+        out = self._subprojects_cmd(['foreach', '--types', 'file'] + dummy_cmd)
+        self.assertEqual(ran_in(out), ['subprojects/sub_file'])
+        out = self._subprojects_cmd(['foreach', '--types', 'git'] + dummy_cmd)
+        self.assertEqual(ran_in(out), ['subprojects/sub_git'])
 
 def _clang_at_least(compiler, minver: str, apple_minver: str) -> bool:
     """
@@ -7237,6 +9301,9 @@
     test_list = []
     for arg in argv:
         if arg.startswith('-'):
+            if arg in ('-f', '--failfast'):
+                arg = '--exitfirst'
+            pytest_args.append(arg)
             continue
         # ClassName.test_name => 'ClassName and test_name'
         if '.' in arg:
@@ -7246,29 +9313,54 @@
         pytest_args += ['-k', ' or '.join(test_list)]
     return pytest_args
 
+def running_single_tests(argv, cases):
+    '''
+    Check whether we only got arguments for running individual tests, not
+    entire testcases, and not all testcases (no test args).
+    '''
+    got_test_arg = False
+    for arg in argv:
+        if arg.startswith('-'):
+            continue
+        for case in cases:
+            if not arg.startswith(case):
+                continue
+            if '.' not in arg:
+                # Got a testcase, done
+                return False
+            got_test_arg = True
+    return got_test_arg
+
 def main():
     unset_envs()
+    cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests',
+             'PythonTests', 'NativeFileTests', 'RewriterTests', 'CrossFileTests',
+             'TAPParserTests', 'SubprojectsCommandTests',
+
+             'LinuxlikeTests', 'LinuxCrossArmTests', 'LinuxCrossMingwTests',
+             'WindowsTests', 'DarwinTests']
+
     try:
         import pytest # noqa: F401
         # Need pytest-xdist for `-n` arg
         import xdist # noqa: F401
-        if sys.version_info.major <= 3 and sys.version_info.minor <= 5:
-            raise ImportError('pytest with python <= 3.5 is causing issues on the CI')
-        pytest_args = ['-n', 'auto', './run_unittests.py']
+        pytest_args = []
+        # Don't use pytest-xdist when running single unit tests since it wastes
+        # time spawning a lot of processes to distribute tests to in that case.
+        if not running_single_tests(sys.argv, cases):
+            pytest_args += ['-n', 'auto']
+        pytest_args += ['./run_unittests.py']
         pytest_args += convert_args(sys.argv[1:])
         return subprocess.run(python_command + ['-m', 'pytest'] + pytest_args).returncode
     except ImportError:
         print('pytest-xdist not found, using unittest instead')
-        pass
-    # All attempts at locating pytest failed, fall back to plain unittest.
-    cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests',
-             'PythonTests', 'NativeFileTests', 'RewriterTests', 'CrossFileTests',
-             'TAPParserTests',
-
-             'LinuxlikeTests', 'LinuxCrossArmTests', 'LinuxCrossMingwTests',
-             'WindowsTests', 'DarwinTests']
-
+    # Fallback to plain unittest.
     return unittest.main(defaultTest=cases, buffer=True)
 
 if __name__ == '__main__':
-    raise SystemExit(main())
+    print('Meson build system', mesonbuild.coredata.version, 'Unit Tests')
+    start = time.monotonic()
+    try:
+        raise SystemExit(main())
+    finally:
+        print('Total time: {:.3f} seconds'.format(time.monotonic() - start))
diff -Nru meson-0.53.2/setup.cfg meson-0.57.0+really0.56.2/setup.cfg
--- meson-0.53.2/setup.cfg	2020-02-25 16:02:10.000000000 +0000
+++ meson-0.57.0+really0.56.2/setup.cfg	2021-01-10 12:49:51.051664400 +0000
@@ -30,6 +30,8 @@
 
 [options]
 python_requires = >= 3.5.2
+setup_requires = 
+	setuptools
 
 [options.extras_require]
 progress = 
diff -Nru meson-0.53.2/setup.py meson-0.57.0+really0.56.2/setup.py
--- meson-0.53.2/setup.py	2020-02-25 18:00:47.000000000 +0000
+++ meson-0.57.0+really0.56.2/setup.py	2021-01-06 10:39:48.000000000 +0000
@@ -37,10 +37,6 @@
             'mesonbuild.scripts',
             'mesonbuild.templates',
             'mesonbuild.wrap']
-package_data = {
-    'mesonbuild.dependencies': ['data/CMakeLists.txt', 'data/CMakeListsLLVM.txt', 'data/CMakePathInfo.txt'],
-    'mesonbuild.cmake': ['data/run_ctgt.py', 'data/preload.cmake'],
-}
 data_files = []
 if sys.platform != 'win32':
     # Only useful on UNIX-like systems
@@ -51,6 +47,5 @@
     setup(name='meson',
           version=version,
           packages=packages,
-          package_data=package_data,
           entry_points=entries,
           data_files=data_files,)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/10 header only/main.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/10 header only/main.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/10 header only/main.cpp"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/10 header only/main.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -3,8 +3,14 @@
 
 using namespace std;
 
+#define EXPECTED "Hello World compDef 42"
+
 int main(void) {
   cmModClass obj("Hello");
   cout << obj.getStr() << endl;
+  if (obj.getStr() != EXPECTED) {
+    cerr << "Expected: '" << EXPECTED << "'" << endl;
+    return 1;
+  }
   return 0;
 }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt"	2020-08-15 16:27:05.000000000 +0000
@@ -9,3 +9,4 @@
 set_target_properties(cmModLib PROPERTIES INTERFACE_COMPILE_OPTIONS "-DCMAKE_FLAG_MUST_BE_PRESENT")
 target_include_directories(cmModLib INTERFACE "${CMAKE_CURRENT_SOURCE_DIR}" "${CMAKE_CURRENT_SOURCE_DIR}/include")
 target_compile_definitions(cmModLib INTERFACE -DCMAKE_COMPILER_DEFINE_STR="compDef")
+target_compile_definitions(cmModLib INTERFACE MESON_MAGIC_FLAG=42)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -6,6 +6,9 @@
 #error "The flag CMAKE_FLAG_MUST_BE_PRESENT was not set"
 #endif
 
+#define xstr(s) str(s)
+#define str(s) #s
+
 class cmModClass {
   private:
     std::string str;
@@ -13,6 +16,8 @@
     cmModClass(std::string foo) {
       str = foo + " World ";
       str += CMAKE_COMPILER_DEFINE_STR;
+      str += ' ';
+      str += xstr(MESON_MAGIC_FLAG);
     }
 
     inline std::string getStr() const { return str; }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake"	2020-08-15 16:27:05.000000000 +0000
@@ -1,24 +1,9 @@
 cmake_policy(VERSION 3.7)
 
-if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.12)
-  find_package(Python COMPONENTS Interpreter)
-else()
-  find_package(PythonInterp)
-endif()
-
+find_package(Python COMPONENTS Interpreter)
 if(Python_FOUND OR PYTHONINTERP_FOUND)
   set(SomethingLikePython_FOUND      ON)
   set(SomethingLikePython_EXECUTABLE ${Python_EXECUTABLE})
-
-  if(NOT DEFINED Python_VERSION)
-    set(Python_VERSION ${Python_VERSION_STRING})
-  endif()
-  if(NOT TARGET Python::Interpreter)
-    add_executable(Python::Interpreter IMPORTED)
-    set_target_properties(Python::Interpreter PROPERTIES
-                          IMPORTED_LOCATION ${Python_EXECUTABLE}
-                          VERSION ${Python_VERSION})
-  endif()
 else()
   set(SomethingLikePython_FOUND OFF)
 endif()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/11 cmake_module_path/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/11 cmake_module_path/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/11 cmake_module_path/meson.build"	2020-01-07 21:05:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/11 cmake_module_path/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,7 +1,7 @@
 # We use Python3 as it's the only thing guaranteed to be available on any platform Meson can run on (unlike Zlib in linuxlike/13 cmake dependency).
 
-project('user CMake find_package module using cmake_module_path',
-  meson_version: '>= 0.50.0')
+project('user CMake find_package module using cmake_module_path', ['c', 'cpp'],
+  meson_version: '>= 0.55.0')
 
 if not find_program('cmake', required: false).found()
   error('MESON_SKIP_TEST cmake binary not available.')
@@ -15,3 +15,11 @@
 dependency('SomethingLikePython', required : true, method : 'cmake', cmake_module_path : 'cmake', modules: 'Python::Interpreter')
 
 dependency('SomethingLikePython', method : 'cmake', cmake_module_path : ['doesNotExist', 'cmake'], modules: 'Python::Interpreter')
+
+# Test a custom target with Python::Interpreter in COMMAND
+cm = import('cmake')
+op = cm.subproject_options()
+op.add_cmake_defines({'CMAKE_MODULE_PATH': meson.source_root() / 'cmake'})
+sp = cm.subproject('cmMod', options: op)
+main = sp.target('main')
+test('main', main)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,15 @@
+cmake_minimum_required(VERSION 3.5)
+
+project(cmMod)
+
+message(STATUS "CMAKE_MODULE_PATH: '${CMAKE_MODULE_PATH}'")
+
+find_package(SomethingLikePython REQUIRED)
+
+add_custom_command(
+  OUTPUT            "${CMAKE_CURRENT_BINARY_DIR}/main.c"
+  COMMAND           Python::Interpreter "${CMAKE_CURRENT_SOURCE_DIR}/gen.py"
+  WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}"
+)
+
+add_executable(main "${CMAKE_CURRENT_BINARY_DIR}/main.c")
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/11 cmake_module_path/subprojects/cmMod/gen.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/11 cmake_module_path/subprojects/cmMod/gen.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/11 cmake_module_path/subprojects/cmMod/gen.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/11 cmake_module_path/subprojects/cmMod/gen.py"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+with open('main.c', 'w') as fp:
+  print('''
+#include 
+
+int main(void) {
+  printf(\"Hello World\");
+  return 0;
+}
+''', file=fp)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/11 cmake_module_path/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/11 cmake_module_path/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/11 cmake_module_path/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/11 cmake_module_path/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "tools": {
+    "cmake": ">=3.12"
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/16 threads/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/16 threads/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/16 threads/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/16 threads/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "matrix": {
+    "options": {
+      "use_pthread": [
+        { "val": "ON"      },
+        { "val": "OFF"     },
+        { "val": "NOT_SET" }
+      ]
+    }
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/16 threads/test_matrix.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/16 threads/test_matrix.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/16 threads/test_matrix.json"	2020-02-25 18:00:47.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/16 threads/test_matrix.json"	1970-01-01 00:00:00.000000000 +0000
@@ -1,9 +0,0 @@
-{
-  "options": {
-    "use_pthread": [
-      { "val": "ON"      },
-      { "val": "OFF"     },
-      { "val": "NOT_SET" }
-    ]
-  }
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/main.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/main.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/main.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/main.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,10 @@
+#include 
+#include 
+
+using namespace std;
+
+int main(void) {
+  cmModClass obj("Hello");
+  cout << obj.getStr() << endl;
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+project('include_path_order', ['c', 'cpp'])
+
+cm = import('cmake')
+
+sub_pro = cm.subproject('cmMod')
+sub_dep = sub_pro.dependency('cmModLib++')
+
+exe1 = executable('main', ['main.cpp'], dependencies: [sub_dep])
+test('test1', exe1)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,34 @@
+cmake_minimum_required(VERSION 3.5)
+
+project(cmMod)
+set (CMAKE_CXX_STANDARD 14)
+
+include_directories(
+  ${CMAKE_CURRENT_BINARY_DIR}
+
+  # The one and only correct include dir
+  ${CMAKE_CURRENT_SOURCE_DIR}/incG
+
+  # All of these are traps
+  ${CMAKE_CURRENT_SOURCE_DIR}/incL
+  ${CMAKE_CURRENT_SOURCE_DIR}/incM
+  ${CMAKE_CURRENT_SOURCE_DIR}/incO
+  ${CMAKE_CURRENT_SOURCE_DIR}/incF
+  ${CMAKE_CURRENT_SOURCE_DIR}/incI
+  ${CMAKE_CURRENT_SOURCE_DIR}/incE
+  ${CMAKE_CURRENT_SOURCE_DIR}/incD
+  ${CMAKE_CURRENT_SOURCE_DIR}/incH
+  ${CMAKE_CURRENT_SOURCE_DIR}/incN
+  ${CMAKE_CURRENT_SOURCE_DIR}/incA
+  ${CMAKE_CURRENT_SOURCE_DIR}/incB
+  ${CMAKE_CURRENT_SOURCE_DIR}/incJ
+  ${CMAKE_CURRENT_SOURCE_DIR}/incP
+  ${CMAKE_CURRENT_SOURCE_DIR}/incC
+  ${CMAKE_CURRENT_SOURCE_DIR}/incK
+)
+
+add_definitions("-DDO_NOTHING_JUST_A_FLAG=1")
+
+add_library(cmModLib++ SHARED cmMod.cpp)
+include(GenerateExportHeader)
+generate_export_header(cmModLib++)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/cmMod.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/cmMod.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/cmMod.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/cmMod.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,11 @@
+#include "cmMod.hpp"
+
+using namespace std;
+
+cmModClass::cmModClass(string foo) {
+  str = foo + " World";
+}
+
+string cmModClass::getStr() const {
+  return str;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incA/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incA/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incA/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incA/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (A)
+#pragma once
+
+#error "cmMod.hpp in incA must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incB/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incB/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incB/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incB/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (B)
+#pragma once
+
+#error "cmMod.hpp in incB must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incC/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incC/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incC/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incC/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (C)
+#pragma once
+
+#error "cmMod.hpp in incC must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incD/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incD/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incD/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incD/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (D)
+#pragma once
+
+#error "cmMod.hpp in incD must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incE/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incE/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incE/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incE/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (E)
+#pragma once
+
+#error "cmMod.hpp in incE must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incF/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incF/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incF/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incF/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (F)
+#pragma once
+
+#error "cmMod.hpp in incF must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incG/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incG/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incG/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incG/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,14 @@
+#pragma once
+
+#include "cmmodlib++_export.h"
+#include 
+
+class CMMODLIB___EXPORT cmModClass {
+private:
+  std::string str;
+
+public:
+  cmModClass(std::string foo);
+
+  std::string getStr() const;
+};
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incH/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incH/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incH/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incH/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (H)
+#pragma once
+
+#error "cmMod.hpp in incH must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incI/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incI/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incI/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incI/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (I)
+#pragma once
+
+#error "cmMod.hpp in incI must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incJ/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incJ/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incJ/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incJ/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (J)
+#pragma once
+
+#error "cmMod.hpp in incJ must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incL/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incL/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incL/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incL/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (L)
+#pragma once
+
+#error "cmMod.hpp in incL must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incM/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incM/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incM/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incM/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (M)
+#pragma once
+
+#error "cmMod.hpp in incM must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incN/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incN/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incN/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incN/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (N)
+#pragma once
+
+#error "cmMod.hpp in incN must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incO/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incO/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incO/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incO/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (O)
+#pragma once
+
+#error "cmMod.hpp in incO must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incP/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incP/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/17 include path order/subprojects/cmMod/incP/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/17 include path order/subprojects/cmMod/incP/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+// cmMod.hpp (P)
+#pragma once
+
+#error "cmMod.hpp in incP must not be included"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/main.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/main.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/main.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/main.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,10 @@
+#include 
+#include 
+
+using namespace std;
+
+int main(void) {
+  cmModClass obj("Hello");
+  cout << obj.getStr() << endl;
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+project('cmakeSubTest', ['c', 'cpp'])
+
+cm = import('cmake')
+
+sub_pro = cm.subproject('cmMod')
+sub_dep = sub_pro.dependency('cmModLib++')
+
+exe1 = executable('main', ['main.cpp'], dependencies: [sub_dep])
+test('test1', exe1)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,15 @@
+cmake_minimum_required(VERSION 3.5)
+
+project(cmMod)
+set (CMAKE_CXX_STANDARD 14)
+
+include_directories(${CMAKE_CURRENT_BINARY_DIR})
+
+add_definitions("-DDO_NOTHING_JUST_A_FLAG=1")
+
+set(SRCS
+  ${CMAKE_CURRENT_LIST_DIR}/cmMod.hpp
+  ${CMAKE_CURRENT_LIST_DIR}/cmMod.cpp
+)
+
+add_subdirectory(fakeInc)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,10 @@
+#include "cmMod.hpp"
+
+using namespace std;
+
+#define MESON_INCLUDE_IMPL
+#include "fakeInc/cmModInc1.cpp"
+#include "fakeInc/cmModInc2.cpp"
+#include "fakeInc/cmModInc3.cpp"
+#include "fakeInc/cmModInc4.cpp"
+#undef MESON_INCLUDE_IMPL
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,16 @@
+#pragma once
+
+#include "cmmodlib++_export.h"
+#include 
+
+class CMMODLIB___EXPORT cmModClass {
+private:
+  std::string str;
+
+  std::string getStr1() const;
+  std::string getStr2() const;
+public:
+  cmModClass(std::string foo);
+
+  std::string getStr() const;
+};
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/CMakeLists.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/CMakeLists.txt"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,30 @@
+list(APPEND SRCS
+  cmModInc1.cpp
+  cmModInc2.cpp
+  cmModInc3.cpp
+  cmModInc4.cpp
+)
+
+set(SRC_A
+  cmModInc1.cpp
+  ${CMAKE_CURRENT_LIST_DIR}/cmModInc2.cpp
+)
+
+set_property(
+  SOURCE ${SRC_A}
+  PROPERTY
+    HEADER_FILE_ONLY ON
+)
+
+set_source_files_properties(
+  cmModInc3.cpp
+  ${CMAKE_CURRENT_LIST_DIR}/cmModInc4.cpp
+  PROPERTIES
+    LABELS "CMake;Lists;are;fun"
+    HEADER_FILE_ONLY ON
+)
+
+include_directories(${CMAKE_CURRENT_BINARY_DIR})
+add_library(cmModLib++ SHARED ${SRCS})
+include(GenerateExportHeader)
+generate_export_header(cmModLib++)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc1.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc1.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc1.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc1.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+#ifndef MESON_INCLUDE_IMPL
+#error "MESON_INCLUDE_IMPL is not defined"
+#endif // !MESON_INCLUDE_IMPL
+
+cmModClass::cmModClass(string foo) {
+  str = foo + " World";
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc2.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc2.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc2.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc2.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+#ifndef MESON_INCLUDE_IMPL
+#error "MESON_INCLUDE_IMPL is not defined"
+#endif // !MESON_INCLUDE_IMPL
+
+string cmModClass::getStr() const {
+  return getStr2();
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc3.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc3.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc3.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc3.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+#ifndef MESON_INCLUDE_IMPL
+#error "MESON_INCLUDE_IMPL is not defined"
+#endif // !MESON_INCLUDE_IMPL
+
+string cmModClass::getStr1() const {
+  return getStr2();
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc4.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc4.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc4.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc4.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+#ifndef MESON_INCLUDE_IMPL
+#error "MESON_INCLUDE_IMPL is not defined"
+#endif // !MESON_INCLUDE_IMPL
+
+string cmModClass::getStr2() const {
+  return str;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/main.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/main.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/main.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/main.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,18 @@
+#include 
+#include 
+#include 
+
+using namespace std;
+
+int main(void) {
+  cmModClass obj("Hello");
+  cout << obj.getStr() << endl;
+
+  int v1 = obj.getInt();
+  int v2 = getTestInt();
+  if (v1 != ((1 + v2) * 2)) {
+    cerr << "Number test failed" << endl;
+    return 1;
+  }
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,29 @@
+project('cmake_set_opt', ['c', 'cpp'])
+
+comp = meson.get_compiler('cpp')
+if comp.get_argument_syntax() == 'msvc'
+  error('MESON_SKIP_TEST: MSVC is not supported because it does not support C++11')
+endif
+
+cm   = import('cmake')
+opts = cm.subproject_options()
+
+opts.add_cmake_defines({'SOME_CMAKE_VAR': 'something', 'SOME_OTHER_VAR': true})
+
+opts.set_override_option('cpp_std', 'c++11')                        # Global is C++11
+opts.set_override_option('cpp_std', 'c++14', target: 'cmModLib++')  # Override it with C++14 for cmModLib++
+
+opts.append_compile_args('cpp', '-DMESON_GLOBAL_FLAG=1')
+opts.append_compile_args('cpp', ['-DMESON_SPECIAL_FLAG1=1', ['-DMESON_SPECIAL_FLAG2=1']], target: 'cmModLib++')
+opts.append_compile_args('cpp', '-DMESON_MAGIC_INT=42',                                   target: 'cmModLib++')
+opts.append_compile_args('cpp', [[[['-DMESON_MAGIC_INT=20']]]],                           target: 'cmTestLib')
+
+opts.set_install(false)
+opts.set_install(true, target: 'testEXE')
+
+sp   = cm.subproject('cmOpts', options: opts)
+dep1 = sp.dependency('cmModLib++')
+dep2 = sp.dependency('cmTestLib')
+
+exe1 = executable('main', ['main.cpp'], dependencies: [dep1, dep2])
+test('test1', exe1)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,18 @@
+cmake_minimum_required(VERSION 3.7)
+
+project(CmOpts)
+
+set(CMAKE_CXX_STANDARD 98)
+set(CMAKE_CXX_STANDARD_REQUIRED ON)
+
+if(NOT "${SOME_CMAKE_VAR}" STREQUAL "something")
+  message(FATAL_ERROR "Setting the CMake var failed")
+endif()
+
+add_library(cmModLib++ STATIC cmMod.cpp)
+add_library(cmTestLib  STATIC cmTest.cpp)
+add_executable(testEXE main.cpp)
+
+target_link_libraries(testEXE cmModLib++)
+
+install(TARGETS cmTestLib ARCHIVE DESTINATION lib RUNTIME DESTINATION bin)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,31 @@
+#include "cmMod.hpp"
+
+using namespace std;
+
+#if __cplusplus < 201402L
+#error "At least C++14 is required"
+#endif
+
+#ifndef MESON_GLOBAL_FLAG
+#error "MESON_GLOBAL_FLAG was not set"
+#endif
+
+#ifndef MESON_SPECIAL_FLAG1
+#error "MESON_SPECIAL_FLAG1 was not set"
+#endif
+
+#ifndef MESON_SPECIAL_FLAG2
+#error "MESON_SPECIAL_FLAG2 was not set"
+#endif
+
+cmModClass::cmModClass(string foo) {
+  str = foo + " World";
+}
+
+string cmModClass::getStr() const {
+  return str;
+}
+
+int cmModClass::getInt() const {
+  return MESON_MAGIC_INT;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,14 @@
+#pragma once
+
+#include 
+
+class cmModClass {
+private:
+  std::string str;
+
+public:
+  cmModClass(std::string foo);
+
+  std::string getStr() const;
+  int getInt() const;
+};
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,25 @@
+#include "cmTest.hpp"
+
+#if __cplusplus < 201103L
+#error "At least C++11 is required"
+#endif
+
+#if __cplusplus >= 201402L
+#error "At most C++11 is required"
+#endif
+
+#ifndef MESON_GLOBAL_FLAG
+#error "MESON_GLOBAL_FLAG was not set"
+#endif
+
+#ifdef MESON_SPECIAL_FLAG1
+#error "MESON_SPECIAL_FLAG1 *was* set"
+#endif
+
+#ifdef MESON_SPECIAL_FLAG2
+#error "MESON_SPECIAL_FLAG2 *was* set"
+#endif
+
+int getTestInt() {
+  return MESON_MAGIC_INT;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+#pragma once
+
+int getTestInt();
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,10 @@
+#include 
+#include "cmMod.hpp"
+
+using namespace std;
+
+int main(void) {
+  cmModClass obj("Hello (LIB TEST)");
+  cout << obj.getStr() << endl;
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/19 advanced options/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/19 advanced options/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/cm_testEXE"}
+  ],
+  "tools": {
+    "cmake": ">=3.11"
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/1 basic/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/1 basic/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/1 basic/meson.build"	2020-02-25 18:00:47.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/1 basic/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -3,11 +3,12 @@
 cm = import('cmake')
 
 sub_pro = cm.subproject('cmMod')
-sub_dep = sub_pro.dependency('cmModLib++')
+sub_dep = sub_pro.dependency('cmModLib++', include_type: 'system')
 
 assert(sub_pro.found(), 'found() method reports not found, but should be found')
 assert(sub_pro.target_list() == ['cmModLib++'], 'There should be exactly one target')
 assert(sub_pro.target_type('cmModLib++') == 'shared_library', 'Target type should be shared_library')
+assert(sub_dep.include_type() == 'system', 'the include_type kwarg of dependency() works')
 
 exe1 = executable('main', ['main.cpp'], dependencies: [sub_dep])
 test('test1', exe1)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt"	2020-02-25 18:00:47.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt"	2020-10-26 11:18:42.000000000 +0000
@@ -8,5 +8,13 @@
 add_definitions("-DDO_NOTHING_JUST_A_FLAG=1")
 
 add_library(cmModLib++ SHARED cmMod.cpp)
+target_compile_definitions(cmModLib++ PRIVATE MESON_MAGIC_FLAG=21)
+target_compile_definitions(cmModLib++ INTERFACE MESON_MAGIC_FLAG=42)
+
+# Test PCH support
+if(${CMAKE_VERSION} VERSION_GREATER_EQUAL "3.16.0")
+  target_precompile_headers(cmModLib++ PRIVATE "cpp_pch.hpp")
+endif()
+
 include(GenerateExportHeader)
 generate_export_header(cmModLib++)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp"	2019-06-16 18:54:18.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -2,6 +2,10 @@
 
 using namespace std;
 
+#if MESON_MAGIC_FLAG != 21
+#error "Invalid MESON_MAGIC_FLAG (private)"
+#endif
+
 cmModClass::cmModClass(string foo) {
   str = foo + " World";
 }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp"	2020-02-25 18:00:47.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp"	2020-08-15 16:27:05.000000000 +0000
@@ -3,6 +3,10 @@
 #include "cmmodlib++_export.h"
 #include 
 
+#if MESON_MAGIC_FLAG != 42 && MESON_MAGIC_FLAG != 21
+#error "Invalid MESON_MAGIC_FLAG"
+#endif
+
 class CMMODLIB___EXPORT cmModClass {
 private:
   std::string str;
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/1 basic/subprojects/cmMod/cpp_pch.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/1 basic/subprojects/cmMod/cpp_pch.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/1 basic/subprojects/cmMod/cpp_pch.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/1 basic/subprojects/cmMod/cpp_pch.hpp"	2020-10-26 11:18:42.000000000 +0000
@@ -0,0 +1,2 @@
+#include 
+#include 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/20 cmake file/foolib.cmake.in" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/20 cmake file/foolib.cmake.in"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/20 cmake file/foolib.cmake.in"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/20 cmake file/foolib.cmake.in"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1 @@
+@foo@
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/20 cmake file/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/20 cmake file/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/20 cmake file/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/20 cmake file/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,14 @@
+project(
+  'cmake config file',
+)
+
+cmake = import('cmake')
+
+cmake_conf = configuration_data()
+cmake_conf.set_quoted('foo', 'bar')
+cmake.configure_package_config_file(
+  name : 'foolib',
+  input : 'foolib.cmake.in',
+  install_dir : get_option('libdir') / 'cmake',
+  configuration : cmake_conf,
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/20 cmake file/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/20 cmake file/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/20 cmake file/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/20 cmake file/test.json"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "installed": [
+    {"file": "usr/lib/cmake/foolibConfig.cmake", "type": "file"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/21 shared module/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/21 shared module/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/21 shared module/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/21 shared module/meson.build"	2020-09-10 16:39:24.000000000 +0000
@@ -0,0 +1,13 @@
+project('cmakeSharedModule', ['c', 'cpp'])
+
+cm = import('cmake')
+
+sub_pro = cm.subproject('cmMod')
+sub_dep = sub_pro.dependency('myMod')
+
+dl = meson.get_compiler('c').find_library('dl', required: false)
+
+l = shared_library('runtime', 'runtime.c')
+e = executable('prog', ['prog.c'], link_with: l, dependencies: [sub_dep, dl])
+m = sub_pro.target('myMod')
+test('test1', e, args : m)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/21 shared module/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/21 shared module/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/21 shared module/prog.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/21 shared module/prog.c"	2020-09-10 16:39:24.000000000 +0000
@@ -0,0 +1,108 @@
+
+#include 
+#include "module.h"
+
+#if SPECIAL_MAGIC_DEFINE != 42
+#error "SPECIAL_MAGIC_DEFINE is not defined"
+#endif
+
+int func_from_language_runtime(void);
+typedef int (*fptr) (void);
+
+#ifdef _WIN32
+
+#include 
+
+static wchar_t*
+win32_get_last_error (void)
+{
+    wchar_t *msg = NULL;
+
+    FormatMessageW (FORMAT_MESSAGE_ALLOCATE_BUFFER
+                    | FORMAT_MESSAGE_IGNORE_INSERTS
+                    | FORMAT_MESSAGE_FROM_SYSTEM,
+                    NULL, GetLastError (), 0,
+                    (LPWSTR) &msg, 0, NULL);
+    return msg;
+}
+
+int main(int argc, char **argv)
+{
+    HINSTANCE handle;
+    fptr importedfunc;
+    int expected, actual;
+    int ret = 1;
+    if(argc==0) {};
+
+    handle = LoadLibraryA (argv[1]);
+    if (!handle) {
+        wchar_t *msg = win32_get_last_error ();
+        printf ("Could not open %s: %S\n", argv[1], msg);
+        goto nohandle;
+    }
+
+    importedfunc = (fptr) GetProcAddress (handle, "func");
+    if (importedfunc == NULL) {
+        wchar_t *msg = win32_get_last_error ();
+        printf ("Could not find 'func': %S\n", msg);
+        goto out;
+    }
+
+    actual = importedfunc ();
+    expected = func_from_language_runtime ();
+    if (actual != expected) {
+        printf ("Got %i instead of %i\n", actual, expected);
+        goto out;
+    }
+
+    ret = 0;
+out:
+    FreeLibrary (handle);
+nohandle:
+    return ret;
+}
+
+#else
+
+#include
+#include
+
+int main(int argc, char **argv) {
+    void *dl;
+    fptr importedfunc;
+    int expected, actual;
+    char *error;
+    int ret = 1;
+    if(argc==0) {};
+
+    dlerror();
+    dl = dlopen(argv[1], RTLD_LAZY);
+    error = dlerror();
+    if(error) {
+        printf("Could not open %s: %s\n", argv[1], error);
+        goto nodl;
+    }
+
+    importedfunc = (fptr) dlsym(dl, "func");
+    if (importedfunc == NULL) {
+        printf ("Could not find 'func'\n");
+        goto out;
+    }
+
+    assert(importedfunc != func_from_language_runtime);
+
+    actual = (*importedfunc)();
+    expected = func_from_language_runtime ();
+    if (actual != expected) {
+        printf ("Got %i instead of %i\n", actual, expected);
+        goto out;
+    }
+
+    ret = 0;
+out:
+    dlclose(dl);
+nodl:
+    return ret;
+}
+
+#endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/21 shared module/runtime.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/21 shared module/runtime.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/21 shared module/runtime.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/21 shared module/runtime.c"	2020-09-10 16:39:24.000000000 +0000
@@ -0,0 +1,19 @@
+#if defined _WIN32 || defined __CYGWIN__
+  #define DLL_PUBLIC __declspec(dllexport)
+#else
+  #if defined __GNUC__
+    #define DLL_PUBLIC __attribute__ ((visibility("default")))
+  #else
+    #pragma message ("Compiler does not support symbol visibility.")
+    #define DLL_PUBLIC
+  #endif
+#endif
+
+/*
+ * This file pretends to be a language runtime that supports extension
+ * modules.
+ */
+
+int DLL_PUBLIC func_from_language_runtime(void) {
+    return 86;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt"	2020-09-10 16:39:24.000000000 +0000
@@ -0,0 +1,7 @@
+cmake_minimum_required(VERSION 3.5)
+
+project(cmModule)
+
+include_directories("${CMAKE_CURRENT_SOURCE_DIR}/module")
+
+add_library(myMod MODULE "${CMAKE_CURRENT_SOURCE_DIR}/module/module.c")
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/21 shared module/subprojects/cmMod/module/module.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/21 shared module/subprojects/cmMod/module/module.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/21 shared module/subprojects/cmMod/module/module.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/21 shared module/subprojects/cmMod/module/module.c"	2020-09-10 16:39:24.000000000 +0000
@@ -0,0 +1,96 @@
+#if defined _WIN32 || defined __CYGWIN__
+  #define DLL_PUBLIC __declspec(dllexport)
+#else
+  #if defined __GNUC__
+    #define DLL_PUBLIC __attribute__ ((visibility("default")))
+  #else
+    #pragma message ("Compiler does not support symbol visibility.")
+    #define DLL_PUBLIC
+  #endif
+#endif
+
+#if defined(_WIN32) || defined(__CYGWIN__)
+
+#include 
+
+typedef int (*fptr) (void);
+
+#ifdef __CYGWIN__
+
+#include 
+
+fptr find_any_f (const char *name) {
+    return (fptr) dlsym(RTLD_DEFAULT, name);
+}
+#else /* _WIN32 */
+
+#include 
+#include 
+
+static wchar_t*
+win32_get_last_error (void)
+{
+    wchar_t *msg = NULL;
+
+    FormatMessageW (FORMAT_MESSAGE_ALLOCATE_BUFFER
+                    | FORMAT_MESSAGE_IGNORE_INSERTS
+                    | FORMAT_MESSAGE_FROM_SYSTEM,
+                    NULL, GetLastError (), 0,
+                    (LPWSTR) &msg, 0, NULL);
+    return msg;
+}
+
+/* Unlike Linux and OS X, when a library is loaded, all the symbols aren't
+ * loaded into a single namespace. You must fetch the symbol by iterating over
+ * all loaded modules. Code for finding the function from any of the loaded
+ * modules is taken from gmodule.c in glib */
+fptr find_any_f (const char *name) {
+    fptr f;
+    HANDLE snapshot;
+    MODULEENTRY32 me32;
+
+    snapshot = CreateToolhelp32Snapshot (TH32CS_SNAPMODULE, 0);
+    if (snapshot == (HANDLE) -1) {
+        wchar_t *msg = win32_get_last_error();
+        printf("Could not get snapshot: %S\n", msg);
+        return 0;
+    }
+
+    me32.dwSize = sizeof (me32);
+
+    f = NULL;
+    if (Module32First (snapshot, &me32)) {
+        do {
+            if ((f = (fptr) GetProcAddress (me32.hModule, name)) != NULL)
+                break;
+        } while (Module32Next (snapshot, &me32));
+    }
+
+    CloseHandle (snapshot);
+    return f;
+}
+#endif
+
+int DLL_PUBLIC func(void) {
+    fptr f;
+
+    f = find_any_f ("func_from_language_runtime");
+    if (f != NULL)
+        return f();
+    printf ("Could not find function\n");
+    return 1;
+}
+
+#else
+/*
+ * Shared modules often have references to symbols that are not defined
+ * at link time, but which will be provided from deps of the executable that
+ * dlopens it. We need to make sure that this works, i.e. that we do
+ * not pass -Wl,--no-undefined when linking modules.
+ */
+int func_from_language_runtime(void);
+
+int DLL_PUBLIC func(void) {
+    return func_from_language_runtime();
+}
+#endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/21 shared module/subprojects/cmMod/module/module.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/21 shared module/subprojects/cmMod/module/module.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/21 shared module/subprojects/cmMod/module/module.h"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/21 shared module/subprojects/cmMod/module/module.h"	2020-09-10 16:39:24.000000000 +0000
@@ -0,0 +1,3 @@
+#pragma once
+
+#define SPECIAL_MAGIC_DEFINE 42
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/22 cmake module/cmake_project/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/22 cmake module/cmake_project/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/22 cmake module/cmake_project/CMakeLists.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/22 cmake module/cmake_project/CMakeLists.txt"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,4 @@
+cmake_minimum_required(VERSION 2.8)
+project(cmakeMeson C)
+
+find_package(cmakeModule REQUIRED)
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/22 cmake module/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/22 cmake module/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/22 cmake module/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/22 cmake module/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,31 @@
+project('cmakeModule', 'c', version: '1.0.0')
+
+if build_machine.system() == 'cygwin'
+  error('MESON_SKIP_TEST CMake is broken on Cygwin.')
+endif
+
+cmake_bin = find_program('cmake', required: false)
+if not cmake_bin.found()
+  error('MESON_SKIP_TEST CMake not installed.')
+endif
+
+cc = meson.get_compiler('c')
+if cc.get_id() == 'clang-cl' and meson.backend() == 'ninja' and build_machine.system() == 'windows'
+    error('MESON_SKIP_TEST CMake installation nor operational for vs2017 clangclx64ninja')
+endif
+
+cmake = import('cmake')
+
+cmake.write_basic_package_version_file(version: '0.0.1',
+   name: 'cmakeModule',
+)
+
+conf = configuration_data()
+conf.set('MYVAR', 'my variable value')
+conf.set_quoted('MYQUOTEDVAR', 'my quoted variable value')
+
+cmake.configure_package_config_file(
+    input: 'projectConfig.cmake.in',
+    name: 'cmakeModule',
+    configuration: conf,
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/22 cmake module/projectConfig.cmake.in" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/22 cmake module/projectConfig.cmake.in"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/22 cmake module/projectConfig.cmake.in"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/22 cmake module/projectConfig.cmake.in"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,4 @@
+@PACKAGE_INIT@
+
+set(MYVAR "@MYVAR@")
+set(MYQUOTEDVAR @MYQUOTEDVAR@)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/22 cmake module/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/22 cmake module/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/22 cmake module/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/22 cmake module/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/lib/cmake/cmakeModule/cmakeModuleConfig.cmake"},
+    {"type": "file", "file": "usr/lib/cmake/cmakeModule/cmakeModuleConfigVersion.cmake"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/23 cmake toolchain/CMakeToolchain.cmake" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/23 cmake toolchain/CMakeToolchain.cmake"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/23 cmake toolchain/CMakeToolchain.cmake"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/23 cmake toolchain/CMakeToolchain.cmake"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1 @@
+set(MESON_TEST_VAR2 VAR2)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/23 cmake toolchain/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/23 cmake toolchain/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/23 cmake toolchain/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/23 cmake toolchain/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,9 @@
+project('cmake toolchain test', ['c', 'cpp'])
+
+if meson.is_cross_build()
+  error('MESON_SKIP_TEST: skip this on cross builds')
+endif
+
+cm = import('cmake')
+
+sub_pro = cm.subproject('cmMod')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/23 cmake toolchain/nativefile.ini.in" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/23 cmake toolchain/nativefile.ini.in"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/23 cmake toolchain/nativefile.ini.in"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/23 cmake toolchain/nativefile.ini.in"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,8 @@
+[properties]
+
+cmake_toolchain_file = '@MESON_TEST_ROOT@/CMakeToolchain.cmake'
+
+[cmake]
+
+MESON_TEST_VAR1 = 'VAR1 space'
+MESON_TEST_VAR2 = 'VAR2 error'
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,11 @@
+cmake_minimum_required(VERSION 3.5)
+
+project(cmMod)
+
+if(NOT "${MESON_TEST_VAR1}" STREQUAL "VAR1 space")
+  message(FATAL_ERROR "MESON_TEST_VAR1 -- '${MESON_TEST_VAR1}' != 'VAR1 space'")
+endif()
+
+if(NOT "${MESON_TEST_VAR2}" STREQUAL "VAR2")
+  message(FATAL_ERROR "MESON_TEST_VAR2 -- '${MESON_TEST_VAR2}' != 'VAR2'")
+endif()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/24 mixing languages/main.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/24 mixing languages/main.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/24 mixing languages/main.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/24 mixing languages/main.c"	2021-01-05 23:18:35.000000000 +0000
@@ -0,0 +1,5 @@
+#include 
+
+int main(void) {
+  return doStuff();
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/24 mixing languages/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/24 mixing languages/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/24 mixing languages/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/24 mixing languages/meson.build"	2021-01-05 23:18:35.000000000 +0000
@@ -0,0 +1,13 @@
+project('CMake mix', ['c', 'cpp'])
+
+if not add_languages('objc', required : false)
+  error('MESON_SKIP_TEST: No ObjC compiler')
+endif
+
+cm = import('cmake')
+
+sub_pro = cm.subproject('cmTest')
+sub_dep = sub_pro.dependency('cmTest', include_type: 'system')
+
+exe1 = executable('exe1', ['main.c'], dependencies: [sub_dep])
+test('test1', exe1)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt"	2021-01-05 23:18:35.000000000 +0000
@@ -0,0 +1,8 @@
+cmake_minimum_required(VERSION 3.5)
+
+project(cmTest)
+
+include_directories(${CMAKE_CURRENT_BINARY_DIR})
+
+add_library(cmTest STATIC cmTest.c cmTest.m)
+target_compile_definitions(cmTest PUBLIC SOME_MAGIC_DEFINE=42)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.c"	2021-01-05 23:18:35.000000000 +0000
@@ -0,0 +1,13 @@
+#include "cmTest.h"
+#include 
+
+#if SOME_MAGIC_DEFINE != 42
+#error "SOME_MAGIC_DEFINE != 42"
+#endif
+
+int foo(int x);
+
+int doStuff(void) {
+  printf("Hello World\n");
+  return foo(42);
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.h"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.h"	2021-01-05 23:18:35.000000000 +0000
@@ -0,0 +1,3 @@
+#pragma once
+
+int doStuff(void);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.m" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.m"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.m"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.m"	2021-01-05 23:18:35.000000000 +0000
@@ -0,0 +1,7 @@
+#if SOME_MAGIC_DEFINE != 42
+#error "SOME_MAGIC_DEFINE != 42"
+#endif
+
+int foo(int x) {
+  return 42 - x;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/2 advanced/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/2 advanced/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/2 advanced/installed_files.txt"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/2 advanced/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-usr/?lib/libcm_cmModLib?so
-?cygwin:usr/lib/libcm_cmModLib?implib
-?!cygwin:usr/bin/libcm_cmModLib?implib
-usr/bin/cm_testEXE?exe
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt"	2019-12-29 22:47:27.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt"	2021-01-04 10:54:09.000000000 +0000
@@ -20,9 +20,19 @@
 add_executable(testEXE main.cpp)
 
 target_link_libraries(cmModLib       ZLIB::ZLIB)
-target_link_libraries(cmModLibStatic ZLIB::ZLIB)
+target_link_libraries(cmModLibStatic ;ZLIB::ZLIB;)
 target_link_libraries(testEXE cmModLib)
 
+if(APPLE)
+  find_library(COREFOUNDATION_FRAMEWORK "CoreFoundation")
+  if(NOT COREFOUNDATION_FRAMEWORK)
+    message(FATAL_ERROR "CoreFoundation framework not found")
+  endif()
+
+  target_link_libraries(cmModLibStatic "${COREFOUNDATION_FRAMEWORK}")
+  target_compile_definitions(cmModLibStatic PUBLIC USE_FRAMEWORK)
+endif()
+
 target_compile_definitions(cmModLibStatic PUBLIC CMMODLIB_STATIC_DEFINE)
 
-install(TARGETS cmModLib testEXE LIBRARY DESTINATION lib RUNTIME DESTINATION bin)
+install(TARGETS testEXE LIBRARY DESTINATION lib RUNTIME DESTINATION bin)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.cpp"	2019-06-16 18:54:18.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.cpp"	2021-01-04 10:54:09.000000000 +0000
@@ -6,10 +6,19 @@
 #error "Invalid value of CONFIG_OPT"
 #endif
 
+#ifdef USE_FRAMEWORK
+#include 
+#endif
+
 using namespace std;
 
 cmModClass::cmModClass(string foo) {
   str = foo + " World " + zlibVersion();
+
+#ifdef USE_FRAMEWORK
+  CFStringRef ref = CFStringCreateWithCString(NULL, str.c_str(), kCFStringEncodingUTF8);
+  CFRelease(ref);
+#endif
 }
 
 string cmModClass::getStr() const {
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/2 advanced/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/2 advanced/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/2 advanced/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/2 advanced/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/cm_testEXE"}
+  ],
+  "tools": {
+    "cmake": ">=3.11"
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/3 advanced no dep/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/3 advanced no dep/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/3 advanced no dep/installed_files.txt"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/3 advanced no dep/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-usr/?lib/libcm_cmModLib?so
-?cygwin:usr/lib/libcm_cmModLib?implib
-?!cygwin:usr/bin/libcm_cmModLib?implib
-?msvc:usr/bin/cm_cmModLib.pdb
-?msvc:usr/bin/cm_testEXE.pdb
-usr/bin/cm_testEXE?exe
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt"	2019-12-29 22:47:27.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt"	2020-10-18 21:29:13.000000000 +0000
@@ -16,9 +16,11 @@
 set_target_properties(cmModLib PROPERTIES VERSION 1.0.1)
 
 add_executable(testEXE main.cpp)
+add_executable(testEXE2 main.cpp)
 
 target_link_libraries(testEXE cmModLib)
+target_link_libraries(testEXE2 cmModLib)
 
 target_compile_definitions(cmModLibStatic PUBLIC CMMODLIB_STATIC_DEFINE)
 
-install(TARGETS cmModLib testEXE LIBRARY DESTINATION lib RUNTIME DESTINATION bin)
+install(TARGETS testEXE testEXE2 LIBRARY DESTINATION lib RUNTIME DESTINATION bin)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/3 advanced no dep/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/3 advanced no dep/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/3 advanced no dep/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/3 advanced no dep/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "installed": [
+    {"type": "pdb", "file": "usr/bin/cm_testEXE"},
+    {"type": "exe", "file": "usr/bin/cm_testEXE"},
+    {"type": "pdb", "file": "usr/bin/cm_testEXE2"},
+    {"type": "exe", "file": "usr/bin/cm_testEXE2"}
+  ],
+  "tools": {
+    "cmake": ">=3.11"
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/4 code gen/subprojects/cmCodeGen/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/4 code gen/subprojects/cmCodeGen/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/4 code gen/subprojects/cmCodeGen/CMakeLists.txt"	2019-06-16 18:54:18.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/4 code gen/subprojects/cmCodeGen/CMakeLists.txt"	2020-10-18 21:29:13.000000000 +0000
@@ -1,5 +1,6 @@
 cmake_minimum_required(VERSION 3.7)
 
+project(CMCodeGen)
 set(CMAKE_CXX_STANDARD 14)
 
 add_executable(genA main.cpp)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt"	2019-06-16 18:54:18.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt"	2020-08-15 16:27:05.000000000 +0000
@@ -1,5 +1,10 @@
 cmake_minimum_required(VERSION 3.7)
+project(testPro)
 
 if(NOT "${SOME_CMAKE_VAR}" STREQUAL "something")
   message(FATAL_ERROR "Setting the CMake var failed")
 endif()
+
+if(NOT "${CMAKE_PREFIX_PATH}" STREQUAL "val1;val2")
+  message(FATAL_ERROR "Setting the CMAKE_PREFIX_PATH failed '${CMAKE_PREFIX_PATH}'")
+endif()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/7 cmake options/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/7 cmake options/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/7 cmake options/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/7 cmake options/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,12 @@
+{
+  "matrix": {
+    "options": {
+      "cmake_prefix_path": [
+        { "val": ["val1", "val2"] }
+      ],
+      "build.cmake_prefix_path": [
+        { "val": ["val1", "val2"] }
+      ]
+    }
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt"	2020-02-25 18:00:47.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt"	2020-09-10 16:39:24.000000000 +0000
@@ -7,6 +7,9 @@
 include_directories(${CMAKE_CURRENT_BINARY_DIR})
 add_definitions("-DDO_NOTHING_JUST_A_FLAG=1")
 
+add_executable(genMain genMain.cpp)
+add_custom_command(OUTPUT main.cpp COMMAND genMain > main.cpp)
+
 add_executable(gen main.cpp)
 add_executable(mycpy cp.cpp)
 
@@ -16,9 +19,15 @@
   COMMAND gen ARGS genTest
 )
 
+set(CMD_PART)
+list(APPEND CMD_PART COMMAND mycpy cpyBase.cpp.in     cpyBase.cpp.in.gen)
+list(APPEND CMD_PART COMMAND mycpy cpyBase.cpp.in.gen cpyBase.cpp.out)
+list(APPEND CMD_PART COMMAND mycpy cpyBase.cpp.out    cpyBase.cpp.something)
+
 add_custom_command(
   OUTPUT cpyBase.cpp
   COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyBase.cpp.am" cpyBase.cpp.in
+  ${CMD_PART}
   COMMAND mycpy cpyBase.cpp.in                               cpyBase.cpp.something
   COMMAND mycpy cpyBase.cpp.something                        cpyBase.cpp.IAmRunningOutOfIdeas
   COMMAND mycpy cpyBase.cpp.IAmRunningOutOfIdeas             cpyBase.cpp
@@ -111,7 +120,14 @@
 
 add_subdirectory(cpyTest ccppyyTTeesstt)
 
-add_library(cmModLib SHARED cmMod.cpp genTest.cpp cpyBase.cpp cpyBase.hpp cpyNext.cpp cpyNext.hpp cpyTest.cpp cpyTest.hpp cpyTest2.hpp cpyTest3.hpp)
+add_custom_command(
+  OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyTest/some/directory/cpyTest5.hpp"
+  COMMAND ${CMAKE_COMMAND} -E copy "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest/cpyTest5.hpp" "${CMAKE_CURRENT_BINARY_DIR}/cpyTest/some/directory/cpyTest5.hpp"
+  DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest/cpyTest5.hpp"
+)
+include_directories("${CMAKE_CURRENT_BINARY_DIR}/cpyTest/some")
+
+add_library(cmModLib SHARED cmMod.cpp genTest.cpp cpyBase.cpp cpyBase.hpp cpyNext.cpp cpyNext.hpp cpyTest.cpp cpyTest.hpp cpyTest2.hpp cpyTest3.hpp cpyTest/some/directory/cpyTest5.hpp)
 include(GenerateExportHeader)
 generate_export_header(cmModLib)
 
@@ -125,5 +141,19 @@
 )
 add_custom_target(macro_name_cmd COMMAND macro_name)
 
+if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
+  message(STATUS "Running the -include test case on macro_name")
+  add_custom_command(
+    OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyInc.hpp"
+    COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyInc.hpp.am" "${CMAKE_CURRENT_BINARY_DIR}/cpyInc.hpp"
+    DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/cpyInc.hpp.am"
+  )
+  target_compile_options(macro_name PUBLIC -DTEST_CMD_INCLUDE -include "${CMAKE_CURRENT_BINARY_DIR}/cpyInc.hpp")
+endif()
+
+# Only executable targets are replaced in the command
+# all other target names are kept as is
+add_custom_target(clang-format COMMAND clang-format -i cmMod.cpp)
+
 add_dependencies(cmModLib args_test_cmd tgtCpyTest4)
 add_dependencies(args_test_cmd macro_name_cmd;gen;mycpy)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/cpyInc.hpp.am" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/cpyInc.hpp.am"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/cpyInc.hpp.am"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/cpyInc.hpp.am"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+#pragma once
+
+#define CPY_INC_WAS_INCLUDED 1
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest5.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest5.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest5.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest5.hpp"	2020-09-10 16:39:24.000000000 +0000
@@ -0,0 +1,3 @@
+#pragma once
+
+#define CPY_TEST_STR_5 " test"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp"	2020-02-25 18:00:47.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp"	2020-09-10 16:39:24.000000000 +0000
@@ -2,7 +2,8 @@
 #include "cpyTest2.hpp"
 #include "cpyTest3.hpp"
 #include "ccppyyTTeesstt/cpyTest4.hpp"
+#include "directory/cpyTest5.hpp"
 
 std::string getStrCpyTest() {
-  return CPY_TEST_STR_2 CPY_TEST_STR_3 CPY_TEST_STR_4;
+  return CPY_TEST_STR_2 CPY_TEST_STR_3 CPY_TEST_STR_4 CPY_TEST_STR_5;
 }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/genMain.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/genMain.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/genMain.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/genMain.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,40 @@
+#include 
+
+using namespace std;
+
+int main() {
+  cout << R"asd(
+#include 
+#include 
+
+using namespace std;
+
+int main(int argc, const char *argv[]) {
+  if(argc < 2) {
+    cerr << argv[0] << " requires an output file!" << endl;
+    return 1;
+  }
+  ofstream out1(string(argv[1]) + ".hpp");
+  ofstream out2(string(argv[1]) + ".cpp");
+  out1 << R"(
+#pragma once
+
+#include 
+
+std::string getStr();
+)";
+
+  out2 << R"(
+#include ")" << argv[1] << R"(.hpp"
+
+std::string getStr() {
+  return "Hello World";
+}
+)";
+
+  return 0;
+}
+)asd";
+
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -5,6 +5,12 @@
 
 using namespace std;
 
+#ifdef TEST_CMD_INCLUDE
+#if CPY_INC_WAS_INCLUDED != 1
+#error "cpyInc.hpp was not included"
+#endif
+#endif
+
 int main() {
   this_thread::sleep_for(chrono::seconds(1));
   ofstream out1("macro_name.txt");
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/main.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/main.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cmake/8 custom command/subprojects/cmMod/main.cpp"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cmake/8 custom command/subprojects/cmMod/main.cpp"	1970-01-01 00:00:00.000000000 +0000
@@ -1,30 +0,0 @@
-#include 
-#include 
-
-using namespace std;
-
-int main(int argc, const char *argv[]) {
-  if(argc < 2) {
-    cerr << argv[0] << " requires an output file!" << endl;
-    return 1;
-  }
-  ofstream out1(string(argv[1]) + ".hpp");
-  ofstream out2(string(argv[1]) + ".cpp");
-  out1 << R"(
-#pragma once
-
-#include 
-
-std::string getStr();
-)";
-
-  out2 << R"(
-#include ")" << argv[1] << R"(.hpp"
-
-std::string getStr() {
-  return "Hello World";
-}
-)";
-
-  return 0;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/meson.build"	2020-01-07 21:07:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -4,3 +4,64 @@
 
 exe = executable('prog', 'prog.c', dependencies: libSub)
 test('subproject subdir', exe)
+
+# Verify the subproject has placed dependency override.
+dependency('sub-1.0')
+
+# Verify we can now take 'sub' dependency without fallback, but only version 1.0.
+dependency('sub')
+d = dependency('sub', version : '>=2.0', required : false)
+assert(not d.found(), 'version should not match')
+
+# Verify that not-found does not get cached, we can still fallback afterward.
+dependency('sub2', required : false)
+d = dependency('sub2', fallback: ['sub', 'libSub'])
+assert(d.found(), 'Should fallback even if a previous call returned not-found')
+
+# Verify we can get a fallback dependency without specifying the variable name,
+# because the subproject overridden 'sub-novar'.
+dependency('sub-novar', fallback : 'sub_novar')
+
+# Verify a subproject can force a dependency to be not-found
+d = dependency('sub-notfound', fallback : 'sub_novar', required : false)
+assert(not d.found(), 'Dependency should be not-found')
+
+# Verify that implicit fallback works because subprojects/sub_implicit directory exists
+d = dependency('sub_implicit')
+assert(d.found(), 'Should implicitly fallback')
+
+# Verify that implicit fallback works because sub_implicit.wrap has
+# `dependency_names=sub_implicit_provide1` and the subproject overrides sub_implicit_provide1.
+d = dependency('sub_implicit_provide1')
+assert(d.found(), 'Should implicitly fallback')
+
+# Verify that implicit fallback works because sub_implicit.wrap has
+# `sub_implicit_provide2=sub_implicit_provide2_dep` and does not override
+# sub_implicit_provide2.
+d = dependency('sub_implicit_provide2')
+assert(d.found(), 'Should implicitly fallback')
+
+# sub_implicit.wrap provides glib-2.0 and we already configured that subproject,
+# so we must not return the system dependency here. Using glib-2.0 here because
+# some CI runners have it installed.
+d = dependency('glib-2.0', required : false)
+assert(d.found())
+assert(d.type_name() == 'internal')
+
+# sub_implicit.wrap provides gobject-2.0 and we already configured that subproject,
+# so we must not return the system dependency here. But since the subproject did
+# not override that dependency and its not required, not-found should be returned.
+# Using gobject-2.0 here because some CI runners have it installed.
+d = dependency('gobject-2.0', required : false)
+assert(not d.found())
+
+# Verify that implicit fallback works because subprojects/sub_implicit/subprojects/subsub
+# directory exists.
+d = dependency('subsub')
+assert(d.found(), 'Should be able to fallback to sub-subproject')
+
+# Verify that implicit fallback works because
+# subprojects/sub_implicit/subprojects/subsub/subprojects/subsubsub.wrap
+# file exists.
+d = dependency('subsubsub')
+assert(d.found(), 'Should be able to fallback to sub-sub-subproject')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub/lib/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub/lib/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub/lib/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub/lib/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1,2 +1,3 @@
 lib = static_library('sub', 'sub.c')
 libSub = declare_dependency(include_directories: include_directories('.'), link_with: lib)
+meson.override_dependency('sub-1.0', libSub)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1,2 +1,2 @@
-project('sub', 'c')
+project('sub', 'c', version : '1.0')
 subdir('lib')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,11 @@
+project('sub_implicit', 'c', version : '1.0')
+
+dep = declare_dependency()
+meson.override_dependency('sub_implicit', dep)
+meson.override_dependency('sub_implicit_provide1', dep)
+
+# This one is not overriden but the wrap file tells the variable name to use.
+sub_implicit_provide2_dep = dep
+
+# This one is not overriden but the wrap file tells the variable name to use.
+glib_dep = dep
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+project('subsub')
+
+meson.override_dependency('subsub', declare_dependency())
Binary files /tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/packagefiles/subsubsub-1.0.zip and /tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/packagefiles/subsubsub-1.0.zip differ
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/subsubsub.wrap" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/subsubsub.wrap"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/subsubsub.wrap"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/subsubsub.wrap"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,4 @@
+[wrap-file]
+directory = subsubsub-1.0
+source_filename = subsubsub-1.0.zip
+source_hash = c073a96b7251937e53216578f6f03d91b84816618a0f1ce3ecfb867beddf1498
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+[wrap-file]
+
+[provide]
+glib-2.0 = glib_dep
+dependency_names = sub_implicit_provide1, gobject-2.0
+sub_implicit_provide2 = sub_implicit_provide2_dep
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub_novar/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub_novar/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/102 subproject subdir/subprojects/sub_novar/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/102 subproject subdir/subprojects/sub_novar/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,4 @@
+project('sub-novar', 'c', version : '1.0')
+
+meson.override_dependency('sub-novar', declare_dependency())
+meson.override_dependency('sub-notfound', dependency('', required : false))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/104 postconf with args/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/104 postconf with args/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/104 postconf with args/meson.build"	2020-01-07 21:07:51.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/104 postconf with args/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1,5 +1,10 @@
 project('postconf script', 'c')
 
-meson.add_postconf_script('postconf.py', '5', '33')
+conf = configure_file(
+  configuration : configuration_data(),
+  output : 'out'
+)
+
+meson.add_postconf_script(find_program('postconf.py'), '5', '33', conf)
 
 test('post', executable('prog', 'prog.c'))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/105 testframework options/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/105 testframework options/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/105 testframework options/meson.build"	2020-01-07 21:07:53.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/105 testframework options/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1,3 +1,6 @@
+# normally run only from run_tests.py or run_project_tests.py
+# else do like
+# meson build '-Dtestoption=A string with spaces' -Dother_one=true -Dcombo_opt=one -Dprefix=/usr -Dlibdir=lib -Dbackend=ninja -Dwerror=True
 project('options', 'c')
 
 assert(get_option('testoption') == 'A string with spaces', 'Incorrect value for testoption option.')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/105 testframework options/test_args.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/105 testframework options/test_args.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/105 testframework options/test_args.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/105 testframework options/test_args.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-# This file is not read by meson itself, but by the test framework.
-# It is not possible to pass arguments to meson from a file.
-['--werror', '-D', 'testoption=A string with spaces', '-D', 'other_one=true', \
- '-D', 'combo_opt=one']
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/105 testframework options/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/105 testframework options/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/105 testframework options/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/105 testframework options/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "matrix": {
+    "options": {
+      "testoption": [{ "val": "A string with spaces" }],
+      "other_one":  [{ "val": "true"                 }],
+      "combo_opt":  [{ "val": "one"                  }],
+      "werror":     [{ "val": "true"                 }]
+    }
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/109 generatorcustom/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/109 generatorcustom/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/109 generatorcustom/meson.build"	2020-01-07 21:07:56.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/109 generatorcustom/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -14,5 +14,7 @@
     output : 'alltogether.h',
     command : [catter, '@INPUT@', '@OUTPUT@'])
 
-executable('proggie', 'main.c', allinone)
+proggie = executable('proggie', 'main.c', allinone)
+
+test('proggie', proggie)
 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/10 man install/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/10 man install/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/10 man install/installed_files.txt"	2018-12-09 14:27:16.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/10 man install/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-usr/share/man/man1/foo.1
-usr/share/man/man2/bar.2
-usr/share/man/man1/vanishing.1
-usr/share/man/man2/vanishing.2
-usr/share/man/man1/baz.1
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/10 man install/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/10 man install/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/10 man install/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/10 man install/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+{
+  "installed": [
+    { "type": "file", "file": "usr/share/man/man1/foo.1"       },
+    { "type": "file", "file": "usr/share/man/man2/bar.2"       },
+    { "type": "file", "file": "usr/share/man/man1/vanishing.1" },
+    { "type": "file", "file": "usr/share/man/man2/vanishing.2" },
+    { "type": "file", "file": "usr/share/man/man1/baz.1"       }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/113 custom target capture/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/113 custom target capture/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/113 custom target capture/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/113 custom target capture/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-usr/subdir/data.dat
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/113 custom target capture/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/113 custom target capture/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/113 custom target capture/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/113 custom target capture/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/subdir/data.dat"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/121 shared module/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/121 shared module/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/121 shared module/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/121 shared module/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-usr/lib/modules/libnosyms?so
-usr/lib/modules/libnosyms?implibempty
-?msvc:usr/lib/modules/nosyms.pdb
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/121 shared module/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/121 shared module/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/121 shared module/meson.build"	2020-01-07 21:08:10.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/121 shared module/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1,6 +1,24 @@
 project('shared module', 'c')
 
-dl = meson.get_compiler('c').find_library('dl', required : false)
+c = meson.get_compiler('c')
+
+# Windows UWP doesn't support the ToolHelp API we use in this test to emulate
+# runtime symbol resolution.
+if host_machine.system() == 'windows'
+   if not c.compiles('''
+#include 
+#include 
+
+HANDLE func(void)
+{
+ return CreateToolhelp32Snapshot(TH32CS_SNAPMODULE, 0);
+}
+''')
+     error('MESON_SKIP_TEST Windows UWP does not support this test.')
+   endif
+endif
+
+dl = c.find_library('dl', required : false)
 l = shared_library('runtime', 'runtime.c')
 # Do NOT link the module with the runtime library. This
 # is a common approach for plugins that are only used
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/121 shared module/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/121 shared module/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/121 shared module/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/121 shared module/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    {"type": "expr", "file": "usr/lib/modules/libnosyms?so"},
+    {"type": "implibempty", "file": "usr/lib/modules/libnosyms"},
+    {"type": "pdb", "file": "usr/lib/modules/nosyms"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/122 llvm ir and assembly/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/122 llvm ir and assembly/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/122 llvm ir and assembly/meson.build"	2020-01-07 21:08:10.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/122 llvm ir and assembly/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1,7 +1,7 @@
 project('llvm-ir', 'c', 'cpp')
 
 cpu = host_machine.cpu_family()
-supported_cpus = ['arm', 'x86', 'x86_64']
+supported_cpus = ['arm', 'aarch64', 'x86', 'x86_64']
 
 foreach lang : ['c', 'cpp']
   cc = meson.get_compiler(lang)
@@ -26,19 +26,24 @@
   square_base = 'square-' + cpu
   square_impl = square_base + '.S'
   # MSVC cannot directly compile assembly files, so we pass it through the
-  # cl.exe pre-processor first and then assemble it with the ml.exe assembler.
-  # Then we can link it into the executable.
+  # cl.exe pre-processor first and then assemble it with ml.exe or armasm.exe
+  # assembler.  Then we can link it into the executable.
   if cc.get_argument_syntax() == 'msvc'
     cl = cc.cmd_array()
     if cpu == 'x86'
-      ml = find_program('ml', required: false)
+      asmcmd = 'ml'
     elif cpu == 'x86_64'
-      ml = find_program('ml64', required: false)
+      asmcmd = 'ml64'
+    elif cpu == 'aarch64'
+      asmcmd = 'armasm64'
+    elif cpu == 'arm'
+      asmcmd = 'armasm'
     else
       error('Unsupported cpu family: "' + cpu + '"')
     endif
+    ml = find_program(asmcmd, required: false)
     if not ml.found()
-      error('MESON_SKIP_TEST: ML (masm) not found')
+      error('MESON_SKIP_TEST: Microsoft assembler (ml/armasm) not found')
     endif
     # Preprocess file (ml doesn't support pre-processing)
     # Force the intput to be C (/Tc) because ICL otherwise assumes it's an object (.obj) file
@@ -48,10 +53,17 @@
         output : preproc_name,
         command : [cl, '/nologo', '/EP', '/P', '/Fi' + preproc_name, '/Tc', '@INPUT@'] + uscore_args)
     # Use assembled object file instead of the original .S assembly source
-    square_impl = custom_target(lang + square_impl,
-        input : square_preproc,
-        output : lang + square_base + '.obj',
-        command : [ml, '/nologo', '/safeseh', '/Fo', '@OUTPUT@', '/c', '@INPUT@'])
+    if asmcmd.startswith('armasm')
+      square_impl = custom_target(lang + square_impl,
+          input : square_preproc,
+          output : lang + square_base + '.obj',
+          command : [ml, '-nologo', '-o', '@OUTPUT@', '@INPUT@'])
+    else
+      square_impl = custom_target(lang + square_impl,
+          input : square_preproc,
+          output : lang + square_base + '.obj',
+          command : [ml, '/nologo', '/safeseh', '/Fo', '@OUTPUT@', '/c', '@INPUT@'])
+    endif
   endif
   if supported_cpus.contains(cpu)
     e = executable('square_asm_' + lang, square_impl, 'main.' + lang,
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/122 llvm ir and assembly/square-aarch64.S" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/122 llvm ir and assembly/square-aarch64.S"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/122 llvm ir and assembly/square-aarch64.S"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/122 llvm ir and assembly/square-aarch64.S"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,29 @@
+#include "symbol-underscore.h"
+
+#ifdef _MSC_VER
+
+    AREA _TEXT, ARM64, CODE, READONLY
+
+    EXPORT SYMBOL_NAME(square_unsigned)
+SYMBOL_NAME(square_unsigned) PROC
+    mul x1, x0, x0
+    mov x0, x1
+    ret
+SYMBOL_NAME(square_unsigned) ENDP
+
+    END
+
+#else
+
+.text
+.globl SYMBOL_NAME(square_unsigned)
+# ifdef __linux__
+.type square_unsigned, %function
+#endif
+
+SYMBOL_NAME(square_unsigned):
+    mul x1, x0, x0
+    mov x0, x1
+    ret
+
+#endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/122 llvm ir and assembly/square-arm.S" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/122 llvm ir and assembly/square-arm.S"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/122 llvm ir and assembly/square-arm.S"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/122 llvm ir and assembly/square-arm.S"	2021-01-06 10:39:48.000000000 +0000
@@ -1,5 +1,20 @@
 #include "symbol-underscore.h"
 
+#ifdef _MSC_VER
+
+    AREA _TEXT, ARM, CODE, READONLY
+
+    EXPORT SYMBOL_NAME(square_unsigned)
+SYMBOL_NAME(square_unsigned) PROC
+    mul r1, r0, r0
+    mov r0, r1
+    mov pc, lr
+SYMBOL_NAME(square_unsigned) ENDP
+
+    END
+
+#else
+
 .text
 .globl SYMBOL_NAME(square_unsigned)
 # ifdef __linux__
@@ -10,3 +25,5 @@
     mul r1, r0, r0
     mov r0, r1
     mov pc, lr
+
+#endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/123 cpp and asm/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/123 cpp and asm/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/123 cpp and asm/meson.build"	2020-01-07 21:08:12.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/123 cpp and asm/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1,4 +1,5 @@
-project('c++ and assembly test', 'cpp')
+project('c++ and assembly test')
+add_languages('cpp')
 
 cpp = meson.get_compiler('cpp')
 cpu = host_machine.cpu_family()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/125 object only target/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/125 object only target/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/125 object only target/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/125 object only target/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/bin/prog?exe
-?msvc:usr/bin/prog.pdb
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/125 object only target/obj_generator.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/125 object only target/obj_generator.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/125 object only target/obj_generator.py"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/125 object only target/obj_generator.py"	2021-01-06 10:39:48.000000000 +0000
@@ -13,6 +13,8 @@
     ofile = sys.argv[3]
     if compiler.endswith('cl'):
         cmd = [compiler, '/nologo', '/MDd', '/Fo' + ofile, '/c', ifile]
+    elif sys.platform == 'sunos5':
+        cmd = [compiler, '-fpic', '-c', ifile, '-o', ofile]
     else:
         cmd = [compiler, '-c', ifile, '-o', ofile]
     sys.exit(subprocess.call(cmd))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/125 object only target/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/125 object only target/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/125 object only target/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/125 object only target/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/prog"},
+    {"type": "pdb", "file": "usr/bin/prog"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/127 custom target directory install/docgen.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/127 custom target directory install/docgen.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/127 custom target directory install/docgen.py"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/127 custom target directory install/docgen.py"	2021-01-06 10:39:48.000000000 +0000
@@ -5,7 +5,10 @@
 
 out = sys.argv[1]
 
-os.mkdir(out)
+try:
+    os.mkdir(out)
+except FileExistsError:
+    pass
 
 for name in ('a', 'b', 'c'):
     with open(os.path.join(out, name + '.html'), 'w') as f:
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/127 custom target directory install/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/127 custom target directory install/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/127 custom target directory install/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/127 custom target directory install/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-usr/share/doc/testpkgname/html/a.html
-usr/share/doc/testpkgname/html/b.html
-usr/share/doc/testpkgname/html/c.html
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/127 custom target directory install/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/127 custom target directory install/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/127 custom target directory install/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/127 custom target directory install/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/share/doc/testpkgname/html/a.html"},
+    {"type": "file", "file": "usr/share/doc/testpkgname/html/b.html"},
+    {"type": "file", "file": "usr/share/doc/testpkgname/html/c.html"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/12 data/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/12 data/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/12 data/installed_files.txt"	2018-08-25 08:05:43.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/12 data/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,11 +0,0 @@
-usr/share/progname/datafile.dat
-usr/share/progname/fileobject_datafile.dat
-usr/share/progname/vanishing.dat
-usr/share/progname/vanishing2.dat
-usr/share/data install test/renamed file.txt
-usr/share/data install test/somefile.txt
-usr/share/data install test/some/nested/path.txt
-usr/share/renamed/renamed 2.txt
-usr/share/renamed/renamed 3.txt
-etc/etcfile.dat
-usr/bin/runscript.sh
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/12 data/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/12 data/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/12 data/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/12 data/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/share/progname/datafile.dat"},
+    {"type": "file", "file": "usr/share/progname/fileobject_datafile.dat"},
+    {"type": "file", "file": "usr/share/progname/vanishing.dat"},
+    {"type": "file", "file": "usr/share/progname/vanishing2.dat"},
+    {"type": "file", "file": "usr/share/data install test/renamed file.txt"},
+    {"type": "file", "file": "usr/share/data install test/somefile.txt"},
+    {"type": "file", "file": "usr/share/data install test/some/nested/path.txt"},
+    {"type": "file", "file": "usr/share/renamed/renamed 2.txt"},
+    {"type": "file", "file": "usr/share/renamed/renamed 3.txt"},
+    {"type": "file", "file": "etc/etcfile.dat"},
+    {"type": "file", "file": "usr/bin/runscript.sh"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/144 custom target multiple outputs/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/144 custom target multiple outputs/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/144 custom target multiple outputs/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/144 custom target multiple outputs/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-usr/include/diff.h
-usr/include/first.h
-usr/bin/diff.sh
-usr/bin/second.sh
-opt/same.h
-opt/same.sh
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/144 custom target multiple outputs/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/144 custom target multiple outputs/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/144 custom target multiple outputs/meson.build"	2020-01-07 21:08:35.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/144 custom target multiple outputs/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -21,8 +21,19 @@
   install : true,
   install_dir : [join_paths(get_option('prefix'), get_option('includedir')), false])
 
-custom_target('only-install-second',
+targets = custom_target('only-install-second',
   output : ['second.h', 'second.sh'],
   command : [gen, 'second', '@OUTDIR@'],
   install : true,
   install_dir : [false, join_paths(get_option('prefix'), get_option('bindir'))])
+
+paths = []
+foreach i : targets.to_list()
+  paths += i.full_path()
+endforeach
+
+# Skip on Windows because paths are not identical, '/' VS '\'.
+if host_machine.system() != 'windows'
+  assert(paths == [meson.current_build_dir() / 'second.h',
+                   meson.current_build_dir() / 'second.sh'])
+endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/144 custom target multiple outputs/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/144 custom target multiple outputs/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/144 custom target multiple outputs/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/144 custom target multiple outputs/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/include/diff.h"},
+    {"type": "file", "file": "usr/include/first.h"},
+    {"type": "file", "file": "usr/bin/diff.sh"},
+    {"type": "file", "file": "usr/bin/second.sh"},
+    {"type": "file", "file": "opt/same.h"},
+    {"type": "file", "file": "opt/same.sh"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/145 special characters/arg-char-test.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/145 special characters/arg-char-test.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/145 special characters/arg-char-test.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/145 special characters/arg-char-test.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,10 @@
+#include 
+#include 
+
+int main(int argc, char **argv) {
+  char c = CHAR;
+  assert(argc == 2);
+  if (c != argv[1][0])
+    fprintf(stderr, "Expected %x, got %x\n", (unsigned int) c, (unsigned int) argv[1][0]);
+  assert(c == argv[1][0]);
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/145 special characters/arg-string-test.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/145 special characters/arg-string-test.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/145 special characters/arg-string-test.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/145 special characters/arg-string-test.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,12 @@
+#include 
+#include 
+#include 
+
+int main(int argc, char **argv) {
+  const char *s = CHAR;
+  assert(argc == 2);
+  assert(strlen(s) == 1);
+  if (s[0] != argv[1][0])
+    fprintf(stderr, "Expected %x, got %x\n", (unsigned int) s[0], (unsigned int) argv[1][0]);
+  assert(s[0] == argv[1][0]);
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/145 special characters/arg-unquoted-test.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/145 special characters/arg-unquoted-test.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/145 special characters/arg-unquoted-test.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/145 special characters/arg-unquoted-test.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,17 @@
+#include 
+#include 
+#include 
+
+#define Q(x) #x
+#define QUOTE(x) Q(x)
+
+int main(int argc, char **argv) {
+  const char *s = QUOTE(CHAR);
+  assert(argc == 2);
+  assert(strlen(s) == 1);
+  if (s[0] != argv[1][0])
+    fprintf(stderr, "Expected %x, got %x\n", (unsigned int) s[0], (unsigned int) argv[1][0]);
+  assert(s[0] == argv[1][0]);
+  // There is no way to convert a macro argument into a character constant.
+  // Otherwise we'd test that as well
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/145 special characters/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/145 special characters/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/145 special characters/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/145 special characters/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/share/result
-usr/share/result2
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/145 special characters/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/145 special characters/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/145 special characters/meson.build"	2020-01-07 21:08:36.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/145 special characters/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -35,3 +35,41 @@
   output : 'result2',
   install : true,
   install_dir : get_option('datadir'))
+
+# Test that we can pass these special characters in compiler arguments
+#
+# (this part of the test is crafted so we don't try to use these special
+# characters in filenames or target names)
+#
+# TODO: similar tests needed for languages other than C
+# TODO: add similar test for quote, doublequote, and hash, carefully
+# Re hash, see
+# https://docs.microsoft.com/en-us/cpp/build/reference/d-preprocessor-definitions
+
+special = [
+  ['amp', '&'],
+  ['at', '@'],
+  ['backslash', '\\'],
+  ['dollar', '$'],
+  ['gt', '>'],
+  ['lt', '<'],
+  ['slash', '/'],
+]
+
+cc = meson.get_compiler('c')
+
+foreach s : special
+  args = '-DCHAR="@0@"'.format(s[1])
+  e = executable('arg-string-' + s[0], 'arg-string-test.c', c_args: args)
+  test('arg-string-' + s[0], e, args: s[1])
+
+  args = '-DCHAR=@0@'.format(s[1])
+  e = executable('arg-unquoted-' + s[0], 'arg-unquoted-test.c', c_args: args)
+  test('arg-unquoted-' + s[0], e, args: s[1])
+endforeach
+
+foreach s : special
+  args = '-DCHAR=\'@0@\''.format(s[1])
+  e = executable('arg-char-' + s[0], 'arg-char-test.c', c_args: args)
+  test('arg-char-' + s[0], e, args: s[1])
+endforeach
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/145 special characters/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/145 special characters/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/145 special characters/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/145 special characters/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/share/result"},
+    {"type": "file", "file": "usr/share/result2"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/149 recursive linking/3rdorderdeps/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/149 recursive linking/3rdorderdeps/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/149 recursive linking/3rdorderdeps/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/149 recursive linking/3rdorderdeps/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -41,7 +41,7 @@
       main_c = configure_file(input : 'main.c.in',
                               output : name + '-main.c',
                               configuration : cdata)
-      dep3_bin = executable(name, main_c, link_with : dep3_lib,
+      dep3_bin = executable(name + '_test', main_c, link_with : dep3_lib,
                             c_args : build_args)
       test(name + 'test', dep3_bin)
     endforeach
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/14 configure file/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/14 configure file/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/14 configure file/installed_files.txt"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/14 configure file/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-usr/share/appdir/config2.h
-usr/share/appdir/config2b.h
-usr/share/appdireh/config2-1.h
-usr/share/appdirok/config2-2.h
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/14 configure file/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/14 configure file/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/14 configure file/meson.build"	2020-01-07 21:06:24.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/14 configure file/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -185,6 +185,12 @@
 if ret.returncode() != 0
   error('Error running command: @0@\n@1@'.format(ret.stdout(), ret.stderr()))
 endif
+# Now the same, but using a File object as an argument.
+inf2 = files('invalid-utf8.bin.in')[0]
+ret = run_command(check_file, inf2, outf)
+if ret.returncode() != 0
+  error('Error running command: @0@\n@1@'.format(ret.stdout(), ret.stderr()))
+endif
 
 # Test copy of a binary file
 outf = configure_file(input : inf,
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/14 configure file/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/14 configure file/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/14 configure file/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/14 configure file/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/share/appdir/config2.h"},
+    {"type": "file", "file": "usr/share/appdir/config2b.h"},
+    {"type": "file", "file": "usr/share/appdireh/config2-1.h"},
+    {"type": "file", "file": "usr/share/appdirok/config2-2.h"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/meson.build"	2020-01-07 21:08:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -3,8 +3,14 @@
 )
 
 subproject('zlib')
-subproject('foo')
+foo = subproject('foo')
+bar = subproject('bar')
+
+libfoo = foo.get_variable('libfoo')
+libbar = bar.get_variable('libbar')
 
 executable('grabprog', files('src/subprojects/prog.c'))
 executable('grabprog2', files('src/subprojects/foo/prog2.c'))
 subdir('src')
+
+subproject('patchdir')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/src/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/src/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/src/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/src/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1,2 +1,6 @@
 executable('grabprog3', files('subprojects/prog.c'))
 executable('grabprog4', files('subprojects/foo/prog2.c'))
+
+texe = executable('testexe', files('test.c'), link_with: [libfoo, libbar])
+
+test('t1', texe)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/src/test.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/src/test.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/src/test.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/src/test.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,9 @@
+#include 
+
+int bar_dummy_func(void);
+int dummy_func(void);
+
+int main(void) {
+    printf("Hello world %d\n", bar_dummy_func() + dummy_func());
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/bar-1.0/bar.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/bar-1.0/bar.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/bar-1.0/bar.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/bar-1.0/bar.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+int bar_dummy_func(void) {
+    return 42;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/bar-1.0/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/bar-1.0/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/bar-1.0/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/bar-1.0/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+project('static lib bar', 'c')
+libbar = static_library('bar', 'bar.c')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/bar.wrap" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/bar.wrap"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/bar.wrap"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/bar.wrap"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,8 @@
+[wrap-file]
+directory = bar-1.0
+lead_directory_missing = true
+
+source_filename = bar-1.0.tar.xz
+source_hash     = f0f61948530dc0d33e3028cd71a9f8ee869f6b3665960d8f41d715cf4aed6467
+
+patch_filename  = bar-1.0-patch.tar.xz
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build"	2019-10-06 17:01:35.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,2 +1,2 @@
-project('shared lib', 'c')
-libfoo = shared_library('foo', 'foo.c')
+project('static lib', 'c')
+libfoo = static_library('foo', 'foo.c')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/foo-1.0-patchdir/foo.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/foo-1.0-patchdir/foo.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/foo-1.0-patchdir/foo.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/foo-1.0-patchdir/foo.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+int dummy_func(void) {
+    return 42;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/foo-1.0-patchdir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/foo-1.0-patchdir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/foo-1.0-patchdir/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/foo-1.0-patchdir/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+project('static lib patchdir', 'c')
+libfoo = static_library('foo', 'foo.c')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/foo.wrap" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/foo.wrap"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/foo.wrap"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/foo.wrap"	2021-01-06 10:39:48.000000000 +0000
@@ -3,9 +3,9 @@
 
 source_url = http://something.invalid
 source_filename = foo-1.0.tar.xz
-source_hash = ae5fc03185654f76b459db16ca25809703f8821aeb39a433902244bb479c4b79
+source_hash = 9ed8f67d75e43d3be161efb6eddf30dd01995a958ca83951ea64234bac8908c1
 lead_directory_missing = true
 
 patch_url = https://something.invalid/patch
 patch_filename = foo-1.0-patch.tar.xz
-patch_hash = 8f2e286a4b190228d4e0c25ddc91195449cfb5e5c52006355838964b244037da
+patch_hash = d0ddc5e60fdb27d808552f5ac8d0bb603ea2cba306538b4427b985535b26c9c5
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/.gitignore" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/.gitignore"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/.gitignore"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/.gitignore"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+/foo-1.0
+/bar-1.0
+/foo-1.0-patchdir
Binary files /tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz and /tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz differ
Binary files /tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz and /tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz differ
Binary files /tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz and /tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz differ
Binary files /tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz and /tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz differ
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,2 @@
+project('static lib patchdir', 'c')
+libfoo = static_library('foo', 'foo.c')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,9 @@
+[wrap-file]
+directory = foo-1.0-patchdir
+
+source_url = http://something.invalid
+source_filename = foo-1.0.tar.xz
+source_hash = 9ed8f67d75e43d3be161efb6eddf30dd01995a958ca83951ea64234bac8908c1
+lead_directory_missing = true
+
+patch_directory = foo-1.0
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/162 external program shebang parsing/input.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/162 external program shebang parsing/input.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/162 external program shebang parsing/input.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/162 external program shebang parsing/input.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-some stuff here
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/162 external program shebang parsing/main.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/162 external program shebang parsing/main.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/162 external program shebang parsing/main.c"	2019-09-16 21:20:45.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/162 external program shebang parsing/main.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,72 +0,0 @@
-#include 
-#include 
-#include 
-#include 
-#include 
-#include 
-
-#ifdef _WIN32
- #include 
- #include 
-#else
- #include 
-#endif
-
-/* Who cares about stack sizes in test programs anyway */
-#define LINE_LENGTH 4096
-
-static int
-intrp_copyfile (char * src, char * dest)
-{
-#ifdef _WIN32
-  if (!CopyFile (src, dest, FALSE))
-    return 1;
-  return 0;
-#else
-  return execlp ("cp", "cp", src, dest, NULL);
-#endif
-}
-
-static void
-parser_get_line (FILE * f, char line[LINE_LENGTH])
-{
-  if (!fgets (line, LINE_LENGTH, f))
-    fprintf (stderr, "%s\n", strerror (errno));
-}
-
-int
-main (int argc, char * argv[])
-{
-  FILE *f = NULL;
-  char line[LINE_LENGTH];
-
-  if (argc != 4) {
-    fprintf (stderr, "Invalid number of arguments: %i\n", argc);
-    goto err;
-  }
-
-  if ((f = fopen (argv[1], "r")) == NULL) {
-    fprintf (stderr, "%s\n", strerror (errno));
-    goto err;
-  }
-
-  parser_get_line (f, line);
-
-  if (!line || line[0] != '#' || line[1] != '!') {
-    fprintf (stderr, "Invalid script\n");
-    goto err;
-  }
-
-  parser_get_line (f, line);
-
-  if (!line || strncmp (line, "copy", 4) != 0) {
-    fprintf (stderr, "Syntax error: %s\n", line);
-    goto err;
-  }
-
-  return intrp_copyfile (argv[2], argv[3]);
-
-err:
-  fclose (f);
-  return 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/162 external program shebang parsing/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/162 external program shebang parsing/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/162 external program shebang parsing/meson.build"	2020-01-07 21:08:55.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/162 external program shebang parsing/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,21 +0,0 @@
-project('shebang parsing', 'c')
-
-interpreter = executable('aninterp', 'main.c', native : true)
-
-cdata = configuration_data()
-cdata.set('INTRP', interpreter.full_path())
-
-f = configure_file(input : 'script.int.in',
-                   output : 'script.int',
-                   configuration : cdata)
-
-# Test that parsing a shebang with spaces works properly. See `man execve`,
-# specifically the section on "Interpreter scripts" and the one under "NOTES".
-script = find_program(f)
-
-custom_target('interpthis',
-  input : 'input.txt',
-  output : 'output.txt',
-  depends : interpreter,
-  command : [script, '@INPUT@', '@OUTPUT@'],
-  build_by_default : true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/162 external program shebang parsing/script.int.in" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/162 external program shebang parsing/script.int.in"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/162 external program shebang parsing/script.int.in"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/162 external program shebang parsing/script.int.in"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-#!/usr/bin/env @INTRP@
-copy
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/163 disabler/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/163 disabler/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/163 disabler/meson.build"	2020-01-07 21:08:58.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/163 disabler/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -9,6 +9,7 @@
 d3 = (d == d2)
 d4 = d + 0
 d5 = d2 or true
+set_variable('d6', disabler())
 
 has_not_changed = false
 if is_disabler(d)
@@ -23,12 +24,14 @@
 assert(is_disabler(d3), 'Disabler comparison should yield disabler.')
 assert(is_disabler(d4), 'Disabler addition should yield disabler.')
 assert(is_disabler(d5), 'Disabler logic op should yield disabler.')
+assert(is_disabler(d6), 'set_variable with a disabler should set variable to disabler.')
 
 assert(d, 'Disabler did not cause this to be skipped.')
 assert(d2, 'Function laundered disabler did not cause this to be skipped.')
 assert(d3, 'Disabler comparison should yield disabler and thus this would not be called.')
 assert(d4, 'Disabler addition should yield disabler and thus this would not be called.')
 assert(d5, 'Disabler logic op should yield disabler and thus this would not be called.')
+assert(d6, 'set_variable with a disabler did not cause this to be skipped.')
 
 number = 0
 
@@ -80,6 +83,31 @@
 endif
 assert(has_not_changed, 'App has changed.')
 
+assert(not is_disabler(is_variable('d6')), 'is_variable should not return a disabler')
+assert(is_variable('d6'), 'is_variable for a disabler should return true')
+
+if_is_not_disabled = false
+if is_variable('d6')
+    if_is_not_disabled = true
+else
+    if_is_not_disabled = true
+endif
+assert(if_is_not_disabled, 'Disabler in is_variable should not skip blocks')
+
+get_d = get_variable('d6')
+assert(is_disabler(get_d), 'get_variable should yield a disabler')
+
+get_fallback_d = get_variable('nonexistant', disabler())
+assert(is_disabler(get_fallback_d), 'get_variable fallback should yield a disabler')
+
+var_true = true
+get_no_fallback_d = get_variable('var_true', disabler())
+assert(not is_disabler(get_no_fallback_d), 'get_variable should not fallback to disabler')
+assert(get_no_fallback_d, 'get_variable should yield true')
+
+assert(is_disabler(get_variable(disabler())), 'get_variable should yield a disabler')
+assert(is_disabler(get_variable(disabler(), var_true)), 'get_variable should yield a disabler')
+
 if_is_disabled = true
 if disabler()
   if_is_disabled = false
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/176 initial c_args/test_args.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/176 initial c_args/test_args.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/176 initial c_args/test_args.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/176 initial c_args/test_args.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-# This file is not read by meson itself, but by the test framework.
-# It is not possible to pass arguments to meson from a file.
-['-Dc_args=-march=native', '-Dc_args=-funroll-loops',
- '-Dc_link_args=-Dtest_harmless_but_useless_link_arg']
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/176 initial c_args/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/176 initial c_args/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/176 initial c_args/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/176 initial c_args/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "matrix": {
+    "options": {
+      "c_args":      [{ "val": "-funroll-loops" }],
+      "c_link_args": [{ "val": "-Dtest_harmless_but_useless_link_arg"   }]
+    }
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/187 find override/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/187 find override/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/187 find override/meson.build"	2020-01-07 21:09:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/187 find override/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -10,3 +10,6 @@
 endif
 
 subdir('otherdir')
+
+tool = find_program('sometool')
+assert(tool.found())
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/187 find override/subprojects/sub/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/187 find override/subprojects/sub/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/187 find override/subprojects/sub/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/187 find override/subprojects/sub/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,4 @@
+project('tools')
+
+exe = find_program('gencodegen')
+meson.override_find_program('sometool', exe)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/187 find override/subprojects/sub.wrap" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/187 find override/subprojects/sub.wrap"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/187 find override/subprojects/sub.wrap"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/187 find override/subprojects/sub.wrap"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+[wrap-file]
+directory = sub
+
+[provide]
+program_names = sometool
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/192 args flattening/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/192 args flattening/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/192 args flattening/meson.build"	2020-01-07 21:09:25.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/192 args flattening/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1,29 +1,31 @@
 project('args flattening')
 
 arr = get_variable('does-not-exist', ['bar', 'baz'])
-
 assert(arr == ['bar', 'baz'], 'get_variable with array fallback is broken')
 
 set_variable('arr', ['bar', 'baz'])
-
 assert(arr == ['bar', 'baz'], 'set_variable(array) is broken')
 
 conf = configuration_data()
-
 conf.set('foo', ['bar', 'baz'])
-
 assert(conf.get('foo') == ['bar', 'baz'], 'configuration_data.set(array) is broken')
 
 arr = conf.get('does-not-exist', ['bar', 'baz'])
-
 assert(arr == ['bar', 'baz'], 'configuration_data.get with array fallback is broken')
 
 arr = meson.get_cross_property('does-not-exist', ['bar', 'baz'])
-
 assert(arr == ['bar', 'baz'], 'meson.get_cross_property with array fallback is broken')
 
+arr = meson.get_external_property('does-not-exist', ['bar', 'baz'])
+assert(arr == ['bar', 'baz'], 'meson.get_external_property with array fallback is broken')
+
+arr = meson.get_external_property('does-not-exist', ['bar', 'baz'], native: true)
+assert(arr == ['bar', 'baz'], 'meson.get_external_property native:true with array fallback is broken')
+
+arr = meson.get_external_property('does-not-exist', ['bar', 'baz'], native: false)
+assert(arr == ['bar', 'baz'], 'meson.get_external_property native:false with array fallback is broken')
+
 # Test deprecated behaviour
 
 conf.set(['foo', 'bar'])
-
 message(conf.get('foo'))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/195 install_mode/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/195 install_mode/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/195 install_mode/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/195 install_mode/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,10 +0,0 @@
-usr/bin/runscript.sh
-usr/bin/trivialprog?exe
-?msvc:usr/bin/trivialprog.pdb
-usr/include/config.h
-usr/include/rootdir.h
-usr/libtest/libstat.a
-usr/share/man/man1/foo.1
-usr/share/sub1/second.dat
-usr/share/sub2/stub
-usr/subdir/data.dat
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/195 install_mode/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/195 install_mode/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/195 install_mode/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/195 install_mode/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/bin/runscript.sh"},
+    {"type": "exe", "file": "usr/bin/trivialprog"},
+    {"type": "pdb", "file": "usr/bin/trivialprog"},
+    {"type": "file", "file": "usr/include/config.h"},
+    {"type": "file", "file": "usr/include/rootdir.h"},
+    {"type": "file", "file": "usr/libtest/libstat.a"},
+    {"type": "file", "file": "usr/share/man/man1/foo.1"},
+    {"type": "file", "file": "usr/share/sub1/second.dat"},
+    {"type": "file", "file": "usr/share/sub2/stub"},
+    {"type": "file", "file": "usr/subdir/data.dat"}
+  ],
+  "do_not_set_opts": ["libdir"]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/1 trivial/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/1 trivial/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/1 trivial/meson.build"	2020-01-07 21:06:11.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/1 trivial/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -1,7 +1,7 @@
 # Comment on the first line
 project('trivial test',
   # Comment inside a function call + array for language list
-  ['c'],
+  ['c'], default_options: ['buildtype=debug'],
   meson_version : '>=0.52.0')
 #this is a comment
 sources = 'trivial.c'
@@ -14,13 +14,8 @@
   add_project_arguments('/Qdiag-error:10159', language : 'c')
 endif
 
-if meson.is_cross_build()
-  native_exe = executable('native-trivialprog', sources : sources, native : true)
-  test('native exe in cross build', native_exe)
-endif
-
 exe = executable('trivialprog', sources : sources)
-
+assert(exe.name() == 'trivialprog')
 test('runtest', exe) # This is a comment
 
 has_not_changed = false
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/201 override with exe/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/201 override with exe/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/201 override with exe/meson.build"	2020-01-07 21:09:34.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/201 override with exe/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,15 +0,0 @@
-project('myexe', 'c')
-sub = subproject('sub')
-prog = find_program('foobar')
-custom1 = custom_target('custom1',
-                        build_by_default : true,
-                        input : [],
-                        output : 'main1.c',
-                        command : [prog, '@OUTPUT@'])
-gen = generator(prog,
-                output : '@BASENAME@.c',
-                arguments : ['@OUTPUT@'])
-custom2 = gen.process('main2.input')
-
-executable('e1', custom1)
-executable('e2', custom2)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/201 override with exe/subprojects/sub/foobar.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/201 override with exe/subprojects/sub/foobar.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/201 override with exe/subprojects/sub/foobar.c"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/201 override with exe/subprojects/sub/foobar.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,13 +0,0 @@
-#include 
-#include 
-
-int main(int argc, char* argv[]) {
-  assert(argc == 2);
-  FILE *f = fopen(argv[1], "w");
-  const char msg[] = "int main(void) {return 0;}\n";
-  size_t w = fwrite(msg, 1, sizeof(msg) - 1, f);
-  assert(w == sizeof(msg) - 1);
-  int r = fclose(f);
-  assert(r == 0);
-  return 0;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/201 override with exe/subprojects/sub/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/201 override with exe/subprojects/sub/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/201 override with exe/subprojects/sub/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/201 override with exe/subprojects/sub/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-project('sub', 'c')
-foobar = executable('foobar', 'foobar.c',  native : true)
-meson.override_find_program('foobar', foobar)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/203 function attributes/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/203 function attributes/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/203 function attributes/meson.build"	2020-01-07 21:09:40.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/203 function attributes/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -31,7 +31,6 @@
 #    figure that out except by running the code we're trying to test.
 attributes = [
   'aligned',
-  'alloc_size',
   'always_inline',
   'cold',
   'const',
@@ -65,6 +64,7 @@
 if host_machine.system() != 'darwin'
   attributes += 'alias'
   attributes += 'visibility'
+  attributes += 'alloc_size'
 endif
 
 if ['gcc', 'intel'].contains(c.get_id())
@@ -82,30 +82,30 @@
   endif
 endif
 
-
-foreach a : attributes
-  x = c.has_function_attribute(a)
-  assert(x == expected_result, '@0@: @1@'.format(c.get_id(), a))
-  x = cpp.has_function_attribute(a)
-  assert(x == expected_result, '@0@: @1@'.format(cpp.get_id(), a))
-endforeach
-
-win_expect = ['windows', 'cygwin'].contains(host_machine.system())
-foreach a : ['dllexport', 'dllimport']
-  assert(c.has_function_attribute(a) == win_expect,
-         '@0@: @1@'.format(c.get_id(), a))
-  assert(cpp.has_function_attribute(a) == win_expect,
-         '@0@: @1@'.format(cpp.get_id(), a))
-endforeach
-
-message('checking get_supported_function_attributes')
-if not ['msvc', 'clang-cl', 'intel-cl'].contains(c.get_id())
-  multi_expected = attributes
+if get_option('mode') == 'single'
+  foreach a : attributes
+    x = c.has_function_attribute(a)
+    assert(x == expected_result, '@0@: @1@'.format(c.get_id(), a))
+    x = cpp.has_function_attribute(a)
+    assert(x == expected_result, '@0@: @1@'.format(cpp.get_id(), a))
+  endforeach
+
+  win_expect = ['windows', 'cygwin'].contains(host_machine.system())
+  foreach a : ['dllexport', 'dllimport']
+    assert(c.has_function_attribute(a) == win_expect,
+          '@0@: @1@'.format(c.get_id(), a))
+    assert(cpp.has_function_attribute(a) == win_expect,
+          '@0@: @1@'.format(cpp.get_id(), a))
+  endforeach
 else
-  multi_expected = []
-endif
+  if not ['msvc', 'clang-cl', 'intel-cl'].contains(c.get_id())
+    multi_expected = attributes
+  else
+    multi_expected = []
+  endif
 
-multi_check = c.get_supported_function_attributes(attributes)
-assert(multi_check == multi_expected, 'get_supported_function_arguments works (C)')
-multi_check = cpp.get_supported_function_attributes(attributes)
-assert(multi_check == multi_expected, 'get_supported_function_arguments works (C++)')
+  multi_check = c.get_supported_function_attributes(attributes)
+  assert(multi_check == multi_expected, 'get_supported_function_arguments works (C)')
+  multi_check = cpp.get_supported_function_attributes(attributes)
+  assert(multi_check == multi_expected, 'get_supported_function_arguments works (C++)')
+endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/203 function attributes/meson_options.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/203 function attributes/meson_options.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/203 function attributes/meson_options.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/203 function attributes/meson_options.txt"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,7 @@
+option(
+    'mode',
+    type : 'combo',
+    choices : ['single', 'parallel'],
+    value : 'single',
+    description : 'Test the one at a time function or many at a time function.'
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/203 function attributes/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/203 function attributes/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/203 function attributes/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/203 function attributes/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "matrix": {
+    "options": {
+      "mode": [
+        { "val": "single" },
+        { "val": "parallel" }
+      ]
+    }
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/206 install name_prefix name_suffix/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/206 install name_prefix name_suffix/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/206 install name_prefix name_suffix/installed_files.txt"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/206 install name_prefix name_suffix/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,12 +0,0 @@
-?msvc:usr/bin/baz.pdb
-?msvc:usr/bin/bowcorge.pdb
-?msvc:usr/bin/foo.pdb
-usr/?lib/bowcorge.stern
-usr/lib/?libbaz.cheese
-usr/lib/bar.a
-usr/lib/bowcorge?implib
-usr/lib/bowgrault.stern
-usr/lib/foo?implib
-usr/lib/foo?so
-usr/lib/libbaz?implib
-usr/lib/libqux.cheese
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/206 install name_prefix name_suffix/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/206 install name_prefix name_suffix/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/206 install name_prefix name_suffix/meson.build"	2020-01-07 21:09:38.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/206 install name_prefix name_suffix/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -8,3 +8,6 @@
 
 shared_library('corge', 'libfile.c', name_prefix: 'bow', name_suffix: 'stern', install : true)
 static_library('grault', 'libfile.c', name_prefix: 'bow', name_suffix: 'stern', install : true)
+
+# exercise default name_prefix and name_suffix
+shared_library('garply', 'libfile.c', name_prefix: [], name_suffix: [], install : true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/206 install name_prefix name_suffix/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/206 install name_prefix name_suffix/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/206 install name_prefix name_suffix/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/206 install name_prefix name_suffix/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,19 @@
+{
+  "installed": [
+    {"type": "pdb", "file": "usr/bin/baz"},
+    {"type": "pdb", "file": "usr/bin/bowcorge"},
+    {"type": "pdb", "file": "usr/bin/foo"},
+    {"type": "expr", "file": "usr/?lib/bowcorge.stern"},
+    {"type": "expr", "file": "usr/lib/?libbaz.cheese"},
+    {"type": "file", "file": "usr/lib/bar.a"},
+    {"type": "implib", "file": "usr/lib/bowcorge"},
+    {"type": "file", "file": "usr/lib/bowgrault.stern"},
+    {"type": "implib", "file": "usr/lib/foo"},
+    {"type": "expr", "file": "usr/lib/foo?so"},
+    {"type": "implib", "file": "usr/lib/libbaz"},
+    {"type": "file", "file": "usr/lib/libqux.cheese"},
+    {"type": "expr", "file": "usr/?lib/libgarply?so"},
+    {"type": "implib", "file": "usr/lib/libgarply"},
+    {"type": "pdb", "file": "usr/bin/garply"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/207 kwarg entry/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/207 kwarg entry/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/207 kwarg entry/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/207 kwarg entry/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/bin/prog?exe
-?msvc:usr/bin/prog.pdb
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/207 kwarg entry/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/207 kwarg entry/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/207 kwarg entry/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/207 kwarg entry/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/prog"},
+    {"type": "pdb", "file": "usr/bin/prog"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/208 custom target build by default/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/208 custom target build by default/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/208 custom target build by default/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/208 custom target build by default/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "installed": [
+
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/211 cmake module/cmake_project/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/211 cmake module/cmake_project/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/211 cmake module/cmake_project/CMakeLists.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/211 cmake module/cmake_project/CMakeLists.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 2.8)
-project(cmakeMeson C)
-
-find_package(cmakeModule REQUIRED)
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/211 cmake module/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/211 cmake module/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/211 cmake module/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/211 cmake module/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/lib/cmake/cmakeModule/cmakeModuleConfig.cmake
-usr/lib/cmake/cmakeModule/cmakeModuleConfigVersion.cmake
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/211 cmake module/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/211 cmake module/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/211 cmake module/meson.build"	2020-01-07 21:09:46.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/211 cmake module/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,31 +0,0 @@
-project('cmakeModule', 'c', version: '1.0.0')
-
-if build_machine.system() == 'cygwin'
-  error('MESON_SKIP_TEST CMake is broken on Cygwin.')
-endif
-
-cmake_bin = find_program('cmake', required: false)
-if not cmake_bin.found()
-  error('MESON_SKIP_TEST CMake not installed.')
-endif
-
-cc = meson.get_compiler('c')
-if cc.get_id() == 'clang-cl' and meson.backend() == 'ninja' and build_machine.system() == 'windows'
-    error('MESON_SKIP_TEST CMake installation nor operational for vs2017 clangclx64ninja')
-endif
-
-cmake = import('cmake')
-
-cmake.write_basic_package_version_file(version: '0.0.1',
-   name: 'cmakeModule',
-)
-
-conf = configuration_data()
-conf.set('MYVAR', 'my variable value')
-conf.set_quoted('MYQUOTEDVAR', 'my quoted variable value')
-
-cmake.configure_package_config_file(
-    input: 'projectConfig.cmake.in',
-    name: 'cmakeModule',
-    configuration: conf,
-)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/211 cmake module/projectConfig.cmake.in" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/211 cmake module/projectConfig.cmake.in"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/211 cmake module/projectConfig.cmake.in"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/211 cmake module/projectConfig.cmake.in"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-@PACKAGE_INIT@
-
-set(MYVAR "@MYVAR@")
-set(MYQUOTEDVAR @MYQUOTEDVAR@)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/212 native file path override/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/212 native file path override/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/212 native file path override/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/212 native file path override/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/custom_bindir/main?exe
-?msvc:usr/custom_bindir/main.pdb
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/212 native file path override/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/212 native file path override/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/212 native file path override/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/212 native file path override/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/custom_bindir/main"},
+    {"type": "pdb", "file": "usr/custom_bindir/main"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/213 tap tests/cat.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/213 tap tests/cat.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/213 tap tests/cat.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/213 tap tests/cat.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,26 @@
+#include 
+#include 
+
+int main(int argc, char **argv) {
+    char buf[1024];
+    size_t len;
+    FILE *fh;
+
+    if (argc != 2) {
+        fprintf(stderr, "Incorrect number of arguments, got %i\n", argc);
+        return 1;
+    }
+    fh = fopen(argv[1], "r");
+    if (fh == NULL) {
+        fprintf(stderr, "Opening %s: errno=%i\n", argv[1], errno);
+        return 1;
+    }
+    do {
+        len = fread(buf, 1, sizeof(buf), fh);
+        if (len > 0) {
+            fwrite(buf, 1, len, stdout);
+        }
+    } while (len > 0);
+    fclose(fh);
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/213 tap tests/issue7515.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/213 tap tests/issue7515.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/213 tap tests/issue7515.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/213 tap tests/issue7515.txt"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,27 @@
+1..26
+ok 1 Gtk overrides UI template sets up internal and public template children
+ok 2 Gtk overrides UI template sets up public template children with the correct widgets
+ok 3 Gtk overrides UI template sets up internal template children with the correct widgets
+ok 4 Gtk overrides UI template connects template callbacks to the correct handler
+ok 5 Gtk overrides UI template binds template callbacks to the correct object
+ok 6 Gtk overrides UI template from resource sets up internal and public template children
+ok 7 Gtk overrides UI template from resource sets up public template children with the correct widgets
+ok 8 Gtk overrides UI template from resource sets up internal template children with the correct widgets
+ok 9 Gtk overrides UI template from resource connects template callbacks to the correct handler
+ok 10 Gtk overrides UI template from resource binds template callbacks to the correct object
+ok 11 Gtk overrides UI template from file sets up internal and public template children
+ok 12 Gtk overrides UI template from file sets up public template children with the correct widgets
+ok 13 Gtk overrides UI template from file sets up internal template children with the correct widgets
+ok 14 Gtk overrides UI template from file connects template callbacks to the correct handler
+ok 15 Gtk overrides UI template from file binds template callbacks to the correct object
+ok 16 Gtk overrides Class inheriting from template class sets up internal and public template children # SKIP pending
+ok 17 Gtk overrides Class inheriting from template class sets up public template children with the correct widgets # SKIP pending
+ok 18 Gtk overrides Class inheriting from template class sets up internal template children with the correct widgets # SKIP pending
+ok 19 Gtk overrides Class inheriting from template class connects template callbacks to the correct handler # SKIP pending
+ok 20 Gtk overrides Class inheriting from template class binds template callbacks to the correct object # SKIP pending
+ok 21 Gtk overrides sets CSS names on classes
+ok 22 Gtk overrides avoid crashing when GTK vfuncs are called in garbage collection
+ok 23 Gtk overrides accepts string in place of GdkAtom
+ok 24 Gtk overrides accepts null in place of GdkAtom as GDK_NONE
+ok 25 Gtk overrides uses the correct GType for null child properties
+ok 26 Gtk overrides can create a Gtk.TreeIter with accessible stamp field
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/213 tap tests/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/213 tap tests/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/213 tap tests/meson.build"	2020-01-07 21:09:44.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/213 tap tests/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1,10 +1,14 @@
 project('test features', 'c')
 
 tester = executable('tester', 'tester.c')
+cat = executable('cat', 'cat.c')
 test('pass', tester, args : ['ok'], protocol: 'tap')
 test('fail', tester, args : ['not ok'], should_fail: true, protocol: 'tap')
 test('xfail', tester, args : ['not ok # todo'], protocol: 'tap')
 test('xpass', tester, args : ['ok # todo'], should_fail: true, protocol: 'tap')
 test('skip', tester, args : ['ok # skip'], protocol: 'tap')
+test('partially skipped', tester, args : ['ok 1\nok 2 # skip'], protocol: 'tap')
+test('partially skipped (real-world example)', cat, args : [files('issue7515.txt')], protocol: 'tap')
+test('skip comment', tester, args : ['ok # Skipped: with a comment'], protocol: 'tap')
 test('skip failure', tester, args : ['not ok # skip'], should_fail: true, protocol: 'tap')
 test('no tests', tester, args : ['1..0 # skip'], protocol: 'tap')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/215 link custom/custom_stlib.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/215 link custom/custom_stlib.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/215 link custom/custom_stlib.py"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/215 link custom/custom_stlib.py"	2021-01-06 10:39:48.000000000 +0000
@@ -18,7 +18,7 @@
 
 def get_pic_args():
     platname = platform.system().lower()
-    if platname in ['windows', 'mingw', 'darwin'] or platname.startswith('cygwin'):
+    if platname in ['windows', 'darwin'] or sys.platform == 'cygwin':
         return []
     return ['-fPIC']
 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/215 link custom/custom_target.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/215 link custom/custom_target.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/215 link custom/custom_target.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/215 link custom/custom_target.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+void outer_lib_func(void);
+
+int main(void) {
+    outer_lib_func();
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/215 link custom/custom_target.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/215 link custom/custom_target.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/215 link custom/custom_target.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/215 link custom/custom_target.py"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+#!/usr/bin/env python3
+
+import shutil, sys
+
+if __name__ == '__main__':
+    shutil.copyfile(sys.argv[1], sys.argv[2])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/215 link custom/dummy.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/215 link custom/dummy.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/215 link custom/dummy.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/215 link custom/dummy.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+void inner_lib_func(void) {}
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/215 link custom/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/215 link custom/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/215 link custom/meson.build"	2020-01-07 21:09:48.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/215 link custom/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -57,3 +57,22 @@
 
 exe4_i = executable('prog4_i', 'prog.c', dependencies: d2_i)
 test('linkwhole2_i', exe2_i)
+
+# Link with custom target
+
+dummy = static_library('dummy', 'dummy.c')
+
+custom_prog = find_program('custom_target.py')
+t = custom_target('custom', input: dummy, output: 'libcustom.a', command: [custom_prog, '@INPUT@', '@OUTPUT@'])
+
+dep1 = declare_dependency(link_with: t)
+dep2 = declare_dependency(link_with: t[0])
+
+lib1 = static_library('lib1', 'outerlib.c', dependencies: dep1)
+lib2 = static_library('lib2', 'outerlib.c', dependencies: dep2)
+
+exe1 = executable('exe1', 'custom_target.c', link_with: lib1)
+test('custom_target_1', exe1)
+
+exe1_2 = executable('exe1_2', 'custom_target.c', link_with: lib2)
+test('custom_target_2', exe2)
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/215 link custom/outerlib.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/215 link custom/outerlib.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/215 link custom/outerlib.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/215 link custom/outerlib.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+void inner_lib_func(void);
+
+void outer_lib_func(void) { inner_lib_func(); }
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/218 dependency get_variable method/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/218 dependency get_variable method/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/218 dependency get_variable method/meson.build"	2020-01-07 21:09:53.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/218 dependency get_variable method/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -47,6 +47,18 @@
         'cmake config-tool got default when we shouldn\'t have.')
 endif
 
+idep = declare_dependency(variables : {'foo' : 'value'})
+assert(idep.get_variable(pkgconfig : 'foo', cmake : 'foo', configtool : 'foo',
+                         internal : 'foo', default_value : default) == 'value',
+       'internal got default when it shouldn\'t have.')
+assert(idep.get_variable(pkgconfig : 'foo', cmake : 'foo', configtool : 'foo',
+                         internal : 'bar', default_value : default) == default,
+       'internal didn\'t default when it should have.')
+
 idep = declare_dependency()
-assert(idep.get_variable(pkgconfig : 'foo', cmake : 'foo', configtool : 'foo', default_value : default) == default,
-       'Got something other than default from an internal dependency')
+assert(idep.get_variable(pkgconfig : 'foo', cmake : 'foo', configtool : 'foo',
+                         default_value : default) == default,
+       'something went wrong with an InternalDependency with no variables.')
+
+idep = declare_dependency(variables : ['foo=value'])
+assert(idep.get_variable(internal: 'foo') == 'value')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/219 source set configuration_data/all.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/219 source set configuration_data/all.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/219 source set configuration_data/all.h"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/219 source set configuration_data/all.h"	2021-01-06 10:39:48.000000000 +0000
@@ -3,5 +3,7 @@
 extern void h(void);
 extern void undefined(void);
 
-/* No extern here to get a common symbol */
-void (*p)(void);
+/* Defined in nope.c and f.c,
+ * value depends on the source set and configuration used.
+ */
+extern void (*p)(void);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/219 source set configuration_data/f.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/219 source set configuration_data/f.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/219 source set configuration_data/f.c"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/219 source set configuration_data/f.c"	2021-01-06 10:39:48.000000000 +0000
@@ -1,5 +1,7 @@
 #include "all.h"
 
+void (*p)(void) = (void *)0x12AB34CD;
+
 void f(void)
 {
 }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/21 global arg/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/21 global arg/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/21 global arg/meson.build"	2020-01-07 21:06:31.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/21 global arg/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -1,23 +1,16 @@
 project('global arg test', 'cpp', 'c')
 
-add_global_arguments('-DMYTHING', language : 'c', native : true)
-add_global_arguments('-DMYTHING', language : 'c', native : false)
-add_global_arguments('-DMYCPPTHING', language : 'cpp', native : true)
-add_global_arguments('-DMYCPPTHING', language : 'cpp', native : false)
-
-add_global_arguments('-DGLOBAL_BUILD', language : 'c', native : true)
-add_global_arguments('-DGLOBAL_HOST', language : 'c', native : false)
+add_global_arguments('-DMYTHING', language : 'c')
+add_global_arguments('-DMYCPPTHING', language : 'cpp')
+add_global_arguments('-DGLOBAL_HOST', language : 'c')
 
 build_c_args = ['-DARG_BUILD']
 c_args = ['-DARG_HOST']
 
-add_global_arguments('-DMYCANDCPPTHING', language: ['c', 'cpp'], native: true)
-add_global_arguments('-DMYCANDCPPTHING', language: ['c', 'cpp'], native: false)
+add_global_arguments('-DMYCANDCPPTHING', language: ['c', 'cpp'])
 
-exe1 = executable('prog1', 'prog.c', c_args : build_c_args, native : true)
-exe2 = executable('prog2', 'prog.c', c_args : c_args, native : false)
+exe2 = executable('prog2', 'prog.c', c_args : c_args)
 exe3 = executable('prog3', 'prog.cc')
 
-test('prog1', exe1)
 test('prog2', exe2)
 test('prog3', exe3)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/220 source set dictionary/all.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/220 source set dictionary/all.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/220 source set dictionary/all.h"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/220 source set dictionary/all.h"	2021-01-06 10:39:48.000000000 +0000
@@ -3,5 +3,7 @@
 extern void h(void);
 extern void undefined(void);
 
-/* No extern here to get a common symbol */
-void (*p)(void);
+/* Defined in nope.c and f.c,
+ * value depends on the source set and configuration used.
+ */
+extern void (*p)(void);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/220 source set dictionary/f.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/220 source set dictionary/f.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/220 source set dictionary/f.c"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/220 source set dictionary/f.c"	2021-01-06 10:39:48.000000000 +0000
@@ -1,5 +1,7 @@
 #include "all.h"
 
+void (*p)(void) = (void *)0x1234ABCD;
+
 void f(void)
 {
 }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/222 source set realistic example/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/222 source set realistic example/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/222 source set realistic example/meson.build"	2020-01-23 21:41:11.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/222 source set realistic example/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1,4 +1,4 @@
-# a sort-of realistic example that combines the sourceset and kconfig
+# a sort-of realistic example that combines the sourceset and keyval
 # modules, inspired by QEMU's build system
 
 project('sourceset-example', 'cpp', default_options: ['cpp_std=c++11'])
@@ -9,7 +9,7 @@
 endif
 
 ss = import('sourceset')
-kconfig = import('unstable-kconfig')
+keyval = import('keyval')
 
 zlib = declare_dependency(compile_args: '-DZLIB=1')
 another = declare_dependency(compile_args: '-DANOTHER=1')
@@ -39,7 +39,7 @@
 target_dirs = { 'arm' : 'arm', 'aarch64' : 'arm', 'x86': 'x86' }
 
 foreach x : targets
-  config = kconfig.load('config' / x)
+  config = keyval.load('config' / x)
   target_specific = specific.apply(config, strict: false)
   target_common = common.apply(config, strict: false)
   target_deps = target_specific.dependencies() + target_common.dependencies()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/226 include_type dependency/main.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/226 include_type dependency/main.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/226 include_type dependency/main.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/226 include_type dependency/main.cpp"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,8 @@
+#include 
+#include 
+
+using namespace std;
+
+int main(void) {
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/226 include_type dependency/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/226 include_type dependency/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/226 include_type dependency/meson.build"	2020-01-07 21:10:03.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/226 include_type dependency/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -4,10 +4,16 @@
 )
 
 dep = dependency('zlib', method: 'pkg-config', required : false)
+boost_dep = dependency('boost', modules: ['graph'], include_type : 'system', required: false)
+
 if not dep.found()
   error('MESON_SKIP_TEST zlib was not found')
 endif
 
+if not boost_dep.found()
+  error('MESON_SKIP_TEST boost was not found')
+endif
+
 assert(dep.include_type() == 'preserve', 'include_type must default to "preserve"')
 
 dep_sys = dep.as_system()
@@ -26,3 +32,10 @@
 sp_dep_sys = sp_dep.as_system('system')
 assert(sp_dep_sys.include_type() == 'system', 'changing include_type works')
 assert(sp_dep.include_type() == 'preserve', 'as_system must not mutate the original object')
+
+fallback = dependency('sdffgagf_does_not_exist', include_type: 'system', fallback: ['subDep', 'subDep_dep'])
+assert(fallback.include_type() == 'system', 'include_type works with dependency fallback')
+
+# Check that PCH works with `include_type : 'system'` See https://github.com/mesonbuild/meson/issues/7167
+main_exe = executable('main_exe', 'main.cpp', cpp_pch: 'pch/test.hpp', dependencies: boost_dep)
+test('main_test', main_exe)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/226 include_type dependency/pch/test.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/226 include_type dependency/pch/test.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/226 include_type dependency/pch/test.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/226 include_type dependency/pch/test.hpp"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+#include 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/227 fs module/a_symlink" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/227 fs module/a_symlink"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/227 fs module/a_symlink"	2020-01-07 21:10:02.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/227 fs module/a_symlink"	1970-01-01 00:00:00.000000000 +0000
@@ -1,87 +0,0 @@
-project('fs module test')
-
-is_windows = build_machine.system() == 'windows'
-
-fs = import('fs')
-
-assert(fs.exists('meson.build'), 'Existing file reported as missing.')
-assert(not fs.exists('nonexisting'), 'Nonexisting file was found.')
-
-# When one creates a source release with sdist, Python
-# does not store symlinks in the archive as native symlinks.
-# Thus the extracted archive does not contain them either.
-# Sadly this means that we can only execute the symlink test when
-# running from a git checkout because otherwise we'd need to
-# do postprocessing on the generated archive before actual release.
-# That is both nonstandard an error prone and having symlinks in
-# the archive would probably break on Windows anyway.
-is_git_checkout = fs.exists('../../../.git')
-
-if not is_windows and build_machine.system() != 'cygwin' and is_git_checkout
-  assert(fs.is_symlink('a_symlink'), 'Symlink not detected.')
-  assert(not fs.is_symlink('meson.build'), 'Regular file detected as symlink.')
-endif
-
-assert(fs.is_file('meson.build'), 'File not detected as a file.')
-assert(not fs.is_file('subprojects'), 'Directory detected as a file.')
-assert(not fs.is_file('nonexisting'), 'Bad path detected as a file.')
-
-assert(fs.is_dir('subprojects'), 'Dir not detected correctly.')
-assert(not fs.is_dir('meson.build'), 'File detected as a dir.')
-assert(not fs.is_dir('nonexisting'), 'Bad path detected as a dir.')
-
-assert(fs.is_dir('~'), 'expanduser not working')
-assert(not fs.is_file('~'), 'expanduser not working')
-
-original = 'foo.txt'
-new = fs.replace_suffix(original, '.ini')
-assert(new == 'foo.ini', 'replace_suffix failed')
-
-original = 'foo'
-new = fs.replace_suffix(original, '.ini')
-assert(new == 'foo.ini', 'replace_suffix did not add suffix to suffixless file')
-
-original = 'foo.dll.a'
-new = fs.replace_suffix(original, '.so')
-assert(new == 'foo.dll.so', 'replace_suffix did not only modify last suffix')
-
-original = 'foo.dll'
-new = fs.replace_suffix(original, '')
-assert(new == 'foo',  'replace_suffix did not only delete last suffix')
-
-# `/` on windows is interpreted like `.drive` which in general may not be `c:/`
-# the files need not exist for fs.replace_suffix()
-original = is_windows ? 'j:/foo/bar.txt' : '/foo/bar.txt'
-new_check = is_windows ? 'j:\\foo\\bar.ini' : '/foo/bar.ini'
-
-new = fs.replace_suffix(original, '.ini')
-assert(new == new_check, 'absolute path replace_suffix failed')
-
-# -- hash
-
-md5 = fs.hash('subdir/subdirfile.txt', 'md5')
-sha256 = fs.hash('subdir/subdirfile.txt', 'sha256')
-assert(md5 == 'd0795db41614d25affdd548314b30b3b', 'md5sum did not match')
-assert(sha256 == 'be2170b0dae535b73f6775694fffa3fd726a43b5fabea11b7342f0605917a42a', 'sha256sum did not match')
-
-# -- size
-
-size = fs.size('subdir/subdirfile.txt')
-assert(size == 19, 'file size not found correctly')
-
-# -- are filenames referring to the same file?
-f1 = 'meson.build'
-f2 = 'subdir/../meson.build'
-assert(fs.is_samepath(f1, f2), 'is_samepath not detercting same files')
-assert(fs.is_samepath(meson.source_root(), 'subdir/..'), 'is_samepath not detecting same directory')
-assert(not fs.is_samepath(f1, 'subdir/subdirfile.txt'), 'is_samepath known bad comparison')
-assert(not fs.is_samepath('not-a-path', f2), 'is_samepath should not error if path(s) do not exist')
-
-if not is_windows and build_machine.system() != 'cygwin' and is_git_checkout
-  assert(fs.is_samepath('a_symlink', 'meson.build'), 'symlink is_samepath fail')
-endif
-
-assert(fs.parent('foo/bar') == 'foo', 'failed to get dirname')
-assert(fs.name('foo/bar') == 'bar', 'failed to get basename')
-
-subdir('subdir')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/227 fs module/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/227 fs module/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/227 fs module/meson.build"	2020-01-07 21:10:02.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/227 fs module/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -7,18 +7,14 @@
 assert(fs.exists('meson.build'), 'Existing file reported as missing.')
 assert(not fs.exists('nonexisting'), 'Nonexisting file was found.')
 
-# When one creates a source release with sdist, Python
-# does not store symlinks in the archive as native symlinks.
-# Thus the extracted archive does not contain them either.
-# Sadly this means that we can only execute the symlink test when
-# running from a git checkout because otherwise we'd need to
-# do postprocessing on the generated archive before actual release.
-# That is both nonstandard an error prone and having symlinks in
-# the archive would probably break on Windows anyway.
-is_git_checkout = fs.exists('../../../.git')
-
-if not is_windows and build_machine.system() != 'cygwin' and is_git_checkout
-  assert(fs.is_symlink('a_symlink'), 'Symlink not detected.')
+if not is_windows and build_machine.system() != 'cygwin'
+  # Symlinks on Windows have specific requirements including:
+  # * Meson running under Python >= 3.8
+  # * Windows user permissions to create symlinks, and/or Windows in Developer mode
+  # so at this time the symlink test is skipped for Windows.
+  symlink = meson.current_build_dir() / 'a_symlink'
+  run_command('ln', '-s', meson.current_source_dir() / 'meson.build', symlink)
+  assert(fs.is_symlink(symlink), 'Symlink not detected.')
   assert(not fs.is_symlink('meson.build'), 'Regular file detected as symlink.')
 endif
 
@@ -30,8 +26,34 @@
 assert(not fs.is_dir('meson.build'), 'File detected as a dir.')
 assert(not fs.is_dir('nonexisting'), 'Bad path detected as a dir.')
 
-assert(fs.is_dir('~'), 'expanduser not working')
-assert(not fs.is_file('~'), 'expanduser not working')
+assert(fs.is_dir('~'), 'home directory not detected')
+assert(not fs.is_file('~'), 'home directory detected as file')
+
+# -- expanduser
+assert(fs.expanduser('~') != '~','expanduser failed')
+assert(fs.expanduser('~/foo').endswith('foo'), 'expanduser with tail failed')
+
+# -- as_posix
+assert(fs.as_posix('/') == '/', 'as_posix idempotent')
+assert(fs.as_posix('\\') == '/', 'as_posix simple')
+assert(fs.as_posix('\\\\') == '/', 'as_posix simple')
+assert(fs.as_posix('foo\\bar/baz') == 'foo/bar/baz', 'as_posix mixed slash')
+
+# -- is_absolute
+winabs = 'q:/foo'
+unixabs = '/foo'
+if is_windows
+  assert(fs.is_absolute(winabs), 'is_absolute windows not detected')
+  assert(not fs.is_absolute(unixabs), 'is_absolute unix false positive')
+else
+  assert(fs.is_absolute(unixabs), 'is_absolute unix not detected')
+  assert(not fs.is_absolute(winabs), 'is_absolute windows false positive')
+endif
+
+# -- replace_suffix
+
+original = 'foo'
+assert(fs.replace_suffix(original, '') == original, 'replace_suffix idempotent')
 
 original = 'foo.txt'
 new = fs.replace_suffix(original, '.ini')
@@ -74,14 +96,23 @@
 f2 = 'subdir/../meson.build'
 assert(fs.is_samepath(f1, f2), 'is_samepath not detercting same files')
 assert(fs.is_samepath(meson.source_root(), 'subdir/..'), 'is_samepath not detecting same directory')
+assert(fs.is_samepath(meson.project_source_root(), 'subdir/..'), 'is_samepath not detecting same directory')
+# This fails with python3.5. It can be uncommented when we depend on python >= 3.6
+#assert(fs.is_samepath(meson.project_build_root(), meson.current_build_dir() / 'subdir/..'), 'is_samepath not detecting same directory')
 assert(not fs.is_samepath(f1, 'subdir/subdirfile.txt'), 'is_samepath known bad comparison')
 assert(not fs.is_samepath('not-a-path', f2), 'is_samepath should not error if path(s) do not exist')
 
-if not is_windows and build_machine.system() != 'cygwin' and is_git_checkout
-  assert(fs.is_samepath('a_symlink', 'meson.build'), 'symlink is_samepath fail')
+if not is_windows and build_machine.system() != 'cygwin'
+  assert(fs.is_samepath(symlink, 'meson.build'), 'symlink is_samepath fail')
 endif
 
+# parts of path
 assert(fs.parent('foo/bar') == 'foo', 'failed to get dirname')
 assert(fs.name('foo/bar') == 'bar', 'failed to get basename')
+assert(fs.name('foo/bar/baz.dll.a') == 'baz.dll.a', 'failed to get basename with compound suffix')
+assert(fs.stem('foo/bar/baz.dll') == 'baz', 'failed to get stem with suffix')
+assert(fs.stem('foo/bar/baz.dll.a') == 'baz.dll', 'failed to get stem with compound suffix')
 
 subdir('subdir')
+
+subproject('subbie')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/227 fs module/subdir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/227 fs module/subdir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/227 fs module/subdir/meson.build"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/227 fs module/subdir/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1 +1,3 @@
 assert(fs.exists('subdirfile.txt'), 'Subdir file lookup is broken.')
+assert(fs.is_samepath(meson.project_source_root(), '..'), 'is_samepath not detecting same directory')
+assert(fs.is_samepath(meson.project_build_root(), meson.current_build_dir() / '..'), 'is_samepath not detecting same directory')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/227 fs module/subprojects/subbie/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/227 fs module/subprojects/subbie/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/227 fs module/subprojects/subbie/meson.build"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/227 fs module/subprojects/subbie/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -3,7 +3,7 @@
 fs = import('fs')
 
 assert(fs.exists('subprojectfile.txt'), 'Subproject root file not found.')
+assert(fs.is_samepath(meson.project_source_root(), meson.current_source_dir()), 'is_samepath not detecting same directory')
+assert(fs.is_samepath(meson.project_build_root(), meson.current_build_dir()), 'is_samepath not detecting same directory')
 
 subdir('subsub')
-
-subproject('subbie')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/227 fs module/subprojects/subbie/subsub/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/227 fs module/subprojects/subbie/subsub/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/227 fs module/subprojects/subbie/subsub/meson.build"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/227 fs module/subprojects/subbie/subsub/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1 +1,3 @@
 assert(fs.exists('subsubfile.txt'), 'Subproject subdir lookup failed.')
+assert(fs.is_samepath(meson.project_source_root(), meson.current_source_dir() / '..'), 'is_samepath not detecting same directory')
+assert(fs.is_samepath(meson.project_build_root(), meson.current_build_dir() / '..'), 'is_samepath not detecting same directory')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/228 zlib/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/228 zlib/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/228 zlib/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/228 zlib/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,23 @@
+project('zlib system dependency', 'c')
+
+if not ['darwin', 'freebsd', 'dragonfly', 'windows'].contains(host_machine.system())
+  error('MESON_SKIP_TEST only applicable on macOS, FreeBSD, DragonflyBSD, and Windows.')
+endif
+
+cc = meson.get_compiler('c')
+
+if host_machine.system() == 'darwin' and cc.get_id() != 'clang'
+  # this will only work on mac if using Apple's clang compiler, but there is no
+  # way in the meson source level to differentiate apple clang and llvm clang
+  # In the meson CI only apple clang is tested
+  error('MESON_SKIP_TEST on macOS only clang is supported.')
+endif
+
+if not (cc.find_library('z', required: false).found() or
+        cc.find_library('zlib', required : false).found() or
+        cc.find_library('zlib1', required : false).found())
+  error('MESON_SKIP_TEST Cannot seem to find zlib via find_library, this test will probably fail.')
+endif
+
+z = dependency('zlib', method : 'system')
+assert(z.version().version_compare('>= 1.2'), 'Version does not seem to have been detected correctly.')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/229 native prop/crossfile.ini" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/229 native prop/crossfile.ini"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/229 native prop/crossfile.ini"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/229 native prop/crossfile.ini"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+[properties]
+astring = 'cross'
+anarray = ['one', 'two']
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/229 native prop/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/229 native prop/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/229 native prop/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/229 native prop/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,25 @@
+project('get prop')
+
+x = meson.get_external_property('astring')
+ref = meson.is_cross_build() ? 'cross' : 'mystring'
+assert(x==ref, 'did not get native property string. did you use "meson setup --native-file native.txt"')
+
+x = meson.get_external_property('astring', native: true)
+assert(x=='mystring', 'did not get native property with native:true and non-cross build.')
+
+x = meson.get_external_property('astring', 'fallback', native: false)
+assert(x==ref, 'did not get get native property with native:false and non-cross build.')
+
+
+x = meson.get_external_property('notexist', 'fallback')
+assert(x=='fallback', 'fallback did not work')
+
+x = meson.get_external_property('notexist', 'fallback', native: true)
+assert(x=='fallback', 'fallback native:true did not work')
+
+x = meson.get_external_property('notexist', 'fallback', native: false)
+assert(x=='fallback', 'fallback native:false did not work')
+
+
+x = meson.get_external_property('anarray')
+assert(x==['one', 'two'], 'array did not work')
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/229 native prop/nativefile.ini" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/229 native prop/nativefile.ini"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/229 native prop/nativefile.ini"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/229 native prop/nativefile.ini"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+[properties]
+astring = 'mystring'
+anarray = ['one', 'two']
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/230 persubproject options/foo.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/230 persubproject options/foo.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/230 persubproject options/foo.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/230 persubproject options/foo.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+int foo(void);
+
+int foo(void) {
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/230 persubproject options/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/230 persubproject options/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/230 persubproject options/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/230 persubproject options/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,15 @@
+project('persubproject options', 'c',
+  default_options : ['default_library=both',
+                     'werror=true',
+                     'warning_level=3'])
+
+assert(get_option('default_library') == 'both', 'Parent default_library should be "both"')
+assert(get_option('werror'))
+assert(get_option('warning_level') == '3')
+
+# Check it build both by calling a method only both_libraries target implement
+lib = library('lib1', 'foo.c')
+lib.get_static_lib()
+
+subproject('sub1')
+subproject('sub2', default_options : ['default_library=static'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/230 persubproject options/subprojects/sub1/foo.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/230 persubproject options/subprojects/sub1/foo.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/230 persubproject options/subprojects/sub1/foo.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/230 persubproject options/subprojects/sub1/foo.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,8 @@
+int foo(void);
+
+int foo(void) {
+  /* This is built with -Werror, it would error if warning_level=3 was inherited
+   * from main project and not overridden by this subproject's default_options. */
+  int x;
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/230 persubproject options/subprojects/sub1/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/230 persubproject options/subprojects/sub1/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/230 persubproject options/subprojects/sub1/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/230 persubproject options/subprojects/sub1/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,9 @@
+project('sub1', 'c',
+ default_options : ['warning_level=0'])
+
+assert(get_option('default_library') == 'both', 'Should inherit parent project default_library')
+assert(get_option('warning_level') == '0')
+
+# Check it build both by calling a method only both_libraries target implement
+lib = library('lib1', 'foo.c')
+lib.get_static_lib()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/230 persubproject options/subprojects/sub2/foo.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/230 persubproject options/subprojects/sub2/foo.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/230 persubproject options/subprojects/sub2/foo.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/230 persubproject options/subprojects/sub2/foo.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,9 @@
+int foo(void);
+
+#ifdef __GNUC__
+#warning This should not produce error
+#endif
+
+int foo(void) {
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/230 persubproject options/subprojects/sub2/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/230 persubproject options/subprojects/sub2/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/230 persubproject options/subprojects/sub2/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/230 persubproject options/subprojects/sub2/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,10 @@
+project('sub2', 'c',
+ default_options : ['default_library=shared',
+                    'werror=false'])
+
+assert(get_option('default_library') == 'static', 'Parent should override default_library')
+assert(not get_option('werror'))
+
+# If it doesn't build only a static library, it would make target name clash.
+library('lib1', 'foo.c')
+shared_library('lib1', 'foo.c')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/231 arithmetic operators/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/231 arithmetic operators/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/231 arithmetic operators/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/231 arithmetic operators/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,8 @@
+project('arithmetic operators')
+assert(5 - 3 - 1 == 1)
+assert(5 - (3 - 1) == 3)
+assert(5 - 1 * 3 - 3 == -1)
+assert(420 - 300 - 51 == 69)
+assert(1000 / 2 / 2 / 2 == 125)
+assert(4 * 9 / 3 % 8 - 3 - 10 / 2 == -4)
+assert(94 - 30 + (2 - (40 - 6 + 7) - 9) - 10 == 6)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/232 link language/c_linkage.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/232 link language/c_linkage.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/232 link language/c_linkage.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/232 link language/c_linkage.cpp"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+extern "C" {
+    int makeInt(void) {
+        return 0;
+    }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/232 link language/c_linkage.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/232 link language/c_linkage.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/232 link language/c_linkage.h"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/232 link language/c_linkage.h"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,10 @@
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+int makeInt(void);
+
+#ifdef __cplusplus
+}
+#endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/232 link language/lib.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/232 link language/lib.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/232 link language/lib.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/232 link language/lib.cpp"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+extern "C" {
+    int makeInt(void) {
+        return 1;
+    }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/232 link language/main.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/232 link language/main.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/232 link language/main.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/232 link language/main.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+#include "c_linkage.h"
+
+int main(void) {
+    return makeInt();
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/232 link language/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/232 link language/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/232 link language/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/232 link language/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,18 @@
+project(
+  'link_language',
+  ['c', 'cpp'],
+)
+
+exe = executable(
+  'main',
+  ['main.c', 'c_linkage.cpp'],
+  link_language : 'c',
+)
+
+lib = library(
+  'mylib',
+  ['lib.cpp'],
+  link_language : 'c',
+)
+
+test('main', exe)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/233 link depends indexed custom target/check_arch.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/233 link depends indexed custom target/check_arch.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/233 link depends indexed custom target/check_arch.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/233 link depends indexed custom target/check_arch.py"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,32 @@
+#!/usr/bin/env python3
+
+import re
+import sys
+import shutil
+import subprocess
+
+exepath = sys.argv[1]
+want_arch = sys.argv[2]
+dummy_output = sys.argv[3]
+
+with open(dummy_output, 'w') as f:
+    f.write('')
+
+if not shutil.which('dumpbin'):
+    print('dumpbin not found, skipping')
+    sys.exit(0)
+
+out = subprocess.check_output(['dumpbin', '/HEADERS', exepath],
+                              universal_newlines=True)
+for line in out.split('\n'):
+    m = re.match(r'.* machine \(([A-Za-z0-9]+)\)$', line)
+    if m:
+        arch = m.groups()[0].lower()
+
+if arch == 'arm64':
+    arch = 'aarch64'
+elif arch == 'x64':
+    arch = 'x86_64'
+
+if arch != want_arch:
+    raise RuntimeError('Wanted arch {} but exe uses {}'.format(want_arch, arch))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/233 link depends indexed custom target/foo.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/233 link depends indexed custom target/foo.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/233 link depends indexed custom target/foo.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/233 link depends indexed custom target/foo.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,15 @@
+#include 
+
+int main(void) {
+  const char *fn = DEPFILE;
+  FILE *f = fopen(fn, "r");
+  if (!f) {
+    printf("could not open %s", fn);
+    return 1;
+  }
+  else {
+    printf("successfully opened %s", fn);
+  }
+
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/233 link depends indexed custom target/make_file.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/233 link depends indexed custom target/make_file.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/233 link depends indexed custom target/make_file.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/233 link depends indexed custom target/make_file.py"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,8 @@
+#!/usr/bin/env python3
+import sys
+
+with open(sys.argv[1], 'w') as f:
+    print('# this file does nothing', file=f)
+
+with open(sys.argv[2], 'w') as f:
+    print('# this file does nothing', file=f)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/233 link depends indexed custom target/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/233 link depends indexed custom target/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/233 link depends indexed custom target/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/233 link depends indexed custom target/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,25 @@
+project('link_depends_indexed_custom_target', 'c')
+
+if meson.backend().startswith('vs')
+  # FIXME: Broken on the VS backends
+  error('MESON_SKIP_TEST see https://github.com/mesonbuild/meson/issues/1799')
+endif
+
+cmd = find_program('make_file.py')
+
+dep_files = custom_target('gen_dep',
+        command: [cmd, '@OUTPUT@'],
+        output: ['dep_file1', 'dep_file2'])
+
+exe = executable('foo', 'foo.c',
+        link_depends: dep_files[1],
+        c_args: ['-DDEPFILE="' + dep_files[0].full_path()+ '"'])
+
+check_arch = find_program('check_arch.py')
+custom_target('check-arch',
+        command: [check_arch, exe, host_machine.cpu_family(), '@OUTPUT@'],
+        build_by_default: true,
+        output: 'dummy.txt')
+
+# check that dep_file1 exists, which means that link_depends target ran
+test('runtest', exe)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/234 very long commmand line/codegen.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/234 very long commmand line/codegen.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/234 very long commmand line/codegen.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/234 very long commmand line/codegen.py"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,7 @@
+#!/usr/bin/env python3
+
+import sys
+from pathlib import Path
+
+Path(sys.argv[2]).write_text(
+    'int func{n}(void) {{ return {n}; }}'.format(n=sys.argv[1]))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/234 very long commmand line/main.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/234 very long commmand line/main.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/234 very long commmand line/main.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/234 very long commmand line/main.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+int main(void) { return 0; }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/234 very long commmand line/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/234 very long commmand line/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/234 very long commmand line/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/234 very long commmand line/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,49 @@
+project('very long command lines', 'c')
+
+# Get the current system's commandline length limit.
+if build_machine.system() == 'windows'
+  # Various limits on windows:
+  # cmd.exe: 8kb
+  # CreateProcess: 32kb
+  limit = 32767
+  # NOTE: filename limit is 260 characters unless
+  # 1. Python >= 3.6 is being used
+  # 2. Windows 10 registry has been edited to enable long pathnaems
+  # ninja backend uses absolute filenames, so we ensure they don't exceed 260.
+elif build_machine.system() == 'cygwin'
+  # cygwin-to-win32: see above
+  # cygwin-to-cygwin: no limit?
+  # Cygwin is slow, so only test it lightly here.
+  limit = 8192
+else
+  # ninja passes whole line as a single argument, for which
+  # the limit is 128k as of Linux 2.6.23.  See MAX_ARG_STRLEN.
+  # BSD seems similar, see https://www.in-ulm.de/~mascheck/various/argmax
+  limit = 131072
+endif
+# Now exceed that limit, but not so far that the test takes too long.
+namelen = 260
+nfiles = 50 + limit / namelen
+message('Expected link commandline length is approximately ' + '@0@'.format((nfiles * (namelen+28))))
+
+seq = run_command('name_gen.py', nfiles.to_string(), meson.build_root()).stdout().strip().split('\n')
+
+sources = []
+codegen = find_program('codegen.py')
+
+i=0
+foreach name : seq
+  sources += custom_target('codegen' + i.to_string(),
+                           command: [codegen, i.to_string(), '@OUTPUT@'],
+                           output: name + '.c')
+  i+=1
+endforeach
+
+shared_library('sharedlib', sources)
+static_library('staticlib', sources)
+executable('app', 'main.c', sources)
+
+# Also test short commandlines to make sure that doesn't regress
+shared_library('sharedlib0', sources[0])
+static_library('staticlib0', sources[0])
+executable('app0', 'main.c', sources[0])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/234 very long commmand line/name_gen.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/234 very long commmand line/name_gen.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/234 very long commmand line/name_gen.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/234 very long commmand line/name_gen.py"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,23 @@
+#!/usr/bin/env python3
+"""
+generate sequence of filename that does not exceed MAX_LEN=260
+for Python < 3.6 and Windows without modified registry
+"""
+
+import sys
+import string
+
+name_len = 260 - len(sys.argv[2]) - 4 - 39 - 4 - 2
+if name_len < 1:
+    raise ValueError('The meson build directory pathname is so long '
+                     'that we cannot generate filenames within 260 characters.')
+# leave room for suffix and file separators, and meson generated text
+# e.g. ".c.obj.d" and other decorators added by Meson at configuration
+# for intermediate files
+
+base = string.ascii_letters * 5  # 260 characters
+max_num_len = len(str(sys.argv[1]))
+base = base[: name_len - max_num_len]
+
+for i in range(int(sys.argv[1])):
+    print("{base}{i:0{max_num_len}d}".format(base=base, max_num_len=max_num_len, i=i))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/235 custom_target source/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/235 custom_target source/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/235 custom_target source/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/235 custom_target source/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+project('a', ['c'])
+
+x = find_program('x.py')
+outs = custom_target('foo', output: ['x.c', 'y'], input: 'a', command: [x])
+executable('testprog', outs[0])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/235 custom_target source/x.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/235 custom_target source/x.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/235 custom_target source/x.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/235 custom_target source/x.py"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+#! /usr/bin/env python3
+with open('x.c', 'w') as f:
+    print('int main(void) { return 0; }', file=f)
+with open('y', 'w'):
+    pass
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/235 disabler array addition/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/235 disabler array addition/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/235 disabler array addition/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/235 disabler array addition/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,9 @@
+project('disabler_inside_array', 'c')
+
+exes = []
+
+exes += library('a', 'test.c')
+
+exes += library('b', 'test.c', dependencies : disabler())
+
+exes += library('c', 'test.c')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/235 disabler array addition/test.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/235 disabler array addition/test.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/235 disabler array addition/test.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/235 disabler array addition/test.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+int stub(void) { return 0; }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/app.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/app.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/app.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/app.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,7 @@
+#include 
+
+int main(void)
+{
+    return call_foo() == 42 ? 0 : 1;
+}
+
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/func.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/func.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/func.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/func.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,7 @@
+#include "func.h"
+
+int func(void)
+{
+    return 1;
+}
+
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/func.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/func.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/func.h"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/func.h"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+int func(void);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/libfoo/configure" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/libfoo/configure"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/libfoo/configure"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/libfoo/configure"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,44 @@
+#! /bin/sh
+
+srcdir=$(dirname "$0")
+
+for i in "$@"
+do
+case $i in
+    --prefix=*)
+    PREFIX="${i#*=}"
+    shift
+    ;;
+    --libdir=*)
+    LIBDIR="${i#*=}"
+    shift
+    ;;
+    --includedir=*)
+    INCDIR="${i#*=}"
+    shift
+    ;;
+    --libext=*)
+    LIBEXT="${i#*=}"
+    shift
+    ;;
+    *)
+    shift
+    ;;
+esac
+done
+
+DEP_ARGS=$(pkg-config somelib --cflags --libs)
+
+cat > Makefile << EOL
+all: libfoo.$LIBEXT
+
+libfoo.$LIBEXT:
+	$CC "$srcdir/libfoo.c" -shared -fPIC $DEP_ARGS -o \$@
+
+install: libfoo.$LIBEXT
+	mkdir -p "\$(DESTDIR)$LIBDIR";
+	mkdir -p "\$(DESTDIR)$LIBDIR/pkgconfig";
+	mkdir -p "\$(DESTDIR)$INCDIR";
+	cp \$< "\$(DESTDIR)$LIBDIR";
+	cp "$srcdir/libfoo.h" "\$(DESTDIR)$INCDIR";
+EOL
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/libfoo/libfoo.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/libfoo/libfoo.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/libfoo/libfoo.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/libfoo/libfoo.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,8 @@
+#include "libfoo.h"
+
+int func(void);
+
+int call_foo()
+{
+  return func() == 1 ? 42 : 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/libfoo/libfoo.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/libfoo/libfoo.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/libfoo/libfoo.h"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/libfoo/libfoo.h"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+#pragma once
+
+int call_foo(void);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/libfoo/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/libfoo/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/libfoo/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/libfoo/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,22 @@
+mod = import('unstable_external_project')
+
+target_system = target_machine.system()
+if target_system in ['windows', 'cygwin']
+  libext = 'dll'
+elif target_system == 'darwin'
+  libext = 'dylib'
+else
+  libext = 'so'
+endif
+
+p = mod.add_project('configure',
+  configure_options : [
+    '--prefix=@PREFIX@',
+    '--libdir=@PREFIX@/@LIBDIR@',
+    '--includedir=@PREFIX@/@INCLUDEDIR@',
+    '--libext=' + libext,
+  ],
+)
+
+libfoo_dep = declare_dependency(link_with : somelib,
+  dependencies : p.dependency('foo'))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,27 @@
+project('test external project', 'c')
+
+if not find_program('pkg-config', required: false).found()
+  error('MESON_SKIP_TEST: pkg-config not found')
+endif
+
+if not find_program('make', required : false).found()
+  error('MESON_SKIP_TEST: make not found')
+endif
+
+if host_machine.system() == 'windows'
+  error('MESON_SKIP_TEST: The fake configure script is too dumb to work on Windows')
+endif
+
+if meson.is_cross_build()
+  # CI uses PKG_CONFIG_SYSROOT_DIR which breaks -uninstalled.pc usage.
+  error('MESON_SKIP_TEST: Cross build support is too limited for this test')
+endif
+
+pkg = import('pkgconfig')
+
+somelib = library('somelib', 'func.c')
+pkg.generate(somelib)
+
+subdir('libfoo')
+
+executable('test-find-library', 'app.c', dependencies : libfoo_dep)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/236 external project/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/236 external project/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    { "type": "shared_lib",  "file": "usr/lib/foo" },
+    { "type": "file",  "file": "usr/include/libfoo.h" },
+    { "type": "file", "file": "usr/lib/pkgconfig/somelib.pc" }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/237 subdir files/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/237 subdir files/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/237 subdir files/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/237 subdir files/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+project('subdir files test', 'c')
+subdir('subdir')
+executable('prog', sources: subdir_sources)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/237 subdir files/subdir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/237 subdir files/subdir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/237 subdir files/subdir/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/237 subdir files/subdir/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+subdir_sources = files(['prog.c'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/237 subdir files/subdir/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/237 subdir files/subdir/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/237 subdir files/subdir/prog.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/237 subdir files/subdir/prog.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+int main(void) { return 0; }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/239 dependency allow_fallback/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/239 dependency allow_fallback/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/239 dependency allow_fallback/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/239 dependency allow_fallback/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,12 @@
+project('subproject fallback', 'c')
+
+foob_dep = dependency('foob', allow_fallback: true, required: false)
+assert(foob_dep.found())
+
+# Careful!  Once a submodule has been triggered and it has
+# overridden the dependency, it sticks.
+foob_dep = dependency('foob', allow_fallback: false, required: false)
+assert(foob_dep.found())
+
+foob3_dep = dependency('foob3', allow_fallback: false, required: false)
+assert(not foob3_dep.found())
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/239 dependency allow_fallback/subprojects/foob/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/239 dependency allow_fallback/subprojects/foob/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/239 dependency allow_fallback/subprojects/foob/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/239 dependency allow_fallback/subprojects/foob/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,2 @@
+project('foob', 'c')
+meson.override_dependency('foob', declare_dependency())
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/239 dependency allow_fallback/subprojects/foob3/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/239 dependency allow_fallback/subprojects/foob3/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/239 dependency allow_fallback/subprojects/foob3/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/239 dependency allow_fallback/subprojects/foob3/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,2 @@
+project('foob3', 'c')
+# Note that there is no override_dependency here
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/23 object extraction/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/23 object extraction/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/23 object extraction/meson.build"	2020-01-07 21:06:31.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/23 object extraction/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -9,12 +9,15 @@
   obj1 = lib1.extract_objects('src/lib.c')
   obj2 = lib2.extract_objects(['lib.c'])
   obj3 = lib2.extract_objects(files('lib.c'))
+  obj4 = lib2.extract_objects(['lib.c', 'lib.c'])
 
   e1 = executable('main1', 'main.c', objects : obj1)
   e2 = executable('main2', 'main.c', objects : obj2)
   e3 = executable('main3', 'main.c', objects : obj3)
+  e4 = executable('main4', 'main.c', objects : obj4)
 
   test('extraction test 1', e1)
   test('extraction test 2', e2)
   test('extraction test 3', e3)
+  test('extraction test 4', e4)
 endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/25 library versions/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/25 library versions/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/25 library versions/installed_files.txt"	2018-12-09 14:27:16.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/25 library versions/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-usr/lib/prefixsomelib.suffix
-usr/lib/prefixsomelib?implib
-?msvc:usr/lib/prefixsomelib.pdb
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/25 library versions/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/25 library versions/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/25 library versions/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/25 library versions/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/lib/prefixsomelib.suffix"},
+    {"type": "implib", "file": "usr/lib/prefixsomelib"},
+    {"type": "pdb", "file": "usr/lib/prefixsomelib"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/depends/copyrunner.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/depends/copyrunner.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/depends/copyrunner.py"	2019-05-22 21:53:22.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/depends/copyrunner.py"	1970-01-01 00:00:00.000000000 +0000
@@ -1,7 +0,0 @@
-#!/usr/bin/env python3
-
-import sys, subprocess
-
-prog, infile, outfile = sys.argv[1:]
-
-subprocess.check_call([prog, infile, outfile])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/depends/filecopier.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/depends/filecopier.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/depends/filecopier.c"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/depends/filecopier.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,23 +0,0 @@
-#include
-#include
-
-#define BUFSIZE 1024
-
-int main(int argc, char **argv) {
-    char buffer[BUFSIZE];
-    size_t num_read;
-    size_t num_written;
-    FILE *fin = fopen(argv[1], "rb");
-    FILE *fout;
-    assert(argc>0);
-    assert(fin);
-    num_read = fread(buffer, 1, BUFSIZE, fin);
-    assert(num_read > 0);
-    fclose(fin);
-    fout = fopen(argv[2], "wb");
-    assert(fout);
-    num_written = fwrite(buffer, 1, num_read, fout);
-    assert(num_written == num_read);
-    fclose(fout);
-    return 0;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/depends/libsrc.c.in" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/depends/libsrc.c.in"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/depends/libsrc.c.in"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/depends/libsrc.c.in"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-int func(void) {
-    return 42;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/depends/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/depends/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/depends/meson.build"	2019-05-22 21:53:22.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/depends/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,11 +0,0 @@
-runner = find_program('copyrunner.py')
-
-copier = executable('copier', 'filecopier.c', native: true)
-
-cg = generator(runner,
-    output: ['@BASENAME@.c'],
-    arguments: [copier.full_path(), '@INPUT@', '@OUTPUT@'],
-    depends: copier)
-
-test('generatordep',
-    executable('gd', 'prog.c', cg.process('libsrc.c.in')))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/depends/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/depends/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/depends/prog.c"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/depends/prog.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-int func(void);
-
-int main(void) {
-    return func() != 42;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/input_src.dat" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/input_src.dat"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/input_src.dat"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/input_src.dat"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-int func(void) { return 0; }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/meson.build"	2020-01-07 21:06:35.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,23 +0,0 @@
-project('pipeline test', 'c')
-
-# We need to run this executable locally so build it with
-# the host compiler.
-e1 = executable('srcgen', 'srcgen.c', native : true)
-
-# Generate a source file that needs to be included in the build.
-gen = generator(e1, \
-  depfile : '@BASENAME@.d',
-  output  : '@BASENAME@.c', # Line continuation inside arguments should work without needing a "\".
-  arguments : ['@INPUT@', '@OUTPUT@', '@DEPFILE@'])
-
-generated = gen.process(['input_src.dat'])
-
-e2 = executable('prog', 'prog.c', generated)
-
-test('pipelined', e2)
-
-# This is in a subdirectory to make sure
-# we write proper subdir paths to output.
-subdir('src')
-
-subdir('depends')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/prog.c"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/prog.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-int func(void);
-
-int main(void) {
-    return func();
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/src/input_src.dat" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/src/input_src.dat"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/src/input_src.dat"	2019-05-22 21:53:22.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/src/input_src.dat"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-#include
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/src/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/src/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/src/meson.build"	2019-05-22 21:53:22.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/src/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,12 +0,0 @@
-e1 = executable('srcgen', 'srcgen.c', native : true)
-
-# Generate a header file that needs to be included.
-gen = generator(e1,
- output  : '@BASENAME@.h',
- arguments : ['@INPUT@', '@OUTPUT@'])
-
-generated = gen.process('input_src.dat')
-
-e2 = executable('prog', 'prog.c', generated)
-
-test('pipelined', e2)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/src/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/src/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/src/prog.c"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/src/prog.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,9 +0,0 @@
-#include"input_src.h"
-
-int main(void) {
-    void *foo = printf;
-    if(foo) {
-        return 0;
-    }
-    return 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/src/srcgen.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/src/srcgen.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/src/srcgen.c"	2019-05-22 21:53:22.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/src/srcgen.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,40 +0,0 @@
-#include
-#include
-
-#define ARRSIZE 80
-
-int main(int argc, char **argv) {
-    char arr[ARRSIZE];
-    char *ifilename;
-    char *ofilename;
-    FILE *ifile;
-    FILE *ofile;
-    size_t bytes;
-
-    if(argc != 3) {
-        fprintf(stderr, "%s  \n", argv[0]);
-        return 1;
-    }
-    ifilename = argv[1];
-    ofilename = argv[2];
-    printf("%s\n", ifilename);
-    ifile = fopen(ifilename, "r");
-    if(!ifile) {
-        fprintf(stderr, "Could not open source file %s.\n", ifilename);
-        return 1;
-    }
-    ofile = fopen(ofilename, "w");
-    if(!ofile) {
-        fprintf(stderr, "Could not open target file %s\n", ofilename);
-        fclose(ifile);
-        return 1;
-    }
-    bytes = fread(arr, 1, ARRSIZE, ifile);
-    assert(bytes < 80);
-    assert(bytes > 0);
-    fwrite(arr, 1, bytes, ofile);
-
-    fclose(ifile);
-    fclose(ofile);
-    return 0;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/srcgen.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/srcgen.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/27 pipeline/srcgen.c"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/27 pipeline/srcgen.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,69 +0,0 @@
-#include
-#include
-#include
-
-#define ARRSIZE 80
-
-int main(int argc, char **argv) {
-    char arr[ARRSIZE];
-    char *ofilename;
-    char *ifilename;
-    char *dfilename;
-    FILE *ifile;
-    FILE *ofile;
-    FILE *depfile;
-    size_t bytes;
-    int i;
-
-    if(argc != 4) {
-        fprintf(stderr, "%s   \n", argv[0]);
-        return 1;
-    }
-    ifilename = argv[1];
-    ofilename = argv[2];
-    dfilename = argv[3];
-    ifile = fopen(argv[1], "r");
-    if(!ifile) {
-        fprintf(stderr, "Could not open source file %s.\n", argv[1]);
-        return 1;
-    }
-    ofile = fopen(ofilename, "w");
-    if(!ofile) {
-        fprintf(stderr, "Could not open target file %s\n", ofilename);
-        fclose(ifile);
-        return 1;
-    }
-    bytes = fread(arr, 1, ARRSIZE, ifile);
-    assert(bytes < 80);
-    assert(bytes > 0);
-    fwrite(arr, 1, bytes, ofile);
-
-    depfile = fopen(dfilename, "w");
-    if(!depfile) {
-        fprintf(stderr, "Could not open depfile %s\n", ofilename);
-        fclose(ifile);
-        fclose(ofile);
-        return 1;
-    }
-    for(i=0; i
+
 int main(void) {
   int num = 1;
   printf("%d\n", num);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/36 tryrun/error.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/36 tryrun/error.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/36 tryrun/error.c"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/36 tryrun/error.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-int main(void) {
-  return 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/36 tryrun/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/36 tryrun/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/36 tryrun/meson.build"	2020-01-07 21:06:46.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/36 tryrun/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,78 +0,0 @@
-project('tryrun', 'c', 'cpp')
-
-# Complex to exercise all code paths.
-if meson.is_cross_build()
-  if meson.has_exe_wrapper()
-    compilers = [meson.get_compiler('c', native : false), meson.get_compiler('cpp', native : false)]
-  else
-    compilers = [meson.get_compiler('c', native : true), meson.get_compiler('cpp', native : true)]
-  endif
-else
-  compilers = [meson.get_compiler('c'), meson.get_compiler('cpp')]
-endif
-
-ok_code = '''#include
-int main(void) {
-  printf("%s\n", "stdout");
-  fprintf(stderr, "%s\n", "stderr");
-  return 0;
-}
-'''
-
-error_code = '''int main(void) {
-  return 1;
-}
-'''
-
-no_compile_code = '''int main(void) {
-'''
-
-INPUTS = [
-  ['String', ok_code, error_code, no_compile_code],
-  ['File', files('ok.c'), files('error.c'), files('no_compile.c')],
-]
-
-foreach cc : compilers
-  foreach input : INPUTS
-    type = input[0]
-    ok = cc.run(input[1], name : type + ' should succeed')
-    err = cc.run(input[2], name : type + ' should fail')
-    noc = cc.run(input[3], name : type + ' does not compile')
-
-    if noc.compiled()
-      error(type + ' compilation fail test failed.')
-    else
-      message(type + ' fail detected properly.')
-    endif
-
-    if ok.compiled()
-      message(type + ' compilation worked.')
-    else
-      error(type + ' compilation did not work.')
-    endif
-
-    if ok.returncode() == 0
-      message(type + ' return code ok.')
-    else
-      error(type + ' return code fail')
-    endif
-
-    if err.returncode() == 1
-      message(type + ' bad return code ok.')
-    else
-      error(type + ' bad return code fail.')
-    endif
-
-    if ok.stdout().strip() == 'stdout'
-      message(type + ' stdout ok.')
-    else
-      message(type + ' bad stdout.')
-    endif
-
-    if ok.stderr().strip() == 'stderr'
-      message(type + ' stderr ok.')
-    else
-      message(type + ' bad stderr.')
-    endif
-  endforeach
-endforeach
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/36 tryrun/no_compile.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/36 tryrun/no_compile.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/36 tryrun/no_compile.c"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/36 tryrun/no_compile.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-int main(void) {
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/36 tryrun/ok.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/36 tryrun/ok.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/36 tryrun/ok.c"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/36 tryrun/ok.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,7 +0,0 @@
-#include
-
-int main(void) {
-  printf("%s\n", "stdout");
-  fprintf(stderr, "%s\n", "stderr");
-  return 0;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/38 string operations/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/38 string operations/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/38 string operations/meson.build"	2020-01-07 21:06:45.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/38 string operations/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -101,3 +101,18 @@
 assert('\\\\\n' == bs_bs_nl, 'Five backslash broken before n')
 assert('\\\\' == bs_bs, 'Double-backslash broken')
 assert('\\' == bs, 'Backslash broken')
+
+mysubstring='foobarbaz'
+assert(mysubstring.substring() == 'foobarbaz', 'substring is broken')
+assert(mysubstring.substring(0) == 'foobarbaz', 'substring is broken')
+assert(mysubstring.substring(1) == 'oobarbaz', 'substring is broken')
+assert(mysubstring.substring(-5) == 'arbaz', 'substring is broken')
+assert(mysubstring.substring(1, 4) == 'oob', 'substring is broken')
+assert(mysubstring.substring(1,-5) == 'oob', 'substring is broken')
+assert(mysubstring.substring(1, 0) == '', 'substring is broken')
+assert(mysubstring.substring(0, 100) == 'foobarbaz', 'substring is broken')
+assert(mysubstring.substring(-1, -5) == '', 'substring is broken')
+assert(mysubstring.substring(10, -25) == '', 'substring is broken')
+assert(mysubstring.substring(-4, 2) == '', 'substring is broken')
+assert(mysubstring.substring(10, 9) == '', 'substring is broken')
+assert(mysubstring.substring(8, 10) == 'z', 'substring is broken')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/39 has function/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/39 has function/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/39 has function/meson.build"	2020-01-07 21:06:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/39 has function/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -54,15 +54,23 @@
                             args : unit_test_args),
             'couldn\'t detect "poll" when defined by a header')
     lchmod_prefix = '#include \n#include '
+    has_lchmod = cc.has_function('lchmod', prefix : lchmod_prefix, args : unit_test_args)
+
     if host_system == 'linux'
-      assert (not cc.has_function('lchmod', prefix : lchmod_prefix,
-                                  args : unit_test_args),
-              '"lchmod" check should have failed')
+      glibc_major = cc.get_define('__GLIBC__', prefix: '#include ', args: unit_test_args)
+      glibc_minor = cc.get_define('__GLIBC_MINOR__', prefix: '#include ', args: unit_test_args)
+      glibc_vers = '@0@.@1@'.format(glibc_major, glibc_minor)
+      message('GLIBC vetsion:', glibc_vers)
+
+      # lchmod was implemented in glibc 2.32 (https://sourceware.org/pipermail/libc-announce/2020/000029.html)
+      if glibc_vers.version_compare('<2.32')
+        assert (not has_lchmod, '"lchmod" check should have failed')
+      else
+        assert (has_lchmod, '"lchmod" check should have succeeded')
+      endif
     else
       # macOS and *BSD have lchmod
-      assert (cc.has_function('lchmod', prefix : lchmod_prefix,
-                                  args : unit_test_args),
-              '"lchmod" check should have succeeded')
+      assert (has_lchmod, '"lchmod" check should have succeeded')
     endif
     # Check that built-ins are found properly both with and without headers
     assert(cc.has_function('alloca', args : unit_test_args),
@@ -88,4 +96,12 @@
     assert (cc.has_function('sendmmsg', args : unit_test_args),
             'Failed to detect function "sendmmsg" (should always exist).')
   endif
+
+  # We should be able to find GCC and Clang __builtin functions
+  if ['gcc', 'clang'].contains(cc.get_id())
+    # __builtin_constant_p is documented to exist at least as far back as
+    # GCC 2.95.3
+    assert(cc.has_function('__builtin_constant_p', args : unit_test_args),
+           '__builtin_constant_p must be found under gcc and clang')
+  endif
 endforeach
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/3 static/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/3 static/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/3 static/meson.build"	2020-01-07 21:06:11.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/3 static/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -2,7 +2,7 @@
 
 lib = static_library('mylib', get_option('source'),
   link_args : '-THISMUSTNOBEUSED') # Static linker needs to ignore all link args.
-
+assert(lib.name() == 'mylib')
 has_not_changed = false
 if is_disabler(lib)
     has_not_changed = true
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/42 library chain/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/42 library chain/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/42 library chain/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/42 library chain/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/bin/prog?exe
-?msvc:usr/bin/prog.pdb
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/42 library chain/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/42 library chain/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/42 library chain/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/42 library chain/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/prog"},
+    {"type": "pdb", "file": "usr/bin/prog"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/43 options/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/43 options/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/43 options/meson.build"	2020-01-07 21:06:51.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/43 options/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -30,4 +30,16 @@
   error('Incorrect value in integer option.')
 endif
 
+if get_option('neg_int_opt') != -3
+  error('Incorrect value in negative integer option.')
+endif
+
+if get_option('CaseSenSiTivE') != 'Some CAPS'
+  error('Incorrect value in mixed caps option.')
+endif
+
+if get_option('CASESENSITIVE') != 'ALL CAPS'
+  error('Incorrect value in all caps option.')
+endif
+
 assert(get_option('wrap_mode') == 'default', 'Wrap mode option is broken.')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/43 options/meson_options.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/43 options/meson_options.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/43 options/meson_options.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/43 options/meson_options.txt"	2021-01-06 10:39:48.000000000 +0000
@@ -1,6 +1,9 @@
-option('testoption', type : 'string', value : 'optval', description : 'An option to do something')
-option('other_one', type : 'boolean', value : false)
-option('combo_opt', type : 'combo', choices : ['one', 'two', 'combo'], value : 'combo')
+option('testoption', type : 'string', value : 'optval', description : 'An option ' + 'to do something')
+option('other_one', type : 'boolean', value : not (not (not (not false))))
+option('combo_opt', type : 'co' + 'mbo', choices : ['one', 'two', 'combo'], value : 'combo')
 option('array_opt', type : 'array', choices : ['one', 'two', 'three'], value : ['one', 'two'])
 option('free_array_opt', type : 'array')
-option('integer_opt', type : 'integer', min : 0, max : 5, value : 3)
+option('integer_opt', type : 'integer', min : 0, max : -(-5), value : 3)
+option('neg' + '_' + 'int' + '_' + 'opt', type : 'integer', min : -5, max : 5, value : -3)
+option('CaseSenSiTivE', type : 'string', value: 'Some CAPS', description : 'An option with mixed capitaliziation')
+option('CASESENSITIVE', type : 'string', value: 'ALL CAPS', description : 'An option with all caps')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/45 subproject/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/45 subproject/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/45 subproject/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/45 subproject/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-usr/bin/user?exe
-?msvc:usr/bin/user.pdb
-usr/share/sublib/sublib.depmf
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/45 subproject/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/45 subproject/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/45 subproject/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/45 subproject/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/user"},
+    {"type": "pdb", "file": "usr/bin/user"},
+    {"type": "file", "file": "usr/share/sublib/sublib.depmf"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/47 pkgconfig-gen/dependencies/main.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/47 pkgconfig-gen/dependencies/main.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/47 pkgconfig-gen/dependencies/main.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/47 pkgconfig-gen/dependencies/main.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,10 @@
+#include 
+
+#ifndef LIBFOO
+#error LIBFOO should be defined in pkgconfig cflags
+#endif
+
+int main(int argc, char *argv[])
+{
+  return simple_function() == 42 ? 0 : 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/47 pkgconfig-gen/dependencies/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/47 pkgconfig-gen/dependencies/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/47 pkgconfig-gen/dependencies/meson.build"	2019-09-28 23:52:33.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/47 pkgconfig-gen/dependencies/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -18,6 +18,9 @@
 custom_dep = declare_dependency(link_with : custom_lib, compile_args : ['-DCUSTOM'])
 custom2_dep = declare_dependency(link_args : ['-lcustom2'], compile_args : ['-DCUSTOM2'])
 
+exe = executable('test1', 'main.c', dependencies : [pc_dep])
+test('Test1', exe)
+
 # Generate a PC file:
 # - Having libmain in libraries should pull implicitly libexposed and libinternal in Libs.private
 # - Having libexposed in libraries should remove it from Libs.private
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/47 pkgconfig-gen/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/47 pkgconfig-gen/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/47 pkgconfig-gen/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/47 pkgconfig-gen/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-usr/include/simple.h
-usr/lib/pkgconfig/simple.pc
-usr/lib/pkgconfig/libfoo.pc
-usr/lib/pkgconfig/libhello.pc
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/47 pkgconfig-gen/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/47 pkgconfig-gen/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/47 pkgconfig-gen/meson.build"	2020-01-07 21:06:55.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/47 pkgconfig-gen/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1,7 +1,13 @@
 project('pkgconfig-gen', 'c')
 
-# First check we have pkg-config >= 0.29
+# Some CI runners does not have zlib, just skip them as we need some common
+# external dependency.
+cc = meson.get_compiler('c')
+if not cc.find_library('z', required: false).found()
+  error('MESON_SKIP_TEST: zlib missing')
+endif
 
+# First check we have pkg-config >= 0.29
 pkgconfig = find_program('pkg-config', required: false)
 if not pkgconfig.found()
   error('MESON_SKIP_TEST: pkg-config not found')
@@ -44,7 +50,8 @@
   name : 'libfoo',
   version : libver,
   description : 'A foo library.',
-  variables : ['foo=bar', 'datadir=${prefix}/data']
+  variables : ['foo=bar', 'datadir=${prefix}/data'],
+  extra_cflags : ['-DLIBFOO'],
 )
 
 pkgg.generate(
@@ -52,3 +59,40 @@
   description : 'A minimalistic pkgconfig file.',
   version : libver,
 )
+
+pkgg.generate(
+  name : 'libhello_nolib',
+  description : 'A minimalistic pkgconfig file.',
+  version : libver,
+  dataonly: true,
+  variables : {'foo': 'bar'},
+)
+
+# Regression test for 2 cases:
+# - link_whole from InternalDependency used to be ignored, but we should still
+#   recurse to add libraries they link to. In this case it must add `-lsimple1`
+#   in generated pc file.
+# - dependencies from InternalDependency used to be ignored. In this it must add
+#   `-lz` in generated pc file.
+simple1 = shared_library('simple1', 'simple.c')
+stat1 = static_library('stat1', 'simple.c', link_with: simple1)
+dep = declare_dependency(link_whole: stat1, dependencies: cc.find_library('z'))
+simple2 = library('simple2', 'simple.c')
+pkgg.generate(simple2, libraries: dep)
+
+# Regression test: as_system() does a deepcopy() of the InternalDependency object
+# which caused `-lsimple3` to be duplicated because generator used to compare
+# Target instances instead of their id.
+simple3 = shared_library('simple3', 'simple.c')
+dep1 = declare_dependency(link_with: simple3)
+dep2 = dep1.as_system()
+pkgg.generate(libraries: [dep1, dep2],
+  name: 'simple3',
+  description: 'desc')
+
+# Regression test: stat2 is both link_with and link_whole, it should not appear
+# in generated pc file.
+stat2 = static_library('stat2', 'simple.c', install: true)
+simple4 = library('simple4', 'simple.c', link_with: stat2)
+simple5 = library('simple5', 'simple5.c', link_with: simple4, link_whole: stat2)
+pkgg.generate(simple5)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/47 pkgconfig-gen/simple5.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/47 pkgconfig-gen/simple5.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/47 pkgconfig-gen/simple5.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/47 pkgconfig-gen/simple5.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+int simple5(void);
+
+int simple5(void)
+{
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/47 pkgconfig-gen/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/47 pkgconfig-gen/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/47 pkgconfig-gen/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/47 pkgconfig-gen/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/include/simple.h"},
+    {"type": "file", "file": "usr/lib/libstat2.a"},
+    {"type": "file", "file": "usr/lib/pkgconfig/simple.pc"},
+    {"type": "file", "file": "usr/lib/pkgconfig/libfoo.pc"},
+    {"type": "file", "file": "usr/lib/pkgconfig/libhello.pc"},
+    {"type": "file", "file": "usr/lib/pkgconfig/libhello_nolib.pc"},
+    {"type": "file", "file": "usr/lib/pkgconfig/simple2.pc"},
+    {"type": "file", "file": "usr/lib/pkgconfig/simple3.pc"},
+    {"type": "file", "file": "usr/lib/pkgconfig/simple5.pc"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/48 custom install dirs/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/48 custom install dirs/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/48 custom install dirs/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/48 custom install dirs/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,12 +0,0 @@
-usr/dib/dab/dub/prog?exe
-?msvc:usr/dib/dab/dub/prog.pdb
-usr/dib/dab/dub2/prog2?exe
-?msvc:usr/dib/dab/dub2/prog2.pdb
-usr/some/dir/sample.h
-usr/some/dir2/sample.h
-usr/woman/prog.1
-usr/woman2/prog.1
-usr/meow/datafile.cat
-usr/meow2/datafile.cat
-usr/woof/subdir/datafile.dog
-usr/woof2/subdir/datafile.dog
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/48 custom install dirs/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/48 custom install dirs/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/48 custom install dirs/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/48 custom install dirs/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,16 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/dib/dab/dub/prog"},
+    {"type": "pdb", "file": "usr/dib/dab/dub/prog"},
+    {"type": "exe", "file": "usr/dib/dab/dub2/prog2"},
+    {"type": "pdb", "file": "usr/dib/dab/dub2/prog2"},
+    {"type": "file", "file": "usr/some/dir/sample.h"},
+    {"type": "file", "file": "usr/some/dir2/sample.h"},
+    {"type": "file", "file": "usr/woman/prog.1"},
+    {"type": "file", "file": "usr/woman2/prog.1"},
+    {"type": "file", "file": "usr/meow/datafile.cat"},
+    {"type": "file", "file": "usr/meow2/datafile.cat"},
+    {"type": "file", "file": "usr/woof/subdir/datafile.dog"},
+    {"type": "file", "file": "usr/woof2/subdir/datafile.dog"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/52 custom target/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/52 custom target/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/52 custom target/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/52 custom target/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-usr/subdir/data.dat
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/52 custom target/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/52 custom target/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/52 custom target/meson.build"	2020-01-07 21:07:01.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/52 custom target/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -9,7 +9,7 @@
 # Code will not be rebuilt if it changes.
 comp = '@0@/@1@'.format(meson.current_source_dir(), 'my_compiler.py')
 # Test that files() in command: works. The compiler just discards it.
-useless = files('installed_files.txt')
+useless = files('test.json')
 
 mytarget = custom_target('bindat',
 output : 'data.dat',
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/52 custom target/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/52 custom target/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/52 custom target/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/52 custom target/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/subdir/data.dat"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/53 custom target chain/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/53 custom target chain/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/53 custom target chain/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/53 custom target chain/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/subdir/data2.dat
-usr/subdir/data3.dat
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/53 custom target chain/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/53 custom target chain/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/53 custom target chain/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/53 custom target chain/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/subdir/data2.dat"},
+    {"type": "file", "file": "usr/subdir/data3.dat"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/customtarget.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/customtarget.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/customtarget.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/customtarget.py"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,19 @@
+#!/usr/bin/env python3
+
+import argparse
+import os
+
+
+def main() -> None:
+    parser = argparse.ArgumentParser()
+    parser.add_argument('dirname')
+    args = parser.parse_args()
+
+    with open(os.path.join(args.dirname, '1.txt'), 'w') as f:
+        f.write('')
+    with open(os.path.join(args.dirname, '2.txt'), 'w') as f:
+        f.write('')
+
+
+if __name__ == "__main__":
+    main()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-usr/bin/prog?exe
-?msvc:usr/bin/prog.pdb
-usr/diiba/daaba/file.dat
-usr/this/should/also-work.dat
-usr/this/does/something-different.dat.in
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/meson.build"	2020-01-07 21:07:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -5,3 +5,23 @@
 meson.add_install_script('myinstall.py', 'this/should', 'also-work.dat')
 
 subdir('src')
+
+meson.add_install_script('myinstall.py', 'dir', afile, '--mode=copy')
+
+data = configuration_data()
+data.set10('foo', true)
+conf = configure_file(
+  configuration : data,
+  output : 'conf.txt'
+)
+
+meson.add_install_script('myinstall.py', 'dir', conf, '--mode=copy')
+
+t = custom_target(
+  'ct',
+  command : [find_program('customtarget.py'), '@OUTDIR@'],
+  output : ['1.txt', '2.txt'],
+)
+
+meson.add_install_script('myinstall.py', 'customtarget', t, '--mode=copy')
+meson.add_install_script('myinstall.py', 'customtargetindex', t[0], '--mode=copy')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/myinstall.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/myinstall.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/myinstall.py"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/myinstall.py"	2021-01-06 10:39:48.000000000 +0000
@@ -1,12 +1,31 @@
 #!/usr/bin/env python3
 
+import argparse
 import os
-import sys
+import shutil
 
 prefix = os.environ['MESON_INSTALL_DESTDIR_PREFIX']
 
-dirname = os.path.join(prefix, sys.argv[1])
 
-os.makedirs(dirname)
-with open(os.path.join(dirname, sys.argv[2]), 'w') as f:
-    f.write('')
+def main() -> None:
+    parser = argparse.ArgumentParser()
+    parser.add_argument('dirname')
+    parser.add_argument('files', nargs='+')
+    parser.add_argument('--mode', action='store', default='create', choices=['create', 'copy'])
+    args = parser.parse_args()
+
+    dirname = os.path.join(prefix, args.dirname)
+    if not os.path.exists(dirname):
+        os.makedirs(dirname)
+
+    if args.mode == 'create':
+        for name in args.files:
+            with open(os.path.join(dirname, name), 'w') as f:
+                f.write('')
+    else:
+        for name in args.files:
+            shutil.copy(name, dirname)
+
+
+if __name__ == "__main__":
+    main()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/src/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/src/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/src/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/src/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -1 +1,3 @@
 meson.add_install_script('myinstall.py', 'this/does', 'something-different.dat')
+
+afile = files('a file.txt')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/src/myinstall.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/src/myinstall.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/src/myinstall.py"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/src/myinstall.py"	2021-01-06 10:39:48.000000000 +0000
@@ -7,6 +7,8 @@
 
 dirname = os.path.join(prefix, sys.argv[1])
 
-os.makedirs(dirname)
+if not os.path.exists(dirname):
+    os.makedirs(dirname)
+
 with open(os.path.join(dirname, sys.argv[2] + '.in'), 'w') as f:
     f.write('')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/56 install script/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/56 install script/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/prog"},
+    {"type": "pdb", "file": "usr/bin/prog"},
+    {"type": "file", "file": "usr/diiba/daaba/file.dat"},
+    {"type": "file", "file": "usr/this/should/also-work.dat"},
+    {"type": "file", "file": "usr/this/does/something-different.dat.in"},
+    {"type": "file", "file": "usr/dir/a file.txt"},
+    {"type": "file", "file": "usr/dir/conf.txt"},
+    {"type": "file", "file": "usr/customtarget/1.txt"},
+    {"type": "file", "file": "usr/customtarget/2.txt"},
+    {"type": "file", "file": "usr/customtargetindex/1.txt"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/62 install subdir/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/62 install subdir/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/62 install subdir/installed_files.txt"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/62 install subdir/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,12 +0,0 @@
-usr/share/dircheck/fifth.dat
-usr/share/dircheck/seventh.dat
-usr/share/dircheck/ninth.dat
-usr/share/eighth.dat
-usr/share/fourth.dat
-usr/share/sixth.dat
-usr/share/sub1/data1.dat
-usr/share/sub1/second.dat
-usr/share/sub1/third.dat
-usr/share/sub1/sub2/data2.dat
-usr/share/sub2/one.dat
-usr/share/sub2/dircheck/excluded-three.dat
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/62 install subdir/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/62 install subdir/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/62 install subdir/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/62 install subdir/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,16 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/share/dircheck/fifth.dat"},
+    {"type": "file", "file": "usr/share/dircheck/seventh.dat"},
+    {"type": "file", "file": "usr/share/dircheck/ninth.dat"},
+    {"type": "file", "file": "usr/share/eighth.dat"},
+    {"type": "file", "file": "usr/share/fourth.dat"},
+    {"type": "file", "file": "usr/share/sixth.dat"},
+    {"type": "file", "file": "usr/share/sub1/data1.dat"},
+    {"type": "file", "file": "usr/share/sub1/second.dat"},
+    {"type": "file", "file": "usr/share/sub1/third.dat"},
+    {"type": "file", "file": "usr/share/sub1/sub2/data2.dat"},
+    {"type": "file", "file": "usr/share/sub2/one.dat"},
+    {"type": "file", "file": "usr/share/sub2/dircheck/excluded-three.dat"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/63 foreach/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/63 foreach/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/63 foreach/installed_files.txt"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/63 foreach/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-usr/bin/prog1?exe
-?msvc:usr/bin/prog1.pdb
-usr/bin/prog2?exe
-?msvc:usr/bin/prog2.pdb
-usr/bin/prog3?exe
-?msvc:usr/bin/prog3.pdb
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/63 foreach/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/63 foreach/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/63 foreach/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/63 foreach/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/prog1"},
+    {"type": "pdb", "file": "usr/bin/prog1"},
+    {"type": "exe", "file": "usr/bin/prog2"},
+    {"type": "pdb", "file": "usr/bin/prog2"},
+    {"type": "exe", "file": "usr/bin/prog3"},
+    {"type": "pdb", "file": "usr/bin/prog3"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/68 build always/version.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/68 build always/version.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/68 build always/version.h"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/68 build always/version.h"	2021-01-06 10:39:48.000000000 +0000
@@ -1,3 +1,3 @@
 #pragma once
 
-const char *version_string;
+extern const char *version_string;
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/69 vcstag/tagprog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/69 vcstag/tagprog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/69 vcstag/tagprog.c"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/69 vcstag/tagprog.c"	2021-01-06 10:39:48.000000000 +0000
@@ -1,6 +1,6 @@
 #include
 
-const char *vcstag;
+extern const char *vcstag;
 
 int main(void) {
     printf("Version is %s\n", vcstag);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/6 linkshared/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/6 linkshared/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/6 linkshared/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/6 linkshared/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/bin/prog?exe
-?msvc:usr/bin/prog.pdb
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/6 linkshared/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/6 linkshared/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/6 linkshared/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/6 linkshared/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    { "type": "exe", "file": "usr/bin/prog" },
+    { "type": "pdb", "file": "usr/bin/prog" }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/85 add language/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/85 add language/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/85 add language/meson.build"	2020-01-07 21:07:33.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/85 add language/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -2,7 +2,9 @@
 
 test('C', executable('cprog', 'prog.c'))
 
-assert(add_languages('cpp'), 'Add_languages returned false on success')
+assert(add_languages('cpp', native: false), 'Add_languages returned false on success')
 assert(not add_languages('klingon', required : false), 'Add_languages returned true on failure.')
 
 test('C++', executable('cppprog', 'prog.cc'))
+
+add_languages('c', native: false)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/8 install/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/8 install/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/8 install/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/8 install/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-usr/bin/prog?exe
-?msvc:usr/bin/prog.pdb
-usr/libtest/libstat.a
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/8 install/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/8 install/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/8 install/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/8 install/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "installed": [
+    { "type": "exe",  "file": "usr/bin/prog"          },
+    { "type": "pdb",  "file": "usr/bin/prog"          },
+    { "type": "file", "file": "usr/libtest/libstat.a" }
+  ],
+  "do_not_set_opts": ["libdir"]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/90 default options/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/90 default options/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/90 default options/meson.build"	2020-01-07 21:07:37.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/90 default options/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -4,6 +4,7 @@
   'cpp_std=c++11',
   'cpp_eh=none',
   'warning_level=3',
+  'sub1:test_option=false',
   ])
 
 assert(get_option('buildtype') == 'debugoptimized', 'Build type default value wrong.')
@@ -28,3 +29,5 @@
 #   assert(not cc.compiles('int foobar;'), 'Default arg not used in test.')
 #   assert(cc.compiles('int foobar;', no_builtin_args : true), 'No_builtin did not disable builtins.')
 # endif
+
+subproject('sub1')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/90 default options/subprojects/sub1/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/90 default options/subprojects/sub1/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/90 default options/subprojects/sub1/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/90 default options/subprojects/sub1/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+project('sub1')
+
+assert(get_option('test_option') == false)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/90 default options/subprojects/sub1/meson_options.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/90 default options/subprojects/sub1/meson_options.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/90 default options/subprojects/sub1/meson_options.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/90 default options/subprojects/sub1/meson_options.txt"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+option('test_option', type : 'boolean', value : true, description : 'Test option. Superproject overrides default to "false"')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/93 selfbuilt custom/data.dat" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/93 selfbuilt custom/data.dat"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/93 selfbuilt custom/data.dat"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/93 selfbuilt custom/data.dat"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-generated_function
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/93 selfbuilt custom/mainprog.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/93 selfbuilt custom/mainprog.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/93 selfbuilt custom/mainprog.cpp"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/93 selfbuilt custom/mainprog.cpp"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-#include"data.h"
-
-int main(void) {
-    return generated_function() != 52;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/93 selfbuilt custom/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/93 selfbuilt custom/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/93 selfbuilt custom/meson.build"	2020-01-07 21:07:41.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/93 selfbuilt custom/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,16 +0,0 @@
-project('selfbuilt custom', 'cpp')
-
-# Build an exe and use it in a custom target
-# whose output is used to build a different exe.
-
-tool = executable('tool', 'tool.cpp', native : true)
-
-hfile = custom_target('datah',
-  output : 'data.h',
-  input : 'data.dat',
-  command : [tool, '@INPUT@', '@OUTPUT@'],
-)
-
-main = executable('mainprog', 'mainprog.cpp', hfile)
-
-test('maintest', main)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/93 selfbuilt custom/tool.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/93 selfbuilt custom/tool.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/93 selfbuilt custom/tool.cpp"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/93 selfbuilt custom/tool.cpp"	1970-01-01 00:00:00.000000000 +0000
@@ -1,34 +0,0 @@
-#include
-#include
-#include
-
-using namespace std;
-
-const char prefix[] = "int ";
-const char suffix[] = " () {\n    return 52;}\n";
-
-int main(int argc, char **argv) {
-    if(argc != 3) {
-        cout << "You is fail.\n";
-        return 1;
-    }
-    ifstream is(argv[1], ifstream::binary);
-    if(!is) {
-        cout << "Opening input file failed.\n";
-        return 1;
-    }
-    string funcname;
-    is >> funcname;
-    ofstream os(argv[2], ofstream::binary);
-    if(!os) {
-        cout << "Opening output file failed.\n";
-        return 1;
-    }
-    os << prefix << funcname << suffix;
-    os.close();
-    if(!os.good()) {
-        cout << "Writing data out failed.\n";
-        return 1;
-    }
-    return 0;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/9 header install/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/9 header install/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/9 header install/installed_files.txt"	2016-11-27 18:42:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/9 header install/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-usr/include/rootdir.h
-usr/include/subdir/subdir.h
-usr/include/vanished.h
-usr/include/fileheader.h
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/9 header install/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/9 header install/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/common/9 header install/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/common/9 header install/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "installed": [
+    { "type": "file", "file": "usr/include/rootdir.h"       },
+    { "type": "file", "file": "usr/include/subdir/subdir.h" },
+    { "type": "file", "file": "usr/include/vanished.h"      },
+    { "type": "file", "file": "usr/include/fileheader.h"    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/csharp/1 basic/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/csharp/1 basic/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/csharp/1 basic/installed_files.txt"	2018-12-09 14:27:16.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/csharp/1 basic/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/bin/prog.exe
-?msvc:usr/bin/prog.pdb
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/csharp/1 basic/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/csharp/1 basic/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/csharp/1 basic/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/csharp/1 basic/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/bin/prog.exe"},
+    {"type": "pdb", "file": "usr/bin/prog"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/csharp/2 library/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/csharp/2 library/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/csharp/2 library/installed_files.txt"	2018-12-09 14:27:16.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/csharp/2 library/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-usr/bin/prog.exe
-?msvc:usr/bin/prog.pdb
-?msvc:usr/bin/helper.dll
-?msvc:usr/bin/helper.pdb
-?gcc:usr/lib/helper.dll
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/csharp/2 library/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/csharp/2 library/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/csharp/2 library/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/csharp/2 library/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/bin/prog.exe"},
+    {"type": "pdb", "file": "usr/bin/prog"},
+    {"type": "file", "platform": "msvc", "file": "usr/bin/helper.dll"},
+    {"type": "pdb", "file": "usr/bin/helper"},
+    {"type": "file", "platform": "gcc", "file": "usr/lib/helper.dll"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/csharp/4 external dep/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/csharp/4 external dep/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/csharp/4 external dep/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/csharp/4 external dep/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-usr/bin/prog.exe
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/csharp/4 external dep/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/csharp/4 external dep/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/csharp/4 external dep/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/csharp/4 external dep/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/bin/prog.exe"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cuda/13 cuda compiler setting/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cuda/13 cuda compiler setting/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cuda/13 cuda compiler setting/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cuda/13 cuda compiler setting/meson.build"	2020-09-10 16:39:24.000000000 +0000
@@ -0,0 +1,5 @@
+project('simple', 'cuda', version : '1.0.0')
+
+exe = executable('prog', 'prog.cu')
+test('cudatest', exe)
+
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cuda/13 cuda compiler setting/nativefile.ini" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cuda/13 cuda compiler setting/nativefile.ini"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cuda/13 cuda compiler setting/nativefile.ini"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cuda/13 cuda compiler setting/nativefile.ini"	2020-09-10 16:39:24.000000000 +0000
@@ -0,0 +1,5 @@
+[binaries]
+
+cuda = 'nvcc'
+
+
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cuda/13 cuda compiler setting/prog.cu" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cuda/13 cuda compiler setting/prog.cu"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cuda/13 cuda compiler setting/prog.cu"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cuda/13 cuda compiler setting/prog.cu"	2020-09-10 16:39:24.000000000 +0000
@@ -0,0 +1,30 @@
+#include 
+
+int main(void) {
+    int cuda_devices = 0;
+    std::cout << "CUDA version: " << CUDART_VERSION << "\n";
+    cudaGetDeviceCount(&cuda_devices);
+    if(cuda_devices == 0) {
+        std::cout << "No Cuda hardware found. Exiting.\n";
+        return 0;
+    }
+    std::cout << "This computer has " << cuda_devices << " Cuda device(s).\n";
+    cudaDeviceProp props;
+    cudaGetDeviceProperties(&props, 0);
+    std::cout << "Properties of device 0.\n\n";
+
+    std::cout << "  Name:            " << props.name << "\n";
+    std::cout << "  Global memory:   " << props.totalGlobalMem << "\n";
+    std::cout << "  Shared memory:   " << props.sharedMemPerBlock << "\n";
+    std::cout << "  Constant memory: " << props.totalConstMem << "\n";
+    std::cout << "  Block registers: " << props.regsPerBlock << "\n";
+
+    std::cout << "  Warp size:         " << props.warpSize << "\n";
+    std::cout << "  Threads per block: " << props.maxThreadsPerBlock << "\n";
+    std::cout << "  Max block dimensions: [ " << props.maxThreadsDim[0] << ", " << props.maxThreadsDim[1]  << ", " << props.maxThreadsDim[2] << " ]" << "\n";
+    std::cout << "  Max grid dimensions:  [ " << props.maxGridSize[0] << ", " << props.maxGridSize[1]  << ", " << props.maxGridSize[2] << " ]" << "\n";
+    std::cout << "\n";
+
+    return 0;
+}
+
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cuda/14 cuda has header symbol/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cuda/14 cuda has header symbol/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cuda/14 cuda has header symbol/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cuda/14 cuda has header symbol/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,27 @@
+project('cuda has header symbol', 'cuda')
+
+cuda = meson.get_compiler('cuda')
+
+# C checks
+assert (cuda.has_header_symbol('stdio.h', 'int'), 'base types should always be available')
+assert (cuda.has_header_symbol('stdio.h', 'printf'), 'printf function not found')
+assert (cuda.has_header_symbol('stdio.h', 'FILE'), 'FILE structure not found')
+assert (cuda.has_header_symbol('limits.h', 'INT_MAX'), 'INT_MAX define not found')
+assert (not cuda.has_header_symbol('limits.h', 'guint64'), 'guint64 is not defined in limits.h')
+assert (not cuda.has_header_symbol('stdlib.h', 'FILE'), 'FILE structure is defined in stdio.h, not stdlib.h')
+assert (not cuda.has_header_symbol('stdlol.h', 'printf'), 'stdlol.h shouldn\'t exist')
+assert (not cuda.has_header_symbol('stdlol.h', 'int'), 'shouldn\'t be able to find "int" with invalid header')
+
+# C++ checks
+assert (cuda.has_header_symbol('iostream', 'std::iostream'), 'iostream not found in iostream.h')
+assert (cuda.has_header_symbol('vector', 'std::vector'), 'vector not found in vector.h')
+assert (not cuda.has_header_symbol('limits.h', 'std::iostream'), 'iostream should not be defined in limits.h')
+
+# CUDA checks
+assert (cuda.has_header_symbol('cuda.h', 'CUDA_VERSION'), 'CUDA_VERSION not found in cuda.h')
+assert (not cuda.has_header_symbol('cuda.h', 'cublasSaxpy'), 'cublasSaxpy is defined in cublas.h, not cuda.h')
+if cuda.version().version_compare('>=4.0')
+    assert (cuda.has_header_symbol('thrust/device_vector.h', 'thrust::device_vector'), 'thrust::device_vector not found')
+    assert (not cuda.has_header_symbol('thrust/fill.h', 'thrust::sort'), 'thrust::sort should not be defined in thrust/fill.h')
+endif
+
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cuda/3 cudamodule/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cuda/3 cudamodule/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/cuda/3 cudamodule/meson.build"	2019-02-07 09:08:59.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/cuda/3 cudamodule/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -3,8 +3,8 @@
 nvcc = meson.get_compiler('cuda')
 cuda = import('unstable-cuda')
 
-arch_flags     = cuda.nvcc_arch_flags(nvcc, 'Auto', detected: ['3.0'])
-arch_readable  = cuda.nvcc_arch_readable(nvcc, 'Auto', detected: ['3.0'])
+arch_flags     = cuda.nvcc_arch_flags(nvcc, 'Auto', detected: ['6.0'])
+arch_readable  = cuda.nvcc_arch_readable(nvcc, 'Auto', detected: ['6.0'])
 driver_version = cuda.min_driver_version(nvcc)
 
 message('NVCC version:   ' + nvcc.version())
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/1 simple/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/1 simple/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/1 simple/installed_files.txt"	2016-08-21 11:17:04.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/1 simple/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-usr/bin/dsimpleapp?exe
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/1 simple/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/1 simple/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/1 simple/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/1 simple/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/dsimpleapp"},
+    {"type": "pdb", "file": "usr/bin/dsimpleapp", "language": "d"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/2 static library/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/2 static library/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/2 static library/installed_files.txt"	2016-08-21 11:17:04.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/2 static library/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/bin/app_s?exe
-usr/lib/libstuff.a
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/2 static library/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/2 static library/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/2 static library/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/2 static library/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/app_s"},
+    {"type": "pdb", "file": "usr/bin/app_s", "language": "d"},
+    {"type": "file", "file": "usr/lib/libstuff.a"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/3 shared library/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/3 shared library/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/3 shared library/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/3 shared library/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-usr/bin/app_d?exe
-?msvc:usr/bin/stuff.dll
-?msvc:usr/lib/stuff.lib
-?gcc:usr/lib/libstuff.so
-usr/lib/pkgconfig/test.pc
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/3 shared library/lld-test.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/3 shared library/lld-test.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/3 shared library/lld-test.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/3 shared library/lld-test.py"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,20 @@
+#!/usr/bin/env python3
+
+import argparse
+import subprocess
+
+def main():
+    parser = argparse.ArgumentParser()
+    parser.add_argument('ldd')
+    parser.add_argument('bin')
+    args = parser.parse_args()
+
+    p, o, _ = subprocess.run([args.ldd, args.bin], stdout=subprocess.PIPE)
+    assert p == 0
+    o = o.decode()
+    assert 'libstuff.so =>' in o, 'libstuff so not in linker path.'
+    assert 'libstuff.so => not found' not in o, 'libstuff.so not found correctly'
+
+
+if __name__ == '__main__':
+    main()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/3 shared library/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/3 shared library/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/3 shared library/meson.build"	2020-01-07 21:12:26.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/3 shared library/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -7,7 +7,7 @@
   endif
 endif
 
-ldyn = shared_library('stuff', 'libstuff.d', install : true)
+subdir('sub')
 ed = executable('app_d', 'app.d', link_with : ldyn, install : true)
 test('linktest_dyn', ed)
 
@@ -19,3 +19,8 @@
           description: 'A test of D attributes to pkgconfig.generate.',
           d_module_versions: ['Use_Static']
 )
+
+ldd = find_program('ldd', required : false)
+if ldd.found()
+  test('ldd-test.py', ed)
+endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/3 shared library/sub/libstuff.d" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/3 shared library/sub/libstuff.d"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/3 shared library/sub/libstuff.d"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/3 shared library/sub/libstuff.d"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,14 @@
+import std.stdio;
+import std.string : format;
+
+export int printLibraryString (string str)
+{
+    writeln ("Library says: %s".format (str));
+    return 4;
+}
+
+version (Windows)
+{
+    import core.sys.windows.dll;
+    mixin SimpleDllMain;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/3 shared library/sub/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/3 shared library/sub/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/3 shared library/sub/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/3 shared library/sub/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+ldyn = shared_library('stuff', 'libstuff.d', install : true)
+
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/3 shared library/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/3 shared library/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/3 shared library/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/3 shared library/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/app_d"},
+    {"type": "pdb", "file": "usr/bin/app_d", "language": "d"},
+    {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/stuff"},
+    {"type": "pdb", "file": "usr/bin/stuff", "language": "d"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/stuff"},
+    {"type": "file", "platform": "msvc", "file": "usr/lib/stuff.lib"},
+    {"type": "file", "file": "usr/lib/pkgconfig/test.pc"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/4 library versions/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/4 library versions/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/4 library versions/installed_files.txt"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/4 library versions/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,17 +0,0 @@
-?gcc:usr/lib/libsome.so
-?gcc:usr/lib/libsome.so.0
-?gcc:usr/lib/libsome.so.1.2.3
-?gcc:usr/lib/libnoversion.so
-?gcc:usr/lib/libonlyversion.so
-?gcc:usr/lib/libonlyversion.so.1
-?gcc:usr/lib/libonlyversion.so.1.4.5
-?gcc:usr/lib/libonlysoversion.so
-?gcc:usr/lib/libonlysoversion.so.5
-?msvc:usr/bin/noversion.dll
-?msvc:usr/bin/onlysoversion-5.dll
-?msvc:usr/bin/onlyversion-1.dll
-?msvc:usr/bin/some-0.dll
-?msvc:usr/lib/noversion.lib
-?msvc:usr/lib/onlysoversion.lib
-?msvc:usr/lib/onlyversion.lib
-?msvc:usr/lib/some.lib
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/4 library versions/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/4 library versions/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/4 library versions/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/4 library versions/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,25 @@
+{
+  "installed": [
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/some"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/some", "version": "0"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/some", "version": "1.2.3"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/noversion"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/onlyversion"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/onlyversion", "version": "1"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/onlyversion", "version": "1.4.5"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/onlysoversion"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/onlysoversion", "version": "5"},
+    {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/noversion"},
+    {"type": "pdb", "file": "usr/bin/noversion", "language": "d"},
+    {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/onlysoversion", "version": "5"},
+    {"type": "pdb", "file": "usr/bin/onlysoversion", "version": "5", "language": "d"},
+    {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/onlyversion", "version": "1"},
+    {"type": "pdb", "file": "usr/bin/onlyversion", "version": "1", "language": "d"},
+    {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/some", "version": "0"},
+    {"type": "pdb", "file": "usr/bin/some", "version": "0", "language": "d"},
+    {"type": "implib", "file": "usr/lib/noversion"},
+    {"type": "implib", "file": "usr/lib/onlysoversion"},
+    {"type": "implib", "file": "usr/lib/onlyversion"},
+    {"type": "implib", "file": "usr/lib/some"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/5 mixed/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/5 mixed/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/5 mixed/installed_files.txt"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/5 mixed/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,7 +0,0 @@
-usr/bin/appdc_d?exe
-usr/bin/appdc_s?exe
-usr/lib/libstuff.a
-?gcc:usr/lib/libstuff.so
-?msvc:usr/bin/stuff.dll
-?msvc:usr/bin/stuff.pdb
-?msvc:usr/lib/stuff.lib
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/5 mixed/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/5 mixed/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/5 mixed/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/5 mixed/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/appdc_d"},
+    {"type": "pdb", "file": "usr/bin/appdc_d", "language": "d"},
+    {"type": "exe", "file": "usr/bin/appdc_s"},
+    {"type": "pdb", "file": "usr/bin/appdc_s", "language": "d"},
+    {"type": "file", "file": "usr/lib/libstuff.a"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/stuff"},
+    {"type": "shared_lib", "platform": "msvc",  "file": "usr/bin/stuff"},
+    {"type": "pdb", "file": "usr/bin/stuff", "language": "c"},
+    {"type": "file", "platform": "msvc", "file": "usr/lib/stuff.lib"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/6 unittest/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/6 unittest/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/6 unittest/installed_files.txt"	2016-08-21 11:17:04.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/6 unittest/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-usr/bin/dapp?exe
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/6 unittest/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/6 unittest/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/6 unittest/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/6 unittest/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/dapp"},
+    {"type": "pdb", "file": "usr/bin/dapp", "language": "d"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/7 multilib/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/7 multilib/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/7 multilib/installed_files.txt"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/7 multilib/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,11 +0,0 @@
-usr/bin/app_d?exe
-?gcc:usr/lib/libsay1.so
-?gcc:usr/lib/libsay1.so.0
-?gcc:usr/lib/libsay1.so.1.2.3
-?gcc:usr/lib/libsay2.so
-?gcc:usr/lib/libsay2.so.1
-?gcc:usr/lib/libsay2.so.1.2.4
-?msvc:usr/bin/say1-0.dll
-?msvc:usr/bin/say2-1.dll
-?msvc:usr/lib/say1.lib
-?msvc:usr/lib/say2.lib
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/7 multilib/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/7 multilib/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/d/7 multilib/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/d/7 multilib/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,18 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/app_d"},
+    {"type": "pdb", "file": "usr/bin/app_d", "language": "d"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/say1"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/say1", "version": "0"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/say1", "version": "1.2.3"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/say2"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/say2", "version": "1"},
+    {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/say2", "version": "1.2.4"},
+    {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/say1", "version": "0"},
+    {"type": "pdb", "file": "usr/bin/say1", "version": "0", "language": "d"},
+    {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/say2", "version": "1"},
+    {"type": "pdb", "file": "usr/bin/say2", "version": "1", "language": "d"},
+    {"type": "implib", "file": "usr/lib/say1"},
+    {"type": "implib", "file": "usr/lib/say2"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/100 fallback consistency/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/100 fallback consistency/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/100 fallback consistency/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/100 fallback consistency/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+project('proj', 'c')
+
+# The first call succeed and cache the value of 'sub' dependency. The 2nd call
+# should return the cached value, but still verify the fallback variable is
+# consistent.
+dependency('sub', fallback : ['sub', 'dep1'])
+dependency('sub', fallback : ['sub', 'dep2'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/100 fallback consistency/subprojects/sub/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/100 fallback consistency/subprojects/sub/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/100 fallback consistency/subprojects/sub/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/100 fallback consistency/subprojects/sub/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+project('proj', 'c')
+
+dep1 = declare_dependency()
+dep2 = declare_dependency()
+meson.override_dependency('sub', dep1)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/100 fallback consistency/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/100 fallback consistency/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/100 fallback consistency/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/100 fallback consistency/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/100 fallback consistency/meson.build:7:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'dep2'"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/101 no native compiler/main.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/101 no native compiler/main.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/101 no native compiler/main.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/101 no native compiler/main.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+int main(void) {
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/101 no native compiler/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/101 no native compiler/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/101 no native compiler/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/101 no native compiler/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,12 @@
+project('no native compiler')
+
+if not meson.is_cross_build()
+   error('MESON_SKIP_TEST test only applicable when cross building.')
+endif
+
+if add_languages('c', required: false, native: true)
+  error('MESON_SKIP_TEST test only applicable when native compiler not available.')
+endif
+
+add_languages('c')
+executable('main', 'main.c', native: true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/101 no native compiler/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/101 no native compiler/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/101 no native compiler/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/101 no native compiler/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/101 no native compiler/meson.build:12:0: ERROR: No host machine compiler for \"main.c\""
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/102 subdir parse error/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/102 subdir parse error/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/102 subdir parse error/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/102 subdir parse error/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+project('subdir false plusassign', 'c')
+subdir('subdir')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/102 subdir parse error/subdir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/102 subdir parse error/subdir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/102 subdir parse error/subdir/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/102 subdir parse error/subdir/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1 @@
+3 += 4
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/102 subdir parse error/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/102 subdir parse error/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/102 subdir parse error/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/102 subdir parse error/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/102 subdir parse error/subdir/meson.build:1:0: ERROR: Plusassignment target must be an id."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/103 invalid option file/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/103 invalid option file/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/103 invalid option file/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/103 invalid option file/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1 @@
+project('invalid option file')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/103 invalid option file/meson_options.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/103 invalid option file/meson_options.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/103 invalid option file/meson_options.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/103 invalid option file/meson_options.txt"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1 @@
+'
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/103 invalid option file/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/103 invalid option file/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/103 invalid option file/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/103 invalid option file/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/103 invalid option file/meson_options.txt:1:0: ERROR: lexer"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/104 no lang/main.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/104 no lang/main.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/104 no lang/main.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/104 no lang/main.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+int main(void) {
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/104 no lang/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/104 no lang/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/104 no lang/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/104 no lang/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+project('target without lang')
+executable('main', 'main.c')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/104 no lang/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/104 no lang/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/104 no lang/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/104 no lang/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/104 no lang/meson.build:2:0: ERROR: No host machine compiler for \"main.c\""
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/105 no glib-compile-resources/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/105 no glib-compile-resources/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/105 no glib-compile-resources/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/105 no glib-compile-resources/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+project('no glib-compile-resources')
+
+if find_program('glib-compile-resources', required: false).found()
+  error('MESON_SKIP_TEST test only applicable when glib-compile-resources is missing.')
+endif
+
+gnome = import('gnome')
+res = gnome.compile_resources('resources', 'trivial.gresource.xml')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/105 no glib-compile-resources/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/105 no glib-compile-resources/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/105 no glib-compile-resources/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/105 no glib-compile-resources/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/105 no glib-compile-resources/meson.build:8:0: ERROR: Program 'glib-compile-resources' not found"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/105 no glib-compile-resources/trivial.gresource.xml" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/105 no glib-compile-resources/trivial.gresource.xml"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/105 no glib-compile-resources/trivial.gresource.xml"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/105 no glib-compile-resources/trivial.gresource.xml"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+
+
+
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/106 fallback consistency/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/106 fallback consistency/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/106 fallback consistency/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/106 fallback consistency/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+project('fallback consistency')
+
+dependency('foo')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/106 fallback consistency/subprojects/foo/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/106 fallback consistency/subprojects/foo/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/106 fallback consistency/subprojects/foo/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/106 fallback consistency/subprojects/foo/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+project('sub')
+
+foo_dep = declare_dependency()
+meson.override_dependency('foo', foo_dep)
+
+bar_dep = declare_dependency()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/106 fallback consistency/subprojects/foo.wrap" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/106 fallback consistency/subprojects/foo.wrap"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/106 fallback consistency/subprojects/foo.wrap"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/106 fallback consistency/subprojects/foo.wrap"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+[wrap-file]
+source_url = http://host.invalid/foo.tar.gz
+source_filename = foo.tar.gz
+
+[provide]
+foo = bar_dep
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/106 fallback consistency/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/106 fallback consistency/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/106 fallback consistency/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/106 fallback consistency/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/106 fallback consistency/meson.build:3:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'bar_dep'"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/107 number in combo/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/107 number in combo/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/107 number in combo/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/107 number in combo/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1 @@
+project('number in combo')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/107 number in combo/nativefile.ini" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/107 number in combo/nativefile.ini"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/107 number in combo/nativefile.ini"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/107 number in combo/nativefile.ini"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,2 @@
+[built-in options]
+optimization = 1
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/107 number in combo/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/107 number in combo/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/107 number in combo/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/107 number in combo/test.json"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "stdout": [
+    { "line": "test cases/failing/107 number in combo/meson.build:1:0: ERROR: Value \"1\" (of type \"number\") for combo option \"Optimization level\" is not one of the choices. Possible choices are (as string): \"0\", \"g\", \"1\", \"2\", \"3\", \"s\"." }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/108 bool in combo/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/108 bool in combo/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/108 bool in combo/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/108 bool in combo/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1 @@
+project('bool in combo')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/108 bool in combo/meson_options.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/108 bool in combo/meson_options.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/108 bool in combo/meson_options.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/108 bool in combo/meson_options.txt"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,5 @@
+option(
+    'opt',
+    type : 'combo',
+    choices : ['true', 'false']
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/108 bool in combo/nativefile.ini" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/108 bool in combo/nativefile.ini"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/108 bool in combo/nativefile.ini"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/108 bool in combo/nativefile.ini"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,2 @@
+[project options]
+opt = true
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/108 bool in combo/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/108 bool in combo/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/108 bool in combo/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/108 bool in combo/test.json"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "stdout": [
+    { "line": "test cases/failing/108 bool in combo/meson.build:1:0: ERROR: Value \"True\" (of type \"boolean\") for combo option \"opt\" is not one of the choices. Possible choices are (as string): \"true\", \"false\"." }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/109 compiler no lang/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/109 compiler no lang/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/109 compiler no lang/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/109 compiler no lang/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,2 @@
+project('compiler without lang')
+meson.get_compiler('c')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/109 compiler no lang/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/109 compiler no lang/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/109 compiler no lang/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/109 compiler no lang/test.json"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/109 compiler no lang/meson.build:2:6: ERROR: Tried to access compiler for language \"c\", not specified for host machine."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/10 out of bounds/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/10 out of bounds/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/10 out of bounds/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/10 out of bounds/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/10 out of bounds/meson.build:4:0: ERROR: Index 0 out of bounds of array of size 0."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/110 no fallback/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/110 no fallback/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/110 no fallback/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/110 no fallback/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,2 @@
+project('no fallback', 'c')
+foob_dep = dependency('foob', allow_fallback: false, required: true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/110 no fallback/subprojects/foob/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/110 no fallback/subprojects/foob/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/110 no fallback/subprojects/foob/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/110 no fallback/subprojects/foob/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,2 @@
+project('foob', 'c')
+meson.override_dependency('foob', declare_dependency())
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/110 no fallback/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/110 no fallback/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/110 no fallback/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/110 no fallback/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "stdout": [
+    {
+      "match": "re",
+      "line": ".*/meson\\.build:2:0: ERROR: (Pkg-config binary for machine MachineChoice\\.HOST not found\\. Giving up\\.|Dependency \"foob\" not found, tried .*)"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/11 object arithmetic/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/11 object arithmetic/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/11 object arithmetic/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/11 object arithmetic/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "stdout": [
+    {
+      "match": "re",
+      "line": "test cases/failing/11 object arithmetic/meson\\.build:3:0: ERROR: Invalid use of addition: .*"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/12 string arithmetic/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/12 string arithmetic/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/12 string arithmetic/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/12 string arithmetic/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "stdout": [
+    {
+      "match": "re",
+      "line": "test cases/failing/12 string arithmetic/meson\\.build:3:0: ERROR: Invalid use of addition: .*"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/13 array arithmetic/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/13 array arithmetic/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/13 array arithmetic/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/13 array arithmetic/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/13 array arithmetic/meson.build:3:0: ERROR: Multiplication works only with integers."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/14 invalid option name/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/14 invalid option name/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/14 invalid option name/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/14 invalid option name/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/14 invalid option name/meson_options.txt:1:0: ERROR: Option names can only contain letters, numbers or dashes."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/15 kwarg before arg/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/15 kwarg before arg/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/15 kwarg before arg/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/15 kwarg before arg/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/15 kwarg before arg/meson.build:3:0: ERROR: All keyword arguments must be after positional arguments."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/16 extract from subproject/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/16 extract from subproject/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/16 extract from subproject/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/16 extract from subproject/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/16 extract from subproject/meson.build:6:0: ERROR: Tried to extract objects from a different subproject."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/17 same target/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/17 same target/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/17 same target/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/17 same target/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/17 same target/meson.build:4:0: ERROR: Tried to create target \"foo\", but a target of that name already exists."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/18 wrong plusassign/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/18 wrong plusassign/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/18 wrong plusassign/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/18 wrong plusassign/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/18 wrong plusassign/meson.build:3:0: ERROR: Plusassignment target must be an id."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/19 target clash/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/19 target clash/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/19 target clash/meson.build"	2017-04-15 14:27:38.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/19 target clash/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -8,8 +8,8 @@
 # output location is redirected.
 
 if host_machine.system() == 'windows' or host_machine.system() == 'cygwin'
-  error('This is expected.')
+  error('MESON_SKIP_TEST test only works on platforms where executables have no suffix.')
 endif
 
 executable('clash', 'clash.c')
-run_target('clash', 'echo', 'clash 1')
+run_target('clash', command: ['echo', 'clash 1'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/19 target clash/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/19 target clash/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/19 target clash/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/19 target clash/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "ERROR: Multiple producers for Ninja target \"clash\". Please rename your targets."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/1 project not first/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/1 project not first/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/1 project not first/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/1 project not first/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "ERROR: First statement must be a call to project"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/20 version/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/20 version/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/20 version/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/20 version/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "stdout": [
+    {
+      "match": "re",
+      "line": "test cases/failing/20 version/meson\\.build:1:0: ERROR: Meson version is .* but project requires >100\\.0\\.0"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/21 subver/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/21 subver/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/21 subver/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/21 subver/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/21 subver/meson.build:3:0: ERROR: Subproject foo version is 1.0.0 but >1.0.0 required."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/22 assert/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/22 assert/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/22 assert/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/22 assert/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/22 assert/meson.build:3:0: ERROR: Assert failed: I am fail."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/23 rel testdir/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/23 rel testdir/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/23 rel testdir/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/23 rel testdir/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/23 rel testdir/meson.build:4:0: ERROR: Workdir keyword argument must be an absolute path."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/24 int conversion/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/24 int conversion/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/24 int conversion/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/24 int conversion/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/24 int conversion/meson.build:3:13: ERROR: String 'notanumber' cannot be converted to int"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/25 badlang/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/25 badlang/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/25 badlang/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/25 badlang/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/25 badlang/meson.build:3:0: ERROR: Tried to use unknown language \"nonexisting\"."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/26 output subdir/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/26 output subdir/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/26 output subdir/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/26 output subdir/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/26 output subdir/meson.build:3:0: ERROR: Output file name must not contain a subdirectory."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/27 noprog use/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/27 noprog use/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/27 noprog use/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/27 noprog use/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/27 noprog use/meson.build:5:0: ERROR: Tried to use not-found external program in \"command\""
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/28 no crossprop/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/28 no crossprop/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/28 no crossprop/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/28 no crossprop/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/28 no crossprop/meson.build:3:0: ERROR: Unknown cross property: nonexisting."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/29 nested ternary/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/29 nested ternary/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/29 nested ternary/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/29 nested ternary/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/29 nested ternary/meson.build:3:12: ERROR: Nested ternary operators are not allowed."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/2 missing file/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/2 missing file/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/2 missing file/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/2 missing file/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/2 missing file/meson.build:3:0: ERROR: File missing.c does not exist."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/30 invalid man extension/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/30 invalid man extension/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/30 invalid man extension/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/30 invalid man extension/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/30 invalid man extension/meson.build:2:0: ERROR: Man file must have a file extension of a number between 1 and 8"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/31 no man extension/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/31 no man extension/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/31 no man extension/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/31 no man extension/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/31 no man extension/meson.build:2:0: ERROR: Man file must have a file extension of a number between 1 and 8"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/32 exe static shared/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/32 exe static shared/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/32 exe static shared/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/32 exe static shared/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -2,7 +2,7 @@
 
 host_system = host_machine.system()
 if host_system == 'windows' or host_system == 'darwin'
-  error('Test only fails on Linux and BSD')
+  error('MESON_SKIP_TEST test only fails on Linux and BSD')
 endif
 
 statlib = static_library('stat', 'stat.c', pic : false)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/32 exe static shared/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/32 exe static shared/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/32 exe static shared/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/32 exe static shared/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/32 exe static shared/meson.build:9:0: ERROR: Can't link non-PIC static library 'stat' into shared library 'shr2'. Use the 'pic' option to static_library to build with PIC."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/33 non-root subproject/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/33 non-root subproject/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/33 non-root subproject/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/33 non-root subproject/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/33 non-root subproject/some/meson.build:1:0: ERROR: Neither a subproject directory nor a someproj.wrap file was found."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/34 dependency not-required then required/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/34 dependency not-required then required/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/34 dependency not-required then required/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/34 dependency not-required then required/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "stdout": [
+    {
+      "match": "re",
+      "line": ".*/meson\\.build:4:0: ERROR: (Pkg-config binary for machine MachineChoice\\.HOST not found\\. Giving up\\.|Dependency \"foo\\-bar\\-xyz\\-12\\.3\" not found, tried .*)"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/35 project argument after target/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/35 project argument after target/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/35 project argument after target/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/35 project argument after target/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/35 project argument after target/meson.build:7:0: ERROR: Tried to use 'add_project_arguments' after a build target has been declared."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,3 +1,7 @@
 project('impossible-dep-test', 'c', version : '1.0')
 
+if not dependency('zlib', required: false).found()
+  error('MESON_SKIP_TEST test requires zlib')
+endif
+
 dependency('zlib', version : ['>=1.0', '<1.0'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/36 pkgconfig dependency impossible conditions/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/36 pkgconfig dependency impossible conditions/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/36 pkgconfig dependency impossible conditions/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/36 pkgconfig dependency impossible conditions/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/36 pkgconfig dependency impossible conditions/meson.build:7:0: ERROR: Dependency 'zlib' was already checked and was not found"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/37 has function external dependency/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/37 has function external dependency/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/37 has function external dependency/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/37 has function external dependency/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/37 has function external dependency/meson.build:8:3: ERROR: Dependencies must be external dependencies"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/38 libdir must be inside prefix/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/38 libdir must be inside prefix/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/38 libdir must be inside prefix/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/38 libdir must be inside prefix/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,2 +1,6 @@
 project('libdir prefix', 'c',
   default_options : ['libdir=/opt/lib'])
+
+if host_machine.system() == 'windows'
+  error('MESON_SKIP_TEST: this test does not work on Windows since /foo is not absolute')
+endif
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/38 libdir must be inside prefix/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/38 libdir must be inside prefix/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/38 libdir must be inside prefix/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/38 libdir must be inside prefix/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "do_not_set_opts": [
+    "libdir"
+  ],
+  "stdout": [
+    {
+      "line": "test cases/failing/38 libdir must be inside prefix/meson.build:1:0: ERROR: The value of the 'libdir' option is '/opt/lib' which must be a subdir of the prefix '/usr'."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/39 prefix absolute/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/39 prefix absolute/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/39 prefix absolute/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/39 prefix absolute/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "do_not_set_opts": [
+    "prefix"
+  ],
+  "stdout": [
+    {
+      "comment": "literal 'some/path/notabs' appears in output, irrespective of os.path.sep, as that's the prefix",
+      "line": "test cases/failing/39 prefix absolute/meson.build:1:0: ERROR: prefix value 'some/path/notabs' must be an absolute path"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/3 missing subdir/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/3 missing subdir/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/3 missing subdir/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/3 missing subdir/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+{
+  "stdout": [
+    {
+        "comment": "'missing/meson.build' gets transformed with os.path.sep separators",
+        "match": "re",
+        "line": "test cases/failing/3 missing subdir/meson\\.build:3:0: ERROR: Non\\-existent build file 'missing[\\\\/]meson\\.build'"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/40 kwarg assign/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/40 kwarg assign/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/40 kwarg assign/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/40 kwarg assign/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/40 kwarg assign/meson.build:3:0: ERROR: Tried to assign values inside an argument list."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/41 custom target plainname many inputs/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/41 custom target plainname many inputs/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/41 custom target plainname many inputs/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/41 custom target plainname many inputs/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/41 custom target plainname many inputs/meson.build:5:0: ERROR: Output cannot contain @PLAINNAME@ or @BASENAME@ when there is more than one input (we can't know which to use)"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/42 custom target outputs not matching install_dirs/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/42 custom target outputs not matching install_dirs/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/42 custom target outputs not matching install_dirs/installed_files.txt"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/42 custom target outputs not matching install_dirs/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-usr/include/diff.h
-usr/include/first.h
-usr/bin/diff.sh
-usr/bin/second.sh
-opt/same.h
-opt/same.sh
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/42 custom target outputs not matching install_dirs/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/42 custom target outputs not matching install_dirs/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/42 custom target outputs not matching install_dirs/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/42 custom target outputs not matching install_dirs/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -3,7 +3,7 @@
 gen = find_program('generator.py')
 
 if meson.backend() != 'ninja'
-  error('Failing manually, test is only for the ninja backend')
+  error('MESON_SKIP_TEST test is only for the ninja backend')
 endif
 
 custom_target('too-few-install-dirs',
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/42 custom target outputs not matching install_dirs/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/42 custom target outputs not matching install_dirs/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/42 custom target outputs not matching install_dirs/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/42 custom target outputs not matching install_dirs/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,33 @@
+{
+  "installed": [
+    {
+      "type": "file",
+      "file": "usr/include/diff.h"
+    },
+    {
+      "type": "file",
+      "file": "usr/include/first.h"
+    },
+    {
+      "type": "file",
+      "file": "usr/bin/diff.sh"
+    },
+    {
+      "type": "file",
+      "file": "usr/bin/second.sh"
+    },
+    {
+      "type": "file",
+      "file": "opt/same.h"
+    },
+    {
+      "type": "file",
+      "file": "opt/same.sh"
+    }
+  ],
+  "stdout": [
+    {
+      "line": "ERROR: Target 'too-few-install-dirs' has 3 outputs: ['toofew.h', 'toofew.c', 'toofew.sh'], but only 2 \"install_dir\"s were found."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/43 project name colon/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/43 project name colon/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/43 project name colon/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/43 project name colon/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/43 project name colon/meson.build:1:0: ERROR: Project name 'name with :' must not contain ':'"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/44 abs subdir/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/44 abs subdir/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/44 abs subdir/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/44 abs subdir/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/44 abs subdir/meson.build:5:0: ERROR: Subdir argument must be a relative path."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/45 abspath to srcdir/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/45 abspath to srcdir/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/45 abspath to srcdir/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/45 abspath to srcdir/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/45 abspath to srcdir/meson.build:3:0: ERROR: Tried to form an absolute path to a source dir. You should not do that but use relative paths instead."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/46 pkgconfig variables reserved/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/46 pkgconfig variables reserved/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/46 pkgconfig variables reserved/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/46 pkgconfig variables reserved/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/46 pkgconfig variables reserved/meson.build:8:5: ERROR: Variable \"prefix\" is reserved"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/47 pkgconfig variables zero length/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/47 pkgconfig variables zero length/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/47 pkgconfig variables zero length/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/47 pkgconfig variables zero length/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/47 pkgconfig variables zero length/meson.build:8:5: ERROR: Empty variable name or value"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/48 pkgconfig variables zero length value/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/48 pkgconfig variables zero length value/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/48 pkgconfig variables zero length value/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/48 pkgconfig variables zero length value/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/48 pkgconfig variables zero length value/meson.build:8:5: ERROR: Empty variable name or value"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/49 pkgconfig variables not key value/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/49 pkgconfig variables not key value/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/49 pkgconfig variables not key value/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/49 pkgconfig variables not key value/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/49 pkgconfig variables not key value/meson.build:8:5: ERROR: Variable 'this_should_be_key_value' must have a value separated by equals sign."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/4 missing meson.build/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/4 missing meson.build/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/4 missing meson.build/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/4 missing meson.build/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+{
+  "stdout": [
+    {
+      "match": "re",
+      "comment": "'subdir/meson.build' gets transformed with os.path.sep separators",
+      "line": "test cases/failing/4 missing meson\\.build/meson\\.build:3:0: ERROR: Non\\-existent build file 'subdir[\\\\/]meson\\.build'"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/50 executable comparison/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/50 executable comparison/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/50 executable comparison/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/50 executable comparison/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/50 executable comparison/meson.build:6:0: ERROR: exe1 can only be compared for equality."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/51 inconsistent comparison/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/51 inconsistent comparison/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/51 inconsistent comparison/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/51 inconsistent comparison/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/51 inconsistent comparison/meson.build:5:0: ERROR: Values of different types (list, str) cannot be compared using <."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/52 slashname/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/52 slashname/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/52 slashname/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/52 slashname/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/52 slashname/meson.build:11:0: ERROR: Problem encountered: Re-enable me once slash in name is finally prohibited."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/53 reserved meson prefix/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/53 reserved meson prefix/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/53 reserved meson prefix/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/53 reserved meson prefix/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/53 reserved meson prefix/meson.build:3:0: ERROR: The \"meson-\" prefix is reserved and cannot be used for top-level subdir()."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/54 wrong shared crate type/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/54 wrong shared crate type/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/54 wrong shared crate type/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/54 wrong shared crate type/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,3 +1,7 @@
-project('test', 'rust')
+project('test')
+
+if not add_languages('rust', required: false)
+  error('MESON_SKIP_TEST test requires rust compiler')
+endif
 
 shared_library('test', 'foo.rs', rust_crate_type : 'staticlib')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/54 wrong shared crate type/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/54 wrong shared crate type/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/54 wrong shared crate type/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/54 wrong shared crate type/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/54 wrong shared crate type/meson.build:7:0: ERROR: Crate type \"staticlib\" invalid for dynamic libraries; must be \"dylib\" or \"cdylib\""
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/55 wrong static crate type/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/55 wrong static crate type/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/55 wrong static crate type/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/55 wrong static crate type/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,3 +1,7 @@
-project('test', 'rust')
+project('test')
+
+if not add_languages('rust', required: false)
+  error('MESON_SKIP_TEST test requires rust compiler')
+endif
 
 static_library('test', 'foo.rs', rust_crate_type : 'cdylib')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/55 wrong static crate type/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/55 wrong static crate type/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/55 wrong static crate type/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/55 wrong static crate type/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/55 wrong static crate type/meson.build:7:0: ERROR: Crate type \"cdylib\" invalid for static libraries; must be \"rlib\" or \"staticlib\""
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/56 or on new line/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/56 or on new line/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/56 or on new line/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/56 or on new line/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/56 or on new line/meson.build:4:8: ERROR: Invalid or clause."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/57 kwarg in module/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/57 kwarg in module/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/57 kwarg in module/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/57 kwarg in module/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/57 kwarg in module/meson.build:3:0: ERROR: Function does not take keyword arguments."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/58 link with executable/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/58 link with executable/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/58 link with executable/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/58 link with executable/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/58 link with executable/meson.build:4:0: ERROR: Link target 'prog' is not linkable."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/59 assign custom target index/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/59 assign custom target index/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/59 assign custom target index/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/59 assign custom target index/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/59 assign custom target index/meson.build:24:0: ERROR: Assignment target must be an id."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/5 misplaced option/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/5 misplaced option/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/5 misplaced option/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/5 misplaced option/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/5 misplaced option/meson.build:3:0: ERROR: Tried to call option() in build description file. All options must be in the option file."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/60 getoption prefix/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/60 getoption prefix/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/60 getoption prefix/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/60 getoption prefix/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/60 getoption prefix/meson.build:5:0: ERROR: Having a colon in option name is forbidden, projects are not allowed to directly access options of other subprojects."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/61 bad option argument/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/61 bad option argument/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/61 bad option argument/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/61 bad option argument/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/61 bad option argument/meson_options.txt:1:0: ERROR: Invalid kwargs for option \"name\": \"vaule\""
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/62 subproj filegrab/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/62 subproj filegrab/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/62 subproj filegrab/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/62 subproj filegrab/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/62 subproj filegrab/subprojects/a/meson.build:3:0: ERROR: Sandbox violation: Tried to grab file prog.c from a different subproject."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/63 grab subproj/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/63 grab subproj/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/63 grab subproj/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/63 grab subproj/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/63 grab subproj/meson.build:7:0: ERROR: Sandbox violation: Tried to grab file sub.c from a different subproject."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/64 grab sibling/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/64 grab sibling/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/64 grab sibling/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/64 grab sibling/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/64 grab sibling/subprojects/a/meson.build:3:0: ERROR: Sandbox violation: Tried to grab file sneaky.c from a different subproject."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/65 string as link target/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/65 string as link target/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/65 string as link target/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/65 string as link target/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/65 string as link target/meson.build:2:0: ERROR: '' is not a target."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/66 dependency not-found and required/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/66 dependency not-found and required/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/66 dependency not-found and required/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/66 dependency not-found and required/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/66 dependency not-found and required/meson.build:2:0: ERROR: Dependency is both required and not-found"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/67 subproj different versions/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/67 subproj different versions/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/67 subproj different versions/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/67 subproj different versions/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/67 subproj different versions/subprojects/b/meson.build:3:0: ERROR: Dependency 'c' was already checked and was not found"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/68 wrong boost module/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/68 wrong boost module/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/68 wrong boost module/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/68 wrong boost module/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,5 +1,9 @@
 project('boosttest', 'cpp',
   default_options : ['cpp_std=c++11'])
 
+if not dependency('boost', required: false).found()
+  error('MESON_SKIP_TEST test requires boost')
+endif
+
 # abc doesn't exist
 linkdep = dependency('boost', modules : ['thread', 'system', 'test', 'abc'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/68 wrong boost module/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/68 wrong boost module/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/68 wrong boost module/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/68 wrong boost module/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/68 wrong boost module/meson.build:9:0: ERROR: Dependency \"boost\" not found"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/69 install_data rename bad size/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/69 install_data rename bad size/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/69 install_data rename bad size/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/69 install_data rename bad size/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/69 install_data rename bad size/meson.build:3:0: ERROR: Size of rename argument is different from number of sources"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/6 missing incdir/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/6 missing incdir/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/6 missing incdir/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/6 missing incdir/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/6 missing incdir/meson.build:3:0: ERROR: Include dir nosuchdir does not exist."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/70 skip only subdir/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/70 skip only subdir/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/70 skip only subdir/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/70 skip only subdir/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/70 skip only subdir/meson.build:8:0: ERROR: File main.cpp does not exist."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/71 dual override/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/71 dual override/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/71 dual override/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/71 dual override/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+project('yo dawg', 'c')
+
+p = find_program('overrides.py')
+meson.override_find_program('override', p)
+meson.override_find_program('override', p)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/71 dual override/overrides.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/71 dual override/overrides.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/71 dual override/overrides.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/71 dual override/overrides.py"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+#!/usr/bin/env python3
+
+print('Yo dawg, we put overrides in your overrides,')
+print('so now you can override when you override.')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/71 dual override/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/71 dual override/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/71 dual override/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/71 dual override/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/71 dual override/meson.build:5:6: ERROR: Tried to override executable \"override\" which has already been overridden."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/71 invalid escape char/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/71 invalid escape char/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/71 invalid escape char/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/71 invalid escape char/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-# Make sure meson exits on invalid string
-# The string below contains an invalid unicode code point
-
-'my name is what \uxyzo who are you'
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/72 dual override/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/72 dual override/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/72 dual override/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/72 dual override/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-project('yo dawg', 'c')
-
-p = find_program('overrides.py')
-meson.override_find_program('override', p)
-meson.override_find_program('override', p)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/72 dual override/overrides.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/72 dual override/overrides.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/72 dual override/overrides.py"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/72 dual override/overrides.py"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-#!/usr/bin/env python3
-
-print('Yo dawg, we put overrides in your overrides,')
-print('so now you can override when you override.')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/72 override used/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/72 override used/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/72 override used/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/72 override used/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+project('overridde an already found exe', 'c')
+
+old = find_program('something.py')
+replacement = find_program('other.py')
+meson.override_find_program('something.py', replacement)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/72 override used/other.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/72 override used/other.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/72 override used/other.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/72 override used/other.py"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+#!/usr/bin/env python3
+
+print('Doing something else.')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/72 override used/something.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/72 override used/something.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/72 override used/something.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/72 override used/something.py"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+#!/usr/bin/env python3
+
+print('Doing something.')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/72 override used/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/72 override used/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/72 override used/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/72 override used/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/72 override used/meson.build:5:6: ERROR: Tried to override finding of executable \"something.py\" which has already been found."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/73 override used/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/73 override used/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/73 override used/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/73 override used/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-project('overridde an already found exe', 'c')
-
-old = find_program('something.py')
-replacement = find_program('other.py')
-meson.override_find_program('something.py', replacement)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/73 override used/other.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/73 override used/other.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/73 override used/other.py"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/73 override used/other.py"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-#!/usr/bin/env python3
-
-print('Doing something else.')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/73 override used/something.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/73 override used/something.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/73 override used/something.py"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/73 override used/something.py"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-#!/usr/bin/env python3
-
-print('Doing something.')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/73 run_command unclean exit/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/73 run_command unclean exit/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/73 run_command unclean exit/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/73 run_command unclean exit/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+project('run_command unclean exit', 'c')
+
+rcprog = find_program('./returncode.py')
+run_command(rcprog, '1', check : true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/73 run_command unclean exit/returncode.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/73 run_command unclean exit/returncode.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/73 run_command unclean exit/returncode.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/73 run_command unclean exit/returncode.py"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+#!/usr/bin/env python3
+
+import sys
+exit(int(sys.argv[1]))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/73 run_command unclean exit/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/73 run_command unclean exit/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/73 run_command unclean exit/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/73 run_command unclean exit/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "stdout": [
+    {
+      "match": "re",
+      "line": "test cases/failing/73 run_command unclean exit/meson\\.build:4:0: ERROR: Command \".*[\\\\/]test cases[\\\\/]failing[\\\\/]73 run_command unclean exit[\\\\/]\\.[\\\\/]returncode\\.py 1\" failed with status 1\\."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/74 int literal leading zero/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/74 int literal leading zero/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/74 int literal leading zero/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/74 int literal leading zero/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+
+# This should fail.
+# Decimal syntax is 123.
+# Octal syntax is 0o123.
+fail_0123 = 0123
+
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/74 int literal leading zero/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/74 int literal leading zero/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/74 int literal leading zero/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/74 int literal leading zero/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "stdout": [
+    {
+      "comment": "this error message is not very informative",
+      "line": "test cases/failing/74 int literal leading zero/meson.build:5:13: ERROR: Expecting eof got number."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/74 run_command unclean exit/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/74 run_command unclean exit/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/74 run_command unclean exit/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/74 run_command unclean exit/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-project('run_command unclean exit', 'c')
-
-rcprog = find_program('./returncode.py')
-run_command(rcprog, '1', check : true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/74 run_command unclean exit/returncode.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/74 run_command unclean exit/returncode.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/74 run_command unclean exit/returncode.py"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/74 run_command unclean exit/returncode.py"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-#!/usr/bin/env python3
-
-import sys
-exit(int(sys.argv[1]))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/75 configuration immutable/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/75 configuration immutable/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/75 configuration immutable/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/75 configuration immutable/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,12 @@
+project('configuration_data is immutable')
+
+a = configuration_data()
+
+configure_file(
+    configuration : a,
+    input : 'input',
+    output : 'output',
+)
+
+still_immutable = a
+still_immutable.set('hello', 'world')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/75 configuration immutable/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/75 configuration immutable/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/75 configuration immutable/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/75 configuration immutable/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/75 configuration immutable/meson.build:12:16: ERROR: Can not set values on configuration object that has been used."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/75 int literal leading zero/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/75 int literal leading zero/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/75 int literal leading zero/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/75 int literal leading zero/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-
-# This should fail.
-# Decimal syntax is 123.
-# Octal syntax is 0o123.
-fail_0123 = 0123
-
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/76 configuration immutable/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/76 configuration immutable/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/76 configuration immutable/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/76 configuration immutable/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,12 +0,0 @@
-project('configuration_data is immutable')
-
-a = configuration_data()
-
-configure_file(
-    configuration : a,
-    input : 'input',
-    output : 'output',
-)
-
-still_immutable = a
-still_immutable.set('hello', 'world')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/76 link with shared module on osx/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/76 link with shared module on osx/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/76 link with shared module on osx/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/76 link with shared module on osx/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+project('link with shared module', 'c')
+
+if host_machine.system() != 'darwin'
+  error('MESON_SKIP_TEST test only fails on OSX')
+endif
+
+m = shared_module('mymodule', 'module.c')
+e = executable('prog', 'prog.c', link_with : m)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/76 link with shared module on osx/module.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/76 link with shared module on osx/module.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/76 link with shared module on osx/module.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/76 link with shared module on osx/module.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+int func(void) {
+    return 1496;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/76 link with shared module on osx/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/76 link with shared module on osx/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/76 link with shared module on osx/prog.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/76 link with shared module on osx/prog.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+
+int main(int argc, char **argv) {
+    return func();
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/76 link with shared module on osx/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/76 link with shared module on osx/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/76 link with shared module on osx/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/76 link with shared module on osx/test.json"	2020-11-22 02:19:19.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/76 link with shared module on osx/meson.build:8:0: ERROR: target links against shared modules. This is not permitted on OSX"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/77 link with shared module on osx/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/77 link with shared module on osx/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/77 link with shared module on osx/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/77 link with shared module on osx/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,8 +0,0 @@
-project('link with shared module', 'c')
-
-if host_machine.system() != 'darwin'
-  error('Test only fails on OSX')
-endif
-
-m = shared_module('mymodule', 'module.c')
-e = executable('prog', 'prog.c', link_with : m)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/77 link with shared module on osx/module.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/77 link with shared module on osx/module.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/77 link with shared module on osx/module.c"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/77 link with shared module on osx/module.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-int func(void) {
-    return 1496;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/77 link with shared module on osx/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/77 link with shared module on osx/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/77 link with shared module on osx/prog.c"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/77 link with shared module on osx/prog.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-
-int main(int argc, char **argv) {
-    return func();
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/77 non ascii in ascii encoded configure file/config9.h.in" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/77 non ascii in ascii encoded configure file/config9.h.in"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/77 non ascii in ascii encoded configure file/config9.h.in"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/77 non ascii in ascii encoded configure file/config9.h.in"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1 @@
+#define MESSAGE "@var@"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/77 non ascii in ascii encoded configure file/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/77 non ascii in ascii encoded configure file/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/77 non ascii in ascii encoded configure file/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/77 non ascii in ascii encoded configure file/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,10 @@
+project('non acsii to ascii encoding', 'c')
+# Writing a non ASCII character with a ASCII encoding should fail
+conf9 = configuration_data()
+conf9.set('var', 'д')
+configure_file(
+  input : 'config9.h.in',
+  output : '@BASENAME@',
+  encoding : 'ascii',
+  configuration : conf9
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/77 non ascii in ascii encoded configure file/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/77 non ascii in ascii encoded configure file/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/77 non ascii in ascii encoded configure file/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/77 non ascii in ascii encoded configure file/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "stdout": [
+    {
+      "match": "re",
+      "line": "test cases/failing/77 non ascii in ascii encoded configure file/meson\\.build:5:0: ERROR: Could not write output file .*[\\\\/]config9\\.h: 'ascii' codec can't encode character '\\\\u0434' in position 17: ordinal not in range\\(128\\)"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/78 non ascii in ascii encoded configure file/config9.h.in" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/78 non ascii in ascii encoded configure file/config9.h.in"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/78 non ascii in ascii encoded configure file/config9.h.in"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/78 non ascii in ascii encoded configure file/config9.h.in"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-#define MESSAGE "@var@"
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/78 non ascii in ascii encoded configure file/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/78 non ascii in ascii encoded configure file/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/78 non ascii in ascii encoded configure file/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/78 non ascii in ascii encoded configure file/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,10 +0,0 @@
-project('non acsii to ascii encoding', 'c')
-# Writing a non ASCII character with a ASCII encoding should fail
-conf9 = configuration_data()
-conf9.set('var', 'д')
-configure_file(
-  input : 'config9.h.in',
-  output : '@BASENAME@',
-  encoding : 'ascii',
-  configuration : conf9
-)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/78 subproj dependency not-found and required/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/78 subproj dependency not-found and required/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/78 subproj dependency not-found and required/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/78 subproj dependency not-found and required/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+project('dep-test')
+missing = dependency('', fallback: ['missing', 'missing_dep'], required: true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/78 subproj dependency not-found and required/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/78 subproj dependency not-found and required/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/78 subproj dependency not-found and required/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/78 subproj dependency not-found and required/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/78 subproj dependency not-found and required/meson.build:2:0: ERROR: Neither a subproject directory nor a missing.wrap file was found."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/79 subproj dependency not-found and required/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/79 subproj dependency not-found and required/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/79 subproj dependency not-found and required/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/79 subproj dependency not-found and required/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-project('dep-test')
-missing = dependency('', fallback: ['missing', 'missing_dep'], required: true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/79 unfound run/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/79 unfound run/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/79 unfound run/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/79 unfound run/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+project('unfound runtarget')
+
+exe = find_program('nonexisting_prog', required : false)
+run_target('invoke_fail', command : [exe])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/79 unfound run/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/79 unfound run/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/79 unfound run/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/79 unfound run/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/79 unfound run/meson.build:4:0: ERROR: Tried to use non-existing executable 'nonexisting_prog'"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/7 go to subproject/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/7 go to subproject/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/7 go to subproject/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/7 go to subproject/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/7 go to subproject/meson.build:3:0: ERROR: Must not go into subprojects dir with subdir(), use subproject() instead."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/80 framework dependency with version/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/80 framework dependency with version/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/80 framework dependency with version/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/80 framework dependency with version/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+project('framework dependency with version', 'c')
+
+if host_machine.system() != 'darwin'
+  error('MESON_SKIP_TEST test only applicable on darwin')
+endif
+
+# do individual frameworks have a meaningful version to test?  And multiple frameworks might be listed...
+dep = dependency('appleframeworks', modules: 'foundation', version: '>0')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/80 framework dependency with version/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/80 framework dependency with version/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/80 framework dependency with version/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/80 framework dependency with version/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/80 framework dependency with version/meson.build:8:0: ERROR: Unknown version of dependency 'appleframeworks', but need ['>0']."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/80 unfound run/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/80 unfound run/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/80 unfound run/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/80 unfound run/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-project('unfound runtarget')
-
-exe = find_program('nonexisting_prog', required : false)
-run_target('invoke_fail', command : [exe])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/81 framework dependency with version/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/81 framework dependency with version/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/81 framework dependency with version/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/81 framework dependency with version/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-project('framework dependency with version')
-# do individual frameworks have a meaningful version to test?  And multiple frameworks might be listed...
-# otherwise we're not on OSX and this will definitely fail
-dep = dependency('appleframeworks', modules: 'foundation', version: '>0')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/81 override exe config/foo.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/81 override exe config/foo.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/81 override exe config/foo.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/81 override exe config/foo.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+int main(void) {
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/81 override exe config/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/81 override exe config/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/81 override exe config/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/81 override exe config/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+project('myexe', 'c')
+
+foo = executable('foo', 'foo.c')
+meson.override_find_program('bar', foo)
+bar = find_program('bar')
+run_command(bar)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/81 override exe config/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/81 override exe config/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/81 override exe config/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/81 override exe config/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/81 override exe config/meson.build:6:0: ERROR: Program 'bar' was overridden with the compiled executable 'foo' and therefore cannot be used during configuration"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/82 gl dependency with version/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/82 gl dependency with version/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/82 gl dependency with version/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/82 gl dependency with version/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+project('gl dependency with version', 'c')
+
+host_system = host_machine.system()
+if host_system != 'windows' and host_system != 'darwin'
+  error('MESON_SKIP_TEST: test only fails on Windows and OSX')
+endif
+
+# gl dependency found via system method doesn't have a meaningful version to check
+dep = dependency('gl', method: 'system', version: '>0')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/82 gl dependency with version/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/82 gl dependency with version/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/82 gl dependency with version/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/82 gl dependency with version/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/82 gl dependency with version/meson.build:9:0: ERROR: Unknown version of dependency 'gl', but need ['>0']."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/82 override exe config/foo.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/82 override exe config/foo.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/82 override exe config/foo.c"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/82 override exe config/foo.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-int main(void) {
-  return 0;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/82 override exe config/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/82 override exe config/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/82 override exe config/meson.build"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/82 override exe config/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-project('myexe', 'c')
-
-foo = executable('foo', 'foo.c')
-meson.override_find_program('bar', foo)
-bar = find_program('bar')
-run_command(bar)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/83 gl dependency with version/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/83 gl dependency with version/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/83 gl dependency with version/meson.build"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/83 gl dependency with version/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,9 +0,0 @@
-project('gl dependency with version', 'c')
-
-host_system = host_machine.system()
-if host_system != 'windows' and host_system != 'darwin'
-  error('Test only fails on Windows and OSX')
-endif
-
-# gl dependency found via system method doesn't have a meaningful version to check
-dep = dependency('gl', method: 'system', version: '>0')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/83 threads dependency with version/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/83 threads dependency with version/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/83 threads dependency with version/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/83 threads dependency with version/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+project('threads dependency with version', 'c')
+# threads dependency doesn't have a meaningful version to check
+dep = dependency('threads', version: '>0')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/83 threads dependency with version/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/83 threads dependency with version/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/83 threads dependency with version/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/83 threads dependency with version/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/83 threads dependency with version/meson.build:3:0: ERROR: Unknown version of dependency 'threads', but need ['>0']."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/84 gtest dependency with version/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/84 gtest dependency with version/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/84 gtest dependency with version/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/84 gtest dependency with version/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+project('gtest dependency with version', ['c', 'cpp'])
+
+if not dependency('gtest', method: 'system', required: false).found()
+   error('MESON_SKIP_TEST test requires gtest')
+endif
+
+# discovering gtest version is not yet implemented
+dep = dependency('gtest', method: 'system', version: '>0')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/84 gtest dependency with version/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/84 gtest dependency with version/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/84 gtest dependency with version/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/84 gtest dependency with version/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/84 gtest dependency with version/meson.build:8:0: ERROR: Dependency 'gtest' was already checked and was not found"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/84 threads dependency with version/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/84 threads dependency with version/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/84 threads dependency with version/meson.build"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/84 threads dependency with version/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-project('threads dependency with version', 'c')
-# threads dependency doesn't have a meaningful version to check
-dep = dependency('threads', version: '>0')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/85 dub libray/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/85 dub libray/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/85 dub libray/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/85 dub libray/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,11 @@
+project('dub')
+
+if not add_languages('d', required: false)
+  error('MESON_SKIP_TEST test requires D compiler')
+endif
+
+if not find_program('dub', required: false).found()
+  error('MESON_SKIP_TEST test requires dub')
+endif
+
+dependency('dubtestproject', method: 'dub') # Not library (none)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/85 dub libray/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/85 dub libray/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/85 dub libray/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/85 dub libray/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/85 dub libray/meson.build:11:0: ERROR: Dependency \"dubtestproject\" not found"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/85 gtest dependency with version/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/85 gtest dependency with version/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/85 gtest dependency with version/meson.build"	2019-04-17 08:08:43.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/85 gtest dependency with version/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-project('gtest dependency with version', ['c', 'cpp'])
-# discovering gtest version is not yet implemented
-dep = dependency('gtest', method: 'system', version: '>0')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/86 dub executable/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/86 dub executable/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/86 dub executable/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/86 dub executable/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,11 @@
+project('dub')
+
+if not add_languages('d', required: false)
+  error('MESON_SKIP_TEST test requires D compiler')
+endif
+
+if not find_program('dub', required: false).found()
+  error('MESON_SKIP_TEST test requires dub')
+endif
+
+dependency('dubtestproject:test1', method: 'dub') # Not library (executable)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/86 dub executable/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/86 dub executable/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/86 dub executable/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/86 dub executable/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/86 dub executable/meson.build:11:0: ERROR: Dependency \"dubtestproject:test1\" not found"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/86 dub libray/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/86 dub libray/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/86 dub libray/meson.build"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/86 dub libray/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-project('dub', 'd', meson_version: '0.48.0')
-
-dependency('dubtestproject', method: 'dub') # Not library (none)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/87 dub compiler/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/87 dub compiler/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/87 dub compiler/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/87 dub compiler/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,17 @@
+project('dub')
+
+if not add_languages('d', required: false)
+  error('MESON_SKIP_TEST test requires D compiler')
+endif
+
+if meson.get_compiler('d').get_id() == 'dmd'
+  if host_machine.system() == 'windows' or host_machine.system() == 'cygwin'
+    error('MESON_SKIP_TEST Windows test environment lacks multiple D compilers.')
+  endif
+endif
+
+if not find_program('dub', required: false).found()
+  error('MESON_SKIP_TEST test requires dub')
+endif
+
+dependency('dubtestproject:test2', method: 'dub') # Compiler mismatch
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/87 dub compiler/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/87 dub compiler/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/87 dub compiler/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/87 dub compiler/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,19 @@
+{
+  "matrix": {
+    "options": {
+      "warning_level": [
+        {
+          "val": "1",
+          "skip_on_env": [
+            "SINGLE_DUB_COMPILER"
+          ]
+        }
+      ]
+    }
+  },
+  "stdout": [
+    {
+      "line": "test cases/failing/87 dub compiler/meson.build:17:0: ERROR: Dependency \"dubtestproject:test2\" not found"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/87 dub executable/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/87 dub executable/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/87 dub executable/meson.build"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/87 dub executable/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-project('dub', 'd', meson_version: '0.48.0')
-
-dependency('dubtestproject:test1', method: 'dub') # Not library (executable)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/88 dub compiler/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/88 dub compiler/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/88 dub compiler/meson.build"	2018-12-09 14:27:16.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/88 dub compiler/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,9 +0,0 @@
-project('dub', 'd', meson_version: '0.48.0')
-
-if meson.get_compiler('d').get_id() == 'dmd'
-  if host_machine.system() == 'windows' or host_machine.system() == 'cygwin'
-    error('MESON_SKIP_TEST Windows test environment lacks multiple D compilers.')
-  endif
-endif
-
-dependency('dubtestproject:test2', method: 'dub') # Compiler mismatch
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/88 subproj not-found dep/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/88 subproj not-found dep/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/88 subproj not-found dep/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/88 subproj not-found dep/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+project('dep-test')
+missing = dependency('', fallback: ['somesubproj', 'notfound_dep'], required: true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/88 subproj not-found dep/subprojects/somesubproj/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/88 subproj not-found dep/subprojects/somesubproj/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/88 subproj not-found dep/subprojects/somesubproj/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/88 subproj not-found dep/subprojects/somesubproj/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+project('dep', 'c')
+
+notfound_dep = dependency('', required : false)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/88 subproj not-found dep/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/88 subproj not-found dep/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/88 subproj not-found dep/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/88 subproj not-found dep/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/88 subproj not-found dep/meson.build:2:0: ERROR: Could not find dependency notfound_dep in subproject somesubproj"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/89 invalid configure file/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/89 invalid configure file/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/89 invalid configure file/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/89 invalid configure file/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+project('invalid configura file')
+
+configure_file(
+    configuration : configuration_data(),
+    input : 'input',
+    output : 'output',
+    install_dir : '',
+    install : true,
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/89 invalid configure file/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/89 invalid configure file/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/89 invalid configure file/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/89 invalid configure file/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/89 invalid configure file/meson.build:3:0: ERROR: \"install_dir\" must be specified when \"install\" in a configure_file is true"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/89 subproj not-found dep/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/89 subproj not-found dep/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/89 subproj not-found dep/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/89 subproj not-found dep/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-project('dep-test')
-missing = dependency('', fallback: ['somesubproj', 'notfound_dep'], required: true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/89 subproj not-found dep/subprojects/somesubproj/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/89 subproj not-found dep/subprojects/somesubproj/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/89 subproj not-found dep/subprojects/somesubproj/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/89 subproj not-found dep/subprojects/somesubproj/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-project('dep', 'c')
-
-notfound_dep = dependency('', required : false)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/8 recursive/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/8 recursive/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/8 recursive/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/8 recursive/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/8 recursive/subprojects/b/meson.build:3:0: ERROR: Recursive include of subprojects: a => b => a."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/90 invalid configure file/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/90 invalid configure file/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/90 invalid configure file/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/90 invalid configure file/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,9 +0,0 @@
-project('invalid configura file')
-
-configure_file(
-    configuration : configuration_data(),
-    input : 'input',
-    output : 'output',
-    install_dir : '',
-    install : true,
-)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/90 kwarg dupe/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/90 kwarg dupe/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/90 kwarg dupe/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/90 kwarg dupe/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+project('dupe kwarg', 'c')
+
+dupedict = {'install': true}
+
+executable('prog', 'prog.c', install: true,
+  kwargs: dupedict)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/90 kwarg dupe/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/90 kwarg dupe/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/90 kwarg dupe/prog.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/90 kwarg dupe/prog.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+#include
+
+int main(int argc, char **argv) {
+    printf("I don't get built. It makes me saaaaaad. :(\n");
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/90 kwarg dupe/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/90 kwarg dupe/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/90 kwarg dupe/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/90 kwarg dupe/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/90 kwarg dupe/meson.build:5:0: ERROR: Entry \"install\" defined both as a keyword argument and in a \"kwarg\" entry."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/91 kwarg dupe/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/91 kwarg dupe/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/91 kwarg dupe/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/91 kwarg dupe/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-project('dupe kwarg', 'c')
-
-dupedict = {'install': true}
-
-executable('prog', 'prog.c', install: true,
-  kwargs: dupedict)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/91 kwarg dupe/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/91 kwarg dupe/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/91 kwarg dupe/prog.c"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/91 kwarg dupe/prog.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-#include
-
-int main(int argc, char **argv) {
-    printf("I don't get built. It makes me saaaaaad. :(\n");
-    return 0;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/91 missing pch file/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/91 missing pch file/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/91 missing pch file/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/91 missing pch file/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+project('pch test', 'c')
+exe = executable('prog', 'prog.c',
+c_pch : ['pch/prog_pch.c', 'pch/prog.h'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/91 missing pch file/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/91 missing pch file/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/91 missing pch file/prog.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/91 missing pch file/prog.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+int main(int argc, char **argv) {
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/91 missing pch file/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/91 missing pch file/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/91 missing pch file/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/91 missing pch file/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "stdout": [
+    {
+      "comment": "literal 'pch/prog.h' from meson.build appears in output, irrespective of os.path.sep",
+      "line": "test cases/failing/91 missing pch file/meson.build:2:0: ERROR: File pch/prog.h does not exist."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/92 missing pch file/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/92 missing pch file/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/92 missing pch file/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/92 missing pch file/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-project('pch test', 'c')
-exe = executable('prog', 'prog.c',
-c_pch : ['pch/prog_pch.c', 'pch/prog.h'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/92 missing pch file/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/92 missing pch file/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/92 missing pch file/prog.c"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/92 missing pch file/prog.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-int main(int argc, char **argv) {
-    return 0;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/92 pch source different folder/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/92 pch source different folder/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/92 pch source different folder/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/92 pch source different folder/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+project('pch', 'c')
+# It is not allowed to have the PCH implementation in a different
+# folder than the header.
+exe = executable('prog', 'prog.c',
+  c_pch : ['include/pch.h', 'src/pch.c'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/92 pch source different folder/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/92 pch source different folder/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/92 pch source different folder/prog.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/92 pch source different folder/prog.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1 @@
+int main(void) {}
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/92 pch source different folder/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/92 pch source different folder/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/92 pch source different folder/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/92 pch source different folder/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/92 pch source different folder/meson.build:4:0: ERROR: PCH files must be stored in the same folder."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/93 pch source different folder/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/93 pch source different folder/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/93 pch source different folder/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/93 pch source different folder/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-project('pch', 'c')
-# It is not allowed to have the PCH implementation in a different
-# folder than the header.
-exe = executable('prog', 'prog.c',
-  c_pch : ['include/pch.h', 'src/pch.c'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/93 pch source different folder/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/93 pch source different folder/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/93 pch source different folder/prog.c"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/93 pch source different folder/prog.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-int main(void) {}
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/93 vala without c/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/93 vala without c/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/93 vala without c/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/93 vala without c/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+project('vala without c')
+add_languages('vala')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/93 vala without c/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/93 vala without c/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/93 vala without c/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/93 vala without c/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/93 vala without c/meson.build:2:0: ERROR: Compiling Vala requires C. Add C to your project languages and rerun Meson."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/94 unknown config tool/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/94 unknown config tool/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/94 unknown config tool/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/94 unknown config tool/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+project('no-such-config-tool')
+dependency('no-such-config-tool', method:'config-tool')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/94 unknown config tool/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/94 unknown config tool/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/94 unknown config tool/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/94 unknown config tool/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/94 unknown config tool/meson.build:2:0: ERROR: Dependency \"no-such-config-tool\" not found"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/94 vala without c/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/94 vala without c/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/94 vala without c/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/94 vala without c/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-project('vala without c')
-add_languages('vala')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/95 custom target install data/Info.plist.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/95 custom target install data/Info.plist.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/95 custom target install data/Info.plist.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/95 custom target install data/Info.plist.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1 @@
+Some data which gets processed before installation
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/95 custom target install data/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/95 custom target install data/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/95 custom target install data/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/95 custom target install data/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,11 @@
+project('custom target install data')
+
+preproc = find_program('preproc.py')
+
+t = custom_target('Info.plist',
+    command: [preproc, '@INPUT@', '@OUTPUT@'],
+    input: 'Info.plist.cpp',
+    output: 'Info.plist',
+)
+
+install_data(t)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/95 custom target install data/preproc.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/95 custom target install data/preproc.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/95 custom target install data/preproc.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/95 custom target install data/preproc.py"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+
+import sys
+
+if len(sys.argv) != 3:
+    print(sys.argv[0], '', '')
+
+inf = sys.argv[1]
+outf = sys.argv[2]
+
+with open(outf, 'wb') as o:
+    with open(inf, 'rb') as i:
+        o.write(i.read())
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/95 custom target install data/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/95 custom target install data/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/95 custom target install data/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/95 custom target install data/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/95 custom target install data/meson.build:11:0: ERROR: Argument must be string or file."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/95 unknown config tool/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/95 unknown config tool/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/95 unknown config tool/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/95 unknown config tool/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-project('no-such-config-tool')
-dependency('no-such-config-tool', method:'config-tool')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/96 add dict non string key/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/96 add dict non string key/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/96 add dict non string key/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/96 add dict non string key/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+project('add dictionary entry using non-string key')
+
+dict = {}
+
+# An integer variable to be used as a key
+key = 1
+
+# Add new entry using integer variable as key should fail
+dict += {key : 'myValue'}
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/96 add dict non string key/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/96 add dict non string key/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/96 add dict non string key/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/96 add dict non string key/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/96 add dict non string key/meson.build:9:0: ERROR: Key must be a string"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/96 custom target install data/Info.plist.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/96 custom target install data/Info.plist.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/96 custom target install data/Info.plist.cpp"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/96 custom target install data/Info.plist.cpp"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-Some data which gets processed before installation
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/96 custom target install data/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/96 custom target install data/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/96 custom target install data/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/96 custom target install data/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,11 +0,0 @@
-project('custom target install data')
-
-preproc = find_program('preproc.py')
-
-t = custom_target('Info.plist',
-    command: [preproc, '@INPUT@', '@OUTPUT@'],
-    input: 'Info.plist.cpp',
-    output: 'Info.plist',
-)
-
-install_data(t)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/96 custom target install data/preproc.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/96 custom target install data/preproc.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/96 custom target install data/preproc.py"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/96 custom target install data/preproc.py"	1970-01-01 00:00:00.000000000 +0000
@@ -1,13 +0,0 @@
-#!/usr/bin/env python3
-
-import sys
-
-if len(sys.argv) != 3:
-    print(sys.argv[0], '', '')
-
-inf = sys.argv[1]
-outf = sys.argv[2]
-
-with open(outf, 'wb') as o:
-    with open(inf, 'rb') as i:
-        o.write(i.read())
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/97 add dict duplicate keys/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/97 add dict duplicate keys/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/97 add dict duplicate keys/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/97 add dict duplicate keys/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+project('add dictionary entries with duplicate keys')
+
+dict = {}
+
+# A variable to be used as a key
+key = 'myKey'
+
+# Add two entries with duplicate keys should fail
+dict += {key : 'myValue1', key : 'myValue2'}
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/97 add dict duplicate keys/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/97 add dict duplicate keys/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/97 add dict duplicate keys/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/97 add dict duplicate keys/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/97 add dict duplicate keys/meson.build:9:0: ERROR: Duplicate dictionary key: myKey"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/97 add dict non string key/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/97 add dict non string key/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/97 add dict non string key/meson.build"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/97 add dict non string key/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,9 +0,0 @@
-project('add dictionary entry using non-string key')
-
-dict = {}
-
-# An integer variable to be used as a key
-key = 1
-
-# Add new entry using integer variable as key should fail
-dict += {key : 'myValue'}
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/98 add dict duplicate keys/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/98 add dict duplicate keys/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/98 add dict duplicate keys/meson.build"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/98 add dict duplicate keys/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,9 +0,0 @@
-project('add dictionary entries with duplicate keys')
-
-dict = {}
-
-# A variable to be used as a key
-key = 'myKey'
-
-# Add two entries with duplicate keys should fail
-dict += {key : 'myValue1', key : 'myValue2'}
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/98 fallback consistency/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/98 fallback consistency/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/98 fallback consistency/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/98 fallback consistency/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+project('proj', 'c')
+
+# Subproject overrides 'sub' with another variable than dep2. This should fail.
+dependency('sub', fallback : ['sub', 'dep2'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/98 fallback consistency/subprojects/sub/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/98 fallback consistency/subprojects/sub/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/98 fallback consistency/subprojects/sub/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/98 fallback consistency/subprojects/sub/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+project('proj', 'c')
+
+dep1 = declare_dependency()
+dep2 = declare_dependency()
+meson.override_dependency('sub', dep1)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/98 fallback consistency/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/98 fallback consistency/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/98 fallback consistency/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/98 fallback consistency/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/98 fallback consistency/meson.build:4:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'dep2'"
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/99 no native prop/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/99 no native prop/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/99 no native prop/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/99 no native prop/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+project('missing property')
+
+message(meson.get_external_property('nonexisting'))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/99 no native prop/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/99 no native prop/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/99 no native prop/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/99 no native prop/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/99 no native prop/meson.build:3:0: ERROR: Unknown native property: nonexisting."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/9 missing extra file/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/9 missing extra file/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing/9 missing extra file/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing/9 missing extra file/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/failing/9 missing extra file/meson.build:3:0: ERROR: File missing.txt does not exist."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing build/5 failed pickled/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing build/5 failed pickled/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing build/5 failed pickled/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing build/5 failed pickled/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,7 @@
+project('failed pickled command')
+
+custom_target('failure',
+  command: ['false', '\n'],
+  output: 'output.txt',
+  build_by_default: true,
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing test/5 tap tests/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing test/5 tap tests/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/failing test/5 tap tests/meson.build"	2020-01-07 21:10:21.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/failing test/5 tap tests/meson.build"	2020-09-10 16:39:24.000000000 +0000
@@ -4,3 +4,4 @@
 test('nonzero return code', tester, args : [], protocol: 'tap')
 test('missing test', tester, args : ['1..1'], protocol: 'tap')
 test('incorrect skip', tester, args : ['1..1 # skip\nok 1'], protocol: 'tap')
+test('partially skipped', tester, args : ['not ok 1\nok 2 # skip'], protocol: 'tap')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/include_hierarchy.f90" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/include_hierarchy.f90"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/include_hierarchy.f90"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/include_hierarchy.f90"	2020-09-17 22:00:44.000000000 +0000
@@ -1,3 +1,4 @@
+program test_include_hier
 
 implicit none
 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/include_syntax.f90" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/include_syntax.f90"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/include_syntax.f90"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/include_syntax.f90"	2020-09-17 22:00:44.000000000 +0000
@@ -1,3 +1,5 @@
+program test_include_syntax
+
 implicit none
 
 integer :: x, y
@@ -20,4 +22,4 @@
 
 print *, 'OK: Fortran include tests: x=',x
 
-end program
\ No newline at end of file
+end program
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/include_tests.f90" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/include_tests.f90"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/include_tests.f90"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/include_tests.f90"	1970-01-01 00:00:00.000000000 +0000
@@ -1,24 +0,0 @@
-implicit none
-
-integer :: x, y
-
-x = 1
-y = 0
-
-! include "timestwo.f90"
-
-! double quote and inline comment check
-include "timestwo.f90"  ! inline comment check
-if (x/=2) error stop 'failed on first include'
-
-! leading space and single quote check
-  include 'timestwo.f90'
-if (x/=4) error stop 'failed on second include'
-
-! Most Fortran compilers can't handle the non-standard #include,
-! including (ha!) Flang, Gfortran, Ifort and PGI.
-! #include "timestwo.f90"
-
-print *, 'OK: Fortran include tests: x=',x
-
-end program
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/meson.build"	2020-01-07 21:13:07.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -1,8 +1,19 @@
 project('Inclusive', 'fortran',
   meson_version: '>= 0.51.1')
 
+cm = import('cmake')
+
 hier_exe = executable('include_hierarchy', 'include_hierarchy.f90')
 test('Fortran include file hierarchy', hier_exe)
 
 syntax_exe = executable('include_syntax', 'include_syntax.f90')
-test('Fortran include file syntax', syntax_exe)
\ No newline at end of file
+test('Fortran include file syntax', syntax_exe)
+
+# older CI runs into problems with too-old Ninja and CMake and Fortran
+ninja_version = run_command('ninja', '--version').stdout().strip()
+cmake_version = run_command('cmake', '--version').stdout().split()[2]
+if ninja_version.version_compare('>=1.10.0') and cmake_version.version_compare('>=3.17.0')
+  cm.subproject('cmake_inc')
+else
+  message('SKIP: CMake Fortran subproject with include. Ninja >= 1.10 and CMake >= 3.17 needed. You have Ninja ' + ninja_version + ' and CMake ' + cmake_version)
+endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/subprojects/cmake_inc/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/subprojects/cmake_inc/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/subprojects/cmake_inc/CMakeLists.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/subprojects/cmake_inc/CMakeLists.txt"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,4 @@
+cmake_minimum_required(VERSION 3.17)
+project(cmake_inc LANGUAGES Fortran)
+
+add_executable(main main.f90)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/subprojects/cmake_inc/main.f90" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/subprojects/cmake_inc/main.f90"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/subprojects/cmake_inc/main.f90"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/subprojects/cmake_inc/main.f90"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,9 @@
+program test_subproject_inc
+
+implicit none
+
+include 'thousand.f90'
+
+if (thousand /= 1000) error stop 'did not include properly'
+
+end program
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/subprojects/cmake_inc/thousand.f90" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/subprojects/cmake_inc/thousand.f90"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/15 include/subprojects/cmake_inc/thousand.f90"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/15 include/subprojects/cmake_inc/thousand.f90"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1 @@
+integer, parameter :: thousand = 1000
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/18 first_arg/main.f90" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/18 first_arg/main.f90"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/18 first_arg/main.f90"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/18 first_arg/main.f90"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+i = 3
+end program
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/18 first_arg/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/18 first_arg/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/18 first_arg/meson.build"	2020-01-07 21:13:11.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/18 first_arg/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -24,3 +24,23 @@
 assert(l1.length() == 1, 'First supported returned wrong result.')
 assert(l1.get(0) == is_arg, 'First supported returned wrong argument.')
 assert(l2.length() == 0, 'First supported did not return empty array.')
+
+# --- test with an actual program, here for implicit none
+
+in0 = fc.first_supported_argument('-fimplicit-none', '-Mdclchk', '/warn:declarations', '-warn').get(0, '')
+impnone = {
+'intel-cl': '/warn:declarations',
+'intel': '-warn',
+'gcc': '-fimplicit-none',
+'pgi': '-Mdclchk',
+}
+
+arg = impnone.get(fc.get_id(), '')
+if arg != ''
+  assert(in0 == arg, 'implicit none argument ' + arg + ' not matching ' + in0)
+endif
+
+in1 = fc.get_supported_arguments('-fimplicit-none', '/warn:declarations', '/warn:errors', '-Mdclchk')
+if in1.length() > 0
+  assert(not fc.compiles(files('main.f90'), args: in1, name:'will fail implicit none'), 'implicit none should have failed')
+endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/7 generated/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/7 generated/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/7 generated/meson.build"	2020-01-07 21:12:58.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/7 generated/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,11 +1,18 @@
 # Tests whether fortran sources files created during configuration are properly
 # scanned for dependency information
 
-project('generated', 'fortran')
+project('generated', 'fortran',
+  default_options : ['default_library=static'])
 
 conf_data = configuration_data()
 conf_data.set('ONE', 1)
 conf_data.set('TWO', 2)
+conf_data.set('THREE', 3)
+
+configure_file(input : 'mod3.fpp', output : 'mod3.f90', configuration : conf_data)
+# Manually build absolute path to source file to test
+# https://github.com/mesonbuild/meson/issues/7265
+three = library('mod3', meson.current_build_dir() / 'mod3.f90')
 
 templates_basenames = ['mod2', 'mod1']
 generated_sources = []
@@ -18,5 +25,5 @@
 endforeach
 
 sources = ['prog.f90'] + generated_sources
-exe = executable('generated', sources)
+exe = executable('generated', sources, link_with: three)
 test('generated', exe)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/7 generated/mod1.fpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/7 generated/mod1.fpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/7 generated/mod1.fpp"	2017-05-17 21:09:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/7 generated/mod1.fpp"	2020-08-15 16:27:05.000000000 +0000
@@ -1,6 +1,6 @@
 module mod1
-  implicit none
+implicit none
 
-  integer, parameter :: modval1 = @ONE@
+integer, parameter :: modval1 = @ONE@
 
 end module mod1
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/7 generated/mod2.fpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/7 generated/mod2.fpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/7 generated/mod2.fpp"	2017-05-17 21:09:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/7 generated/mod2.fpp"	2020-08-15 16:27:05.000000000 +0000
@@ -1,7 +1,7 @@
 module mod2
-  use mod1
-  implicit none
+use mod1, only : modval1
+implicit none
 
-  integer, parameter :: modval2 = @TWO@
+integer, parameter :: modval2 = @TWO@
 
 end module mod2
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/7 generated/mod3.fpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/7 generated/mod3.fpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/7 generated/mod3.fpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/7 generated/mod3.fpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+module mod3
+implicit none
+
+integer, parameter :: modval3 = @THREE@
+
+end module mod3
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/7 generated/prog.f90" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/7 generated/prog.f90"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/fortran/7 generated/prog.f90"	2019-02-28 20:43:18.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/fortran/7 generated/prog.f90"	2020-08-15 16:27:05.000000000 +0000
@@ -1,7 +1,8 @@
-program prog
-use mod2
+program generated
+use mod2, only : modval1, modval2
+use mod3, only : modval3
 implicit none
 
-if (modval1 + modval2 /= 3) stop 1
+if (modval1 + modval2 + modval3 /= 6) error stop
 
-end program prog
+end program generated
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/10 gtk-doc/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/10 gtk-doc/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/10 gtk-doc/installed_files.txt"	2018-12-09 14:27:16.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/10 gtk-doc/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,57 +0,0 @@
-usr/include/foo-version.h
-usr/share/gtk-doc/html/foobar/BAR.html
-usr/share/gtk-doc/html/foobar/foobar.devhelp2
-usr/share/gtk-doc/html/foobar/foobar.html
-usr/share/gtk-doc/html/foobar/foobar-foo.html
-usr/share/gtk-doc/html/foobar/foobar-foo-version.html
-usr/share/gtk-doc/html/foobar/home.png
-usr/share/gtk-doc/html/foobar/index.html
-usr/share/gtk-doc/html/foobar/left.png
-usr/share/gtk-doc/html/foobar/left-insensitive.png
-usr/share/gtk-doc/html/foobar/right.png
-usr/share/gtk-doc/html/foobar/right-insensitive.png
-usr/share/gtk-doc/html/foobar/style.css
-usr/share/gtk-doc/html/foobar/up.png
-usr/share/gtk-doc/html/foobar/up-insensitive.png
-usr/share/gtk-doc/html/foobar2/BAR.html
-usr/share/gtk-doc/html/foobar2/foobar2.devhelp2
-usr/share/gtk-doc/html/foobar2/foobar.html
-usr/share/gtk-doc/html/foobar2/foobar2-foo.html
-usr/share/gtk-doc/html/foobar2/foobar2-foo-version.html
-usr/share/gtk-doc/html/foobar2/home.png
-usr/share/gtk-doc/html/foobar2/index.html
-usr/share/gtk-doc/html/foobar2/left.png
-usr/share/gtk-doc/html/foobar2/left-insensitive.png
-usr/share/gtk-doc/html/foobar2/right.png
-usr/share/gtk-doc/html/foobar2/right-insensitive.png
-usr/share/gtk-doc/html/foobar2/style.css
-usr/share/gtk-doc/html/foobar2/up.png
-usr/share/gtk-doc/html/foobar2/up-insensitive.png
-usr/share/gtk-doc/html/foobar-3.0/BAR.html
-usr/share/gtk-doc/html/foobar-3.0/foobar-3.0.devhelp2
-usr/share/gtk-doc/html/foobar-3.0/foobar.html
-usr/share/gtk-doc/html/foobar-3.0/foobar-foo.html
-usr/share/gtk-doc/html/foobar-3.0/foobar-foo-version.html
-usr/share/gtk-doc/html/foobar-3.0/home.png
-usr/share/gtk-doc/html/foobar-3.0/index.html
-usr/share/gtk-doc/html/foobar-3.0/left.png
-usr/share/gtk-doc/html/foobar-3.0/left-insensitive.png
-usr/share/gtk-doc/html/foobar-3.0/right.png
-usr/share/gtk-doc/html/foobar-3.0/right-insensitive.png
-usr/share/gtk-doc/html/foobar-3.0/style.css
-usr/share/gtk-doc/html/foobar-3.0/up.png
-usr/share/gtk-doc/html/foobar-3.0/up-insensitive.png
-usr/share/gtk-doc/html/foobar3/BAR.html
-usr/share/gtk-doc/html/foobar3/foobar2-3.0.devhelp2
-usr/share/gtk-doc/html/foobar3/foobar.html
-usr/share/gtk-doc/html/foobar3/foobar2-foo.html
-usr/share/gtk-doc/html/foobar3/foobar2-foo-version.html
-usr/share/gtk-doc/html/foobar3/home.png
-usr/share/gtk-doc/html/foobar3/index.html
-usr/share/gtk-doc/html/foobar3/left.png
-usr/share/gtk-doc/html/foobar3/left-insensitive.png
-usr/share/gtk-doc/html/foobar3/right.png
-usr/share/gtk-doc/html/foobar3/right-insensitive.png
-usr/share/gtk-doc/html/foobar3/style.css
-usr/share/gtk-doc/html/foobar3/up.png
-usr/share/gtk-doc/html/foobar3/up-insensitive.png
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/10 gtk-doc/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/10 gtk-doc/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/10 gtk-doc/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/10 gtk-doc/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,61 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/include/foo-version.h"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/BAR.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/foobar.devhelp2"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/foobar.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/foobar-foo.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/foobar-foo-version.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/home.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/index.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/left.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/left-insensitive.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/right.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/right-insensitive.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/style.css"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/up.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar/up-insensitive.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/BAR.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/foobar2.devhelp2"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/foobar.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/foobar2-foo.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/foobar2-foo-version.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/home.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/index.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/left.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/left-insensitive.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/right.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/right-insensitive.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/style.css"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/up.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/up-insensitive.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/BAR.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/foobar-3.0.devhelp2"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/foobar.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/foobar-foo.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/foobar-foo-version.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/home.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/index.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/left.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/left-insensitive.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/right.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/right-insensitive.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/style.css"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/up.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/up-insensitive.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/BAR.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/foobar2-3.0.devhelp2"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/foobar.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/foobar2-foo.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/foobar2-foo-version.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/home.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/index.html"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/left.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/left-insensitive.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/right.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/right-insensitive.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/style.css"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/up.png"},
+    {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/up-insensitive.png"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/11 gir subproject/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/11 gir subproject/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/11 gir subproject/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/11 gir subproject/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,8 +0,0 @@
-usr/lib/girepository-1.0/Meson-1.0.typelib
-usr/lib/girepository-1.0/MesonSub-1.0.typelib
-usr/share/gir-1.0/Meson-1.0.gir
-usr/share/gir-1.0/MesonSub-1.0.gir
-usr/lib/?libgirsubproject.so
-?cygwin:usr/lib/libgirlib.dll.a
-usr/lib/?libgirlib.so
-?cygwin:usr/lib/libgirsubproject.dll.a
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/11 gir subproject/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/11 gir subproject/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/11 gir subproject/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/11 gir subproject/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,12 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/lib/girepository-1.0/Meson-1.0.typelib"},
+    {"type": "file", "file": "usr/lib/girepository-1.0/MesonSub-1.0.typelib"},
+    {"type": "file", "file": "usr/share/gir-1.0/Meson-1.0.gir"},
+    {"type": "file", "file": "usr/share/gir-1.0/MesonSub-1.0.gir"},
+    {"type": "expr", "file": "usr/lib/?libgirsubproject.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libgirlib.dll.a"},
+    {"type": "expr", "file": "usr/lib/?libgirlib.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libgirsubproject.dll.a"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/12 multiple gir/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/12 multiple gir/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/12 multiple gir/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/12 multiple gir/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,8 +0,0 @@
-usr/lib/girepository-1.0/Meson-1.0.typelib
-usr/lib/girepository-1.0/MesonSub-1.0.typelib
-usr/lib/?libgirlib.so
-?cygwin:usr/lib/libgirlib.dll.a
-usr/lib/?libgirsubproject.so
-?cygwin:usr/lib/libgirsubproject.dll.a
-usr/share/gir-1.0/Meson-1.0.gir
-usr/share/gir-1.0/MesonSub-1.0.gir
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/12 multiple gir/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/12 multiple gir/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/12 multiple gir/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/12 multiple gir/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,12 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/lib/girepository-1.0/Meson-1.0.typelib"},
+    {"type": "file", "file": "usr/lib/girepository-1.0/MesonSub-1.0.typelib"},
+    {"type": "expr", "file": "usr/lib/?libgirlib.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libgirlib.dll.a"},
+    {"type": "expr", "file": "usr/lib/?libgirsubproject.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libgirsubproject.dll.a"},
+    {"type": "file", "file": "usr/share/gir-1.0/Meson-1.0.gir"},
+    {"type": "file", "file": "usr/share/gir-1.0/MesonSub-1.0.gir"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/13 yelp/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/13 yelp/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/13 yelp/installed_files.txt"	2018-08-25 08:05:43.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/13 yelp/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,18 +0,0 @@
-usr/share/help/C/meson/index.page
-usr/share/help/C/meson/media/test.txt
-usr/share/help/es/meson/index.page
-usr/share/help/es/meson/media/test.txt
-usr/share/help/de/meson/index.page
-usr/share/help/de/meson/media/test.txt
-usr/share/help/C/meson-symlink/index.page
-usr/share/help/C/meson-symlink/media/test.txt
-usr/share/help/es/meson-symlink/media/test.txt
-usr/share/help/es/meson-symlink/index.page
-usr/share/help/de/meson-symlink/index.page
-usr/share/help/de/meson-symlink/media/test.txt
-usr/share/help/C/meson-linguas/index.page
-usr/share/help/C/meson-linguas/media/test.txt
-usr/share/help/es/meson-linguas/media/test.txt
-usr/share/help/es/meson-linguas/index.page
-usr/share/help/de/meson-linguas/index.page
-usr/share/help/de/meson-linguas/media/test.txt
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/13 yelp/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/13 yelp/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/13 yelp/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/13 yelp/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,22 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/share/help/C/meson/index.page"},
+    {"type": "file", "file": "usr/share/help/C/meson/media/test.txt"},
+    {"type": "file", "file": "usr/share/help/es/meson/index.page"},
+    {"type": "file", "file": "usr/share/help/es/meson/media/test.txt"},
+    {"type": "file", "file": "usr/share/help/de/meson/index.page"},
+    {"type": "file", "file": "usr/share/help/de/meson/media/test.txt"},
+    {"type": "file", "file": "usr/share/help/C/meson-symlink/index.page"},
+    {"type": "file", "file": "usr/share/help/C/meson-symlink/media/test.txt"},
+    {"type": "file", "file": "usr/share/help/es/meson-symlink/media/test.txt"},
+    {"type": "file", "file": "usr/share/help/es/meson-symlink/index.page"},
+    {"type": "file", "file": "usr/share/help/de/meson-symlink/index.page"},
+    {"type": "file", "file": "usr/share/help/de/meson-symlink/media/test.txt"},
+    {"type": "file", "file": "usr/share/help/C/meson-linguas/index.page"},
+    {"type": "file", "file": "usr/share/help/C/meson-linguas/media/test.txt"},
+    {"type": "file", "file": "usr/share/help/es/meson-linguas/media/test.txt"},
+    {"type": "file", "file": "usr/share/help/es/meson-linguas/index.page"},
+    {"type": "file", "file": "usr/share/help/de/meson-linguas/index.page"},
+    {"type": "file", "file": "usr/share/help/de/meson-linguas/media/test.txt"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/14 doxygen/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/14 doxygen/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/14 doxygen/installed_files.txt"	2017-05-10 21:18:30.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/14 doxygen/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,83 +0,0 @@
-usr/share/doc/spede/html/annotated.html
-usr/share/doc/spede/html/bc_s.png
-usr/share/doc/spede/html/bdwn.png
-usr/share/doc/spede/html/classComedy_1_1Comedian.html
-usr/share/doc/spede/html/classComedy_1_1Comedian.png
-usr/share/doc/spede/html/classComedy_1_1Comedian-members.html
-usr/share/doc/spede/html/classComedy_1_1Spede.html
-usr/share/doc/spede/html/classComedy_1_1Spede.png
-usr/share/doc/spede/html/classComedy_1_1Spede-members.html
-usr/share/doc/spede/html/classes.html
-usr/share/doc/spede/html/closed.png
-usr/share/doc/spede/html/comedian_8h_source.html
-usr/share/doc/spede/html/dir_7bdce917e28dfbd493cadd1d2e5c7d80.html
-usr/share/doc/spede/html/dir_44a4667d36a4476878de085754f6d2b9.html
-usr/share/doc/spede/html/dir_68b523c5b3a2dcea45d5ce70397fb722.html
-usr/share/doc/spede/html/dir_a7e6472d2301212032fd74682f8217f3.html
-usr/share/doc/spede/html/dir_ee191f21c02d247cc959e80c1a3acadf.html
-usr/share/doc/spede/html/doc.png
-usr/share/doc/spede/html/doxygen.css
-usr/share/doc/spede/html/doxygen.png
-usr/share/doc/spede/html/dynsections.js
-usr/share/doc/spede/html/files.html
-usr/share/doc/spede/html/folderclosed.png
-usr/share/doc/spede/html/folderopen.png
-usr/share/doc/spede/html/functions.html
-usr/share/doc/spede/html/functions_func.html
-usr/share/doc/spede/html/hierarchy.html
-usr/share/doc/spede/html/index.html
-usr/share/doc/spede/html/jquery.js
-usr/share/doc/spede/html/menu.js
-usr/share/doc/spede/html/menudata.js
-usr/share/doc/spede/html/namespaceComedy.html
-usr/share/doc/spede/html/namespacemembers.html
-usr/share/doc/spede/html/namespacemembers_func.html
-usr/share/doc/spede/html/namespaces.html
-usr/share/doc/spede/html/nav_f.png
-usr/share/doc/spede/html/nav_g.png
-usr/share/doc/spede/html/nav_h.png
-usr/share/doc/spede/html/open.png
-usr/share/doc/spede/html/search/all_0.html
-usr/share/doc/spede/html/search/all_0.js
-usr/share/doc/spede/html/search/all_1.html
-usr/share/doc/spede/html/search/all_1.js
-usr/share/doc/spede/html/search/all_2.html
-usr/share/doc/spede/html/search/all_2.js
-usr/share/doc/spede/html/search/all_3.html
-usr/share/doc/spede/html/search/all_3.js
-usr/share/doc/spede/html/search/classes_0.html
-usr/share/doc/spede/html/search/classes_0.js
-usr/share/doc/spede/html/search/classes_1.html
-usr/share/doc/spede/html/search/classes_1.js
-usr/share/doc/spede/html/search/close.png
-usr/share/doc/spede/html/search/files_0.html
-usr/share/doc/spede/html/search/files_0.js
-usr/share/doc/spede/html/search/functions_0.html
-usr/share/doc/spede/html/search/functions_0.js
-usr/share/doc/spede/html/search/functions_1.html
-usr/share/doc/spede/html/search/functions_1.js
-usr/share/doc/spede/html/search/functions_2.html
-usr/share/doc/spede/html/search/functions_2.js
-usr/share/doc/spede/html/search/mag_sel.png
-usr/share/doc/spede/html/search/namespaces_0.html
-usr/share/doc/spede/html/search/namespaces_0.js
-usr/share/doc/spede/html/search/nomatches.html
-usr/share/doc/spede/html/search/pages_0.html
-usr/share/doc/spede/html/search/pages_0.js
-usr/share/doc/spede/html/search/search.css
-usr/share/doc/spede/html/search/search.js
-usr/share/doc/spede/html/search/searchdata.js
-usr/share/doc/spede/html/search/search_l.png
-usr/share/doc/spede/html/search/search_m.png
-usr/share/doc/spede/html/search/search_r.png
-usr/share/doc/spede/html/spede_8cpp.html
-usr/share/doc/spede/html/spede_8h.html
-usr/share/doc/spede/html/spede_8h_source.html
-usr/share/doc/spede/html/splitbar.png
-usr/share/doc/spede/html/sync_off.png
-usr/share/doc/spede/html/sync_on.png
-usr/share/doc/spede/html/tabs.css
-usr/share/doc/spede/html/tab_a.png
-usr/share/doc/spede/html/tab_b.png
-usr/share/doc/spede/html/tab_h.png
-usr/share/doc/spede/html/tab_s.png
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/14 doxygen/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/14 doxygen/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/14 doxygen/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/14 doxygen/test.json"	2020-09-10 16:39:24.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "installed": [
+    {"type": "dir", "file": "usr/share/doc/spede/html"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/15 llvm/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/15 llvm/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/15 llvm/meson.build"	2020-01-07 21:15:03.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/15 llvm/meson.build"	2020-10-26 11:18:42.000000000 +0000
@@ -1,28 +1,23 @@
 project('llvmtest', ['c', 'cpp'], default_options : ['c_std=c99'])
 
-d = dependency('llvm', required : false, method : 'config-tool')
+method = get_option('method')
+static = get_option('link-static')
+d = dependency('llvm', required : false, method : method, static : static)
 if not d.found()
-  d = dependency('llvm', required : false, static : true)
-  if not d.found()
-    error('MESON_SKIP_TEST llvm not found.')
-  else
-    static = true
-  endif
-else
-  static = false
+  error('MESON_SKIP_TEST llvm not found.')
 endif
 
-d = dependency('llvm', modules : 'not-found', required : false, static : static)
+d = dependency('llvm', modules : 'not-found', required : false, static : static, method : method)
 assert(d.found() == false, 'not-found llvm module found')
 
-d = dependency('llvm', version : '<0.1', required : false, static : static)
+d = dependency('llvm', version : '<0.1', required : false, static : static, method : method)
 assert(d.found() == false, 'ancient llvm module found')
 
-d = dependency('llvm', optional_modules : 'not-found', required : false, static : static)
+d = dependency('llvm', optional_modules : 'not-found', required : false, static : static, method : method)
 assert(d.found() == true, 'optional module stopped llvm from being found.')
 
 # Check we can apply a version constraint
-d = dependency('llvm', version : ['< 500', '>=@0@'.format(d.version())], required: false, static : static)
+d = dependency('llvm', version : ['< 500', '>=@0@'.format(d.version())], required: false, static : static, method : method)
 assert(d.found() == true, 'Cannot set version constraints')
 
 dep_tinfo = dependency('tinfo', required : false)
@@ -31,29 +26,23 @@
   dep_tinfo = cpp.find_library('tinfo', required: false)
 endif
 
-foreach method : ['config-tool', 'cmake']
-  foreach static : [true, false]
-    message('Trying method @0@ for @1@ link'.format(method, static ? 'static' : 'dynamic'))
-    llvm_dep = dependency(
-      'llvm',
-      modules : ['bitwriter', 'asmprinter', 'executionengine', 'target',
-                'mcjit', 'nativecodegen', 'amdgpu'],
-      required : false,
-      static : static,
-      method : method,
-    )
-    if llvm_dep.found()
-      name = static ? 'static' : 'dynamic'
-      executable(
-        'sum-@0@-@1@'.format(name, method),
-        'sum.c',
-        dependencies : [
-          llvm_dep, dep_tinfo,
-          # zlib will be statically linked on windows
-          dependency('zlib', required : host_machine.system() != 'windows'),
-          meson.get_compiler('c').find_library('dl', required : false),
-        ]
-      )
-    endif
-  endforeach
-endforeach
+llvm_dep = dependency(
+  'llvm',
+  modules : ['bitwriter', 'asmprinter', 'executionengine', 'target',
+             'mcjit', 'nativecodegen', 'amdgpu'],
+  required : false,
+  static : static,
+  method : method,
+)
+if llvm_dep.found()
+  executable(
+    'sum',
+    'sum.c',
+    dependencies : [
+      llvm_dep, dep_tinfo,
+      # zlib will be statically linked on windows
+      dependency('zlib', required : host_machine.system() != 'windows'),
+      meson.get_compiler('c').find_library('dl', required : false),
+    ]
+  )
+endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/15 llvm/meson_options.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/15 llvm/meson_options.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/15 llvm/meson_options.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/15 llvm/meson_options.txt"	2020-10-26 11:18:42.000000000 +0000
@@ -0,0 +1,10 @@
+option(
+  'method',
+  type : 'combo',
+  choices : ['config-tool', 'cmake']
+)
+option(
+  'link-static',
+  type : 'boolean',
+  value : false,
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/15 llvm/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/15 llvm/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/15 llvm/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/15 llvm/test.json"	2020-10-26 11:18:42.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "matrix": {
+    "options": {
+      "method": [
+        { "val": "config-tool" },
+        { "val": "cmake" }
+      ],
+      "link-static": [
+        { "val": true },
+        { "val": false }
+      ]
+    },
+    "exclude": [
+      { "method": "cmake", "link-static": false }
+    ]
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/16 sdl2/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/16 sdl2/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/16 sdl2/meson.build"	2020-01-07 21:14:46.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/16 sdl2/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,6 +1,8 @@
 project('sdl2 test', 'c')
 
-sdl2_dep = dependency('sdl2', version : '>=2.0.0', required: false)
+method = get_option('method')
+
+sdl2_dep = dependency('sdl2', version : '>=2.0.0', required : false, method : method)
 
 if not sdl2_dep.found()
   error('MESON_SKIP_TEST sdl2 not found.')
@@ -9,19 +11,3 @@
 e = executable('sdl2prog', 'sdl2prog.c', dependencies : sdl2_dep)
 
 test('sdl2test', e)
-
-if sdl2_dep.type_name() == 'extraframeworks'
-  # The SDL OSX framework does not ship with detection executables
-  # so skip the remaining tests.
-  subdir_done()
-endif
-
-# Ensure that we can find it with sdl2-config too, using the legacy method name
-configdep = dependency('sdl2', method : 'sdlconfig')
-
-# And the modern method name
-configdep = dependency('sdl2', method : 'config-tool')
-
-# Check we can apply a version constraint
-dependency('sdl2', version: '>=@0@'.format(sdl2_dep.version()), method: 'pkg-config')
-dependency('sdl2', version: '>=@0@'.format(sdl2_dep.version()), method: 'config-tool')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/16 sdl2/meson_options.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/16 sdl2/meson_options.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/16 sdl2/meson_options.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/16 sdl2/meson_options.txt"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+option(
+    'method',
+    type : 'combo',
+    choices : ['auto', 'pkg-config', 'config-tool', 'sdlconfig', 'extraframework'],
+    value : 'auto',
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/16 sdl2/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/16 sdl2/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/16 sdl2/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/16 sdl2/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "matrix": {
+    "options": {
+      "method": [
+        { "val": "auto" },
+        { "val": "pkg-config" },
+        { "val": "config-tool" },
+        { "val": "sdlconfig" },
+        { "val": "extraframework" }
+      ]
+    }
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/17 mpi/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/17 mpi/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/17 mpi/meson.build"	2020-01-07 21:14:56.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/17 mpi/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,7 +1,9 @@
 project('mpi', 'c', 'cpp', default_options: ['b_asneeded=false'])
 
+method = get_option('method')
+
 cc = meson.get_compiler('c')
-mpic = dependency('mpi', language : 'c', required : false)
+mpic = dependency('mpi', language : 'c', required : false, method : method)
 if not mpic.found()
   error('MESON_SKIP_TEST: MPI not found, skipping.')
 endif
@@ -14,7 +16,7 @@
 
 # C++ MPI not supported by MS-MPI
 cpp = meson.get_compiler('cpp')
-mpicpp = dependency('mpi', language : 'cpp', required: false)
+mpicpp = dependency('mpi', language : 'cpp', required: false, method : method)
 if not cpp.links('''
 #include 
 #include 
@@ -31,7 +33,7 @@
 
 if add_languages('fortran', required : false)
   fc = meson.get_compiler('fortran')
-  mpif = dependency('mpi', language : 'fortran', required: false)
+  mpif = dependency('mpi', language : 'fortran', required: false, method : method)
   if not fc.links('use mpi; end', dependencies: mpif, name: 'Fortran MPI')
     mpif = disabler()
   endif
@@ -46,5 +48,5 @@
 
 # Check we can apply a version constraint
 if mpic.version() != 'unknown'
-  dependency('mpi', version: '>=@0@'.format(mpic.version()))
+  dependency('mpi', version: '>=@0@'.format(mpic.version()), method : method)
 endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/17 mpi/meson_options.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/17 mpi/meson_options.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/17 mpi/meson_options.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/17 mpi/meson_options.txt"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+option(
+    'method',
+    type : 'combo',
+    choices : ['auto', 'pkg-config', 'config-tool', 'system'],
+    value : 'auto',
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/17 mpi/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/17 mpi/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/17 mpi/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/17 mpi/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "matrix": {
+    "options": {
+      "method": [
+        { "val": "auto" },
+        { "val": "pkg-config" },
+        { "val": "config-tool" },
+        {
+          "val": "system",
+          "compilers": { "c" :"msvc", "cpp": "msvc" }
+        }
+      ]
+    }
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/1 boost/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/1 boost/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/1 boost/meson.build"	2020-01-23 21:41:11.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/1 boost/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -1,69 +1,48 @@
 # this test requires the following on Ubuntu: libboost-{system,python,log,thread,test}-dev
 project('boosttest', 'cpp',
-  default_options : ['cpp_std=c++11'])
+  default_options : ['cpp_std=c++14'])
 
-add_project_arguments(['-DBOOST_LOG_DYN_LINK'],
-  language : 'cpp'
-)
+s = get_option('static')
 
-dep = dependency('boost', required: false)
+dep = dependency('boost', static: s, required: false)
 if not dep.found()
   error('MESON_SKIP_TEST boost not found.')
 endif
 
-compiler = meson.get_compiler('cpp')
-if compiler.has_argument('-permissive')
-  # boost 1.64, the version we test against, doesn't work with -permissive
-  add_project_arguments('-permissive', language: 'cpp')
-endif
-
 # We want to have multiple separate configurations of Boost
 # within one project. The need to be independent of each other.
 # Use one without a library dependency and one with it.
 
-linkdep = dependency('boost', modules : ['thread', 'system', 'test'])
-staticdep = dependency('boost', modules : ['thread', 'system'], static : true)
-testdep = dependency('boost', modules : ['unit_test_framework'])
-nomoddep = dependency('boost')
-extralibdep = dependency('boost', modules : ['thread', 'system', 'log_setup', 'log'])
+linkdep     = dependency('boost', static: s, modules : ['thread', 'system', 'date_time'])
+testdep     = dependency('boost', static: s, modules : ['unit_test_framework'])
+nomoddep    = dependency('boost', static: s)
+extralibdep = dependency('boost', static: s, modules : ['thread', 'system', 'date_time', 'log_setup', 'log', 'filesystem', 'regex'])
+notfound    = dependency('boost', static: s, modules : ['this_should_not_exist_on_any_systen'], required: false)
+
+assert(not notfound.found())
 
 pymod = import('python')
-python2 = pymod.find_installation('python2', required: host_machine.system() == 'linux', disabler: true)
+python2 = pymod.find_installation('python2', required: false                           , disabler: true)
 python3 = pymod.find_installation('python3', required: host_machine.system() == 'linux', disabler: true)
-python2dep = python2.dependency(required: host_machine.system() == 'linux', embed: true, disabler: true)
+python2dep = python2.dependency(required: false                           , embed: true, disabler: true)
 python3dep = python3.dependency(required: host_machine.system() == 'linux', embed: true, disabler: true)
 
 # compile python 2/3 modules only if we found a corresponding python version
-if(python2dep.found() and host_machine.system() == 'linux')
-  if(dep.version().version_compare('>=1.67'))
-    # if we have a new version of boost, we need to construct the module name based
-    # on the installed version of python (and hope that they match the version boost
-    # was compiled against)
-    py2version_string = ''.join(python2dep.version().split('.'))
-    bpython2dep = dependency('boost', modules : ['python' + py2version_string], required: false, disabler: true)
-  else
-    # if we have an older version of boost, we need to use the old module names
-    bpython2dep = dependency('boost', modules : ['python'], required: false, disabler: true)
-  endif
+if(python2dep.found() and host_machine.system() == 'linux' and not s)
+  bpython2dep = dependency('boost', static: s, modules : ['python'], required: false, disabler: true)
 else
   python2dep = disabler()
   bpython2dep = disabler()
 endif
 
-if(python3dep.found() and host_machine.system() == 'linux')
-  if(dep.version().version_compare('>=1.67'))
-    py3version_string = ''.join(python3dep.version().split('.'))
-    bpython3dep = dependency('boost', modules : ['python' + py3version_string], required: false, disabler: true)
-  else
-    bpython3dep = dependency('boost', modules : ['python3'], required: false, disabler: true)
-  endif
+if(python3dep.found() and host_machine.system() == 'linux' and not s)
+  bpython3dep = dependency('boost', static: s, modules : ['python3'])
 else
   python3dep = disabler()
   bpython3dep = disabler()
 endif
 
 linkexe = executable('linkedexe', 'linkexe.cc', dependencies : linkdep)
-staticexe = executable('staticlinkedexe', 'linkexe.cc', dependencies : staticdep)
 unitexe = executable('utf', 'unit_test.cpp', dependencies: testdep)
 nomodexe = executable('nomod', 'nomod.cpp', dependencies : nomoddep)
 extralibexe = executable('extralibexe', 'extralib.cpp', dependencies : extralibdep)
@@ -72,11 +51,14 @@
 python2module = shared_library('python2_module', ['python_module.cpp'], dependencies: [python2dep, bpython2dep], name_prefix: '', cpp_args: ['-DMOD_NAME=python2_module'])
 python3module = shared_library('python3_module', ['python_module.cpp'], dependencies: [python3dep, bpython3dep], name_prefix: '', cpp_args: ['-DMOD_NAME=python3_module'])
 
-test('Boost linktest', linkexe)
-test('Boost statictest', staticexe)
-test('Boost UTF test', unitexe)
+test('Boost linktest', linkexe, timeout: 60)
+test('Boost UTF test', unitexe, timeout: 60)
 test('Boost nomod', nomodexe)
-test('Boost extralib test', extralibexe)
+if host_machine.system() != 'darwin' or s
+  # Segfaults on macOS with dynamic linking since Boost 1.73
+  # https://github.com/mesonbuild/meson/issues/7535
+  test('Boost extralib test', extralibexe)
+endif
 
 # explicitly use the correct python interpreter so that we don't have to provide two different python scripts that have different shebang lines
 python2interpreter = find_program(python2.path(), required: false, disabler: true)
@@ -87,4 +69,4 @@
 subdir('partial_dep')
 
 # check we can apply a version constraint
-dependency('boost', version: '>=@0@'.format(dep.version()))
+dependency('boost', static: s, version: '>=@0@'.format(dep.version()))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/1 boost/meson_options.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/1 boost/meson_options.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/1 boost/meson_options.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/1 boost/meson_options.txt"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1 @@
+option('static', type: 'boolean', value: false)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/1 boost/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/1 boost/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/1 boost/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/1 boost/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,21 @@
+{
+  "matrix": {
+    "options": {
+      "static": [
+        { "val": "true", "skip_on_env": [ "SKIP_STATIC_BOOST" ] },
+        { "val": "false" }
+      ],
+      "b_vscrt": [
+        { "val": null },
+        { "val": "md",  "compilers": { "cpp": [ "msvc" ] } },
+        { "val": "mdd", "compilers": { "cpp": [ "msvc" ] } },
+        { "val": "mt",  "compilers": { "cpp": [ "msvc" ] } },
+        { "val": "mtd", "compilers": { "cpp": [ "msvc" ] } }
+      ]
+    },
+    "exclude": [
+      { "static": "false", "b_vscrt": "mt"  },
+      { "static": "false", "b_vscrt": "mtd" }
+    ]
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/1 boost/unit_test.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/1 boost/unit_test.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/1 boost/unit_test.cpp"	2016-01-23 18:52:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/1 boost/unit_test.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -1,4 +1,3 @@
-#define BOOST_TEST_DYN_LINK
 #define BOOST_TEST_MODULE "MesonTest"
 #define BOOST_TEST_MAIN
 #include 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/20 cups/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/20 cups/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/20 cups/meson.build"	2020-01-07 21:14:58.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/20 cups/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -12,9 +12,10 @@
 
 # ensure we can find the cups dependency via the legacy and modern config-tool
 # options
-dep = dependency('cups', version : '>=1.4', method : 'cups-config')
-dep = dependency('cups', version : '>=1.4', method : 'config-tool')
+depCC = dependency('cups', version : '>=1.4', method : 'cups-config')
+depCT = dependency('cups', version : '>=1.4', method : 'config-tool')
+depCM = dependency('cups', version : '>=1.4', method : 'cmake')
 
 # check we can apply a version constraint
-dependency('cups', version: '>=@0@'.format(dep.version()), method: 'pkg-config', required: false)
-dependency('cups', version: '>=@0@'.format(dep.version()), method: 'config-tool')
+dependency('cups', version: '>=@0@'.format(depCT.version()), method: 'pkg-config', required: false)
+dependency('cups', version: '>=@0@'.format(depCT.version()), method: 'config-tool')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/21 libwmf/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/21 libwmf/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/21 libwmf/meson.build"	2020-01-07 21:14:56.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/21 libwmf/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,7 +1,7 @@
 project('libwmf test', 'c')
 
 wm = find_program('libwmf-config', required : false)
-if not wm.found()
+if not wm.found() or meson.is_cross_build()
   error('MESON_SKIP_TEST: libwmf-config not installed')
 endif
 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/23 hotdoc/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/23 hotdoc/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/23 hotdoc/installed_files.txt"	2019-07-09 16:34:14.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/23 hotdoc/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,314 +0,0 @@
-usr/share/doc/foobar/html/foo.html
-usr/share/doc/foobar/html/c-index.html
-usr/share/doc/foobar/html/index.html
-usr/share/doc/foobar/html/dumped.trie
-usr/share/doc/foobar/html/assets/theme.json
-usr/share/doc/foobar/html/assets/css/prism-tomorrow.css
-usr/share/doc/foobar/html/assets/css/bootstrap-toc.min.css
-usr/share/doc/foobar/html/assets/css/frontend.css
-usr/share/doc/foobar/html/assets/css/dumped.trie
-usr/share/doc/foobar/html/assets/css/jquery.mCustomScrollbar.min.css
-usr/share/doc/foobar/html/assets/css/custom_bootstrap.css
-usr/share/doc/foobar/html/assets/templates/navbar_links.html
-usr/share/doc/foobar/html/assets/templates/scripts.html
-usr/share/doc/foobar/html/assets/templates/stylesheets.html
-usr/share/doc/foobar/html/assets/templates/multi_return_value.html
-usr/share/doc/foobar/html/assets/templates/parameters.html
-usr/share/doc/foobar/html/assets/templates/base_page.html
-usr/share/doc/foobar/html/assets/templates/footer.html
-usr/share/doc/foobar/html/assets/templates/extra_head.html
-usr/share/doc/foobar/html/assets/templates/parameter_detail.html
-usr/share/doc/foobar/html/assets/templates/navbar_center.html
-usr/share/doc/foobar/html/assets/templates/enum_member.html
-usr/share/doc/foobar/html/assets/templates/member_list.html
-usr/share/doc/foobar/html/assets/templates/return_item.html
-usr/share/doc/foobar/html/assets/templates/subpages.html
-usr/share/doc/foobar/html/assets/templates/dumped.trie
-usr/share/doc/foobar/html/assets/templates/page_content.html
-usr/share/doc/foobar/html/assets/templates/navbar.html
-usr/share/doc/foobar/html/assets/templates/site_navigation.html
-usr/share/doc/foobar/html/assets/templates/field_detail.html
-usr/share/doc/foobar/html/assets/templates/brand-logo.html
-usr/share/doc/foobar/html/assets/js/prism_autoloader_path_override.js
-usr/share/doc/foobar/html/assets/js/jquery.js
-usr/share/doc/foobar/html/assets/js/scrollspy.js
-usr/share/doc/foobar/html/assets/js/isotope.pkgd.min.js
-usr/share/doc/foobar/html/assets/js/utils.js
-usr/share/doc/foobar/html/assets/js/typeahead.jquery.min.js
-usr/share/doc/foobar/html/assets/js/language_switching.js
-usr/share/doc/foobar/html/assets/js/tag_filtering.js
-usr/share/doc/foobar/html/assets/js/prism-autoloader.js
-usr/share/doc/foobar/html/assets/js/navbar_offset_scroller.js
-usr/share/doc/foobar/html/assets/js/lines_around_headings.js
-usr/share/doc/foobar/html/assets/js/trie_index.js
-usr/share/doc/foobar/html/assets/js/search.js
-usr/share/doc/foobar/html/assets/js/trie.js
-usr/share/doc/foobar/html/assets/js/bootstrap.js
-usr/share/doc/foobar/html/assets/js/navigation.js
-usr/share/doc/foobar/html/assets/js/bootstrap-toc.min.js
-usr/share/doc/foobar/html/assets/js/anchor.min.js
-usr/share/doc/foobar/html/assets/js/prism-core.js
-usr/share/doc/foobar/html/assets/js/sitemap.js
-usr/share/doc/foobar/html/assets/js/dumped.trie
-usr/share/doc/foobar/html/assets/js/mustache.min.js
-usr/share/doc/foobar/html/assets/js/compare-versions.js
-usr/share/doc/foobar/html/assets/js/jquery.touchSwipe.min.js
-usr/share/doc/foobar/html/assets/js/jquery.mCustomScrollbar.concat.min.js
-usr/share/doc/foobar/html/assets/js/search/members
-usr/share/doc/foobar/html/assets/js/search/Hello
-usr/share/doc/foobar/html/assets/js/search/hello
-usr/share/doc/foobar/html/assets/js/search/type
-usr/share/doc/foobar/html/assets/js/search/FooIndecision
-usr/share/doc/foobar/html/assets/js/search/fooindecision
-usr/share/doc/foobar/html/assets/js/search/Members
-usr/share/doc/foobar/html/assets/js/search/dumped.trie
-usr/share/doc/foobar/html/assets/js/search/indecision
-usr/share/doc/foobar/html/assets/js/search/hotdoc_fragments/index.html-hello-world.fragment
-usr/share/doc/foobar/html/assets/js/search/hotdoc_fragments/dumped.trie
-usr/share/doc/foobar/html/assets/js/search/hotdoc_fragments/foo.html-FooIndecision.fragment
-usr/share/doc/foobar/html/assets/js/search/Subpages
-usr/share/doc/foobar/html/assets/js/search/foo
-usr/share/doc/foobar/html/assets/js/search/API
-usr/share/doc/foobar/html/assets/js/search/Reference
-usr/share/doc/foobar/html/assets/js/search/api
-usr/share/doc/foobar/html/assets/js/search/reference
-usr/share/doc/foobar/html/assets/js/search/subpages
-usr/share/doc/foobar/html/assets/js/search/hotdoc_fragments/index.html-subpages.fragment
-usr/share/doc/foobar/html/assets/js/search/hotdoc_fragments/c-index.html-subpages.fragment
-usr/share/doc/foobar/html/assets/prism_components/prism-inform7.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-pascal.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-bro.js
-usr/share/doc/foobar/html/assets/prism_components/prism-nim.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-gherkin.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-stylus.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-ocaml.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-powershell.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-smalltalk.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-verilog.js
-usr/share/doc/foobar/html/assets/prism_components/prism-puppet.js
-usr/share/doc/foobar/html/assets/prism_components/prism-aspnet.js
-usr/share/doc/foobar/html/assets/prism_components/prism-parigp.js
-usr/share/doc/foobar/html/assets/prism_components/prism-objectivec.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-processing.js
-usr/share/doc/foobar/html/assets/prism_components/prism-objectivec.js
-usr/share/doc/foobar/html/assets/prism_components/prism-jsx.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-nginx.js
-usr/share/doc/foobar/html/assets/prism_components/prism-powershell.js
-usr/share/doc/foobar/html/assets/prism_components/prism-php.js
-usr/share/doc/foobar/html/assets/prism_components/prism-smarty.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-roboconf.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-batch.js
-usr/share/doc/foobar/html/assets/prism_components/prism-vhdl.js
-usr/share/doc/foobar/html/assets/prism_components/prism-protobuf.js
-usr/share/doc/foobar/html/assets/prism_components/prism-textile.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-crystal.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-scss.js
-usr/share/doc/foobar/html/assets/prism_components/prism-bro.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-smarty.js
-usr/share/doc/foobar/html/assets/prism_components/prism-bison.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-tcl.js
-usr/share/doc/foobar/html/assets/prism_components/prism-pure.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-makefile.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-applescript.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-css-extras.js
-usr/share/doc/foobar/html/assets/prism_components/prism-stylus.js
-usr/share/doc/foobar/html/assets/prism_components/prism-q.js
-usr/share/doc/foobar/html/assets/prism_components/prism-dart.js
-usr/share/doc/foobar/html/assets/prism_components/prism-oz.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-haskell.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-clike.js
-usr/share/doc/foobar/html/assets/prism_components/prism-kotlin.js
-usr/share/doc/foobar/html/assets/prism_components/prism-http.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-bash.js
-usr/share/doc/foobar/html/assets/prism_components/prism-apl.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-docker.js
-usr/share/doc/foobar/html/assets/prism_components/prism-sass.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-basic.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-nasm.js
-usr/share/doc/foobar/html/assets/prism_components/prism-kotlin.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-abap.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-perl.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-rust.js
-usr/share/doc/foobar/html/assets/prism_components/prism-c.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-scala.js
-usr/share/doc/foobar/html/assets/prism_components/prism-glsl.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-lua.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-coffeescript.js
-usr/share/doc/foobar/html/assets/prism_components/prism-jade.js
-usr/share/doc/foobar/html/assets/prism_components/prism-keyman.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-crystal.js
-usr/share/doc/foobar/html/assets/prism_components/prism-rest.js
-usr/share/doc/foobar/html/assets/prism_components/prism-json.js
-usr/share/doc/foobar/html/assets/prism_components/prism-roboconf.js
-usr/share/doc/foobar/html/assets/prism_components/prism-twig.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-dart.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-vim.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-handlebars.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-cpp.js
-usr/share/doc/foobar/html/assets/prism_components/prism-fsharp.js
-usr/share/doc/foobar/html/assets/prism_components/prism-sas.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-brainfuck.js
-usr/share/doc/foobar/html/assets/prism_components/prism-haxe.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-julia.js
-usr/share/doc/foobar/html/assets/prism_components/prism-jade.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-python.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-nim.js
-usr/share/doc/foobar/html/assets/prism_components/prism-typescript.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-csharp.js
-usr/share/doc/foobar/html/assets/prism_components/prism-brainfuck.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-asciidoc.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-groovy.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-applescript.js
-usr/share/doc/foobar/html/assets/prism_components/prism-elixir.js
-usr/share/doc/foobar/html/assets/prism_components/prism-diff.js
-usr/share/doc/foobar/html/assets/prism_components/prism-scheme.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-parser.js
-usr/share/doc/foobar/html/assets/prism_components/prism-qore.js
-usr/share/doc/foobar/html/assets/prism_components/prism-yaml.js
-usr/share/doc/foobar/html/assets/prism_components/prism-j.js
-usr/share/doc/foobar/html/assets/prism_components/prism-mel.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-css-extras.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-erlang.js
-usr/share/doc/foobar/html/assets/prism_components/prism-icon.js
-usr/share/doc/foobar/html/assets/prism_components/prism-actionscript.js
-usr/share/doc/foobar/html/assets/prism_components/prism-cpp.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-makefile.js
-usr/share/doc/foobar/html/assets/prism_components/prism-q.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-nsis.js
-usr/share/doc/foobar/html/assets/prism_components/prism-mizar.js
-usr/share/doc/foobar/html/assets/prism_components/prism-wiki.js
-usr/share/doc/foobar/html/assets/prism_components/prism-csharp.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-julia.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-coffeescript.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-sql.js
-usr/share/doc/foobar/html/assets/prism_components/prism-php-extras.js
-usr/share/doc/foobar/html/assets/prism_components/prism-basic.js
-usr/share/doc/foobar/html/assets/prism_components/prism-swift.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-haxe.js
-usr/share/doc/foobar/html/assets/prism_components/prism-apacheconf.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-javascript.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-markup.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-keyman.js
-usr/share/doc/foobar/html/assets/prism_components/prism-sql.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-php-extras.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-scheme.js
-usr/share/doc/foobar/html/assets/prism_components/prism-python.js
-usr/share/doc/foobar/html/assets/prism_components/prism-autoit.js
-usr/share/doc/foobar/html/assets/prism_components/prism-gherkin.js
-usr/share/doc/foobar/html/assets/prism_components/prism-java.js
-usr/share/doc/foobar/html/assets/prism_components/prism-parigp.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-autohotkey.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-ruby.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-nginx.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-core.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-fortran.js
-usr/share/doc/foobar/html/assets/prism_components/prism-nasm.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-ini.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-protobuf.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-jsx.js
-usr/share/doc/foobar/html/assets/prism_components/prism-markdown.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-nix.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-nsis.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-oz.js
-usr/share/doc/foobar/html/assets/prism_components/prism-less.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-abap.js
-usr/share/doc/foobar/html/assets/prism_components/prism-puppet.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-nix.js
-usr/share/doc/foobar/html/assets/prism_components/prism-pascal.js
-usr/share/doc/foobar/html/assets/prism_components/prism-latex.js
-usr/share/doc/foobar/html/assets/prism_components/prism-verilog.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-aspnet.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-go.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-glsl.js
-usr/share/doc/foobar/html/assets/prism_components/prism-inform7.js
-usr/share/doc/foobar/html/assets/prism_components/prism-yaml.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-matlab.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-lua.js
-usr/share/doc/foobar/html/assets/prism_components/prism-mizar.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-c.js
-usr/share/doc/foobar/html/assets/prism_components/prism-fsharp.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-haml.js
-usr/share/doc/foobar/html/assets/prism_components/prism-rust.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-icon.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-fortran.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-qore.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-batch.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-eiffel.js
-usr/share/doc/foobar/html/assets/prism_components/prism-vim.js
-usr/share/doc/foobar/html/assets/prism_components/prism-j.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-eiffel.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-elixir.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-erlang.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-matlab.js
-usr/share/doc/foobar/html/assets/prism_components/prism-tcl.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-ruby.js
-usr/share/doc/foobar/html/assets/prism_components/prism-d.js
-usr/share/doc/foobar/html/assets/prism_components/prism-swift.js
-usr/share/doc/foobar/html/assets/prism_components/prism-wiki.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-lolcode.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-latex.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-prolog.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-php.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-scss.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-vhdl.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-lolcode.js
-usr/share/doc/foobar/html/assets/prism_components/prism-prolog.js
-usr/share/doc/foobar/html/assets/prism_components/prism-apacheconf.js
-usr/share/doc/foobar/html/assets/prism_components/prism-core.js
-usr/share/doc/foobar/html/assets/prism_components/prism-diff.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-json.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-ini.js
-usr/share/doc/foobar/html/assets/prism_components/dumped.trie
-usr/share/doc/foobar/html/assets/prism_components/prism-r.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-markup.js
-usr/share/doc/foobar/html/assets/prism_components/prism-apl.js
-usr/share/doc/foobar/html/assets/prism_components/prism-markdown.js
-usr/share/doc/foobar/html/assets/prism_components/prism-asciidoc.js
-usr/share/doc/foobar/html/assets/prism_components/prism-ocaml.js
-usr/share/doc/foobar/html/assets/prism_components/prism-javascript.js
-usr/share/doc/foobar/html/assets/prism_components/prism-autohotkey.js
-usr/share/doc/foobar/html/assets/prism_components/prism-less.js
-usr/share/doc/foobar/html/assets/prism_components/prism-pure.js
-usr/share/doc/foobar/html/assets/prism_components/prism-groovy.js
-usr/share/doc/foobar/html/assets/prism_components/prism-bison.js
-usr/share/doc/foobar/html/assets/prism_components/prism-sass.js
-usr/share/doc/foobar/html/assets/prism_components/prism-css.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-haml.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-handlebars.js
-usr/share/doc/foobar/html/assets/prism_components/prism-textile.js
-usr/share/doc/foobar/html/assets/prism_components/prism-parser.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-docker.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-monkey.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-http.js
-usr/share/doc/foobar/html/assets/prism_components/prism-git.js
-usr/share/doc/foobar/html/assets/prism_components/prism-sas.js
-usr/share/doc/foobar/html/assets/prism_components/prism-go.js
-usr/share/doc/foobar/html/assets/prism_components/prism-mel.js
-usr/share/doc/foobar/html/assets/prism_components/prism-rest.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-clike.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-d.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-haskell.js
-usr/share/doc/foobar/html/assets/prism_components/prism-git.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-java.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-rip.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-perl.js
-usr/share/doc/foobar/html/assets/prism_components/prism-typescript.js
-usr/share/doc/foobar/html/assets/prism_components/prism-actionscript.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-autoit.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-rip.js
-usr/share/doc/foobar/html/assets/prism_components/prism-twig.js
-usr/share/doc/foobar/html/assets/prism_components/prism-monkey.js
-usr/share/doc/foobar/html/assets/prism_components/prism-processing.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-scala.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-smalltalk.js
-usr/share/doc/foobar/html/assets/prism_components/prism-bash.min.js
-usr/share/doc/foobar/html/assets/prism_components/prism-r.js
-usr/share/doc/foobar/html/assets/prism_components/prism-css.js
-usr/share/doc/foobar/html/assets/fonts/glyphicons-halflings-regular.woff
-usr/share/doc/foobar/html/assets/fonts/glyphicons-halflings-regular.woff2
-usr/share/doc/foobar/html/assets/fonts/glyphicons-halflings-regular.svg
-usr/share/doc/foobar/html/assets/fonts/glyphicons-halflings-regular.ttf
-usr/share/doc/foobar/html/assets/fonts/glyphicons-halflings-regular.eot
-usr/share/doc/foobar/html/assets/fonts/dumped.trie
-usr/share/doc/foobar/html/assets/images/home.svg
-usr/share/doc/foobar/html/assets/images/dumped.trie
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/23 hotdoc/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/23 hotdoc/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/23 hotdoc/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/23 hotdoc/test.json"	2020-09-10 16:39:24.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "installed": [
+    {"type": "dir", "file": "usr/share/doc/foobar/html"}
+  ],
+  "tools": {
+    "hotdoc": ">=0.1.0"
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/25 hdf5/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/25 hdf5/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/25 hdf5/meson.build"	2019-12-29 22:47:27.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/25 hdf5/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -2,8 +2,10 @@
 
 # NOTE: all HDF5 languages must have HDF5 C library working.
 
+method = get_option('method')
+
 # --- C tests
-h5c = dependency('hdf5', language : 'c', required : false)
+h5c = dependency('hdf5', language : 'c', required : false, method : method)
 if not h5c.found()
   error('MESON_SKIP_TEST: HDF5 C library not found.')
 endif
@@ -12,16 +14,31 @@
 
 # --- C++ tests
 if add_languages('cpp', required: false)
-  h5cpp = dependency('hdf5', language : 'cpp', required : false, disabler: true)
+  h5cpp = dependency('hdf5', language : 'cpp', required : false, disabler: true, method : method)
   execpp = executable('execpp', 'main.cpp', dependencies : h5cpp)
   test('HDF5 C++', execpp, timeout: 30)
 endif
 
-# --- Fortran tests
-if add_languages('fortran', required: false)
-  h5f = dependency('hdf5', language : 'fortran', required : false, disabler: true)
-  exef = executable('exef', 'main.f90', dependencies : h5f)
-  test('HDF5 Fortran', exef, timeout: 30)
+test_fortran = add_languages('fortran', required: false)
+
+if test_fortran
+  cpp = meson.get_compiler('cpp')
+  fc = meson.get_compiler('fortran')
+
+  if host_machine.system() == 'darwin' and cpp.get_id() == 'clang' and fc.get_id() == 'gcc'
+    # Search paths don't work correctly here and -lgfortran doesn't work
+    test_fortran = false
+  elif host_machine.system() == 'windows' and cpp.get_id() != 'gcc' and fc.get_id() == 'gcc'
+    # mixing gfotran with non-gcc doesn't work on windows
+    test_fortran = false
+  endif
+
+  # --- Fortran tests
+  if test_fortran
+    h5f = dependency('hdf5', language : 'fortran', required : false, disabler: true, method : method)
+    exef = executable('exef', 'main.f90', dependencies : h5f)
+    test('HDF5 Fortran', exef, timeout: 30)
+  endif
 endif
 
 # Check we can apply a version constraint
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/25 hdf5/meson_options.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/25 hdf5/meson_options.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/25 hdf5/meson_options.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/25 hdf5/meson_options.txt"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,6 @@
+option(
+    'method',
+    type : 'combo',
+    choices : ['pkg-config', 'config-tool'],
+    value : 'pkg-config'
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/25 hdf5/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/25 hdf5/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/25 hdf5/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/25 hdf5/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "matrix": {
+    "options": {
+      "method": [
+        { "val": "pkg-config" },
+        { "val": "config-tool" }
+      ]
+    }
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/2 gtest/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/2 gtest/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/2 gtest/meson.build"	2020-01-07 21:13:32.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/2 gtest/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -8,7 +8,7 @@
 gtest_nomain = dependency('gtest', main : false, method : 'system')
 
 e = executable('testprog', 'test.cc', dependencies : gtest)
-test('gtest test', e)
+test('gtest test', e, protocol : 'gtest')
 
 e = executable('testprog_nomain', 'test_nomain.cc', dependencies : gtest_nomain)
-test('gtest nomain test', e)
+test('gtest nomain test', e, protocol : 'gtest')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/31 curses/main.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/31 curses/main.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/31 curses/main.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/31 curses/main.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+#include "curses.h"
+
+int main(void) {
+initscr();
+endwin();
+return 0;
+}
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/31 curses/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/31 curses/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/31 curses/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/31 curses/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,13 @@
+project('curses', 'c')
+
+curses = dependency('curses', required: false, method : get_option('method'), version : '>= 0')
+if not curses.found()
+  error('MESON_SKIP_TEST: Curses library not found')
+endif
+
+exec = executable('basic', 'main.c', dependencies: curses)
+# didn't run the test because in general graphics fail on CI
+
+# this should fail
+not_found = dependency('curses', required: false, method : get_option('method'), version : '> 1000000')
+assert(not_found.found() == false)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/31 curses/meson_options.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/31 curses/meson_options.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/31 curses/meson_options.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/31 curses/meson_options.txt"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,6 @@
+option(
+    'method',
+    type : 'combo',
+    choices : ['pkg-config', 'config-tool', 'system'],
+    value : 'pkg-config',
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/31 curses/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/31 curses/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/31 curses/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/31 curses/test.json"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "matrix": {
+    "options": {
+      "method": [
+        { "val": "pkg-config" },
+        { "val": "config-tool" },
+        { "val": "system" }
+      ]
+    }
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/32 boost root/boost/include/boost/version.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/32 boost root/boost/include/boost/version.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/32 boost root/boost/include/boost/version.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/32 boost root/boost/include/boost/version.hpp"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,3 @@
+#define BOOST_VERSION 100
+
+#error This is not a real version of boost
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/32 boost root/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/32 boost root/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/32 boost root/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/32 boost root/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,6 @@
+project('boosttest', 'cpp')
+
+dep = dependency('boost', modules : 'regex', required: false)
+
+assert(dep.found(), 'expected to find a fake version of boost')
+assert(dep.version() == '0.1.0', 'expected to find version 0.1.0')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/32 boost root/nativefile.ini.in" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/32 boost root/nativefile.ini.in"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/32 boost root/nativefile.ini.in"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/32 boost root/nativefile.ini.in"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,2 @@
+[properties]
+boost_root = '@MESON_TEST_ROOT@/boost'
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,3 @@
+#define BOOST_VERSION 200
+
+#error This is not a real version of boost
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/33 boost split root/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/33 boost split root/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/33 boost split root/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/33 boost split root/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,6 @@
+project('boosttest', 'cpp')
+
+dep = dependency('boost', modules : 'regex', required: false)
+
+assert(dep.found(), 'expected to find a fake version of boost')
+assert(dep.version() == '0.2.0', 'expected to find version 0.2.0')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/33 boost split root/nativefile.ini.in" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/33 boost split root/nativefile.ini.in"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/33 boost split root/nativefile.ini.in"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/33 boost split root/nativefile.ini.in"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,3 @@
+[properties]
+boost_includedir = '@MESON_TEST_ROOT@/boost/extra-dir/include'
+boost_librarydir = '@MESON_TEST_ROOT@/boost/lib'
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/4 qt/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/4 qt/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/4 qt/meson.build"	2020-01-23 12:51:19.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/4 qt/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -42,6 +42,7 @@
   qtdep = dependency(qt, modules : qt_modules, main : true, private_headers: true, required : required, method : get_option('method'))
   if qtdep.found()
     qtmodule = import(qt)
+    assert(qtmodule.has_tools())
 
     # The following has two resource files because having two in one target
     # requires you to do it properly or you get linker symbol clashes.
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/6 gettext/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/6 gettext/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/6 gettext/installed_files.txt"	2019-05-02 18:59:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/6 gettext/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,10 +0,0 @@
-usr/bin/intlprog?exe
-usr/share/locale/de/LC_MESSAGES/intltest.mo
-usr/share/locale/fi/LC_MESSAGES/intltest.mo
-usr/share/locale/ru/LC_MESSAGES/intltest.mo
-usr/share/applications/something.desktop
-usr/share/applications/test.desktop
-usr/share/applications/test.plugin
-usr/share/applications/test2.desktop
-usr/share/applications/test3.desktop
-usr/share/applications/test4.desktop
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/6 gettext/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/6 gettext/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/6 gettext/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/6 gettext/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/intlprog"},
+    {"type": "file", "file": "usr/share/locale/de/LC_MESSAGES/intltest.mo"},
+    {"type": "file", "file": "usr/share/locale/fi/LC_MESSAGES/intltest.mo"},
+    {"type": "file", "file": "usr/share/locale/ru/LC_MESSAGES/intltest.mo"},
+    {"type": "file", "file": "usr/share/applications/something.desktop"},
+    {"type": "file", "file": "usr/share/applications/test.desktop"},
+    {"type": "file", "file": "usr/share/applications/test.plugin"},
+    {"type": "file", "file": "usr/share/applications/test2.desktop"},
+    {"type": "file", "file": "usr/share/applications/test3.desktop"},
+    {"type": "file", "file": "usr/share/applications/test4.desktop"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,124 @@
+#include "dep3.h"
+
+struct _MesonDep3
+{
+  GObject parent_instance;
+
+  gchar *msg;
+};
+
+G_DEFINE_TYPE (MesonDep3, meson_dep3, G_TYPE_OBJECT)
+
+enum {
+  PROP_0,
+  PROP_MSG,
+  LAST_PROP
+};
+
+static GParamSpec *gParamSpecs [LAST_PROP];
+
+/**
+ * meson_dep3_new:
+ * @msg: The message to set.
+ *
+ * Allocates a new #MesonDep3.
+ *
+ * Returns: (transfer full): a #MesonDep3.
+ */
+MesonDep3 *
+meson_dep3_new (const gchar *msg)
+{
+  g_return_val_if_fail (msg != NULL, NULL);
+
+  return g_object_new (MESON_TYPE_DEP3,
+                       "message", msg,
+                       NULL);
+}
+
+static void
+meson_dep3_finalize (GObject *object)
+{
+  MesonDep3 *self = (MesonDep3 *)object;
+
+  g_clear_pointer (&self->msg, g_free);
+
+  G_OBJECT_CLASS (meson_dep3_parent_class)->finalize (object);
+}
+
+static void
+meson_dep3_get_property (GObject    *object,
+                           guint       prop_id,
+                           GValue     *value,
+                           GParamSpec *pspec)
+{
+  MesonDep3 *self = MESON_DEP3 (object);
+
+  switch (prop_id)
+    {
+    case PROP_MSG:
+      g_value_set_string (value, self->msg);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+    }
+}
+
+static void
+meson_dep3_set_property (GObject      *object,
+                           guint         prop_id,
+                           const GValue *value,
+                           GParamSpec   *pspec)
+{
+  MesonDep3 *self = MESON_DEP3 (object);
+
+  switch (prop_id)
+    {
+    case PROP_MSG:
+      self->msg = g_value_dup_string (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+    }
+}
+
+static void
+meson_dep3_class_init (MesonDep3Class *klass)
+{
+  GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+  object_class->finalize = meson_dep3_finalize;
+  object_class->get_property = meson_dep3_get_property;
+  object_class->set_property = meson_dep3_set_property;
+
+  gParamSpecs [PROP_MSG] =
+    g_param_spec_string ("message",
+                         "Message",
+                         "The message to print.",
+                         NULL,
+                         (G_PARAM_READWRITE |
+                          G_PARAM_CONSTRUCT_ONLY |
+                          G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs);
+}
+
+static void
+meson_dep3_init (MesonDep3 *self)
+{
+}
+
+/**
+ * meson_dep3_return_message:
+ * @self: a #MesonDep3.
+ *
+ * Returns the message.
+ *
+ * Returns: (transfer none): a const gchar*
+ */
+const gchar*
+meson_dep3_return_message (MesonDep3 *self)
+{
+  g_return_val_if_fail (MESON_IS_DEP3 (self), NULL);
+
+  return (const gchar*) self->msg;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,21 @@
+#ifndef MESON_DEP3_H
+#define MESON_DEP3_H
+
+#if !defined (MESON_TEST)
+#error "MESON_TEST not defined."
+#endif
+
+#include 
+
+G_BEGIN_DECLS
+
+#define MESON_TYPE_DEP3 (meson_dep3_get_type())
+
+G_DECLARE_FINAL_TYPE (MesonDep3, meson_dep3, MESON, DEP3, GObject)
+
+MesonDep3   *meson_dep3_new            (const gchar *msg);
+const gchar *meson_dep3_return_message (MesonDep3 *self);
+
+G_END_DECLS
+
+#endif /* MESON_DEP3_H */
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,22 @@
+dep3sources = ['dep3.c', 'dep3.h']
+
+dep3lib = shared_library(
+  'dep3lib',
+  sources : dep3sources,
+  dependencies : gobj,
+  install : true
+)
+
+dep3gir = gnome.generate_gir(
+  dep3lib,
+  sources : dep3sources,
+  nsversion : '1.0',
+  namespace : 'MesonDep3',
+  symbol_prefix : 'meson',
+  identifier_prefix : 'Meson',
+  includes : ['GObject-2.0'],
+  install : true
+)
+
+dep3_dep = declare_dependency(link_with : dep3lib,
+  sources : [dep3gir])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/gir/dep1/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/gir/dep1/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/gir/dep1/meson.build"	2018-08-25 08:05:43.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/gir/dep1/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,4 +1,5 @@
 subdir('dep2')
+subdir('dep3')
 
 dep1sources = ['dep1.c', 'dep1.h']
 
@@ -20,11 +21,11 @@
   symbol_prefix : 'meson',
   identifier_prefix : 'Meson',
   header: 'dep1.h',
-  includes : ['GObject-2.0', 'MesonDep2-1.0'],
+  includes : ['GObject-2.0', 'MesonDep2-1.0', dep3gir[0]],
   dependencies : [dep2_dep],
   install : true
 )
 
 dep1_dep = declare_dependency(link_with : dep1lib,
-  dependencies : [dep2_dep],
+  dependencies : [dep2_dep, dep3_dep],
   sources : [dep1gir])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/gir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/gir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/gir/meson.build"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/gir/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -45,7 +45,7 @@
 )
 
 test('gobject introspection/c', girexe)
-gir_paths = ':'.join([girlib.outdir(), dep1lib.outdir(), dep2lib.outdir()])
+gir_paths = ':'.join([girlib.outdir(), dep1lib.outdir(), dep2lib.outdir(), dep3lib.outdir()])
 envdata = environment()
 envdata.append('GI_TYPELIB_PATH', gir_paths, separator : ':')
 envdata.append('LD_LIBRARY_PATH', gir_paths)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,24 +0,0 @@
-usr/include/enums.h
-usr/include/enums2.h
-usr/include/enums3.h
-usr/include/enums5.h
-usr/include/marshaller.h
-usr/lib/?libgir_lib.so
-?cygwin:usr/lib/libgir_lib.dll.a
-usr/lib/?libgir_lib2.so
-?cygwin:usr/lib/libgir_lib2.dll.a
-usr/lib/?libdep1lib.so
-?cygwin:usr/lib/libdep1lib.dll.a
-usr/lib/?libdep2lib.so
-?cygwin:usr/lib/libdep2lib.dll.a
-usr/lib/girepository-1.0/Meson-1.0.typelib
-usr/lib/girepository-1.0/MesonDep1-1.0.typelib
-usr/lib/girepository-1.0/MesonDep2-1.0.typelib
-usr/share/gir-1.0/Meson-1.0.gir
-usr/share/gir-1.0/MesonDep1-1.0.gir
-usr/share/gir-1.0/MesonDep2-1.0.gir
-usr/share/glib-2.0/schemas/com.github.meson.gschema.xml
-usr/share/simple-resources.gresource
-usr/include/enums6.h
-usr/include/simple-resources.h
-usr/include/generated-gdbus.h
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/mkenums/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/mkenums/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/mkenums/meson.build"	2018-08-25 08:05:43.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/mkenums/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -126,6 +126,14 @@
                               install_header : true,
                               decorator : 'MESON_EXPORT',
                               header_prefix : '#include "meson-decls.h"')
+
+conf = configuration_data()
+conf.set('ENUM_FILE', 'enums5.h')
+main = configure_file(
+  input : 'main.c',
+  output : 'main5.c',
+  configuration : conf)
+
 enumexe5 = executable('enumprog5', main, enums5, dependencies : gobj)
 
 # Generate template then use as input to mkenums
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/7 gnome/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/7 gnome/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,32 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/include/enums.h"},
+    {"type": "file", "file": "usr/include/enums2.h"},
+    {"type": "file", "file": "usr/include/enums3.h"},
+    {"type": "file", "file": "usr/include/enums5.h"},
+    {"type": "file", "file": "usr/include/marshaller.h"},
+    {"type": "expr", "file": "usr/lib/?libgir_lib.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libgir_lib.dll.a"},
+    {"type": "expr", "file": "usr/lib/?libgir_lib2.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libgir_lib2.dll.a"},
+    {"type": "expr", "file": "usr/lib/?libdep1lib.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libdep1lib.dll.a"},
+    {"type": "expr", "file": "usr/lib/?libdep2lib.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libdep2lib.dll.a"},
+    {"type": "expr", "file": "usr/lib/?libdep3lib.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libdep3lib.dll.a"},
+    {"type": "file", "file": "usr/lib/girepository-1.0/Meson-1.0.typelib"},
+    {"type": "file", "file": "usr/lib/girepository-1.0/MesonDep1-1.0.typelib"},
+    {"type": "file", "file": "usr/lib/girepository-1.0/MesonDep2-1.0.typelib"},
+    {"type": "file", "file": "usr/lib/girepository-1.0/MesonDep3-1.0.typelib"},
+    {"type": "file", "file": "usr/share/gir-1.0/Meson-1.0.gir"},
+    {"type": "file", "file": "usr/share/gir-1.0/MesonDep1-1.0.gir"},
+    {"type": "file", "file": "usr/share/gir-1.0/MesonDep2-1.0.gir"},
+    {"type": "file", "file": "usr/share/gir-1.0/MesonDep3-1.0.gir"},
+    {"type": "file", "file": "usr/share/glib-2.0/schemas/com.github.meson.gschema.xml"},
+    {"type": "file", "file": "usr/share/simple-resources.gresource"},
+    {"type": "file", "file": "usr/include/enums6.h"},
+    {"type": "file", "file": "usr/include/simple-resources.h"},
+    {"type": "file", "file": "usr/include/generated-gdbus.h"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/8 flex/lexer.l" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/8 flex/lexer.l"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/8 flex/lexer.l"	2016-01-23 18:52:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/8 flex/lexer.l"	2021-01-09 10:14:21.000000000 +0000
@@ -1,6 +1,9 @@
 %{
 #include 
 #include "parser.tab.h"
+
+extern int yylex(void);
+extern int yyerror(); 
 %}
 
 %% 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/8 flex/parser.y" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/8 flex/parser.y"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/frameworks/8 flex/parser.y"	2016-01-23 18:52:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/frameworks/8 flex/parser.y"	2021-01-09 10:14:21.000000000 +0000
@@ -1,3 +1,8 @@
+%{
+extern int yylex(void);
+extern int yyerror();
+%}
+
 %token BOOLEAN
 
 %%
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/java/1 basic/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/java/1 basic/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/java/1 basic/installed_files.txt"	2017-02-06 21:49:56.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/java/1 basic/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-usr/bin/myprog.jar
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/java/1 basic/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/java/1 basic/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/java/1 basic/meson.build"	2020-01-07 21:11:01.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/java/1 basic/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -5,3 +5,7 @@
   install : true,
   install_dir : get_option('bindir'))
 test('mytest', javaprog)
+
+jc = meson.get_compiler('java')
+message(jc.get_id())
+message(jc.get_linker_id())
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/java/1 basic/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/java/1 basic/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/java/1 basic/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/java/1 basic/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/bin/myprog.jar"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/java/3 args/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/java/3 args/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/java/3 args/meson.build"	2020-01-07 21:11:01.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/java/3 args/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,9 +1,9 @@
 project('simplejava', 'java')
 
-add_project_arguments('-target', '1.6', language : 'java')
+add_project_arguments('-target', '1.7', language : 'java')
 
 javaprog = jar('myprog', 'com/mesonbuild/Simple.java',
   main_class : 'com.mesonbuild.Simple',
-  java_args : ['-source', '1.6'])
+  java_args : ['-source', '1.7'])
 test('mytest', javaprog)
 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/1 basic/.config" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/1 basic/.config"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/1 basic/.config"	2019-04-17 08:08:43.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/1 basic/.config"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-CONFIG_VAL1=y
-# CONFIG_VAL2 is not set
-CONFIG_VAL_VAL=4
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/1 basic/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/1 basic/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/1 basic/meson.build"	2020-01-07 21:10:22.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/1 basic/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,16 +0,0 @@
-project('kconfig basic test')
-
-k = import('unstable-kconfig')
-conf = k.load('.config')
-
-if not conf.has_key('CONFIG_VAL1')
-  error('Expected CONFIG_VAL1 to be set, but it wasn\'t')
-endif
-
-if conf.has_key('CONFIG_VAL2')
-  error('Expected CONFIG_VAL2 not be set, but it was')
-endif
-
-if conf.get('CONFIG_VAL_VAL').to_int() != 4
-  error('Expected CONFIG_VAL_VAL to be 4')
-endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/2 subdir/.config" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/2 subdir/.config"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/2 subdir/.config"	2019-04-17 08:08:43.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/2 subdir/.config"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-CONFIG_IS_SET=y
-# CONFIG_NOT_IS_SET is not set
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/2 subdir/dir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/2 subdir/dir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/2 subdir/dir/meson.build"	2019-04-17 08:08:43.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/2 subdir/dir/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,13 +0,0 @@
-
-k = import('unstable-kconfig')
-
-conf = k.load(meson.source_root() / '.config')
-
-if not conf.has_key('CONFIG_IS_SET')
-  error('Expected CONFIG_IS_SET to be set, but it wasn\'t')
-endif
-
-if conf.has_key('CONFIG_NOT_IS_SET')
-  error('Expected CONFIG_NOT_IS_SET not be set, but it was')
-endif
-
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/2 subdir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/2 subdir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/2 subdir/meson.build"	2020-01-07 21:10:22.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/2 subdir/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-project('kconfig subdir test')
-
-# Test into sub directory
-subdir('dir')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/3 load_config files/dir/config" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/3 load_config files/dir/config"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/3 load_config files/dir/config"	2019-04-17 08:08:43.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/3 load_config files/dir/config"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-CONFIG_IS_SET=y
-# CONFIG_NOT_IS_SET is not set
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/3 load_config files/dir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/3 load_config files/dir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/3 load_config files/dir/meson.build"	2019-04-17 08:08:43.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/3 load_config files/dir/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,13 +0,0 @@
-
-k = import('unstable-kconfig')
-
-conf = k.load(files('config'))
-
-if not conf.has_key('CONFIG_IS_SET')
-  error('Expected CONFIG_IS_SET to be set, but it wasn\'t')
-endif
-
-if conf.has_key('CONFIG_NOT_IS_SET')
-  error('Expected CONFIG_NOT_IS_SET not be set, but it was')
-endif
-
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/3 load_config files/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/3 load_config files/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/3 load_config files/meson.build"	2020-01-07 21:10:22.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/3 load_config files/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-project('kconfig subdir test')
-
-# Test into sub directory
-subdir('dir')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/4 load_config builddir/config" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/4 load_config builddir/config"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/4 load_config builddir/config"	2019-04-17 08:08:43.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/4 load_config builddir/config"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-CONFIG_IS_SET=y
-# CONFIG_NOT_IS_SET is not set
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/4 load_config builddir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/4 load_config builddir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/kconfig/4 load_config builddir/meson.build"	2020-01-07 21:10:22.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/kconfig/4 load_config builddir/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,14 +0,0 @@
-project('kconfig builddir test')
-
-k = import('unstable-kconfig')
-
-out_conf = configure_file(input: 'config', output: 'out-config', copy: true)
-conf = k.load(out_conf)
-
-if not conf.has_key('CONFIG_IS_SET')
-  error('Expected CONFIG_IS_SET to be set, but it wasn\'t')
-endif
-
-if conf.has_key('CONFIG_NOT_IS_SET')
-  error('Expected CONFIG_NOT_IS_SET not be set, but it was')
-endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/1 basic/.config" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/1 basic/.config"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/1 basic/.config"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/1 basic/.config"	2020-06-29 17:00:07.000000000 +0000
@@ -0,0 +1,3 @@
+CONFIG_VAL1=y
+# CONFIG_VAL2 is not set
+CONFIG_VAL_VAL=4
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/1 basic/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/1 basic/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/1 basic/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/1 basic/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,18 @@
+project('keyval basic test')
+
+k = import('keyval')
+conf = k.load('.config')
+
+if not conf.has_key('CONFIG_VAL1')
+  error('Expected CONFIG_VAL1 to be set, but it wasn\'t')
+endif
+
+if conf.has_key('CONFIG_VAL2')
+  error('Expected CONFIG_VAL2 not be set, but it was')
+endif
+
+if conf.get('CONFIG_VAL_VAL').to_int() != 4
+  error('Expected CONFIG_VAL_VAL to be 4')
+endif
+
+k = import('unstable-keyval')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/1 basic/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/1 basic/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/1 basic/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/1 basic/test.json"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "WARNING: Module unstable-keyval is now stable, please use the keyval module instead."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/2 subdir/.config" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/2 subdir/.config"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/2 subdir/.config"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/2 subdir/.config"	2020-06-29 17:00:07.000000000 +0000
@@ -0,0 +1,2 @@
+CONFIG_IS_SET=y
+# CONFIG_NOT_IS_SET is not set
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/2 subdir/dir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/2 subdir/dir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/2 subdir/dir/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/2 subdir/dir/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,13 @@
+
+k = import('keyval')
+
+conf = k.load(meson.source_root() / '.config')
+
+if not conf.has_key('CONFIG_IS_SET')
+  error('Expected CONFIG_IS_SET to be set, but it wasn\'t')
+endif
+
+if conf.has_key('CONFIG_NOT_IS_SET')
+  error('Expected CONFIG_NOT_IS_SET not be set, but it was')
+endif
+
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/2 subdir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/2 subdir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/2 subdir/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/2 subdir/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+project('keyval subdir test')
+
+# Test into sub directory
+subdir('dir')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/3 load_config files/dir/config" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/3 load_config files/dir/config"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/3 load_config files/dir/config"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/3 load_config files/dir/config"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+CONFIG_IS_SET=y
+# CONFIG_NOT_IS_SET is not set
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/3 load_config files/dir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/3 load_config files/dir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/3 load_config files/dir/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/3 load_config files/dir/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,13 @@
+
+k = import('keyval')
+
+conf = k.load(files('config'))
+
+if not conf.has_key('CONFIG_IS_SET')
+  error('Expected CONFIG_IS_SET to be set, but it wasn\'t')
+endif
+
+if conf.has_key('CONFIG_NOT_IS_SET')
+  error('Expected CONFIG_NOT_IS_SET not be set, but it was')
+endif
+
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/3 load_config files/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/3 load_config files/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/3 load_config files/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/3 load_config files/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+project('keyval subdir test')
+
+# Test into sub directory
+subdir('dir')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/4 load_config builddir/config" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/4 load_config builddir/config"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/4 load_config builddir/config"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/4 load_config builddir/config"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+CONFIG_IS_SET=y
+# CONFIG_NOT_IS_SET is not set
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/4 load_config builddir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/4 load_config builddir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/keyval/4 load_config builddir/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/keyval/4 load_config builddir/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,14 @@
+project('keyval builddir test')
+
+k = import('keyval')
+
+out_conf = configure_file(input: 'config', output: 'out-config', copy: true)
+conf = k.load(out_conf)
+
+if not conf.has_key('CONFIG_IS_SET')
+  error('Expected CONFIG_IS_SET to be set, but it wasn\'t')
+endif
+
+if conf.has_key('CONFIG_NOT_IS_SET')
+  error('Expected CONFIG_NOT_IS_SET not be set, but it was')
+endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake"	2020-01-23 21:41:11.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake"	2020-08-15 16:27:05.000000000 +0000
@@ -4,6 +4,9 @@
 include(CheckCXXSourceRuns)
 include(CheckCSourceRuns)
 
+# Do something stupid (see https://github.com/mesonbuild/meson/issues/7501)
+set("")
+
 check_cxx_source_runs(
 "
 #include 
@@ -38,6 +41,10 @@
   message(FATAL_ERROR "Running C source code failed")
 endif()
 
+if(NOT SomethingLikeZLIB_FIND_COMPONENTS STREQUAL "required_comp")
+  message(FATAL_ERROR "Component 'required_comp' was not specified")
+endif()
+
 find_dependency(Threads)
 
 if(ZLIB_FOUND OR ZLIB_Found)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+find_package(ZLIB)
+
+if(ZLIB_FOUND OR ZLIB_Found)
+  set(cmMesonTestF1_FOUND        ON)
+  set(cmMesonTestF1_LIBRARIES    ${ZLIB_LIBRARY})
+  set(cmMesonTestF1_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR})
+else()
+  set(cmMesonTestF1_FOUND       OFF)
+endif()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+find_package(ZLIB)
+
+if(ZLIB_FOUND OR ZLIB_Found)
+  set(cmMesonTestF2_FOUND        ON)
+  set(cmMesonTestF2_LIBRARIES    ${ZLIB_LIBRARY})
+  set(cmMesonTestF2_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR})
+else()
+  set(cmMesonTestF2_FOUND       OFF)
+endif()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/cmVers.sh" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/cmVers.sh"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/cmVers.sh"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/cmVers.sh"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+VERS=$(cmake --version | grep "cmake version")
+VERS=${VERS//cmake version/}
+
+echo -n $VERS
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/meson.build"	2020-01-23 21:41:11.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -6,6 +6,9 @@
   error('MESON_SKIP_TEST cmake binary not available.')
 endif
 
+# CMake version
+cm_vers = run_command(find_program('./cmVers.sh')).stdout().strip()
+
 # Zlib is probably on all dev machines.
 
 dep = dependency('ZLIB', version : '>=1.2', method : 'cmake')
@@ -41,21 +44,25 @@
 # Try to find cmMesonTestDep in a custom prefix
 # setup_env.json is used by run_project_tests.py:_run_test to point to ./cmake_pref_env/
 depPrefEnv = dependency('cmMesonTestDep', required : true, method : 'cmake')
+depPrefEnv1 = dependency('cmMesonTestF1', required : true, method : 'cmake')
+depPrefEnv2 = dependency('cmMesonTestF2', required : true, method : 'cmake')
 
 # Try to find a dependency with a custom CMake module
 
-depm1 = dependency('SomethingLikeZLIB', required : true, method : 'cmake', cmake_module_path : 'cmake')
-depm2 = dependency('SomethingLikeZLIB', required : true, method : 'cmake', cmake_module_path : ['cmake'])
-depm3 = dependency('SomethingLikeZLIB', required : true, cmake_module_path : 'cmake')
-
-# Test some edge cases with spaces, etc.
-
-testDep1 = dependency('ImportedTarget', required : true, method : 'cmake', cmake_module_path : 'cmake', modules: 'mesonTestLibDefs')
-testDep2 = dependency('ImportedTarget', required : true, method : 'cmake', cmake_module_path : 'cmake', modules : ['MesonTest::TestLibDefs'])
-testFlagSet1 = executable('testFlagSet1', ['testFlagSet.c'], dependencies: [testDep1])
-testFlagSet2 = executable('testFlagSet2', ['testFlagSet.c'], dependencies: [testDep2])
-test('testFlagSetTest1', testFlagSet1)
-test('testFlagSetTest2', testFlagSet2)
+depm1 = dependency('SomethingLikeZLIB', required : true, components : 'required_comp',   method : 'cmake', cmake_module_path : 'cmake')
+depm2 = dependency('SomethingLikeZLIB', required : true, components : 'required_comp',   method : 'cmake', cmake_module_path : ['cmake'])
+depm3 = dependency('SomethingLikeZLIB', required : true, components : ['required_comp'], cmake_module_path : 'cmake')
+
+# Test some edge cases with spaces, etc. (but only for CMake >= 3.15)
+
+if cm_vers.version_compare('>=3.15')
+  testDep1 = dependency('ImportedTarget', required : true, method : 'cmake', cmake_module_path : 'cmake', modules: 'mesonTestLibDefs')
+  testDep2 = dependency('ImportedTarget', required : true, method : 'cmake', cmake_module_path : 'cmake', modules : ['MesonTest::TestLibDefs'])
+  testFlagSet1 = executable('testFlagSet1', ['testFlagSet.c'], dependencies: [testDep1])
+  testFlagSet2 = executable('testFlagSet2', ['testFlagSet.c'], dependencies: [testDep2])
+  test('testFlagSetTest1', testFlagSet1)
+  test('testFlagSetTest2', testFlagSet2)
+endif
 
 # Try to compile a test that takes a dep and an include_directories
 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/setup_env.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/setup_env.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/setup_env.json"	2019-08-25 19:17:02.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/setup_env.json"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-{
-  "CMAKE_PREFIX_PATH": "@ROOT@/cmake_pref_env"
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/13 cmake dependency/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/13 cmake dependency/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "env": {
+    "CMAKE_PREFIX_PATH": "@ROOT@/cmake_fake1;@ROOT@/cmake_fake2:@ROOT@/cmake_pref_env"
+  }
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/14 static dynamic linkage/verify_static.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/14 static dynamic linkage/verify_static.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/14 static dynamic linkage/verify_static.py"	2020-01-23 12:51:19.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/14 static dynamic linkage/verify_static.py"	2020-08-15 16:27:05.000000000 +0000
@@ -13,7 +13,7 @@
 def handle_cygwin(path):
     """Handle the Cygwin case."""
     output = subprocess.check_output(['nm', path]).decode('utf-8')
-    if 'I __imp_zlibVersion' in output:
+    if (('I __imp_zlibVersion' in output) or ('D __imp_zlibVersion' in output)):
         return 1
     return 0
 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/3 linker script/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/3 linker script/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/3 linker script/meson.build"	2020-01-07 21:10:28.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/3 linker script/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,5 +1,11 @@
 project('linker script', 'c')
 
+# Solaris 11.4 ld supports --version-script only when you also specify
+# -z gnu-version-script-compat
+if meson.get_compiler('c').get_linker_id() == 'ld.solaris'
+  add_project_link_arguments('-Wl,-z,gnu-version-script-compat', language: 'C')
+endif
+
 # Static map file
 mapfile = 'bob.map'
 vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/5 dependency versions/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/5 dependency versions/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/5 dependency versions/meson.build"	2020-01-07 21:10:34.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/5 dependency versions/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -31,39 +31,39 @@
 
 # Search for an external dependency that won't be found, but must later be
 # found via fallbacks
-somelibnotfound = dependency('somelib', required : false)
+somelibnotfound = dependency('somelib1', required : false)
 assert(somelibnotfound.found() == false, 'somelibnotfound was found?')
 # Find internal dependency without version
-somelibver = dependency('somelib',
+somelibver = dependency('somelib1',
   fallback : ['somelibnover', 'some_dep'])
 assert(somelibver.type_name() == 'internal', 'somelibver should be of type "internal", not ' + somelibver.type_name())
 # Find an internal dependency again with the same name and a specific version
-somelib = dependency('somelib',
+somelib = dependency('somelib2',
   version : '== 0.1',
   fallback : ['somelib', 'some_dep'])
 # Find an internal dependency again even if required = false
-somelib_reqfalse = dependency('somelib',
+somelib_reqfalse = dependency('somelib3',
   required: false,
   fallback : ['somelib', 'some_dep'])
 assert(somelib_reqfalse.found(), 'somelib should have been found')
 # Find an internal dependency again with the same name and incompatible version
-somelibver = dependency('somelib',
+somelibver = dependency('somelib4',
   version : '>= 0.3',
   fallback : ['somelibver', 'some_dep'])
 # Find an internal dependency again with impossible multi-version
-somelibver = dependency('somelib',
+somelibver = dependency('somelib5',
   version : ['>= 0.3', '<0.3'],
   required : false,
   fallback : ['somelibver', 'some_dep'])
 assert(not somelibver.found(), 'Dependency should not be found')
 # Find somelib again, but with a fallback that will fail because subproject does not exist
-somelibfail = dependency('somelib',
+somelibfail = dependency('somelib6',
   version : '>= 0.2',
   required : false,
   fallback : ['somelibfail', 'some_dep'])
 assert(somelibfail.found() == false, 'somelibfail found via wrong fallback')
 # Find somelib again, but with a fallback that will fail because dependency does not exist
-somefail_dep = dependency('somelib',
+somefail_dep = dependency('somelib7',
   version : '>= 0.2',
   required : false,
   fallback : ['somelib', 'somefail_dep'])
@@ -71,14 +71,21 @@
 
 # Fallback should only be used if the primary was not found
 fallbackzlib_dep = dependency('zlib',
-  fallback : ['somelib', 'fakezlib_dep'])
+  fallback : ['fakezlib', 'fakezlib_dep'])
 assert(fallbackzlib_dep.type_name() == 'pkgconfig', 'fallbackzlib_dep should be of type "pkgconfig", not ' + fallbackzlib_dep.type_name())
 # Check that the above dependency was pkgconfig because the fallback wasn't
 # checked, not because the fallback didn't work
 fakezlib_dep = dependency('fakezlib',
-  fallback : ['somelib', 'fakezlib_dep'])
+  fallback : ['fakezlib', 'fakezlib_dep'])
 assert(fakezlib_dep.type_name() == 'internal', 'fakezlib_dep should be of type "internal", not ' + fakezlib_dep.type_name())
 
+# Verify that once we got a system dependency, we won't fallback if a newer
+# version is requested.
+d = dependency('zlib', version: '>= 999',
+  fallback : ['donotexist', 'fakezlib_dep'],
+  required: false)
+assert(not d.found(), 'version should not match and it should not fallback')
+
 # Check that you can find a dependency by not specifying a version after not
 # finding it by specifying a version. We add `static: true` here so that the
 # previously cached zlib dependencies don't get checked.
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/5 dependency versions/subprojects/fakezlib/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/5 dependency versions/subprojects/fakezlib/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/5 dependency versions/subprojects/fakezlib/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/5 dependency versions/subprojects/fakezlib/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+project('some', 'c', version : '0.1')
+
+fakezlib_dep = declare_dependency()
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/7 library versions/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/7 library versions/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/7 library versions/installed_files.txt"	2017-04-15 14:27:38.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/7 library versions/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,10 +0,0 @@
-usr/lib/libsome.so
-usr/lib/libsome.so.0
-usr/lib/libsome.so.1.2.3
-usr/lib/libnoversion.so
-usr/lib/libonlyversion.so
-usr/lib/libonlyversion.so.1
-usr/lib/libonlyversion.so.1.4.5
-usr/lib/libonlysoversion.so
-usr/lib/libonlysoversion.so.5
-usr/lib/libmodule.so
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/7 library versions/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/7 library versions/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/7 library versions/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/7 library versions/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/lib/libsome.so"},
+    {"type": "file", "file": "usr/lib/libsome.so.0"},
+    {"type": "file", "file": "usr/lib/libsome.so.1.2.3"},
+    {"type": "file", "file": "usr/lib/libnoversion.so"},
+    {"type": "file", "file": "usr/lib/libonlyversion.so"},
+    {"type": "file", "file": "usr/lib/libonlyversion.so.1"},
+    {"type": "file", "file": "usr/lib/libonlyversion.so.1.4.5"},
+    {"type": "file", "file": "usr/lib/libonlysoversion.so"},
+    {"type": "file", "file": "usr/lib/libonlysoversion.so.5"},
+    {"type": "file", "file": "usr/lib/libmodule.so"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/8 subproject library install/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/8 subproject library install/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/8 subproject library install/installed_files.txt"	2016-07-15 10:52:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/8 subproject library install/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-usr/lib/libsublib.so
-usr/lib/libsublib.so.5
-usr/lib/libsublib.so.2.1.0
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/8 subproject library install/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/8 subproject library install/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/linuxlike/8 subproject library install/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/linuxlike/8 subproject library install/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/lib/libsublib.so"},
+    {"type": "file", "file": "usr/lib/libsublib.so.5"},
+    {"type": "file", "file": "usr/lib/libsublib.so.2.1.0"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/162 external program shebang parsing/input.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/162 external program shebang parsing/input.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/162 external program shebang parsing/input.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/162 external program shebang parsing/input.txt"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+some stuff here
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/162 external program shebang parsing/main.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/162 external program shebang parsing/main.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/162 external program shebang parsing/main.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/162 external program shebang parsing/main.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,72 @@
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#ifdef _WIN32
+ #include 
+ #include 
+#else
+ #include 
+#endif
+
+/* Who cares about stack sizes in test programs anyway */
+#define LINE_LENGTH 4096
+
+static int
+intrp_copyfile (char * src, char * dest)
+{
+#ifdef _WIN32
+  if (!CopyFile (src, dest, FALSE))
+    return 1;
+  return 0;
+#else
+  return execlp ("cp", "cp", src, dest, NULL);
+#endif
+}
+
+static void
+parser_get_line (FILE * f, char line[LINE_LENGTH])
+{
+  if (!fgets (line, LINE_LENGTH, f))
+    fprintf (stderr, "%s\n", strerror (errno));
+}
+
+int
+main (int argc, char * argv[])
+{
+  FILE *f = NULL;
+  char line[LINE_LENGTH];
+
+  if (argc != 4) {
+    fprintf (stderr, "Invalid number of arguments: %i\n", argc);
+    goto err;
+  }
+
+  if ((f = fopen (argv[1], "r")) == NULL) {
+    fprintf (stderr, "%s\n", strerror (errno));
+    goto err;
+  }
+
+  parser_get_line (f, line);
+
+  if (!line || line[0] != '#' || line[1] != '!') {
+    fprintf (stderr, "Invalid script\n");
+    goto err;
+  }
+
+  parser_get_line (f, line);
+
+  if (!line || strncmp (line, "copy", 4) != 0) {
+    fprintf (stderr, "Syntax error: %s\n", line);
+    goto err;
+  }
+
+  return intrp_copyfile (argv[2], argv[3]);
+
+err:
+  fclose (f);
+  return 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/162 external program shebang parsing/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/162 external program shebang parsing/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/162 external program shebang parsing/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/162 external program shebang parsing/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,21 @@
+project('shebang parsing', 'c')
+
+interpreter = executable('aninterp', 'main.c', native : true)
+
+cdata = configuration_data()
+cdata.set('INTRP', interpreter.full_path())
+
+f = configure_file(input : 'script.int.in',
+                   output : 'script.int',
+                   configuration : cdata)
+
+# Test that parsing a shebang with spaces works properly. See `man execve`,
+# specifically the section on "Interpreter scripts" and the one under "NOTES".
+script = find_program(f)
+
+custom_target('interpthis',
+  input : 'input.txt',
+  output : 'output.txt',
+  depends : interpreter,
+  command : [script, '@INPUT@', '@OUTPUT@'],
+  build_by_default : true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/162 external program shebang parsing/script.int.in" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/162 external program shebang parsing/script.int.in"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/162 external program shebang parsing/script.int.in"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/162 external program shebang parsing/script.int.in"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,2 @@
+#!/usr/bin/env @INTRP@
+copy
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/1 trivial/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/1 trivial/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/1 trivial/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/1 trivial/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,9 @@
+project('trivial native test', 'c')
+
+sources = 'trivial.c'
+cc = meson.get_compiler('c', native: true)
+
+if meson.is_cross_build()
+  native_exe = executable('native-trivialprog', sources : sources, native : true)
+  test('native exe in cross build', native_exe)
+endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/1 trivial/trivial.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/1 trivial/trivial.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/1 trivial/trivial.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/1 trivial/trivial.c"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,6 @@
+#include
+
+int main(void) {
+    printf("Trivial test is working.\n");
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/201 override with exe/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/201 override with exe/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/201 override with exe/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/201 override with exe/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,21 @@
+project('myexe', 'c')
+sub = subproject('sub')
+
+prog = find_program('foobar', version : '>= 2.0', required : false)
+assert(not prog.found())
+
+prog = find_program('foobar', version : '>= 1.0')
+custom1 = custom_target('custom1',
+                        build_by_default : true,
+                        input : [],
+                        output : 'main1.c',
+                        command : [prog, '@OUTPUT@'])
+gen = generator(prog,
+                output : '@BASENAME@.c',
+                arguments : ['@OUTPUT@'])
+custom2 = gen.process('main2.input')
+
+message(prog.full_path())
+
+executable('e1', custom1)
+executable('e2', custom2)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/201 override with exe/subprojects/sub/foobar.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/201 override with exe/subprojects/sub/foobar.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/201 override with exe/subprojects/sub/foobar.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/201 override with exe/subprojects/sub/foobar.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,13 @@
+#include 
+#include 
+
+int main(int argc, char* argv[]) {
+  assert(argc == 2);
+  FILE *f = fopen(argv[1], "w");
+  const char msg[] = "int main(void) {return 0;}\n";
+  size_t w = fwrite(msg, 1, sizeof(msg) - 1, f);
+  assert(w == sizeof(msg) - 1);
+  int r = fclose(f);
+  assert(r == 0);
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/201 override with exe/subprojects/sub/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/201 override with exe/subprojects/sub/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/201 override with exe/subprojects/sub/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/201 override with exe/subprojects/sub/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+project('sub', 'c', version : '1.0')
+foobar = executable('foobar', 'foobar.c',  native : true)
+meson.override_find_program('foobar', foobar)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/21 global arg/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/21 global arg/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/21 global arg/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/21 global arg/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,14 @@
+project('global arg test', 'cpp', 'c')
+
+add_global_arguments('-DMYTHING', language : 'c', native : true)
+add_global_arguments('-DMYCPPTHING', language : 'cpp', native : true)
+add_global_arguments('-DGLOBAL_BUILD', language : 'c', native : true)
+
+build_c_args = ['-DARG_BUILD']
+c_args = ['-DARG_HOST']
+
+add_global_arguments('-DMYCANDCPPTHING', language: ['c', 'cpp'], native: true)
+
+exe1 = executable('prog1', 'prog.c', c_args : build_c_args, native : true)
+
+test('prog1', exe1)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/21 global arg/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/21 global arg/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/21 global arg/prog.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/21 global arg/prog.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,43 @@
+#ifndef MYTHING
+  #error "Global argument not set"
+#endif
+
+#ifdef MYCPPTHING
+  #error "Wrong global argument set"
+#endif
+
+#ifndef MYCANDCPPTHING
+  #error "Global argument not set"
+#endif
+
+#if !defined(GLOBAL_HOST) && !defined(GLOBAL_BUILD)
+  #error "Neither global_host nor glogal_build is set."
+#endif
+
+#if defined(GLOBAL_HOST) && defined(GLOBAL_BUILD)
+  #error "Both global build and global host set."
+#endif
+
+#ifdef GLOBAL_BUILD
+  #ifndef ARG_BUILD
+    #error "Global is build but arg_build is not set."
+  #endif
+
+  #ifdef ARG_HOST
+    #error "Global is build but arg host is set."
+  #endif
+#endif
+
+#ifdef GLOBAL_HOST
+  #ifndef ARG_HOST
+    #error "Global is host but arg_host is not set."
+  #endif
+
+  #ifdef ARG_BUILD
+    #error "Global is host but arg_build is set."
+  #endif
+#endif
+
+int main(void) {
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/21 global arg/prog.cc" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/21 global arg/prog.cc"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/21 global arg/prog.cc"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/21 global arg/prog.cc"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,15 @@
+#ifdef MYTHING
+#error "Wrong global argument set"
+#endif
+
+#ifndef MYCPPTHING
+#error "Global argument not set"
+#endif
+
+#ifndef MYCANDCPPTHING
+#error "Global argument not set"
+#endif
+
+int main(void) {
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/depends/copyrunner.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/depends/copyrunner.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/depends/copyrunner.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/depends/copyrunner.py"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,7 @@
+#!/usr/bin/env python3
+
+import sys, subprocess
+
+prog, infile, outfile = sys.argv[1:]
+
+subprocess.check_call([prog, infile, outfile])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/depends/filecopier.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/depends/filecopier.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/depends/filecopier.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/depends/filecopier.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,23 @@
+#include
+#include
+
+#define BUFSIZE 1024
+
+int main(int argc, char **argv) {
+    char buffer[BUFSIZE];
+    size_t num_read;
+    size_t num_written;
+    FILE *fin = fopen(argv[1], "rb");
+    FILE *fout;
+    assert(argc>0);
+    assert(fin);
+    num_read = fread(buffer, 1, BUFSIZE, fin);
+    assert(num_read > 0);
+    fclose(fin);
+    fout = fopen(argv[2], "wb");
+    assert(fout);
+    num_written = fwrite(buffer, 1, num_read, fout);
+    assert(num_written == num_read);
+    fclose(fout);
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/depends/libsrc.c.in" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/depends/libsrc.c.in"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/depends/libsrc.c.in"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/depends/libsrc.c.in"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+int func(void) {
+    return 42;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/depends/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/depends/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/depends/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/depends/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,11 @@
+runner = find_program('copyrunner.py')
+
+copier = executable('copier', 'filecopier.c', native: true)
+
+cg = generator(runner,
+    output: ['@BASENAME@.c'],
+    arguments: [copier.full_path(), '@INPUT@', '@OUTPUT@'],
+    depends: copier)
+
+test('generatordep',
+    executable('gd', 'prog.c', cg.process('libsrc.c.in')))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/depends/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/depends/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/depends/prog.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/depends/prog.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+int func(void);
+
+int main(void) {
+    return func() != 42;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/input_src.dat" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/input_src.dat"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/input_src.dat"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/input_src.dat"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+int func(void) { return 0; }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,23 @@
+project('pipeline test', 'c')
+
+# We need to run this executable locally so build it with
+# the host compiler.
+e1 = executable('srcgen', 'srcgen.c', native : true)
+
+# Generate a source file that needs to be included in the build.
+gen = generator(e1, \
+  depfile : '@BASENAME@.d',
+  output  : '@BASENAME@.c', # Line continuation inside arguments should work without needing a "\".
+  arguments : ['@INPUT@', '@OUTPUT@', '@DEPFILE@'])
+
+generated = gen.process(['input_src.dat'])
+
+e2 = executable('prog', 'prog.c', generated)
+
+test('pipelined', e2)
+
+# This is in a subdirectory to make sure
+# we write proper subdir paths to output.
+subdir('src')
+
+subdir('depends')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/prog.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/prog.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+int func(void);
+
+int main(void) {
+    return func();
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/src/input_src.dat" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/src/input_src.dat"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/src/input_src.dat"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/src/input_src.dat"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+#include
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/src/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/src/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/src/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/src/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,12 @@
+e1 = executable('srcgen', 'srcgen.c', native : true)
+
+# Generate a header file that needs to be included.
+gen = generator(e1,
+ output  : '@BASENAME@.h',
+ arguments : ['@INPUT@', '@OUTPUT@'])
+
+generated = gen.process('input_src.dat')
+
+e2 = executable('prog', 'prog.c', generated)
+
+test('pipelined', e2)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/src/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/src/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/src/prog.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/src/prog.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,9 @@
+#include"input_src.h"
+
+int main(void) {
+    void *foo = printf;
+    if(foo) {
+        return 0;
+    }
+    return 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/src/srcgen.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/src/srcgen.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/src/srcgen.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/src/srcgen.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,40 @@
+#include
+#include
+
+#define ARRSIZE 80
+
+int main(int argc, char **argv) {
+    char arr[ARRSIZE];
+    char *ifilename;
+    char *ofilename;
+    FILE *ifile;
+    FILE *ofile;
+    size_t bytes;
+
+    if(argc != 3) {
+        fprintf(stderr, "%s  \n", argv[0]);
+        return 1;
+    }
+    ifilename = argv[1];
+    ofilename = argv[2];
+    printf("%s\n", ifilename);
+    ifile = fopen(ifilename, "r");
+    if(!ifile) {
+        fprintf(stderr, "Could not open source file %s.\n", ifilename);
+        return 1;
+    }
+    ofile = fopen(ofilename, "w");
+    if(!ofile) {
+        fprintf(stderr, "Could not open target file %s\n", ofilename);
+        fclose(ifile);
+        return 1;
+    }
+    bytes = fread(arr, 1, ARRSIZE, ifile);
+    assert(bytes < 80);
+    assert(bytes > 0);
+    fwrite(arr, 1, bytes, ofile);
+
+    fclose(ifile);
+    fclose(ofile);
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/srcgen.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/srcgen.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/27 pipeline/srcgen.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/27 pipeline/srcgen.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,69 @@
+#include
+#include
+#include
+
+#define ARRSIZE 80
+
+int main(int argc, char **argv) {
+    char arr[ARRSIZE];
+    char *ofilename;
+    char *ifilename;
+    char *dfilename;
+    FILE *ifile;
+    FILE *ofile;
+    FILE *depfile;
+    size_t bytes;
+    int i;
+
+    if(argc != 4) {
+        fprintf(stderr, "%s   \n", argv[0]);
+        return 1;
+    }
+    ifilename = argv[1];
+    ofilename = argv[2];
+    dfilename = argv[3];
+    ifile = fopen(argv[1], "r");
+    if(!ifile) {
+        fprintf(stderr, "Could not open source file %s.\n", argv[1]);
+        return 1;
+    }
+    ofile = fopen(ofilename, "w");
+    if(!ofile) {
+        fprintf(stderr, "Could not open target file %s\n", ofilename);
+        fclose(ifile);
+        return 1;
+    }
+    bytes = fread(arr, 1, ARRSIZE, ifile);
+    assert(bytes < 80);
+    assert(bytes > 0);
+    fwrite(arr, 1, bytes, ofile);
+
+    depfile = fopen(dfilename, "w");
+    if(!depfile) {
+        fprintf(stderr, "Could not open depfile %s\n", ofilename);
+        fclose(ifile);
+        fclose(ofile);
+        return 1;
+    }
+    for(i=0; i
+int main(void) {
+  printf("%s\n", "stdout");
+  fprintf(stderr, "%s\n", "stderr");
+  return 0;
+}
+'''
+
+error_code = '''int main(void) {
+  return 1;
+}
+'''
+
+no_compile_code = '''int main(void) {
+'''
+
+INPUTS = [
+  ['String', ok_code, error_code, no_compile_code],
+  ['File', files('ok.c'), files('error.c'), files('no_compile.c')],
+]
+
+foreach cc : compilers
+  foreach input : INPUTS
+    type = input[0]
+    ok = cc.run(input[1], name : type + ' should succeed')
+    err = cc.run(input[2], name : type + ' should fail')
+    noc = cc.run(input[3], name : type + ' does not compile')
+
+    if noc.compiled()
+      error(type + ' compilation fail test failed.')
+    else
+      message(type + ' fail detected properly.')
+    endif
+
+    if ok.compiled()
+      message(type + ' compilation worked.')
+    else
+      error(type + ' compilation did not work.')
+    endif
+
+    if ok.returncode() == 0
+      message(type + ' return code ok.')
+    else
+      error(type + ' return code fail')
+    endif
+
+    if err.returncode() == 1
+      message(type + ' bad return code ok.')
+    else
+      error(type + ' bad return code fail.')
+    endif
+
+    if ok.stdout().strip() == 'stdout'
+      message(type + ' stdout ok.')
+    else
+      message(type + ' bad stdout.')
+    endif
+
+    if ok.stderr().strip() == 'stderr'
+      message(type + ' stderr ok.')
+    else
+      message(type + ' bad stderr.')
+    endif
+  endforeach
+endforeach
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/36 tryrun/no_compile.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/36 tryrun/no_compile.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/36 tryrun/no_compile.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/36 tryrun/no_compile.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+int main(void) {
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/36 tryrun/ok.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/36 tryrun/ok.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/36 tryrun/ok.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/36 tryrun/ok.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,7 @@
+#include
+
+int main(void) {
+  printf("%s\n", "stdout");
+  fprintf(stderr, "%s\n", "stderr");
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/56 install script/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/56 install script/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/56 install script/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/56 install script/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,12 @@
+project('custom install script', 'c')
+
+# this is just to ensure that the install directory exists before exe is run
+install_data('file.txt', install_dir: '.')
+
+subdir('src')
+
+meson.add_install_script(exe, 'generated.txt')
+wrap = find_program('wrap.py')
+# Yes, these are getting silly
+meson.add_install_script(wrap, exe, 'wrapped.txt')
+meson.add_install_script(wrap, wrap, exe, 'wrapped2.txt')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/56 install script/src/exe.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/56 install script/src/exe.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/56 install script/src/exe.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/56 install script/src/exe.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,27 @@
+#include 
+#include 
+#include 
+
+int main(int argc, char * argv[]) {
+    if (argc != 2) {
+        fprintf(stderr, "Takes exactly 2 arguments\n");
+        return 1;
+    }
+
+    char * dirname = getenv("MESON_INSTALL_DESTDIR_PREFIX");
+    char * fullname = malloc(strlen(dirname) + 1 + strlen(argv[1]) + 1);
+    strcpy(fullname, dirname);
+    strcat(fullname, "/");
+    strcat(fullname, argv[1]);
+
+    FILE * fp = fopen(fullname, "w");
+    if (!fp)
+        return 1;
+
+    fputs("Some text\n", fp);
+    fclose(fp);
+
+    free(fullname);
+
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/56 install script/src/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/56 install script/src/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/56 install script/src/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/56 install script/src/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+exe = executable('exe', 'exe.c', install : false, native : true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/56 install script/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/56 install script/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/56 install script/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/56 install script/test.json"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/file.txt"},
+    {"type": "file", "file": "usr/generated.txt"},
+    {"type": "file", "file": "usr/wrapped.txt"},
+    {"type": "file", "file": "usr/wrapped2.txt"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/56 install script/wrap.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/56 install script/wrap.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/56 install script/wrap.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/56 install script/wrap.py"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+#!/usr/bin/env python3
+
+import subprocess
+import sys
+
+subprocess.run(sys.argv[1:])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/85 add language/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/85 add language/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/85 add language/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/85 add language/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+project('add language', 'c')
+assert(add_languages('cpp', native: true), 'Add_languages returned false on success')
+test('C++', executable('cppprog', 'prog.cc', native: true))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/85 add language/prog.cc" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/85 add language/prog.cc"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/85 add language/prog.cc"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/85 add language/prog.cc"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+#include
+
+int main(int, char**) {
+    std::cout << "I am C++.\n";
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/93 selfbuilt custom/checkarg.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/93 selfbuilt custom/checkarg.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/93 selfbuilt custom/checkarg.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/93 selfbuilt custom/checkarg.cpp"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+#include 
+
+int main(int argc, char *[]) {
+    assert(argc == 2);
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/93 selfbuilt custom/data.dat" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/93 selfbuilt custom/data.dat"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/93 selfbuilt custom/data.dat"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/93 selfbuilt custom/data.dat"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+generated_function
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/93 selfbuilt custom/mainprog.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/93 selfbuilt custom/mainprog.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/93 selfbuilt custom/mainprog.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/93 selfbuilt custom/mainprog.cpp"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+#include"data.h"
+
+int main(void) {
+    return generated_function() != 52;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/93 selfbuilt custom/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/93 selfbuilt custom/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/93 selfbuilt custom/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/93 selfbuilt custom/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,39 @@
+project('selfbuilt custom', 'cpp')
+
+# Build an exe and use it in a custom target
+# whose output is used to build a different exe.
+
+tool = executable('tool', 'tool.cpp', native : true)
+
+hfile = custom_target('datah',
+  output : 'data.h',
+  input : 'data.dat',
+  command : [tool, '@INPUT@', '@OUTPUT@'],
+)
+
+main = executable('mainprog', 'mainprog.cpp', hfile)
+
+test('maintest', main)
+
+lib = library('libtool', 'tool.cpp')
+
+checkarg = executable('checkarg', 'checkarg.cpp', native : true)
+
+ctlib = custom_target('ctlib',
+  output : 'ctlib.out',
+  capture : true,
+  command : [checkarg, lib],
+  build_by_default : true,
+)
+
+if meson.is_cross_build() and meson.can_run_host_binaries()
+  checkarg_host = executable('checkarg_host', 'checkarg.cpp')
+
+  ctlib_host = custom_target(
+    'ctlib_host',
+    output : 'ctlib.host.out',
+    capture : true,
+    command : [checkarg_host, lib],
+    build_by_default : true,
+  )
+endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/93 selfbuilt custom/tool.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/93 selfbuilt custom/tool.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/native/93 selfbuilt custom/tool.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/native/93 selfbuilt custom/tool.cpp"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,34 @@
+#include
+#include
+#include
+
+using namespace std;
+
+const char prefix[] = "int ";
+const char suffix[] = " () {\n    return 52;}\n";
+
+int main(int argc, char **argv) {
+    if(argc != 3) {
+        cout << "You is fail.\n";
+        return 1;
+    }
+    ifstream is(argv[1], ifstream::binary);
+    if(!is) {
+        cout << "Opening input file failed.\n";
+        return 1;
+    }
+    string funcname;
+    is >> funcname;
+    ofstream os(argv[2], ofstream::binary);
+    if(!os) {
+        cout << "Opening output file failed.\n";
+        return 1;
+    }
+    os << prefix << funcname << suffix;
+    os.close();
+    if(!os.good()) {
+        cout << "Writing data out failed.\n";
+        return 1;
+    }
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/osx/2 library versions/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/osx/2 library versions/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/osx/2 library versions/installed_files.txt"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/osx/2 library versions/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,8 +0,0 @@
-usr/lib/libsome.dylib
-usr/lib/libsome.7.dylib
-usr/lib/libnoversion.dylib
-usr/lib/libonlyversion.dylib
-usr/lib/libonlyversion.1.dylib
-usr/lib/libonlysoversion.dylib
-usr/lib/libonlysoversion.5.dylib
-usr/lib/libmodule.dylib
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/osx/2 library versions/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/osx/2 library versions/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/osx/2 library versions/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/osx/2 library versions/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,12 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/lib/libsome.dylib"},
+    {"type": "file", "file": "usr/lib/libsome.7.dylib"},
+    {"type": "file", "file": "usr/lib/libnoversion.dylib"},
+    {"type": "file", "file": "usr/lib/libonlyversion.dylib"},
+    {"type": "file", "file": "usr/lib/libonlyversion.1.dylib"},
+    {"type": "file", "file": "usr/lib/libonlysoversion.dylib"},
+    {"type": "file", "file": "usr/lib/libonlysoversion.5.dylib"},
+    {"type": "file", "file": "usr/lib/libmodule.dylib"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/osx/4 framework/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/osx/4 framework/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/osx/4 framework/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/osx/4 framework/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/bin/prog
-usr/lib/libstat.a
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/osx/4 framework/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/osx/4 framework/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/osx/4 framework/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/osx/4 framework/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/bin/prog"},
+    {"type": "file", "file": "usr/lib/libstat.a"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/osx/5 extra frameworks/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/osx/5 extra frameworks/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/osx/5 extra frameworks/installed_files.txt"	2019-02-07 09:08:59.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/osx/5 extra frameworks/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/bin/prog
-usr/lib/libstat.a
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/osx/5 extra frameworks/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/osx/5 extra frameworks/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/osx/5 extra frameworks/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/osx/5 extra frameworks/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/bin/prog"},
+    {"type": "file", "file": "usr/lib/libstat.a"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/1 basic/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/1 basic/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/1 basic/meson.build"	2020-01-07 21:13:19.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/1 basic/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -1,4 +1,4 @@
-project('python sample', 'c')
+project('python sample')
 
 py_mod = import('python')
 py = py_mod.find_installation('python3')
@@ -12,6 +12,7 @@
 if not py_purelib.endswith('site-packages')
   error('Python3 purelib path seems invalid? ' + py_purelib)
 endif
+message('Python purelib path:', py_purelib)
 
 # could be 'lib64' or 'Lib' on some systems
 py_platlib = py.get_path('platlib')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/1 basic/prog.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/1 basic/prog.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/1 basic/prog.py"	2019-02-07 09:08:55.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/1 basic/prog.py"	2020-09-17 22:00:44.000000000 +0000
@@ -1,9 +1,8 @@
 #!/usr/bin/env python3
 
 from gluon import gluonator
-import sys
 
 print('Running mainprog from root dir.')
 
 if gluonator.gluoninate() != 42:
-    sys.exit(1)
+    raise ValueError("!= 42")
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/1 basic/subdir/subprog.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/1 basic/subdir/subprog.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/1 basic/subdir/subprog.py"	2019-02-07 09:08:55.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/1 basic/subdir/subprog.py"	2020-09-17 22:00:44.000000000 +0000
@@ -4,9 +4,8 @@
 # point to source root.
 
 from gluon import gluonator
-import sys
 
 print('Running mainprog from subdir.')
 
 if gluonator.gluoninate() != 42:
-    sys.exit(1)
+    raise ValueError("!= 42")
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/2 extmodule/blaster.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/2 extmodule/blaster.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/2 extmodule/blaster.py"	2019-02-07 09:08:55.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/2 extmodule/blaster.py"	2020-09-17 22:00:44.000000000 +0000
@@ -1,14 +1,11 @@
 #!/usr/bin/env python3
 
 import tachyon
-import sys
 
 result = tachyon.phaserize('shoot')
 
 if not isinstance(result, int):
-    print('Returned result not an integer.')
-    sys.exit(1)
+    raise SystemExit('Returned result not an integer.')
 
 if result != 1:
-    print('Returned result {} is not 1.'.format(result))
-    sys.exit(1)
+    raise SystemExit('Returned result {} is not 1.'.format(result))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/2 extmodule/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/2 extmodule/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/2 extmodule/meson.build"	2020-01-07 21:13:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/2 extmodule/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -3,26 +3,33 @@
 # Because Windows Python ships only with optimized libs,
 # we must build this project the same way.
 
+if meson.backend() != 'ninja'
+  error('MESON_SKIP_TEST: Ninja backend required')
+endif
+
+
 py_mod = import('python')
 py = py_mod.find_installation()
-py_dep = py.dependency()
+py_dep = py.dependency(required: false)
 
-if py_dep.found()
-  subdir('ext')
+if not py_dep.found()
+  error('MESON_SKIP_TEST: Python libraries not found.')
+endif
 
-  test('extmod',
-    py,
-    args : files('blaster.py'),
-    env : ['PYTHONPATH=' + pypathdir])
+subdir('ext')
 
-  # Check we can apply a version constraint
-  dependency('python3', version: '>=@0@'.format(py_dep.version()))
+test('extmod',
+  py,
+  args : files('blaster.py'),
+  env : ['PYTHONPATH=' + pypathdir])
 
-else
-  error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.')
-endif
 
 py3_pkg_dep = dependency('python3', method: 'pkg-config', required : false)
 if py3_pkg_dep.found()
   python_lib_dir = py3_pkg_dep.get_pkgconfig_variable('libdir')
+
+  # Check we can apply a version constraint
+  dependency('python3', version: '>=@0@'.format(py_dep.version()))
+else
+  message('Skipped python3 pkg-config test')
 endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/3 cython/cytest.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/3 cython/cytest.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/3 cython/cytest.py"	2019-02-07 09:08:55.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/3 cython/cytest.py"	2020-09-17 22:00:44.000000000 +0000
@@ -1,23 +1,19 @@
 #!/usr/bin/env python3
 
 from storer import Storer
-import sys
 
 s = Storer()
 
 if s.get_value() != 0:
-    print('Initial value incorrect.')
-    sys.exit(1)
+    raise SystemExit('Initial value incorrect.')
 
 s.set_value(42)
 
 if s.get_value() != 42:
-    print('Setting value failed.')
-    sys.exit(1)
+    raise SystemExit('Setting value failed.')
 
 try:
     s.set_value('not a number')
-    print('Using wrong argument type did not fail.')
-    sys.exit(1)
+    raise SystemExit('Using wrong argument type did not fail.')
 except TypeError:
     pass
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/3 cython/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/3 cython/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/3 cython/meson.build"	2020-01-07 21:13:21.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/3 cython/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -1,20 +1,26 @@
 project('cython', 'c',
   default_options : ['warning_level=3'])
 
-cython = find_program('cython3', required : false)
-py3_dep = dependency('python3', required : false)
+if meson.backend() != 'ninja'
+  error('MESON_SKIP_TEST: Ninja backend required')
+endif
 
-if cython.found() and py3_dep.found()
-  py_mod = import('python')
-  py3 = py_mod.find_installation()
-  py3_dep = py3.dependency()
-  subdir('libdir')
+cython = find_program('cython', required : false)
+if not cython.found()
+  error('MESON_SKIP_TEST: Cython3 not found.')
+endif
 
-  test('cython tester',
-    py3,
-    args : files('cytest.py'),
-    env : ['PYTHONPATH=' + pydir]
-  )
-else
-  error('MESON_SKIP_TEST: Cython3 or Python3 libraries not found, skipping test.')
+py_mod = import('python')
+py3 = py_mod.find_installation()
+py3_dep = py3.dependency(required: false)
+if not py3_dep.found()
+  error('MESON_SKIP_TEST: Python library not found.')
 endif
+
+subdir('libdir')
+
+test('cython tester',
+  py3,
+  args : files('cytest.py'),
+  env : ['PYTHONPATH=' + pydir]
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/4 custom target depends extmodule/blaster.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/4 custom target depends extmodule/blaster.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/4 custom target depends extmodule/blaster.py"	2019-02-07 09:08:55.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/4 custom target depends extmodule/blaster.py"	2020-09-17 22:00:44.000000000 +0000
@@ -24,9 +24,7 @@
         f.write('success')
 
 if not isinstance(result, int):
-    print('Returned result not an integer.')
-    sys.exit(1)
+    raise SystemExit('Returned result not an integer.')
 
 if result != 1:
-    print('Returned result {} is not 1.'.format(result))
-    sys.exit(1)
+    raise SystemExit('Returned result {} is not 1.'.format(result))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/4 custom target depends extmodule/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/4 custom target depends extmodule/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/4 custom target depends extmodule/meson.build"	2020-01-07 21:13:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/4 custom target depends extmodule/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -3,11 +3,19 @@
 # Because Windows Python ships only with optimized libs,
 # we must build this project the same way.
 
+if meson.backend() != 'ninja'
+  error('MESON_SKIP_TEST: Ninja backend required')
+endif
+
 py_mod = import('python')
 py3 = py_mod.find_installation()
 py3_dep = py3.dependency(required : false)
 cc = meson.get_compiler('c')
 
+if not py3_dep.found()
+  error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.')
+endif
+
 # Copy to the builddir so that blaster.py can find the built tachyon module
 # FIXME: We should automatically detect this case and append the correct paths
 # to PYTHONLIBDIR
@@ -20,21 +28,18 @@
 with open(sys.argv[1], 'rb') as f:
   assert(f.read() == b'success')
 '''
-if py3_dep.found()
-  message('Detected Python version: ' + py3_dep.version())
-  if py3_dep.version().version_compare('>=3.8') and cc.get_id() == 'msvc' and cc.version().version_compare('<=19.00.24215.1')
-    error('MESON_SKIP_TEST: Python modules do not work with Python 3.8 and VS2015 or earlier.')
-  endif
-  subdir('ext')
-
-  out_txt = custom_target('tachyon flux',
-    input : blaster_py,
-    output : 'out.txt',
-    command : [py3, '@INPUT@', '-o', '@OUTPUT@'],
-    depends : pylib,
-    build_by_default: true)
 
-  test('flux', py3, args : ['-c', check_exists, out_txt])
-else
-  error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.')
+message('Detected Python version: ' + py3_dep.version())
+if py3_dep.version().version_compare('>=3.8') and cc.get_id() == 'msvc' and cc.version().version_compare('<=19.00.24215.1')
+  error('MESON_SKIP_TEST: Python modules do not work with Python 3.8 and VS2015 or earlier.')
 endif
+subdir('ext')
+
+out_txt = custom_target('tachyon flux',
+  input : blaster_py,
+  output : 'out.txt',
+  command : [py3, '@INPUT@', '-o', '@OUTPUT@'],
+  depends : pylib,
+  build_by_default: true)
+
+test('flux', py3, args : ['-c', check_exists, out_txt])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/5 modules kwarg/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/5 modules kwarg/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python/5 modules kwarg/meson.build"	2020-01-07 21:13:22.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python/5 modules kwarg/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -1,7 +1,7 @@
 project('python kwarg')
 
 py = import('python')
-prog_python = py.find_installation('python3', modules : ['setuptools'])
+prog_python = py.find_installation('python3', modules : ['distutils'])
 assert(prog_python.found() == true, 'python not found when should be')
 prog_python = py.find_installation('python3', modules : ['thisbetternotexistmod'], required : false)
 assert(prog_python.found() == false, 'python not found but reported as found')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python3/3 cython/libdir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python3/3 cython/libdir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/python3/3 cython/libdir/meson.build"	2017-01-12 20:52:44.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/python3/3 cython/libdir/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,6 +1,7 @@
 pyx_c = custom_target('storer_pyx',
   output : 'storer_pyx.c',
   input : 'storer.pyx',
+  depend_files : 'cstorer.pxd',
   command : [cython, '@INPUT@', '-o', '@OUTPUT@'],
 )
 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/1 basic/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/1 basic/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/1 basic/installed_files.txt"	2019-06-27 16:43:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/1 basic/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-usr/bin/program?exe
-?msvc:usr/bin/program.pdb
-usr/bin/program2?exe
-?msvc:usr/bin/program2.pdb
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/1 basic/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/1 basic/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/1 basic/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/1 basic/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/program"},
+    {"type": "pdb", "file": "usr/bin/program"},
+    {"type": "exe", "file": "usr/bin/program2"},
+    {"type": "pdb", "file": "usr/bin/program2"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/2 sharedlib/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/2 sharedlib/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/2 sharedlib/installed_files.txt"	2019-06-27 16:43:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/2 sharedlib/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-usr/bin/prog?exe
-?msvc:usr/bin/prog.pdb
-?gcc:usr/lib/libstuff.so
-?msvc:usr/bin/stuff.dll
-?msvc:usr/bin/stuff.pdb
-?msvc:usr/lib/stuff.dll.lib
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/2 sharedlib/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/2 sharedlib/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/2 sharedlib/meson.build"	2020-01-07 21:12:11.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/2 sharedlib/meson.build"	2021-01-09 10:14:21.000000000 +0000
@@ -1,5 +1,9 @@
 project('rust shared library', 'rust')
 
+if host_machine.system() == 'darwin'
+  error('MESON_SKIP_TEST: doesnt work right on macos, please fix!')
+endif
+
 l = shared_library('stuff', 'stuff.rs', install : true)
 e = executable('prog', 'prog.rs', link_with : l, install : true)
 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/2 sharedlib/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/2 sharedlib/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/2 sharedlib/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/2 sharedlib/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/prog"},
+    {"type": "pdb", "file": "usr/bin/prog"},
+    {"type": "file", "platform": "gcc", "file": "usr/lib/libstuff.so"},
+    {"type": "file", "platform": "msvc", "file": "usr/bin/stuff.dll"},
+    {"type": "pdb", "file": "usr/bin/stuff"},
+    {"type": "file", "platform": "msvc", "file": "usr/lib/stuff.dll.lib"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/3 staticlib/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/3 staticlib/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/3 staticlib/installed_files.txt"	2019-06-27 16:43:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/3 staticlib/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-usr/bin/prog?exe
-?msvc:usr/bin/prog.pdb
-usr/lib/libstuff.rlib
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/3 staticlib/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/3 staticlib/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/3 staticlib/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/3 staticlib/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/prog"},
+    {"type": "pdb", "file": "usr/bin/prog"},
+    {"type": "file", "file": "usr/lib/libstuff.rlib"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/4 polyglot/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/4 polyglot/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/4 polyglot/installed_files.txt"	2019-06-27 16:43:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/4 polyglot/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,7 +0,0 @@
-usr/bin/prog?exe
-?msvc:usr/bin/prog.pdb
-?gcc:usr/lib/libstuff.so
-?msvc:usr/bin/stuff.dll
-?msvc:usr/lib/stuff.dll.lib
-?msvc:usr/bin/stuff.pdb
-
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/4 polyglot/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/4 polyglot/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/4 polyglot/meson.build"	2020-01-07 21:12:12.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/4 polyglot/meson.build"	2021-01-09 10:14:21.000000000 +0000
@@ -1,5 +1,9 @@
 project('rust and c polyglot executable', 'c', 'rust')
 
+if host_machine.system() == 'darwin'
+  error('MESON_SKIP_TEST: doesnt work right on macos, please fix!')
+endif
+
 l = library('stuff', 'stuff.rs', rust_crate_type: 'cdylib', install : true)
 e = executable('prog', 'prog.c', link_with : l, install : true)
 test('polyglottest', e)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/4 polyglot/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/4 polyglot/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/4 polyglot/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/4 polyglot/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/prog"},
+    {"type": "pdb", "file": "usr/bin/prog"},
+    {"type": "file", "platform": "gcc", "file": "usr/lib/libstuff.so"},
+    {"type": "file", "platform": "msvc", "file": "usr/bin/stuff.dll"},
+    {"type": "file", "platform": "msvc", "file": "usr/lib/stuff.dll.lib"},
+    {"type": "pdb", "file": "usr/bin/stuff"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/5 polyglot static/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/5 polyglot static/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/5 polyglot static/installed_files.txt"	2019-06-27 16:43:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/5 polyglot static/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-usr/bin/prog?exe
-?msvc:usr/bin/prog.pdb
-usr/lib/libstuff.a
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/5 polyglot static/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/5 polyglot static/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/5 polyglot static/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/5 polyglot static/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/prog"},
+    {"type": "pdb", "file": "usr/bin/prog"},
+    {"type": "file", "file": "usr/lib/libstuff.a"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/6 named staticlib/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/6 named staticlib/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/6 named staticlib/installed_files.txt"	2019-06-27 16:43:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/6 named staticlib/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-usr/bin/prog?exe
-?msvc:usr/bin/prog.pdb
-usr/lib/libnamed_stuff.rlib
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/6 named staticlib/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/6 named staticlib/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/6 named staticlib/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/6 named staticlib/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/prog"},
+    {"type": "pdb", "file": "usr/bin/prog"},
+    {"type": "file", "file": "usr/lib/libnamed_stuff.rlib"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/7 private crate collision/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/7 private crate collision/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/7 private crate collision/installed_files.txt"	2019-06-27 16:43:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/7 private crate collision/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-usr/bin/prog?exe
-?msvc:usr/bin/prog.pdb
-usr/lib/librand.rlib
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/7 private crate collision/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/7 private crate collision/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/rust/7 private crate collision/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/rust/7 private crate collision/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/prog"},
+    {"type": "pdb", "file": "usr/bin/prog"},
+    {"type": "file", "file": "usr/lib/librand.rlib"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap"	2018-10-31 09:31:20.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap"	2020-08-15 16:27:05.000000000 +0000
@@ -1,2 +1 @@
-The contents of this wrap file are never evaluated so they
-can be anything.
+[wrap-file]
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/35 dist script/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/35 dist script/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/35 dist script/meson.build"	2018-12-09 14:27:16.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/35 dist script/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -5,3 +5,4 @@
 test('compare', exe)
 
 meson.add_dist_script('replacer.py', '"incorrect"', '"correct"')
+meson.add_dist_script(find_program('replacer.py'), '"incorrect"', '"correct"')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/36 exe_wrapper behaviour/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/36 exe_wrapper behaviour/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/36 exe_wrapper behaviour/meson.build"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/36 exe_wrapper behaviour/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,7 +1,7 @@
 project('exe wrapper behaviour', 'c')
 
 assert(meson.is_cross_build(), 'not setup as cross build')
-assert(meson.has_exe_wrapper(), 'exe wrapper not defined?')
+assert(meson.has_exe_wrapper(), 'exe wrapper not defined?')  # intentionally not changed to can_run_host_binaries,
 
 exe = executable('prog', 'prog.c')
 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/39 python extmodule/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/39 python extmodule/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/39 python extmodule/meson.build"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/39 python extmodule/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -6,7 +6,7 @@
 py = py_mod.find_installation(get_option('python'), required : false)
 
 if py.found()
-  py_dep = py.dependency()
+  py_dep = py.dependency(required : false)
 
   if py_dep.found()
     subdir('ext')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/40 external, internal library rpath/built library/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/40 external, internal library rpath/built library/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/40 external, internal library rpath/built library/meson.build"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/40 external, internal library rpath/built library/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -18,4 +18,9 @@
 if host_machine.system() == 'darwin'
   e = executable('prog', 'prog.c', link_with: l, install: true)
   test('testprog', e)
+elif host_machine.system() == 'linux'
+  e = executable('prog', 'prog.c', link_with: l, install: true,
+        install_rpath: '$ORIGIN/..' / get_option('libdir'),
+      )
+  test('testprog', e)
 endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/40 external, internal library rpath/external library/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/40 external, internal library rpath/external library/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/40 external, internal library rpath/external library/meson.build"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/40 external, internal library rpath/external library/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -4,16 +4,16 @@
 l = shared_library('faa_pkg', 'faa.c', install: true)
 
 if host_machine.system() == 'darwin'
-  frameworks = ['-framework', 'CoreFoundation', '-framework', 'CoreMedia']
+  ldflags = ['-framework', 'CoreFoundation', '-framework', 'CoreMedia']
   allow_undef_args = ['-Wl,-undefined,dynamic_lookup']
 else
-  frameworks = []
+  ldflags = ['-Wl,-rpath,${libdir}']
   allow_undef_args = []
 endif
 
 pkg = import('pkgconfig')
 pkg.generate(name: 'faa_pkg',
-             libraries: [l] + frameworks,
+             libraries: [l] + ldflags,
              description: 'FAA, a pkg-config test library')
 
 # cygwin DLLs can't have undefined symbols
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/54 clang-format/dummydir.h/dummy.dat" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/54 clang-format/dummydir.h/dummy.dat"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/54 clang-format/dummydir.h/dummy.dat"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/54 clang-format/dummydir.h/dummy.dat"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1 @@
+Placeholder to track enclosing directory in git. Not to be analyzed.
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/57 introspection/cp.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/57 introspection/cp.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/57 introspection/cp.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/57 introspection/cp.py"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,5 @@
+#! /usr/bin/env python3
+
+import sys
+from shutil import copyfile
+copyfile(*sys.argv[1:])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/57 introspection/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/57 introspection/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/57 introspection/meson.build"	2019-12-29 22:47:27.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/57 introspection/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -8,11 +8,13 @@
 b2 = get_option('test_opt2')
 test_bool = b1 or b2
 test_bool = b1 and b2
+test_bool = not test_bool
 
 set_variable('list_test_plusassign', [])
 list_test_plusassign += ['bugs everywhere']
+dict_test = {list_test_plusassign[0]: 'even more bugs'}
 
-if false
+if not true
   vers_str = '<=99.9.9'
   dependency('somethingthatdoesnotexist', required: true, version: '>=1.2.3')
   dependency('look_i_have_a_fallback', version: ['>=1.0.0', vers_str], fallback: ['oh_no', 'the_subproject_does_not_exist'])
@@ -25,7 +27,10 @@
 var2 = 2.to_string()
 var3 = 'test3'
 
-t1 = executable('test' + var1, ['t1.cpp'], link_with: [sharedlib], install: true, build_by_default: get_option('test_opt2'))
+cus = custom_target('custom target test', output: 'file2', input: 'cp.py',
+                    command: [find_program('cp.py'), '@INPUT@', '@OUTPUT@'])
+
+t1 = executable('test' + var1, ['t1.cpp'], link_with: [sharedlib], install: not false, build_by_default: get_option('test_opt2'))
 t2 = executable('test@0@'.format('@0@'.format(var2)), sources: ['t2.cpp'], link_with: [staticlib])
 t3 = executable(var3, 't3.cpp', link_with: [sharedlib, staticlib], dependencies: [dep1])
 
@@ -43,5 +48,18 @@
 message(osmesa_lib_name) # Infinite recursion gets triggered here when the parameter osmesa_lib_name is resolved
 
 test('test case 1', t1)
-test('test case 2', t2)
-benchmark('benchmark 1', t3)
+test('test case 2', t2, depends: t3)
+benchmark('benchmark 1', t3, args: cus)
+
+### Stuff to test the AST JSON printer
+foreach x : ['a', 'b', 'c']
+  if x == 'a'
+    message('a')
+  elif x == 'b'
+    message('a')
+  else
+    continue
+  endif
+  break
+  continue
+endforeach
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/57 introspection/sharedlib/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/57 introspection/sharedlib/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/57 introspection/sharedlib/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/57 introspection/sharedlib/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -1,2 +1,2 @@
 SRC_shared = ['shared.cpp']
-sharedlib = shared_library('sharedTestLib', SRC_shared)
+sharedlib = shared_library('sharedTestLib', SRC_shared, extra_files: ['shared.hpp'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/57 introspection/staticlib/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/57 introspection/staticlib/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/57 introspection/staticlib/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/57 introspection/staticlib/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -1,2 +1,3 @@
 SRC_static = ['static.c']
-staticlib = static_library('staticTestLib', SRC_static)
+extra_static = files(['static.h'])
+staticlib = static_library('staticTestLib', SRC_static, extra_files: extra_static)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/57 introspection/staticlib/static.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/57 introspection/staticlib/static.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/57 introspection/staticlib/static.h"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/57 introspection/staticlib/static.h"	2020-10-18 21:29:13.000000000 +0000
@@ -1,3 +1,11 @@
 #pragma once
 
-int add_numbers(int a, int b);
\ No newline at end of file
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+int add_numbers(int a, int b);
+
+#ifdef __cplusplus
+}
+#endif
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/59 introspect buildoptions/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/59 introspect buildoptions/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/59 introspect buildoptions/meson.build"	2019-09-16 21:20:45.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/59 introspect buildoptions/meson.build"	2021-01-09 10:14:21.000000000 +0000
@@ -1,4 +1,4 @@
-project('introspect buildargs', ['c'], default_options: ['c_std=c11', 'cpp_std=c++14', 'buildtype=release'])
+project('introspect buildargs', ['c'], default_options: ['c_std=c99', 'cpp_std=c++14', 'buildtype=release'])
 
 subA = subproject('projectA')
 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/61 identity cross/build_wrapper.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/61 identity cross/build_wrapper.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/61 identity cross/build_wrapper.py"	2020-01-23 12:51:19.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/61 identity cross/build_wrapper.py"	2020-08-15 16:27:05.000000000 +0000
@@ -1,5 +1,11 @@
 #!/usr/bin/env python3
 
-import subprocess, sys
+import subprocess, sys, platform
 
-subprocess.call(["cc", "-DEXTERNAL_BUILD"] + sys.argv[1:])
+# Meson does not yet support Studio cc on Solaris, only gcc or clang
+if platform.system() == 'SunOS':
+    cc = 'gcc'
+else:
+    cc = 'cc'
+
+subprocess.call([cc, "-DEXTERNAL_BUILD"] + sys.argv[1:])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/61 identity cross/host_wrapper.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/61 identity cross/host_wrapper.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/61 identity cross/host_wrapper.py"	2020-01-23 12:51:19.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/61 identity cross/host_wrapper.py"	2020-08-15 16:27:05.000000000 +0000
@@ -1,5 +1,11 @@
 #!/usr/bin/env python3
 
-import subprocess, sys
+import subprocess, sys, platform
 
-subprocess.call(["cc", "-DEXTERNAL_HOST"] + sys.argv[1:])
+# Meson does not yet support Studio cc on Solaris, only gcc or clang
+if platform.system() == 'SunOS':
+    cc = 'gcc'
+else:
+    cc = 'cc'
+
+subprocess.call([cc, "-DEXTERNAL_HOST"] + sys.argv[1:])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/app/appA.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/app/appA.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/app/appA.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/app/appA.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+#include 
+#include 
+
+int main(void) { printf("The answer is: %d\n", libA_func()); }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/app/appB.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/app/appB.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/app/appB.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/app/appB.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+#include 
+#include 
+
+int main(void) { printf("The answer is: %d\n", libB_func()); }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/app/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/app/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/app/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/app/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+project('app', ['c'])
+
+a = dependency('test-a')
+b = dependency('test-b')
+
+executable('appA', files('appA.c'), dependencies : a)
+executable('appB', files('appB.c'), dependencies : b)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/lib/libA.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/lib/libA.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/lib/libA.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/lib/libA.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+#include 
+
+static int libA_func_impl(void) { return 0; }
+
+int libA_func(void) { return libA_func_impl(); }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/lib/libA.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/lib/libA.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/lib/libA.h"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/lib/libA.h"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1 @@
+int libA_func(void);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/lib/libB.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/lib/libB.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/lib/libB.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/lib/libB.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+#include 
+
+static int libB_func_impl(void) { return 0; }
+
+int libB_func(void) { return libB_func_impl(); }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/lib/libB.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/lib/libB.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/lib/libB.h"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/lib/libB.h"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1 @@
+int libB_func(void);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/lib/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/lib/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/67 static archive stripping/lib/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/67 static archive stripping/lib/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,23 @@
+project('lib', ['c'])
+
+pkg = import('pkgconfig')
+
+a = library('test-a', files('libA.c'), install: true)
+install_headers(files('libA.h'), subdir: 'libA')
+pkg.generate(
+    a,
+    version: '0.0',
+    description: 'test library libA',
+    filebase: 'test-a',
+    name: 'test library libA',
+    subdirs: 'libA')
+
+b = static_library('test-b', files('libB.c'), install: true)
+install_headers(files('libB.h'), subdir: 'libB')
+pkg.generate(
+    b,
+    version: '0.0',
+    description: 'test library libB',
+    filebase: 'test-b',
+    name: 'test library libB',
+    subdirs: 'libB')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/app/appA.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/app/appA.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/app/appA.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/app/appA.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-#include 
-#include 
-
-int main(void) { printf("The answer is: %d\n", libA_func()); }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/app/appB.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/app/appB.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/app/appB.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/app/appB.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-#include 
-#include 
-
-int main(void) { printf("The answer is: %d\n", libB_func()); }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/app/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/app/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/app/meson.build"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/app/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,7 +0,0 @@
-project('app', ['c'])
-
-a = dependency('test-a')
-b = dependency('test-b')
-
-executable('appA', files('appA.c'), dependencies : a)
-executable('appB', files('appB.c'), dependencies : b)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/lib/libA.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/lib/libA.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/lib/libA.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/lib/libA.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-#include 
-
-static int libA_func_impl(void) { return 0; }
-
-int libA_func(void) { return libA_func_impl(); }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/lib/libA.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/lib/libA.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/lib/libA.h"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/lib/libA.h"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-int libA_func(void);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/lib/libB.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/lib/libB.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/lib/libB.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/lib/libB.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-#include 
-
-static int libB_func_impl(void) { return 0; }
-
-int libB_func(void) { return libB_func_impl(); }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/lib/libB.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/lib/libB.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/lib/libB.h"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/lib/libB.h"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-int libB_func(void);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/lib/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/lib/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static archive stripping/lib/meson.build"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static archive stripping/lib/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,23 +0,0 @@
-project('lib', ['c'])
-
-pkg = import('pkgconfig')
-
-a = library('test-a', files('libA.c'), install: true)
-install_headers(files('libA.h'), subdir: 'libA')
-pkg.generate(
-    a,
-    version: '0.0',
-    description: 'test library libA',
-    filebase: 'test-a',
-    name: 'test library libA',
-    subdirs: 'libA')
-
-b = static_library('test-b', files('libB.c'), install: true)
-install_headers(files('libB.h'), subdir: 'libB')
-pkg.generate(
-    b,
-    version: '0.0',
-    description: 'test library libB',
-    filebase: 'test-b',
-    name: 'test library libB',
-    subdirs: 'libB')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func10.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func10.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func10.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func10.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+int func10()
+{
+  return 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func11.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func11.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func11.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func11.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func10();
+
+int func11()
+{
+  return func10() + 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func12.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func12.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func12.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func12.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+int func10();
+int func11();
+
+int func12()
+{
+  return func10() + func11();
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func14.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func14.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func14.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func14.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+int func14()
+{
+  return 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func15.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func15.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func15.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func15.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func14();
+
+int func15()
+{
+  return func14() + 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func16.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func16.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func16.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func16.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func15();
+
+int func16()
+{
+  return func15() + 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func17.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func17.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func17.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func17.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+int func17()
+{
+  return 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func18.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func18.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func18.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func18.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func17();
+
+int func18()
+{
+  return func17() + 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func19.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func19.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func19.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func19.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+int func17();
+int func18();
+
+int func19()
+{
+  return func17() + func18();
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func1.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func1.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func1.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func1.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,9 @@
+int func1()
+{
+  return 1;
+}
+
+int func1b()
+{
+  return 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func2.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func2.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func2.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func2.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func1();
+
+int func2()
+{
+  return func1() + 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func3.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func3.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func3.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func3.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+int func3()
+{
+  return 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func4.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func4.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func4.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func4.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func3();
+
+int func4()
+{
+  return func3() + 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func5.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func5.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func5.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func5.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+int func5()
+{
+  return 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func6.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func6.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func6.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func6.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func5();
+
+int func6()
+{
+  return func5() + 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func7.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func7.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func7.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func7.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+int func7()
+{
+  return 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func8.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func8.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func8.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func8.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func7();
+
+int func8()
+{
+  return func7() + 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func9.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func9.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/func9.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/func9.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func8();
+
+int func9()
+{
+  return func8() + 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/lib/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/lib/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,80 @@
+project('test static link libs', 'c')
+
+pkg = import('pkgconfig')
+
+# libfunc2 should contain both func1() and func2() symbols
+libfunc1 = static_library('func1', 'func1.c',
+  install : false)
+libfunc2 = static_library('func2', 'func2.c',
+  link_whole : libfunc1,
+  install : true)
+
+# Same as above, but with link_with instead of link_whole,
+# libfunc4 should contain both func3() and func4() symbols
+libfunc3 = static_library('func3', 'func3.c',
+  install : false)
+libfunc4 = static_library('func4', 'func4.c',
+  link_with : libfunc3,
+  install : true)
+
+# Same as above, but also generate an pkg-config file. Use both_libraries() to
+# make sure a complete .pc file gets generated. libfunc5 should not be mentioned
+# into the .pc file because it's not installed.
+libfunc5 = static_library('func5', 'func5.c',
+  install : false)
+libfunc6 = both_libraries('func6', 'func6.c',
+  link_with : libfunc5,
+  install : true)
+pkg.generate(libfunc6)
+
+# libfunc9 should contain both func8() and func9() but not func7() because that
+# one gets installed. Also test that link_with and link_whole works the same way
+# because libfunc8 is uninstalled.
+libfunc7 = static_library('func7', 'func7.c',
+  install : true)
+libfunc8 = static_library('func8', 'func8.c',
+  link_with : libfunc7,
+  install : false)
+libfunc9_linkwith = static_library('func9_linkwith', 'func9.c',
+  link_with : libfunc8,
+  install : true)
+libfunc9_linkwhole = static_library('func9_linkwhole', 'func9.c',
+  link_whole : libfunc8,
+  install : true)
+
+# Pattern found in mesa:
+# - libfunc11 uses func10()
+# - libfunc12 uses both func10() and func11()
+# When a shared library link_whole on libfunc12, we ensure we don't include
+# func10.c.o twice which would fail to link.
+libfunc10 = static_library('func10', 'func10.c',
+  install : false)
+libfunc11 = static_library('func11', 'func11.c',
+  link_with : libfunc10,
+  install : false)
+libfunc12 = static_library('func12', 'func12.c',
+  link_with : [libfunc10, libfunc11],
+  install : false)
+libfunc13 = shared_library('func13', link_whole : libfunc12)
+
+# libfunc16 should contain func14(), func15() and func16()
+libfunc14 = static_library('func14', 'func14.c',
+  install : false)
+libfunc15 = static_library('func15', 'func15.c',
+  link_with : libfunc14,
+  install : false)
+libfunc16 = static_library('func16', 'func16.c',
+  link_with : libfunc15,
+  install : true)
+
+# Verify func17.c.o gets included only once into libfunc19, otherwise
+# func19-shared would failed with duplicated symbol.
+libfunc17 = static_library('func17', 'func17.c',
+  install : false)
+libfunc18 = static_library('func18', 'func18.c',
+  link_with : libfunc17,
+  install : false)
+libfunc19 = static_library('func19', 'func19.c',
+  link_whole : [libfunc17, libfunc18],
+  install : false)
+shared_library('func19-shared', link_whole : [libfunc19])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,32 @@
+project('test static link', 'c')
+
+cc = meson.get_compiler('c')
+
+# Verify that installed libfunc2.a is usable
+func2_dep = cc.find_library('func2')
+test('test1', executable('test1', 'test1.c', dependencies : func2_dep))
+
+# Verify that installed libfunc4.a is usable
+func4_dep = cc.find_library('func4')
+test('test2', executable('test2', 'test2.c', dependencies : func4_dep))
+
+# Verify that installed pkg-config file is usable for both shared and static link
+func6_static_dep = dependency('func6', static : true)
+test('test3-static', executable('test3-static', 'test3.c',
+    dependencies : func6_static_dep))
+func6_shared_dep = dependency('func6', static : false)
+test('test3-shared', executable('test3-shared', 'test3.c',
+    dependencies : func6_shared_dep))
+
+# Verify that installed libfunc9.a contains func8() and func8() but not func7()
+func7_dep = cc.find_library('func7')
+func9_linkwhole_dep = cc.find_library('func9_linkwhole')
+test('test4-linkwhole', executable('test4-linkwhole', 'test4.c',
+  dependencies : [func7_dep, func9_linkwhole_dep]))
+func9_linkwith_dep = cc.find_library('func9_linkwith')
+test('test4-linkwith', executable('test4-linkwith', 'test4.c',
+  dependencies : [func7_dep, func9_linkwith_dep]))
+
+# Verify that installed libfunc16.a is usable
+libfunc16_dep = cc.find_library('func16')
+test('test5', executable('test5', 'test5.c', dependencies: libfunc16_dep))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/test1.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/test1.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/test1.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/test1.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+int func1b();
+int func2();
+
+int main(int argc, char *argv[])
+{
+  return func2() + func1b() == 3 ? 0 : 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/test2.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/test2.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/test2.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/test2.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func4();
+
+int main(int argc, char *argv[])
+{
+  return func4() == 2 ? 0 : 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/test3.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/test3.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/test3.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/test3.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func6();
+
+int main(int argc, char *argv[])
+{
+  return func6() == 2 ? 0 : 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/test4.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/test4.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/test4.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/test4.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func9();
+
+int main(int argc, char *argv[])
+{
+  return func9() == 3 ? 0 : 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/test5.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/test5.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/68 static link/test5.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/68 static link/test5.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+int func16();
+
+int main(int argc, char *argv[])
+{
+  return func16() == 3 ? 0 : 1;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func10.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func10.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func10.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func10.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-int func10()
-{
-  return 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func11.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func11.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func11.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func11.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func10();
-
-int func11()
-{
-  return func10() + 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func12.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func12.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func12.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func12.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,7 +0,0 @@
-int func10();
-int func11();
-
-int func12()
-{
-  return func10() + func11();
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func14.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func14.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func14.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func14.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-int func14()
-{
-  return 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func15.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func15.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func15.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func15.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func14();
-
-int func15()
-{
-  return func14() + 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func16.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func16.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func16.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func16.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func15();
-
-int func16()
-{
-  return func15() + 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func17.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func17.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func17.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func17.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-int func17()
-{
-  return 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func18.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func18.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func18.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func18.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func17();
-
-int func18()
-{
-  return func17() + 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func19.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func19.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func19.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func19.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,7 +0,0 @@
-int func17();
-int func18();
-
-int func19()
-{
-  return func17() + func18();
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func1.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func1.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func1.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func1.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,9 +0,0 @@
-int func1()
-{
-  return 1;
-}
-
-int func1b()
-{
-  return 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func2.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func2.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func2.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func2.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func1();
-
-int func2()
-{
-  return func1() + 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func3.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func3.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func3.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func3.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-int func3()
-{
-  return 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func4.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func4.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func4.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func4.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func3();
-
-int func4()
-{
-  return func3() + 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func5.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func5.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func5.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func5.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-int func5()
-{
-  return 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func6.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func6.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func6.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func6.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func5();
-
-int func6()
-{
-  return func5() + 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func7.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func7.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func7.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func7.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-int func7()
-{
-  return 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func8.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func8.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func8.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func8.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func7();
-
-int func8()
-{
-  return func7() + 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func9.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func9.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/func9.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/func9.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func8();
-
-int func9()
-{
-  return func8() + 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/lib/meson.build"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/lib/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,80 +0,0 @@
-project('test static link libs', 'c')
-
-pkg = import('pkgconfig')
-
-# libfunc2 should contain both func1() and func2() symbols
-libfunc1 = static_library('func1', 'func1.c',
-  install : false)
-libfunc2 = static_library('func2', 'func2.c',
-  link_whole : libfunc1,
-  install : true)
-
-# Same as above, but with link_with instead of link_whole,
-# libfunc4 should contain both func3() and func4() symbols
-libfunc3 = static_library('func3', 'func3.c',
-  install : false)
-libfunc4 = static_library('func4', 'func4.c',
-  link_with : libfunc3,
-  install : true)
-
-# Same as above, but also generate an pkg-config file. Use both_libraries() to
-# make sure a complete .pc file gets generated. libfunc5 should not be mentioned
-# into the .pc file because it's not installed.
-libfunc5 = static_library('func5', 'func5.c',
-  install : false)
-libfunc6 = both_libraries('func6', 'func6.c',
-  link_with : libfunc5,
-  install : true)
-pkg.generate(libfunc6)
-
-# libfunc9 should contain both func8() and func9() but not func7() because that
-# one gets installed. Also test that link_with and link_whole works the same way
-# because libfunc8 is uninstalled.
-libfunc7 = static_library('func7', 'func7.c',
-  install : true)
-libfunc8 = static_library('func8', 'func8.c',
-  link_with : libfunc7,
-  install : false)
-libfunc9_linkwith = static_library('func9_linkwith', 'func9.c',
-  link_with : libfunc8,
-  install : true)
-libfunc9_linkwhole = static_library('func9_linkwhole', 'func9.c',
-  link_whole : libfunc8,
-  install : true)
-
-# Pattern found in mesa:
-# - libfunc11 uses func10()
-# - libfunc12 uses both func10() and func11()
-# When a shared library link_whole on libfunc12, we ensure we don't include
-# func10.c.o twice which would fail to link.
-libfunc10 = static_library('func10', 'func10.c',
-  install : false)
-libfunc11 = static_library('func11', 'func11.c',
-  link_with : libfunc10,
-  install : false)
-libfunc12 = static_library('func12', 'func12.c',
-  link_with : [libfunc10, libfunc11],
-  install : false)
-libfunc13 = shared_library('func13', link_whole : libfunc12)
-
-# libfunc16 should contain func14(), func15() and func16()
-libfunc14 = static_library('func14', 'func14.c',
-  install : false)
-libfunc15 = static_library('func15', 'func15.c',
-  link_with : libfunc14,
-  install : false)
-libfunc16 = static_library('func16', 'func16.c',
-  link_with : libfunc15,
-  install : true)
-
-# Verify func17.c.o gets included only once into libfunc19, otherwise
-# func19-shared would failed with duplicated symbol.
-libfunc17 = static_library('func17', 'func17.c',
-  install : false)
-libfunc18 = static_library('func18', 'func18.c',
-  link_with : libfunc17,
-  install : false)
-libfunc19 = static_library('func19', 'func19.c',
-  link_whole : [libfunc17, libfunc18],
-  install : false)
-shared_library('func19-shared', link_whole : [libfunc19])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/meson.build"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,32 +0,0 @@
-project('test static link', 'c')
-
-cc = meson.get_compiler('c')
-
-# Verify that installed libfunc2.a is usable
-func2_dep = cc.find_library('func2')
-test('test1', executable('test1', 'test1.c', dependencies : func2_dep))
-
-# Verify that installed libfunc4.a is usable
-func4_dep = cc.find_library('func4')
-test('test2', executable('test2', 'test2.c', dependencies : func4_dep))
-
-# Verify that installed pkg-config file is usable for both shared and static link
-func6_static_dep = dependency('func6', static : true)
-test('test3-static', executable('test3-static', 'test3.c',
-    dependencies : func6_static_dep))
-func6_shared_dep = dependency('func6', static : false)
-test('test3-shared', executable('test3-shared', 'test3.c',
-    dependencies : func6_shared_dep))
-
-# Verify that installed libfunc9.a contains func8() and func8() but not func7()
-func7_dep = cc.find_library('func7')
-func9_linkwhole_dep = cc.find_library('func9_linkwhole')
-test('test4-linkwhole', executable('test4-linkwhole', 'test4.c',
-  dependencies : [func7_dep, func9_linkwhole_dep]))
-func9_linkwith_dep = cc.find_library('func9_linkwith')
-test('test4-linkwith', executable('test4-linkwith', 'test4.c',
-  dependencies : [func7_dep, func9_linkwith_dep]))
-
-# Verify that installed libfunc16.a is usable
-libfunc16_dep = cc.find_library('func16')
-test('test5', executable('test5', 'test5.c', dependencies: libfunc16_dep))
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/test1.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/test1.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/test1.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/test1.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,7 +0,0 @@
-int func1b();
-int func2();
-
-int main(int argc, char *argv[])
-{
-  return func2() + func1b() == 3 ? 0 : 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/test2.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/test2.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/test2.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/test2.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func4();
-
-int main(int argc, char *argv[])
-{
-  return func4() == 2 ? 0 : 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/test3.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/test3.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/test3.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/test3.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func6();
-
-int main(int argc, char *argv[])
-{
-  return func6() == 2 ? 0 : 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/test4.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/test4.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/test4.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/test4.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func9();
-
-int main(int argc, char *argv[])
-{
-  return func9() == 3 ? 0 : 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/test5.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/test5.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/69 static link/test5.c"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/69 static link/test5.c"	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-int func16();
-
-int main(int argc, char *argv[])
-{
-  return func16() == 3 ? 0 : 1;
-}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/70 clang-tidy/dummydir.h/dummy.dat" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/70 clang-tidy/dummydir.h/dummy.dat"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/70 clang-tidy/dummydir.h/dummy.dat"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/70 clang-tidy/dummydir.h/dummy.dat"	2020-10-18 21:29:13.000000000 +0000
@@ -0,0 +1 @@
+Placeholder to track enclosing directory in git. Not to be analyzed.
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/72 cross test passed/exewrapper.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/72 cross test passed/exewrapper.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/72 cross test passed/exewrapper.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/72 cross test passed/exewrapper.py"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,24 @@
+#!/usr/bin/env python3
+# Test that the MESON_EXE_WRAPPER environment variable is set
+
+import argparse
+import os
+import sys
+
+
+def main():
+    parser = argparse.ArgumentParser()
+    parser.add_argument('binary')  # unused, but needed for test behavior
+    parser.add_argument('--expected', action='store_true')
+    args = parser.parse_args()
+
+    defined = 'MESON_EXE_WRAPPER' in os.environ
+
+    if args.expected != defined:
+        print(os.environ, file=sys.stderr)
+        return 1
+    return 0
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/72 cross test passed/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/72 cross test passed/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/72 cross test passed/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/72 cross test passed/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,19 @@
+project(
+  'cross test passed',
+  'c',
+  version : '>= 0.51'
+)
+
+e = executable('exec', 'src/main.c')
+
+py = import('python').find_installation()
+
+test('root', e)
+test('main', py, args : [meson.current_source_dir() / 'script.py', e])
+
+wrapper_args = []
+if get_option('expect')
+  wrapper_args += '--expected'
+endif
+
+test('exe_wrapper in env', py, args : [meson.current_source_dir() / 'exewrapper.py', e, wrapper_args])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/72 cross test passed/meson_options.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/72 cross test passed/meson_options.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/72 cross test passed/meson_options.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/72 cross test passed/meson_options.txt"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+option(
+    'expect',
+    type : 'boolean',
+    value : false,
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/72 cross test passed/script.py" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/72 cross test passed/script.py"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/72 cross test passed/script.py"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/72 cross test passed/script.py"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+#!/usr/bin/env python3
+
+import subprocess
+import sys
+
+if __name__ == "__main__":
+    sys.exit(subprocess.run(sys.argv[1:]).returncode)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/72 cross test passed/src/main.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/72 cross test passed/src/main.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/72 cross test passed/src/main.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/72 cross test passed/src/main.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+#include 
+
+int main(int argc, char const *argv[])
+{
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/73 summary/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/73 summary/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/73 summary/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/73 summary/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,16 @@
+project('My Project', version : '1.0')
+
+subproject('sub')
+subproject('sub2', required : false)
+
+summary({'Some boolean': false,
+         'Another boolean': true,
+         'Some string': 'Hello World',
+         'A list': ['string', 1, true],
+         'empty list': [],
+        }, section: 'Configuration')
+summary('A number', 1, section: 'Configuration')
+summary('yes', true, bool_yn : true, section: 'Configuration')
+summary('no', false, bool_yn : true, section: 'Configuration')
+summary('coma list', ['a', 'b', 'c'], list_sep: ', ', section: 'Configuration')
+summary('long coma list', ['alpha', 'alphacolor', 'apetag', 'audiofx', 'audioparsers', 'auparse', 'autodetect', 'avi'], list_sep: ', ', section: 'Plugins')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/73 summary/subprojects/sub/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/73 summary/subprojects/sub/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/73 summary/subprojects/sub/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/73 summary/subprojects/sub/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+project('Some Subproject', version : '2.0')
+
+summary('string', 'bar')
+summary({'integer': 1, 'boolean': true})
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/73 summary/subprojects/sub2/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/73 summary/subprojects/sub2/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/73 summary/subprojects/sub2/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/73 summary/subprojects/sub2/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+project('sub2')
+
+error('This subproject failed')
+
+summary('Section', 'Should not be seen')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/73 wrap file url/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/73 wrap file url/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/73 wrap file url/meson.build"	2020-01-23 12:51:19.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/73 wrap file url/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-project('test wrap with file url')
-
-exe = subproject('foo').get_variable('foo_exe')
-test('test1', exe)
Binary files /tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/73 wrap file url/subprojects/foo-patch.tar.xz and /tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/73 wrap file url/subprojects/foo-patch.tar.xz differ
Binary files /tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/73 wrap file url/subprojects/foo.tar.xz and /tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/73 wrap file url/subprojects/foo.tar.xz differ
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/74 summary/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/74 summary/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/74 summary/meson.build"	2020-02-25 18:00:47.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/74 summary/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,14 +0,0 @@
-project('My Project', version : '1.0')
-
-subproject('sub')
-subproject('sub2', required : false)
-
-summary({'Some boolean': false,
-         'Another boolean': true,
-         'Some string': 'Hello World',
-         'A list': ['string', 1, true],
-         'empty list': [],
-        }, section: 'Configuration')
-summary('A number', 1, section: 'Configuration')
-summary('yes', true, bool_yn : true, section: 'Configuration')
-summary('no', false, bool_yn : true, section: 'Configuration')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/74 summary/subprojects/sub/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/74 summary/subprojects/sub/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/74 summary/subprojects/sub/meson.build"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/74 summary/subprojects/sub/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-project('Some Subproject', version : '2.0')
-
-summary('string', 'bar')
-summary({'integer': 1, 'boolean': true})
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/74 summary/subprojects/sub2/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/74 summary/subprojects/sub2/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/74 summary/subprojects/sub2/meson.build"	2020-01-23 22:29:05.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/74 summary/subprojects/sub2/meson.build"	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-project('sub2')
-
-error('This subproject failed')
-
-summary('Section', 'Should not be seen')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/74 wrap file url/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/74 wrap file url/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/74 wrap file url/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/74 wrap file url/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,4 @@
+project('test wrap with file url')
+
+exe = subproject('foo').get_variable('foo_exe')
+test('test1', exe)
Binary files /tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/74 wrap file url/subprojects/foo-patch.tar.xz and /tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/74 wrap file url/subprojects/foo-patch.tar.xz differ
Binary files /tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/74 wrap file url/subprojects/foo.tar.xz and /tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/74 wrap file url/subprojects/foo.tar.xz differ
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/75 dep files/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/75 dep files/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/75 dep files/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/75 dep files/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,16 @@
+project('test', 'c')
+
+python = import('python').find_installation()
+
+lib = library('foo', 'foo.c')
+
+# The library does not yet exist but we can already use its path during
+# configuration. This should not trigger a reconfigure when the library is
+# rebuilt.
+configure_file(
+  output: 'out.txt',
+  capture: true,
+  command: [python, '-c', 'import sys; print(sys.argv[1])', lib.full_path()],
+)
+
+message('Project configured')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/client/client.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/client/client.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/client/client.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/client/client.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,8 @@
+#include 
+#include 
+
+int main(int argc, char **argv)
+{
+  printf("%d\n", val2());
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/client/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/client/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/client/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/client/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+project('client', 'c')
+val2_dep = dependency('val2')
+executable('client', 'client.c', dependencies : [val2_dep], install: true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/val1/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/val1/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/val1/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/val1/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+project('val1', 'c')
+val1 = shared_library('val1', 'val1.c', install: true)
+install_headers('val1.h')
+pkgconfig = import('pkgconfig')
+pkgconfig.generate(val1, libraries : ['-Wl,-rpath,${libdir}'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/val1/val1.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/val1/val1.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/val1/val1.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/val1/val1.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+#include "val1.h"
+
+int val1(void) { return 1; }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/val1/val1.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/val1/val1.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/val1/val1.h"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/val1/val1.h"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+int val1(void);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/val2/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/val2/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/val2/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/val2/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,8 @@
+project('val2', 'c')
+val1_dep = dependency('val1')
+val2 = shared_library('val2', 'val2.c',
+  dependencies : [val1_dep],
+  install: true)
+install_headers('val2.h')
+pkgconfig = import('pkgconfig')
+pkgconfig.generate(val2, libraries : ['-Wl,-rpath,${libdir}'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/val2/val2.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/val2/val2.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/val2/val2.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/val2/val2.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,4 @@
+#include "val1.h"
+#include "val2.h"
+
+int val2(void) { return val1() + 2; }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/val2/val2.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/val2/val2.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/77 pkgconfig prefixes/val2/val2.h"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/77 pkgconfig prefixes/val2/val2.h"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+int val2(void);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/78 subdir libdir/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/78 subdir libdir/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/78 subdir libdir/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/78 subdir libdir/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,2 @@
+project('toplevel', 'c')
+subproject('flub')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/78 subdir libdir/subprojects/flub/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/78 subdir libdir/subprojects/flub/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/78 subdir libdir/subprojects/flub/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/78 subdir libdir/subprojects/flub/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+project('subflub', 'c')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 as link whole/bar.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 as link whole/bar.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 as link whole/bar.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 as link whole/bar.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+int bar(void);
+
+int bar(void)
+{
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 as link whole/foo.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 as link whole/foo.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 as link whole/foo.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 as link whole/foo.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+int foo(void);
+
+int foo(void)
+{
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 as link whole/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 as link whole/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 as link whole/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 as link whole/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,11 @@
+project('as-link-whole', 'c')
+
+foo = static_library('foo', 'foo.c', install: true)
+dep = declare_dependency(link_with: foo)
+bar1 = library('bar1', 'bar.c', dependencies: dep)
+bar2 = library('bar2', 'bar.c', dependencies: dep.as_link_whole())
+
+# bar1.pc should have -lfoo but not bar2.pc
+pkg = import('pkgconfig')
+pkg.generate(bar1)
+pkg.generate(bar2)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 nostdlib/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 nostdlib/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 nostdlib/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 nostdlib/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,14 @@
+project('own libc', 'c')
+
+# Not related to this test, but could not find a better place for this test.
+assert(meson.get_cross_property('nonexisting', 'defaultvalue') == 'defaultvalue',
+  'Cross prop getting is broken.')
+
+# A simple project that uses its own libc.
+
+# Note that we don't need to specify anything, the flags to use
+# stdlib come from the cross file.
+
+exe = executable('selfcontained', 'prog.c')
+
+test('standalone test', exe)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 nostdlib/prog.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 nostdlib/prog.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 nostdlib/prog.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 nostdlib/prog.c"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,7 @@
+
+#include
+
+int main(void) {
+  const char *message = "Hello without stdlib.\n";
+  return simple_print(message, simple_strlen(message));
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 nostdlib/subprojects/mylibc/libc.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 nostdlib/subprojects/mylibc/libc.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 nostdlib/subprojects/mylibc/libc.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 nostdlib/subprojects/mylibc/libc.c"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,35 @@
+/* Do not use this as the basis of your own libc.
+ * The code is probably unoptimal or wonky, as I
+ * had no prior experience with this, but instead
+ * just fiddled with the code until it worked.
+ */
+
+#include
+
+#define STDOUT 1
+#define SYS_WRITE 4
+
+int simple_print(const char *msg, const long bufsize) {
+  int count;
+  long total_written = 0;
+  while(total_written < bufsize) {
+    asm(
+        "int $0x80\n\t"
+        : "=a"(count)
+        : "0"(SYS_WRITE), "b"(STDOUT), "c"(msg+total_written), "d"(bufsize-total_written)
+        :);
+    if(count == 0) {
+      return 1;
+    }
+    total_written += count;
+  }
+  return 0;
+}
+
+int simple_strlen(const char *str) {
+  int len = 0;
+  while(str[len] != '\0') {
+    len++;
+  }
+  return len;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 nostdlib/subprojects/mylibc/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 nostdlib/subprojects/mylibc/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 nostdlib/subprojects/mylibc/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 nostdlib/subprojects/mylibc/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,13 @@
+project('own libc', 'c')
+
+# A very simple libc implementation
+
+# Do not specify -nostdlib & co. They come from cross specifications.
+
+libc = static_library('c', 'libc.c', 'stubstart.s')
+
+mylibc_dep = declare_dependency(link_with : libc,
+  include_directories : include_directories('.')
+)
+
+meson.override_dependency('c_stdlib', mylibc_dep)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 nostdlib/subprojects/mylibc/stdio.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 nostdlib/subprojects/mylibc/stdio.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 nostdlib/subprojects/mylibc/stdio.h"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 nostdlib/subprojects/mylibc/stdio.h"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,5 @@
+#pragma once
+
+int simple_print(const char *msg, const long bufsize);
+
+int simple_strlen(const char *str);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 nostdlib/subprojects/mylibc/stubstart.s" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 nostdlib/subprojects/mylibc/stubstart.s"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 nostdlib/subprojects/mylibc/stubstart.s"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 nostdlib/subprojects/mylibc/stubstart.s"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,8 @@
+.globl _start
+
+_start:
+
+    call main
+    movl %eax, %ebx
+    movl $1, %eax
+    int $0x80
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+subprojects/*
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+project('user option for subproject')
+
+p = subproject('sub')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 user options for subproject/.gitignore" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 user options for subproject/.gitignore"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/79 user options for subproject/.gitignore"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/79 user options for subproject/.gitignore"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+/subprojects
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/80 global-rpath/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/80 global-rpath/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/80 global-rpath/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/80 global-rpath/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+project('global-rpath', 'cpp')
+yonder_dep = dependency('yonder')
+executable('rpathified', 'rpathified.cpp', dependencies: [yonder_dep], install: true)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/80 global-rpath/rpathified.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/80 global-rpath/rpathified.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/80 global-rpath/rpathified.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/80 global-rpath/rpathified.cpp"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+#include 
+#include 
+int main(int argc, char **argv)
+{
+    return strcmp(yonder(), "AB54 6BR");
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/80 global-rpath/yonder/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/80 global-rpath/yonder/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/80 global-rpath/yonder/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/80 global-rpath/yonder/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,5 @@
+project('yonder', 'cpp')
+yonder = shared_library('yonder', 'yonder.cpp', install: true)
+install_headers('yonder.h')
+pkgconfig = import('pkgconfig')
+pkgconfig.generate(yonder)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/80 global-rpath/yonder/yonder.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/80 global-rpath/yonder/yonder.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/80 global-rpath/yonder/yonder.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/80 global-rpath/yonder/yonder.cpp"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+#include "yonder.h"
+
+char *yonder(void) { return "AB54 6BR"; }
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/80 global-rpath/yonder/yonder.h" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/80 global-rpath/yonder/yonder.h"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/80 global-rpath/yonder/yonder.h"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/80 global-rpath/yonder/yonder.h"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+char *yonder(void);
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/81 wrap-git/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/81 wrap-git/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/81 wrap-git/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/81 wrap-git/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,4 @@
+project('test-wrap-git')
+
+exe = subproject('wrap_git').get_variable('exe')
+test('test1', exe)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+project('foo', 'c')
+
+exe = executable('app', 'main.c')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,4 @@
+int main(void)
+{
+  return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/82 meson version compare/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/82 meson version compare/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/82 meson version compare/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/82 meson version compare/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,17 @@
+project('version compare', meson_version: '>= 0.1')
+
+if meson.version().version_compare('>= 9999')
+  error('This should not be executed')
+elif meson.version().version_compare('>= 0.55') and false
+  error('This should not be executed')
+elif not meson.version().version_compare('>= 0.55')
+  error('This should not be executed')
+elif meson.version().version_compare('>= 0.55')
+  # This Should not produce warning even when using function not available in
+  # meson 0.1.
+  foo_dep = declare_dependency()
+  meson.override_dependency('foo', foo_dep)
+endif
+
+# This will error out if elif cause did not enter
+assert(foo_dep.found(), 'meson.version_compare did not work')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/83 cross only introspect/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/83 cross only introspect/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/83 cross only introspect/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/83 cross only introspect/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,2 @@
+project('cross only introspect')
+add_languages('c', native: false)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/84 change option choices/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/84 change option choices/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/84 change option choices/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/84 change option choices/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+project('change option choices')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/84 change option choices/meson_options.1.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/84 change option choices/meson_options.1.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/84 change option choices/meson_options.1.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/84 change option choices/meson_options.1.txt"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,13 @@
+option(
+    'combo',
+    type : 'combo',
+    choices : ['a', 'b', 'c'],
+    value : 'a',
+)
+
+option(
+    'array',
+    type : 'array',
+    choices : ['a', 'b', 'c'],
+    value : ['a'],
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/84 change option choices/meson_options.2.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/84 change option choices/meson_options.2.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/84 change option choices/meson_options.2.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/84 change option choices/meson_options.2.txt"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,13 @@
+option(
+    'combo',
+    type : 'combo',
+    choices : ['b', 'c', 'd'],
+    value : 'b',
+)
+
+option(
+    'array',
+    type : 'array',
+    choices : ['b', 'c', 'd'],
+    value : ['b'],
+)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/85 nested subproject regenerate depends/main.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/85 nested subproject regenerate depends/main.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/85 nested subproject regenerate depends/main.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/85 nested subproject regenerate depends/main.c"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,3 @@
+int main(void) {
+    return 0;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/85 nested subproject regenerate depends/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/85 nested subproject regenerate depends/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/85 nested subproject regenerate depends/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/85 nested subproject regenerate depends/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,6 @@
+project('nested subproject regenerate depends', 'c')
+
+s = subproject('sub1')
+
+# This is needed to make msbuild noop check work correctly
+executable('exe', 'main.c')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/85 nested subproject regenerate depends/subprojects/sub1/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/85 nested subproject regenerate depends/subprojects/sub1/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/85 nested subproject regenerate depends/subprojects/sub1/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/85 nested subproject regenerate depends/subprojects/sub1/meson.build"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,4 @@
+project('sub1')
+
+cmake = import('cmake')
+cmake.subproject('sub2')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/85 nested subproject regenerate depends/subprojects/sub2/CMakeLists.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/85 nested subproject regenerate depends/subprojects/sub2/CMakeLists.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/unit/85 nested subproject regenerate depends/subprojects/sub2/CMakeLists.txt"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/unit/85 nested subproject regenerate depends/subprojects/sub2/CMakeLists.txt"	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1 @@
+project(sub2)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/11 generated vapi/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/11 generated vapi/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/11 generated vapi/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/11 generated vapi/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,9 +0,0 @@
-usr/bin/vapigen-test?exe
-usr/lib/?libfoo.so
-?cygwin:usr/lib/libfoo.dll.a
-usr/lib/?libbar.so
-?cygwin:usr/lib/libbar.dll.a
-usr/share/vala/vapi/foo-1.0.vapi
-usr/share/vala/vapi/foo-1.0.deps
-usr/share/vala/vapi/bar-1.0.vapi
-usr/share/vala/vapi/bar-1.0.deps
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/11 generated vapi/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/11 generated vapi/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/11 generated vapi/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/11 generated vapi/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/vapigen-test"},
+    {"type": "expr", "file": "usr/lib/?libfoo.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libfoo.dll.a"},
+    {"type": "expr", "file": "usr/lib/?libbar.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libbar.dll.a"},
+    {"type": "file", "file": "usr/share/vala/vapi/foo-1.0.vapi"},
+    {"type": "file", "file": "usr/share/vala/vapi/foo-1.0.deps"},
+    {"type": "file", "file": "usr/share/vala/vapi/bar-1.0.vapi"},
+    {"type": "file", "file": "usr/share/vala/vapi/bar-1.0.deps"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/1 basic/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/1 basic/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/1 basic/meson.build"	2020-01-07 21:11:31.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/1 basic/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,4 +1,5 @@
-project('valatest', 'vala', 'c')
+# Language are case unsensitive, check here that capital C works too.
+project('valatest', 'vala', 'C')
 
 valadeps = [dependency('glib-2.0'), dependency('gobject-2.0')]
 
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/6 static library/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/6 static library/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/6 static library/installed_files.txt"	2016-11-13 21:03:31.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/6 static library/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-usr/lib/libextractedlib.a
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/6 static library/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/6 static library/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/6 static library/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/6 static library/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/lib/libextractedlib.a"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/7 shared library/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/7 shared library/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/7 shared library/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/7 shared library/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,10 +0,0 @@
-usr/lib/?libinstalled_vala_lib.so
-?cygwin:usr/lib/libinstalled_vala_lib.dll.a
-usr/lib/?libinstalled_vala_all.so
-?cygwin:usr/lib/libinstalled_vala_all.dll.a
-usr/include/installed_vala_all.h
-usr/include/valah/installed_vala_all_nolib.h
-usr/include/installed_vala_onlyh.h
-usr/share/vala/vapi/installed_vala_all.vapi
-usr/share/vala-1.0/vapi/installed_vala_all_nolib.vapi
-usr/share/vala/vapi/installed_vala_onlyvapi.vapi
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/7 shared library/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/7 shared library/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/7 shared library/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/7 shared library/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "installed": [
+    {"type": "expr", "file": "usr/lib/?libinstalled_vala_lib.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libinstalled_vala_lib.dll.a"},
+    {"type": "expr", "file": "usr/lib/?libinstalled_vala_all.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libinstalled_vala_all.dll.a"},
+    {"type": "file", "file": "usr/include/installed_vala_all.h"},
+    {"type": "file", "file": "usr/include/valah/installed_vala_all_nolib.h"},
+    {"type": "file", "file": "usr/include/installed_vala_onlyh.h"},
+    {"type": "file", "file": "usr/share/vala/vapi/installed_vala_all.vapi"},
+    {"type": "file", "file": "usr/share/vala-1.0/vapi/installed_vala_all_nolib.vapi"},
+    {"type": "file", "file": "usr/share/vala/vapi/installed_vala_onlyvapi.vapi"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/8 generated sources/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/8 generated sources/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/8 generated sources/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/8 generated sources/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-usr/bin/generatedtestparent?exe
-usr/bin/generatedtest?exe
-usr/bin/onlygentest?exe
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/8 generated sources/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/8 generated sources/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/8 generated sources/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/8 generated sources/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    {"type": "exe", "file": "usr/bin/generatedtestparent"},
+    {"type": "exe", "file": "usr/bin/generatedtest"},
+    {"type": "exe", "file": "usr/bin/onlygentest"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/9 gir/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/9 gir/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/9 gir/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/9 gir/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-?gcc:usr/lib/?libfoo.so
-?cygwin:usr/lib/libfoo.dll.a
-usr/share/gir-1.0/Foo-1.0.gir
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/9 gir/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/9 gir/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/vala/9 gir/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/vala/9 gir/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "installed": [
+    {"type": "expr", "platform": "gcc", "file": "usr/lib/?libfoo.so"},
+    {"type": "file", "platform": "cygwin", "file": "usr/lib/libfoo.dll.a"},
+    {"type": "file", "file": "usr/share/gir-1.0/Foo-1.0.gir"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/warning/1 version for string div/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/warning/1 version for string div/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/warning/1 version for string div/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/warning/1 version for string div/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "stdout": [
+    {
+      "comment": "literal '/' appears in output, irrespective of os.path.sep, as that's the operator",
+      "line": "WARNING: Project targeting '>=0.48.0' but tried to use feature introduced in '0.49.0': / with string arguments."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/warning/2 languages missing native/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/warning/2 languages missing native/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/warning/2 languages missing native/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/warning/2 languages missing native/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,3 @@
+project('languages missing native',
+        meson_version : '>= 0.54')
+add_languages('c')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/warning/2 languages missing native/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/warning/2 languages missing native/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/warning/2 languages missing native/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/warning/2 languages missing native/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "stdout": [
+    {
+      "line": "test cases/warning/2 languages missing native/meson.build:3: WARNING: add_languages is missing native:, assuming languages are wanted for both host and build."
+    }
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/wasm/1 basic/hello.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/wasm/1 basic/hello.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/wasm/1 basic/hello.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/wasm/1 basic/hello.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,7 @@
+#include 
+
+int main() {
+  printf("Hello World\n");
+  return 0;
+}
+
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/wasm/1 basic/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/wasm/1 basic/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/wasm/1 basic/meson.build"	2019-08-28 17:15:39.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/wasm/1 basic/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -1,3 +1,4 @@
-project('emcctest', 'cpp')
+project('emcctest', 'c', 'cpp')
 
+executable('hello-c', 'hello.c')
 executable('hello', 'hello.cpp')
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/wasm/2 threads/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/wasm/2 threads/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/wasm/2 threads/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/wasm/2 threads/meson.build"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,10 @@
+project(
+  'threads',
+  'c', 'cpp',
+  default_options : ['cpp_std=c++11'],
+)
+
+dep_threads = dependency('threads')
+
+executable('threads-c', 'threads.c', dependencies : dep_threads)
+executable('threads-c++', 'threads.cpp', dependencies : dep_threads)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/wasm/2 threads/threads.c" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/wasm/2 threads/threads.c"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/wasm/2 threads/threads.c"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/wasm/2 threads/threads.c"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,21 @@
+#include 
+#include 
+#include 
+
+void inthread(void * args) {
+    sleep(1);
+    printf("In thread\n");
+}
+
+int main() {
+#ifdef __EMSCRIPTEN_PTHREADS__
+    pthread_t thread_id;
+    printf("Before Thread\n"); 
+    pthread_create(&thread_id, NULL, (void *)*inthread, NULL); 
+    pthread_join(thread_id, NULL); 
+    printf("After Thread\n");
+    return 0;
+#else
+# error "threads not enabled\n"
+#endif
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/wasm/2 threads/threads.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/wasm/2 threads/threads.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/wasm/2 threads/threads.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/wasm/2 threads/threads.cpp"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,13 @@
+#include 
+#include 
+#include 
+
+int main(void) {
+    std::cout << "Before thread" << std::endl;
+    std::thread t([]() {
+        sleep(1);
+        std::cout << "In a thread." << std::endl;
+    });
+    t.join();
+    std::cout << "After thread" << std::endl;
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/11 exe implib/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/11 exe implib/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/11 exe implib/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/11 exe implib/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,8 +0,0 @@
-usr/bin/prog.exe
-?msvc:usr/bin/prog.pdb
-usr/bin/prog2.exe
-?msvc:usr/bin/prog2.pdb
-?gcc:usr/lib/libprog.exe.a
-?gcc:usr/lib/libburble.a
-?msvc:usr/lib/prog.exe.lib
-?msvc:usr/lib/burble.lib
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/11 exe implib/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/11 exe implib/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/11 exe implib/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/11 exe implib/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,12 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/bin/prog.exe"},
+    {"type": "pdb", "file": "usr/bin/prog"},
+    {"type": "file", "file": "usr/bin/prog2.exe"},
+    {"type": "pdb", "file": "usr/bin/prog2"},
+    {"type": "file", "platform": "gcc", "file": "usr/lib/libprog.exe.a"},
+    {"type": "file", "platform": "gcc", "file": "usr/lib/libburble.a"},
+    {"type": "file", "platform": "msvc", "file": "usr/lib/prog.exe.lib"},
+    {"type": "file", "platform": "msvc", "file": "usr/lib/burble.lib"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/16 gui app/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/16 gui app/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/16 gui app/meson.build"	2020-01-23 12:51:19.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/16 gui app/meson.build"	2020-10-18 21:29:13.000000000 +0000
@@ -6,16 +6,21 @@
 #
 
 console_lib = static_library('main', 'console_prog.c')
-executable('console', 'dummy.c', link_with: console_lib, gui_app: false)
+executable('console', 'dummy.c', link_with: console_lib, win_subsystem: 'console')
+executable('console2', 'dummy.c', link_with: console_lib, gui_app: false)
 
 #
 # also verify that the correct subsystem is set by executable(gui_app:)
 #
 
-gui_prog = executable('gui_prog', 'gui_prog.c', gui_app: true)
-console_prog = executable('console_prog', 'console_prog.c', gui_app: false)
+gui_prog = executable('gui_prog', 'gui_prog.c', win_subsystem: 'windows,6.0')
+gui_prog2 = executable('gui_prog2', 'gui_prog.c', gui_app: true)
+console_prog = executable('console_prog', 'console_prog.c', win_subsystem: 'console')
+console_prog2 = executable('console_prog2', 'console_prog.c', gui_app: false)
 
 tester = find_program('gui_app_tester.py')
 
 test('is_gui', tester, args: [gui_prog, '2'])
+test('is_gui2', tester, args: [gui_prog2, '2'])
 test('not_gui', tester, args: [console_prog, '3'])
+test('not_gui2', tester, args: [console_prog2, '3'])
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/17 msvc ndebug/main.cpp" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/17 msvc ndebug/main.cpp"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/17 msvc ndebug/main.cpp"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/17 msvc ndebug/main.cpp"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,9 @@
+int main() {
+#ifdef NDEBUG
+    // NDEBUG is defined
+    return 0;
+#else
+    // NDEBUG is not defined
+    return 1;
+#endif
+}
\ No newline at end of file
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/17 msvc ndebug/meson.build" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/17 msvc ndebug/meson.build"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/17 msvc ndebug/meson.build"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/17 msvc ndebug/meson.build"	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,7 @@
+project('msvc_ndebug', 'cpp',
+    default_options : [ 'b_ndebug=true' ]
+)
+
+exe = executable('exe', 'main.cpp')
+
+test('ndebug', exe)
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/1 basic/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/1 basic/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/1 basic/installed_files.txt"	2018-10-31 09:31:57.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/1 basic/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-usr/bin/prog.exe
-?msvc:usr/bin/prog.pdb
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/1 basic/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/1 basic/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/1 basic/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/1 basic/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "installed": [
+    {"type": "file", "file": "usr/bin/prog.exe"},
+    {"type": "pdb", "file": "usr/bin/prog"}
+  ]
+}
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/7 dll versioning/installed_files.txt" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/7 dll versioning/installed_files.txt"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/7 dll versioning/installed_files.txt"	2019-12-04 18:45:50.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/7 dll versioning/installed_files.txt"	1970-01-01 00:00:00.000000000 +0000
@@ -1,30 +0,0 @@
-?msvc:usr/bin/some-0.dll
-?msvc:usr/bin/some-0.pdb
-?msvc:usr/lib/some.lib
-?msvc:usr/bin/noversion.dll
-?msvc:usr/bin/noversion.pdb
-?msvc:usr/lib/noversion.lib
-?msvc:usr/bin/onlyversion-1.dll
-?msvc:usr/bin/onlyversion-1.pdb
-?msvc:usr/lib/onlyversion.lib
-?msvc:usr/bin/onlysoversion-5.dll
-?msvc:usr/bin/onlysoversion-5.pdb
-?msvc:usr/lib/onlysoversion.lib
-?msvc:usr/libexec/customdir.dll
-?msvc:usr/libexec/customdir.lib
-?msvc:usr/libexec/customdir.pdb
-?msvc:usr/lib/modules/module.dll
-?msvc:usr/lib/modules/module.lib
-?msvc:usr/lib/modules/module.pdb
-?gcc:usr/bin/?libsome-0.dll
-?gcc:usr/lib/libsome.dll.a
-?gcc:usr/bin/?libnoversion.dll
-?gcc:usr/lib/libnoversion.dll.a
-?gcc:usr/bin/?libonlyversion-1.dll
-?gcc:usr/lib/libonlyversion.dll.a
-?gcc:usr/bin/?libonlysoversion-5.dll
-?gcc:usr/lib/libonlysoversion.dll.a
-?gcc:usr/libexec/?libcustomdir.dll
-?gcc:usr/libexec/libcustomdir.dll.a
-?gcc:usr/lib/modules/?libmodule.dll
-?gcc:usr/lib/modules/libmodule.dll.a
diff -Nru "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/7 dll versioning/test.json" "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/7 dll versioning/test.json"
--- "/tmp/tmpunk8bsyk/nCTUapf_u6/meson-0.53.2/test cases/windows/7 dll versioning/test.json"	1970-01-01 00:00:00.000000000 +0000
+++ "/tmp/tmpunk8bsyk/BhcjZcOLhS/meson-0.57.0+really0.56.2/test cases/windows/7 dll versioning/test.json"	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,34 @@
+{
+  "installed": [
+    {"type": "file", "platform": "msvc", "file": "usr/bin/some-0.dll"},
+    {"type": "pdb", "file": "usr/bin/some-0"},
+    {"type": "file", "platform": "msvc", "file": "usr/lib/some.lib"},
+    {"type": "file", "platform": "msvc", "file": "usr/bin/noversion.dll"},
+    {"type": "pdb", "file": "usr/bin/noversion"},
+    {"type": "file", "platform": "msvc", "file": "usr/lib/noversion.lib"},
+    {"type": "file", "platform": "msvc", "file": "usr/bin/onlyversion-1.dll"},
+    {"type": "pdb", "file": "usr/bin/onlyversion-1"},
+    {"type": "file", "platform": "msvc", "file": "usr/lib/onlyversion.lib"},
+    {"type": "file", "platform": "msvc", "file": "usr/bin/onlysoversion-5.dll"},
+    {"type": "pdb", "file": "usr/bin/onlysoversion-5"},
+    {"type": "file", "platform": "msvc", "file": "usr/lib/onlysoversion.lib"},
+    {"type": "file", "platform": "msvc", "file": "usr/libexec/customdir.dll"},
+    {"type": "file", "platform": "msvc", "file": "usr/libexec/customdir.lib"},
+    {"type": "pdb", "file": "usr/libexec/customdir"},
+    {"type": "file", "platform": "msvc", "file": "usr/lib/modules/module.dll"},
+    {"type": "file", "platform": "msvc", "file": "usr/lib/modules/module.lib"},
+    {"type": "pdb", "file": "usr/lib/modules/module"},
+    {"type": "expr", "platform": "gcc", "file": "usr/bin/?libsome-0.dll"},
+    {"type": "file", "platform": "gcc", "file": "usr/lib/libsome.dll.a"},
+    {"type": "expr", "platform": "gcc", "file": "usr/bin/?libnoversion.dll"},
+    {"type": "file", "platform": "gcc", "file": "usr/lib/libnoversion.dll.a"},
+    {"type": "expr", "platform": "gcc", "file": "usr/bin/?libonlyversion-1.dll"},
+    {"type": "file", "platform": "gcc", "file": "usr/lib/libonlyversion.dll.a"},
+    {"type": "expr", "platform": "gcc", "file": "usr/bin/?libonlysoversion-5.dll"},
+    {"type": "file", "platform": "gcc", "file": "usr/lib/libonlysoversion.dll.a"},
+    {"type": "expr", "platform": "gcc", "file": "usr/libexec/?libcustomdir.dll"},
+    {"type": "file", "platform": "gcc", "file": "usr/libexec/libcustomdir.dll.a"},
+    {"type": "expr", "platform": "gcc", "file": "usr/lib/modules/?libmodule.dll"},
+    {"type": "file", "platform": "gcc", "file": "usr/lib/modules/libmodule.dll.a"}
+  ]
+}
diff -Nru meson-0.53.2/tools/boost_names.py meson-0.57.0+really0.56.2/tools/boost_names.py
--- meson-0.53.2/tools/boost_names.py	2019-12-04 18:45:50.000000000 +0000
+++ meson-0.57.0+really0.56.2/tools/boost_names.py	2020-10-18 21:29:13.000000000 +0000
@@ -24,164 +24,277 @@
 """
 
 import sys
-import os
-import collections
-import pprint
 import json
 import re
+import textwrap
+import functools
+import typing as T
+from pathlib import Path
+
+lib_dir = Path('libs')
+jamroot = Path('Jamroot')
+
+not_modules = ['config', 'disjoint_sets', 'headers']
+
+export_modules = False
+
+
+@functools.total_ordering
+class BoostLibrary():
+    def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
+        self.name = name
+        self.shared = sorted(set(shared))
+        self.static = sorted(set(static))
+        self.single = sorted(set(single))
+        self.multi = sorted(set(multi))
+
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, BoostLibrary):
+            return self.name < other.name
+        return NotImplemented
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, BoostLibrary):
+            return self.name == other.name
+        elif isinstance(other, str):
+            return self.name == other
+        return NotImplemented
+
+    def __hash__(self) -> int:
+        return hash(self.name)
+
+@functools.total_ordering
+class BoostModule():
+    def __init__(self, name: str, key: str, desc: str, libs: T.List[BoostLibrary]):
+        self.name = name
+        self.key = key
+        self.desc = desc
+        self.libs = libs
+
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, BoostModule):
+            return self.key < other.key
+        return NotImplemented
+
+
+def get_boost_version() -> T.Optional[str]:
+    raw = jamroot.read_text()
+    m = re.search(r'BOOST_VERSION\s*:\s*([0-9\.]+)\s*;', raw)
+    if m:
+        return m.group(1)
+    return None
+
+
+def get_libraries(jamfile: Path) -> T.List[BoostLibrary]:
+    # Extract libraries from the boost Jamfiles. This includes:
+    #  - library name
+    #  - compiler flags
+
+    libs: T.List[BoostLibrary] = []
+    raw = jamfile.read_text()
+    raw = re.sub(r'#.*\n', '\n', raw)  # Remove comments
+    raw = re.sub(r'\s+', ' ', raw)     # Force single space
+    raw = re.sub(r'}', ';', raw)       # Cheat code blocks by converting } to ;
+
+    cmds = raw.split(';')              # Commands always terminate with a ; (I hope)
+    cmds = [x.strip() for x in cmds]   # Some cleanup
+
+    project_usage_requirements: T.List[str] = []
+
+    # "Parse" the relevant sections
+    for i in cmds:
+        parts = i.split(' ')
+        parts = [x for x in parts if x not in ['']]
+        if not parts:
+            continue
 
-Module = collections.namedtuple('Module', ['dirname', 'name', 'libnames'])
-Module.__repr__ = lambda self: str((self.dirname, self.name, self.libnames))  # type: ignore
+        # Parse project
+        if parts[0] in ['project']:
+            attributes: T.Dict[str, T.List[str]] = {}
+            curr: T.Optional[str] = None
+
+            for j in parts:
+                if j == ':':
+                    curr = None
+                elif curr is None:
+                    curr = j
+                else:
+                    if curr not in attributes:
+                        attributes[curr] = []
+                    attributes[curr] += [j]
+
+            if 'usage-requirements' in attributes:
+                project_usage_requirements = attributes['usage-requirements']
+
+        # Parse libraries
+        elif parts[0] in ['lib', 'boost-lib']:
+            assert len(parts) >= 2
+
+            # Get and check the library name
+            lname = parts[1]
+            if parts[0] == 'boost-lib':
+                lname = f'boost_{lname}'
+            if not lname.startswith('boost_'):
+                continue
+
+            # Count `:` to only select the 'usage-requirements'
+            # See https://boostorg.github.io/build/manual/master/index.html#bbv2.main-target-rule-syntax
+            colon_counter = 0
+            usage_requirements: T.List[str] = []
+            for j in parts:
+                if j == ':':
+                    colon_counter += 1
+                elif colon_counter >= 4:
+                    usage_requirements += [j]
+
+            # Get shared / static defines
+            shared: T.List[str] = []
+            static: T.List[str] = []
+            single: T.List[str] = []
+            multi: T.List[str] = []
+            for j in usage_requirements + project_usage_requirements:
+                m1 = re.match(r'shared:(.*)', j)
+                m2 = re.match(r'static:(.*)', j)
+                m3 = re.match(r'single:(.*)', j)
+                m4 = re.match(r'multi:(.*)', j)
+
+                if m1:
+                    shared += [f'-D{m1.group(1)}']
+                if m2:
+                    static += [f'-D{m2.group(1)}']
+                if m3:
+                    single +=[f'-D{m3.group(1)}']
+                if m4:
+                    multi += [f'-D{m4.group(1)}']
 
-LIBS = 'libs'
+            libs += [BoostLibrary(lname, shared, static, single, multi)]
 
-manual_map = {
-    'callable_traits': 'Call Traits',
-    'crc': 'CRC',
-    'dll': 'DLL',
-    'gil': 'GIL',
-    'graph_parallel': 'GraphParallel',
-    'icl': 'ICL',
-    'io': 'IO State Savers',
-    'msm': 'Meta State Machine',
-    'mpi': 'MPI',
-    'mpl': 'MPL',
-    'multi_array': 'Multi-Array',
-    'multi_index': 'Multi-Index',
-    'numeric': 'Numeric Conversion',
-    'ptr_container': 'Pointer Container',
-    'poly_collection': 'PolyCollection',
-    'qvm': 'QVM',
-    'throw_exception': 'ThrowException',
-    'tti': 'TTI',
-    'vmd': 'VMD',
-}
-
-extra = [
-    Module('utility', 'Compressed Pair', []),
-    Module('core', 'Enable If', []),
-    Module('functional', 'Functional/Factory', []),
-    Module('functional', 'Functional/Forward', []),
-    Module('functional', 'Functional/Hash', []),
-    Module('functional', 'Functional/Overloaded Function', []),
-    Module('utility', 'Identity Type', []),
-    Module('utility', 'In Place Factory, Typed In Place Factory', []),
-    Module('numeric', 'Interval', []),
-    Module('math', 'Math Common Factor', []),
-    Module('math', 'Math Octonion', []),
-    Module('math', 'Math Quaternion', []),
-    Module('math', 'Math/Special Functions', []),
-    Module('math', 'Math/Statistical Distributions', []),
-    Module('bind', 'Member Function', []),
-    Module('algorithm', 'Min-Max', []),
-    Module('numeric', 'Odeint', []),
-    Module('utility', 'Operators', []),
-    Module('core', 'Ref', []),
-    Module('utility', 'Result Of', []),
-    Module('algorithm', 'String Algo', []),
-    Module('core', 'Swap', []),
-    Module('', 'Tribool', []),
-    Module('numeric', 'uBLAS', []),
-    Module('utility', 'Value Initialized', []),
-]
-
-# Cannot find the following modules in the documentation of boost
-not_modules = ['beast', 'logic', 'mp11', 'winapi']
-
-def eprint(message):
-    print(message, file=sys.stderr)
-
-def get_library_names(jamfile):
-    libs = []
-    with open(jamfile) as jamfh:
-        jam = jamfh.read()
-        res = re.finditer(r'^lib[\s]+([A-Za-z0-9_]+)([^;]*);', jam, re.MULTILINE | re.DOTALL)
-        for matches in res:
-            if ':' in matches.group(2):
-                libs.append(matches.group(1))
-        res = re.finditer(r'^boost-lib[\s]+([A-Za-z0-9_]+)([^;]*);', jam, re.MULTILINE | re.DOTALL)
-        for matches in res:
-            if ':' in matches.group(2):
-                libs.append('boost_{}'.format(matches.group(1)))
     return libs
 
-def exists(modules, module):
-    return len([x for x in modules if x.dirname == module.dirname]) != 0
 
-def get_modules(init=extra):
-    modules = init
-    for directory in os.listdir(LIBS):
-        if not os.path.isdir(os.path.join(LIBS, directory)):
-            continue
-        if directory in not_modules:
-            continue
-        jamfile = os.path.join(LIBS, directory, 'build', 'Jamfile.v2')
-        if os.path.isfile(jamfile):
-            libs = get_library_names(jamfile)
-        else:
-            libs = []
-        if directory in manual_map.keys():
-            modname = manual_map[directory]
-        else:
-            modname = directory.replace('_', ' ').title()
-        modules.append(Module(directory, modname, libs))
+def process_lib_dir(ldir: Path) -> T.List[BoostModule]:
+    meta_file = ldir / 'meta' / 'libraries.json'
+    bjam_file = ldir / 'build' / 'Jamfile.v2'
+    if not meta_file.exists():
+        print(f'WARNING: Meta file {meta_file} does not exist')
+        return []
+
+    # Extract libs
+    libs: T.List[BoostLibrary] = []
+    if bjam_file.exists():
+        libs = get_libraries(bjam_file)
+
+    # Extract metadata
+    data = json.loads(meta_file.read_text())
+    if not isinstance(data, list):
+        data = [data]
+
+    modules: T.List[BoostModule] = []
+    for i in data:
+        modules += [BoostModule(i['name'], i['key'], i['description'], libs)]
+
     return modules
 
-def get_modules_2():
-    modules = []
-    # The python module uses an older build system format and is not easily parseable.
-    # We add the python module libraries manually.
-    modules.append(Module('python', 'Python', ['boost_python', 'boost_python3', 'boost_numpy', 'boost_numpy3']))
-    for (root, _, files) in os.walk(LIBS):
-        for f in files:
-            if f == "libraries.json":
-                projectdir = os.path.dirname(root)
-
-                jamfile = os.path.join(projectdir, 'build', 'Jamfile.v2')
-                if os.path.isfile(jamfile):
-                    libs = get_library_names(jamfile)
-                else:
-                    libs = []
 
-                # Get metadata for module
-                jsonfile = os.path.join(root, f)
-                with open(jsonfile) as jsonfh:
-                    boost_modules = json.loads(jsonfh.read())
-                    if(isinstance(boost_modules, dict)):
-                        boost_modules = [boost_modules]
-                    for boost_module in boost_modules:
-                        modules.append(Module(boost_module['key'], boost_module['name'], libs))
-
-    # Some subprojects do not have meta directory with json file. Find those
-    jsonless_modules = [x for x in get_modules([]) if not exists(modules, x)]
-    for module in jsonless_modules:
-        eprint("WARNING: {} does not have meta/libraries.json. Will guess pretty name '{}'".format(module.dirname, module.name))
-    modules.extend(jsonless_modules)
+def get_modules() -> T.List[BoostModule]:
+    modules: T.List[BoostModule] = []
+    for i in lib_dir.iterdir():
+        if not i.is_dir() or i.name in not_modules:
+            continue
+
+        # numeric has sub libs
+        subdirs = i / 'sublibs'
+        metadir = i / 'meta'
+        if subdirs.exists() and not metadir.exists():
+            for j in i.iterdir():
+                if not j.is_dir():
+                    continue
+                modules += process_lib_dir(j)
+        else:
+            modules += process_lib_dir(i)
 
     return modules
 
-def main(args):
-    if not os.path.isdir(LIBS):
-        eprint("ERROR: script must be run in boost source directory")
-
-    # It will pick jsonless algorithm if 1 is given as argument
-    impl = 0
-    if len(args) > 1:
-        if args[1] == '1':
-            impl = 1
-
-    if impl == 1:
-        modules = get_modules()
-    else:
-        modules = get_modules_2()
-
-    sorted_modules = sorted(modules, key=lambda module: module.name.lower())
-    sorted_modules = [x[2] for x in sorted_modules if x[2]]
-    sorted_modules = sum(sorted_modules, [])
-    sorted_modules = [x for x in sorted_modules if x.startswith('boost')]
 
-    pp = pprint.PrettyPrinter()
-    pp.pprint(sorted_modules)
+def main() -> int:
+    if not lib_dir.is_dir() or not jamroot.exists():
+        print("ERROR: script must be run in boost source directory")
+        return 1
+
+    vers = get_boost_version()
+    modules = get_modules()
+    modules = sorted(modules)
+    libraries = [x for y in modules for x in y.libs]
+    libraries = sorted(set(libraries))
+
+    print(textwrap.dedent(f'''\
+        ####      ---- BEGIN GENERATED ----      ####
+        #                                           #
+        # Generated with tools/boost_names.py:
+        #  - boost version:   {vers}
+        #  - modules found:   {len(modules)}
+        #  - libraries found: {len(libraries)}
+        #
+
+        class BoostLibrary():
+            def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
+                self.name = name
+                self.shared = shared
+                self.static = static
+                self.single = single
+                self.multi = multi
+
+        class BoostModule():
+            def __init__(self, name: str, key: str, desc: str, libs: T.List[str]):
+                self.name = name
+                self.key = key
+                self.desc = desc
+                self.libs = libs
+
+
+        # dict of all know libraries with additional compile options
+        boost_libraries = {{\
+    '''))
+
+    for i in libraries:
+        print(textwrap.indent(textwrap.dedent(f"""\
+            '{i.name}': BoostLibrary(
+                name='{i.name}',
+                shared={i.shared},
+                static={i.static},
+                single={i.single},
+                multi={i.multi},
+            ),\
+        """), '    '))
+
+    if export_modules:
+        print(textwrap.dedent(f'''\
+            }}
+
+
+            # dict of all modules with metadata
+            boost_modules = {{\
+        '''))
+
+        for mod in modules:
+            desc_excaped = re.sub(r"'", "\\'", mod.desc)
+            print(textwrap.indent(textwrap.dedent(f"""\
+                '{mod.key}': BoostModule(
+                    name='{mod.name}',
+                    key='{mod.key}',
+                    desc='{desc_excaped}',
+                    libs={[x.name for x in mod.libs]},
+                ),\
+            """), '    '))
+
+    print(textwrap.dedent(f'''\
+        }}
+
+        #                                           #
+        ####       ---- END GENERATED ----       ####\
+    '''))
+
+    return 0
 
 if __name__ == '__main__':
-    main(sys.argv)
+    sys.exit(main())
diff -Nru meson-0.53.2/tools/build_website.py meson-0.57.0+really0.56.2/tools/build_website.py
--- meson-0.53.2/tools/build_website.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/tools/build_website.py	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,51 @@
+#!/usr/bin/env python3
+
+import os, sys, subprocess, shutil
+
+assert(os.getcwd() == '/home/jpakkane')
+
+from glob import glob
+
+def purge(fname: str) -> None:
+    if not os.path.exists(fname):
+        return
+    if os.path.isdir(fname):
+        shutil.rmtree(fname)
+    os.unlink(fname)
+
+def update() -> None:
+    webdir = 'mesonweb'
+    repodir = 'mesonwebbuild'
+    docdir = os.path.join(repodir, 'docs')
+    builddir = os.path.join(docdir, 'builddir')
+    htmldir = os.path.join(builddir, 'Meson documentation-doc/html')
+#    subprocess.check_call(['git', 'pull'], cwd=webdir)
+    subprocess.check_call(['git', 'fetch', '-a'], cwd=repodir)
+    subprocess.check_call(['git', 'reset', '--hard', 'origin/master'],
+                          cwd=repodir)
+    if os.path.isdir(htmldir):
+        shutil.rmtree(htmldir)
+    if os.path.isdir(builddir):
+        shutil.rmtree(builddir)
+    env = os.environ.copy()
+    env['PATH'] = env['PATH'] + ':/home/jpakkane/.local/bin'
+    subprocess.check_call(['../meson.py', '.', 'builddir'], cwd=docdir, env=env)
+    subprocess.check_call(['ninja'], cwd=builddir)
+    old_files = glob(os.path.join(webdir, '*'))
+    for f in old_files:
+        base = f[len(webdir)+1:]
+        if base == 'CNAME' or base == 'favicon.png':
+            continue
+        subprocess.check_call(['git', 'rm', '-rf', base], cwd=webdir)
+    assert(os.path.isdir(webdir))
+    new_entries = glob(os.path.join(htmldir, '*'))
+    for e in new_entries:
+        shutil.move(e, webdir)
+    subprocess.check_call('git add *', shell=True, cwd=webdir)
+    subprocess.check_call(['git', 'commit', '-a', '-m', 'Bleep. Bloop. I am a bot.'],
+                          cwd=webdir)
+    subprocess.check_call(['git', 'push'], cwd=webdir)
+    shutil.rmtree(builddir)
+
+if __name__ == '__main__':
+    update()
diff -Nru meson-0.53.2/tools/cmake2meson.py meson-0.57.0+really0.56.2/tools/cmake2meson.py
--- meson-0.53.2/tools/cmake2meson.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/tools/cmake2meson.py	2020-09-17 22:00:44.000000000 +0000
@@ -34,7 +34,7 @@
         self.args = args
 
 class Lexer:
-    def __init__(self):
+    def __init__(self) -> None:
         self.token_specification = [
             # Need to be sorted longest to shortest.
             ('ignore', re.compile(r'[ \t]')),
@@ -87,11 +87,11 @@
                 raise ValueError('Lexer got confused line %d column %d' % (lineno, col))
 
 class Parser:
-    def __init__(self, code: str):
+    def __init__(self, code: str) -> None:
         self.stream = Lexer().lex(code)
         self.getsym()
 
-    def getsym(self):
+    def getsym(self) -> None:
         try:
             self.current = next(self.stream)
         except StopIteration:
@@ -118,8 +118,8 @@
         self.expect('rparen')
         return Statement(cur.value, args)
 
-    def arguments(self) -> list:
-        args = []
+    def arguments(self) -> T.List[T.Union[Token, T.Any]]:
+        args = []  # type: T.List[T.Union[Token, T.Any]]
         if self.accept('lparen'):
             args.append(self.arguments())
             self.expect('rparen')
@@ -139,6 +139,17 @@
         while not self.accept('eof'):
             yield(self.statement())
 
+def token_or_group(arg: T.Union[Token, T.List[Token]]) -> str:
+    if isinstance(arg, Token):
+        return ' ' + arg.value
+    elif isinstance(arg, list):
+        line = ' ('
+        for a in arg:
+            line += ' ' + token_or_group(a)
+        line += ' )'
+        return line
+    raise RuntimeError('Conversion error in token_or_group')
+
 class Converter:
     ignored_funcs = {'cmake_minimum_required': True,
                      'enable_testing': True,
@@ -173,7 +184,7 @@
             return res[0]
         return ''
 
-    def write_entry(self, outfile: T.TextIO, t: Statement):
+    def write_entry(self, outfile: T.TextIO, t: Statement) -> None:
         if t.name in Converter.ignored_funcs:
             return
         preincrement = 0
@@ -237,17 +248,16 @@
             except AttributeError:  # complex if statements
                 line = t.name
                 for arg in t.args:
-                    if isinstance(arg, Token):
-                        line += ' ' + arg.value
-                    elif isinstance(arg, list):
-                        line += ' ('
-                        for a in arg:
-                            line += ' ' + a.value
-                        line += ' )'
+                    line += token_or_group(arg)
         elif t.name == 'elseif':
             preincrement = -1
             postincrement = 1
-            line = 'elif %s' % self.convert_args(t.args, False)
+            try:
+                line = 'elif %s' % self.convert_args(t.args, False)
+            except AttributeError:  # complex if statements
+                line = t.name
+                for arg in t.args:
+                    line += token_or_group(arg)
         elif t.name == 'else':
             preincrement = -1
             postincrement = 1
@@ -265,7 +275,7 @@
             outfile.write('\n')
         self.indent_level += postincrement
 
-    def convert(self, subdir: Path = None):
+    def convert(self, subdir: Path = None) -> None:
         if not subdir:
             subdir = self.cmake_root
         cfile = Path(subdir).expanduser() / 'CMakeLists.txt'
@@ -288,7 +298,7 @@
         if subdir == self.cmake_root and len(self.options) > 0:
             self.write_options()
 
-    def write_options(self):
+    def write_options(self) -> None:
         filename = self.cmake_root / 'meson_options.txt'
         with filename.open('w') as optfile:
             for o in self.options:
diff -Nru meson-0.53.2/tools/copy_files.py meson-0.57.0+really0.56.2/tools/copy_files.py
--- meson-0.53.2/tools/copy_files.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/tools/copy_files.py	2020-08-15 16:27:05.000000000 +0000
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+
+
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+Copy files
+'''
+
+import argparse
+import shutil
+import typing as T
+from pathlib import Path
+
+PathLike = T.Union[Path,str]
+
+def copy_files(files: T.List[str], input_dir: PathLike, output_dir: PathLike) -> None:
+    if not input_dir:
+        raise ValueError(f'Input directory value is not set')
+    if not output_dir:
+        raise ValueError(f'Output directory value is not set')
+    
+    input_dir = Path(input_dir).resolve()
+    output_dir = Path(output_dir).resolve()
+    output_dir.mkdir(parents=True, exist_ok=True)
+    
+    for f in files:
+        if (input_dir/f).is_dir():
+            shutil.copytree(input_dir/f, output_dir/f)
+        else:
+            shutil.copy2(input_dir/f, output_dir/f)
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(description='Copy files')
+    parser.add_argument('files', metavar='FILE', nargs='*')
+    parser.add_argument('-C', dest='input_dir', required=True)
+    parser.add_argument('--output-dir', required=True)
+    
+    args = parser.parse_args()
+
+    copy_files(files=args.files,
+               input_dir=args.input_dir,
+               output_dir=args.output_dir)
diff -Nru meson-0.53.2/tools/dircondenser.py meson-0.57.0+really0.56.2/tools/dircondenser.py
--- meson-0.53.2/tools/dircondenser.py	2020-01-23 12:51:19.000000000 +0000
+++ meson-0.57.0+really0.56.2/tools/dircondenser.py	2020-09-17 22:00:44.000000000 +0000
@@ -53,7 +53,7 @@
     entries.sort()
     return entries
 
-def replace_source(sourcefile: str, replacements: T.List[T.Tuple[str, str]]):
+def replace_source(sourcefile: str, replacements: T.List[T.Tuple[str, str]]) -> None:
     with open(sourcefile, 'r') as f:
         contents = f.read()
     for old_name, new_name in replacements:
@@ -61,7 +61,7 @@
     with open(sourcefile, 'w') as f:
         f.write(contents)
 
-def condense(dirname: str):
+def condense(dirname: str) -> None:
     curdir = os.getcwd()
     os.chdir(dirname)
     entries = get_entries()
@@ -74,6 +74,10 @@
             #print('git mv "%s" "%s"' % (old_name, new_name))
             subprocess.check_call(['git', 'mv', old_name, new_name])
             replacements.append((old_name, new_name))
+            # update any appearances of old_name in expected stdout in test.json
+            json = os.path.join(new_name, 'test.json')
+            if os.path.isfile(json):
+                replace_source(json, [(old_name, new_name)])
     os.chdir(curdir)
     replace_source('run_unittests.py', replacements)
     replace_source('run_project_tests.py', replacements)
diff -Nru meson-0.53.2/tools/gen_data.py meson-0.57.0+really0.56.2/tools/gen_data.py
--- meson-0.53.2/tools/gen_data.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/tools/gen_data.py	2021-01-06 10:39:48.000000000 +0000
@@ -0,0 +1,140 @@
+#!/usr/bin/env python3
+
+# Copyright 2020 Daniel Mensinger
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import hashlib
+import textwrap
+import re
+from pathlib import Path
+from datetime import datetime
+import typing as T
+
+class DataFile:
+    file_counter = 0
+
+    def __init__(self, path: Path, root: Path):
+        self.path = path
+        self.id = self.path.relative_to(root)
+        self.data_str = f'file_{DataFile.file_counter}_data_' + re.sub('[^a-zA-Z0-9]', '_', self.path.name)
+        DataFile.file_counter += 1
+
+        b = self.path.read_bytes()
+        self.data = b.decode()
+        self.sha256sum = hashlib.sha256(b).hexdigest()
+
+    def __repr__(self) -> str:
+        return f'<{type(self).__name__}: [{self.sha256sum}] {self.id}>'
+
+def main() -> int:
+    root_dir = Path(__file__).resolve().parents[1]
+    mesonbuild_dir = root_dir / 'mesonbuild'
+    out_file = mesonbuild_dir / 'mesondata.py'
+
+    data_dirs = mesonbuild_dir.glob('**/data')
+
+    data_files: T.List[DataFile] = []
+
+    for d in data_dirs:
+        for p in d.iterdir():
+            data_files += [DataFile(p, mesonbuild_dir)]
+
+    print(f'Found {len(data_files)} data files')
+
+    # Generate the data script
+    data = ''
+
+    data += textwrap.dedent(f'''\
+        # Copyright {datetime.today().year} The Meson development team
+
+        # Licensed under the Apache License, Version 2.0 (the "License");
+        # you may not use this file except in compliance with the License.
+        # You may obtain a copy of the License at
+
+        #     http://www.apache.org/licenses/LICENSE-2.0
+
+        # Unless required by applicable law or agreed to in writing, software
+        # distributed under the License is distributed on an "AS IS" BASIS,
+        # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+        # See the License for the specific language governing permissions and
+        # limitations under the License.
+
+
+        ####
+        ####  WARNING: This is an automatically generated file! Do not edit!
+        ####           Generated by {Path(__file__).resolve().relative_to(root_dir)}
+        ####
+
+
+        # TODO: Remember to remove this also from tools/gen_data.py
+        from ._pathlib import Path
+        import typing as T
+
+        if T.TYPE_CHECKING:
+            from .environment import Environment
+
+        ######################
+        # BEGIN Data section #
+        ######################
+
+    ''')
+
+    for i in data_files:
+        data += f"{i.data_str} = '''\\\n{i.data}'''\n\n"
+
+    data += textwrap.dedent(f'''
+        ####################
+        # END Data section #
+        ####################
+
+        class DataFile:
+            def __init__(self, path: Path, sha256sum: str, data: str) -> None:
+                self.path = path
+                self.sha256sum = sha256sum
+                self.data = data
+
+            def write_once(self, path: Path) -> None:
+                if not path.exists():
+                    path.write_text(self.data)
+
+            def write_to_private(self, env: 'Environment') -> Path:
+                out_file = Path(env.scratch_dir) / 'data' / self.path.name
+                out_file.parent.mkdir(exist_ok=True)
+                self.write_once(out_file)
+                return out_file
+
+
+        mesondata = {{
+    ''')
+
+    for i in data_files:
+        data += textwrap.indent(textwrap.dedent(f"""\
+            '{i.id}': DataFile(
+                Path('{i.id}'),
+                '{i.sha256sum}',
+                {i.data_str},
+            ),
+        """), '    ')
+
+    data += textwrap.dedent('''\
+        }
+    ''')
+
+    print(f'Updating {out_file}')
+    out_file.write_text(data)
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(main())
diff -Nru meson-0.53.2/tools/regenerate_docs.py meson-0.57.0+really0.56.2/tools/regenerate_docs.py
--- meson-0.53.2/tools/regenerate_docs.py	1970-01-01 00:00:00.000000000 +0000
+++ meson-0.57.0+really0.56.2/tools/regenerate_docs.py	2020-09-17 22:00:44.000000000 +0000
@@ -0,0 +1,150 @@
+#!/usr/bin/env python3
+
+
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+Regenerate markdown docs by using `meson.py` from the root dir
+'''
+
+import argparse
+import jinja2
+import os
+import re
+import subprocess
+import sys
+import textwrap
+import typing as T
+from pathlib import Path
+
+PathLike = T.Union[Path,str]
+
+def _get_meson_output(root_dir: Path, args: T.List) -> str:
+    env = os.environ.copy()
+    env['COLUMNS'] = '80'
+    return subprocess.run([str(sys.executable), str(root_dir/'meson.py')] + args, check=True, capture_output=True, text=True, env=env).stdout.strip()
+
+def get_commands_data(root_dir: Path) -> T.Dict[str, T.Any]:
+    usage_start_pattern = re.compile(r'^usage: ', re.MULTILINE)
+    positional_start_pattern = re.compile(r'^positional arguments:[\t ]*[\r\n]+', re.MULTILINE)
+    options_start_pattern = re.compile(r'^optional arguments:[\t ]*[\r\n]+', re.MULTILINE)
+    commands_start_pattern = re.compile(r'^[A-Za-z ]*[Cc]ommands:[\t ]*[\r\n]+', re.MULTILINE)
+
+    def get_next_start(iterators: T.Sequence[T.Any], end: T.Optional[int]) -> int:
+        return next((i.start() for i in iterators if i), end)
+
+    def normalize_text(text: str) -> str:
+        # clean up formatting
+        out = text
+        out = re.sub(r'\r\n', r'\r', out, flags=re.MULTILINE) # replace newlines with a linux EOL
+        out = re.sub(r'^ +$', '', out, flags=re.MULTILINE) # remove trailing whitespace
+        out = re.sub(r'(?:^\n+|\n+$)', '', out) # remove trailing empty lines
+        return out
+
+    def parse_cmd(cmd: str) -> T.Dict[str, str]:
+        cmd_len = len(cmd)
+        usage = usage_start_pattern.search(cmd)
+        positionals = positional_start_pattern.search(cmd)
+        options = options_start_pattern.search(cmd)
+        commands = commands_start_pattern.search(cmd)
+
+        arguments_start = get_next_start([positionals, options, commands], None)
+        assert arguments_start
+
+        # replace `usage:` with `$` and dedent
+        dedent_size = (usage.end() - usage.start()) - len('$ ')
+        usage_text = textwrap.dedent(f'{dedent_size * " "}$ {normalize_text(cmd[usage.end():arguments_start])}')
+
+        return {
+            'usage': usage_text,
+            'arguments': normalize_text(cmd[arguments_start:cmd_len]),
+        }
+
+    def clean_dir_arguments(text: str) -> str:
+        # Remove platform specific defaults
+        args = [
+            'prefix',
+            'bindir',
+            'datadir',
+            'includedir',
+            'infodir',
+            'libdir',
+            'libexecdir',
+            'localedir',
+            'localstatedir',
+            'mandir',
+            'sbindir',
+            'sharedstatedir',
+            'sysconfdir'
+        ]
+        out = text
+        for a in args:
+            out = re.sub(r'(--' + a + r' .+?)\s+\(default:.+?\)(\.)?', r'\1\2', out, flags=re.MULTILINE|re.DOTALL)
+        return out
+
+    output = _get_meson_output(root_dir, ['--help'])
+    commands = set(c.strip() for c in re.findall(r'usage:(?:.+)?{((?:[a-z]+,*)+?)}', output, re.MULTILINE|re.DOTALL)[0].split(','))
+    commands.remove('help')
+
+    cmd_data = dict()
+
+    for cmd in commands:
+        cmd_output = _get_meson_output(root_dir, [cmd, '--help'])
+        cmd_data[cmd] = parse_cmd(cmd_output)
+        if cmd in ['setup', 'configure']:
+            cmd_data[cmd]['arguments'] = clean_dir_arguments(cmd_data[cmd]['arguments'])
+
+    return cmd_data
+
+def regenerate_commands(root_dir: Path, output_dir: Path) -> None:
+    with open(root_dir/'docs'/'markdown_dynamic'/'Commands.md') as f:
+        template = f.read()
+
+    cmd_data = get_commands_data(root_dir)
+
+    t = jinja2.Template(template, undefined=jinja2.StrictUndefined, keep_trailing_newline=True)
+    content = t.render(cmd_help=cmd_data)
+
+    output_file = output_dir/'Commands.md'
+    with open(output_file, 'w') as f:
+        f.write(content)
+
+    print(f'`{output_file}` was regenerated')
+
+def regenerate_docs(output_dir: PathLike,
+                    dummy_output_file: T.Optional[PathLike]) -> None:
+    if not output_dir:
+        raise ValueError(f'Output directory value is not set')
+
+    output_dir = Path(output_dir).resolve()
+    output_dir.mkdir(parents=True, exist_ok=True)
+
+    root_dir = Path(__file__).resolve().parent.parent
+
+    regenerate_commands(root_dir, output_dir)
+
+    if dummy_output_file:
+        with open(output_dir/dummy_output_file, 'w') as f:
+            f.write('dummy file for custom_target output')
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(description='Generate meson docs')
+    parser.add_argument('--output-dir', required=True)
+    parser.add_argument('--dummy-output-file', type=str)
+
+    args = parser.parse_args()
+
+    regenerate_docs(output_dir=args.output_dir,
+                    dummy_output_file=args.dummy_output_file)