Compare commits

...

12 Commits

Author SHA1 Message Date
Jussi Pakkanen 79d530e325 Generators can have extra target dependencies. Closes #4131. 5 years ago
Jussi Pakkanen 60e1676651 Combine the two different pipeline test dirs into one. 5 years ago
Jussi Pakkanen da1f663e8d Reserve build_ prefix in option names. 5 years ago
Jussi Pakkanen ef024583df
Merge pull request #5276 from dcbaker/pkg-config-path-invalidate-cache 5 years ago
Jon Turney fb35e6faac Remove compiler data from build object 5 years ago
Jon Turney 3ff758f22d Switch from build.compiler to environment.coredata.compiler in backends 5 years ago
Jon Turney 6d6f6ad5fa Remove a pointless conditional 5 years ago
Jon Turney 58870fda16 Remove compilers from ModuleState object 5 years ago
Dylan Baker 146e97e974 Use dependency cache 5 years ago
Dylan Baker 5df6823bd8 coredata: Introduce a class to act as the dependency cache 5 years ago
Dylan Baker d941ca1b19 mconf: Don't change the type of coredata.deps 5 years ago
Dylan Baker 0b66a106e3 coredata: fix bugs reading PKG_CONFIG_PATH 5 years ago
  1. 3
      docs/markdown/Reference-manual.md
  2. 16
      docs/markdown/snippets/gendeps.md
  3. 24
      mesonbuild/backend/ninjabackend.py
  4. 14
      mesonbuild/build.py
  5. 127
      mesonbuild/coredata.py
  6. 4
      mesonbuild/dependencies/base.py
  7. 6
      mesonbuild/environment.py
  8. 37
      mesonbuild/interpreter.py
  9. 3
      mesonbuild/mconf.py
  10. 2
      mesonbuild/mintro.py
  11. 5
      mesonbuild/modules/cmake.py
  12. 2
      mesonbuild/modules/rpm.py
  13. 2
      mesonbuild/modules/windows.py
  14. 22
      mesonbuild/munstable_coredata.py
  15. 2
      mesonbuild/optinterpreter.py
  16. 7
      test cases/common/27 pipeline/depends/copyrunner.py
  17. 22
      test cases/common/27 pipeline/depends/filecopier.c
  18. 3
      test cases/common/27 pipeline/depends/libsrc.c.in
  19. 11
      test cases/common/27 pipeline/depends/meson.build
  20. 5
      test cases/common/27 pipeline/depends/prog.c
  21. 6
      test cases/common/27 pipeline/meson.build
  22. 0
      test cases/common/27 pipeline/src/input_src.dat
  23. 0
      test cases/common/27 pipeline/src/meson.build
  24. 0
      test cases/common/27 pipeline/src/prog.c
  25. 0
      test cases/common/27 pipeline/src/srcgen.c
  26. 5
      test cases/common/28 pipeline/meson.build

@ -714,6 +714,9 @@ following:
- `arguments` a list of template strings that will be the command line
arguments passed to the executable
- `depends` is an array of build targets that must be built before this
generator can be run. This is used if you have a generator that calls
a second executable that is built in this project. Available since 0.51.0
- `depfile` is a template string pointing to a dependency file that a
generator can write listing all the additional files this target
depends on, for example a C compiler would list all the header files

@ -0,0 +1,16 @@
## Generators have a new `depends` keyword argument
Generators can now specify extra dependencies with the `depends`
keyword argument. It matches the behaviour of the same argument in
other functions and specifies that the given targets must be built
before the generator can be run. This is used in cases such as this
one where you need to tell a generator to indirectly invoke a
different program.
```meson
exe = executable(...)
cg = generator(program_runner,
output: ['@BASENAME@.c'],
arguments: ['--use-tool=' + exe.full_path(), '@INPUT@', '@OUTPUT@'],
depends: exe)
```

@ -219,7 +219,7 @@ class NinjaBackend(backends.Backend):
def detect_vs_dep_prefix(self, tempfilename):
'''VS writes its dependency in a locale dependent format.
Detect the search prefix to use.'''
for compiler in self.build.compilers.values():
for compiler in self.environment.coredata.compilers.values():
# Have to detect the dependency format
# IFort on windows is MSVC like, but doesn't have /showincludes
@ -314,9 +314,9 @@ int dummy;
# http://clang.llvm.org/docs/JSONCompilationDatabase.html
def generate_compdb(self):
pch_compilers = ['%s_PCH' % i for i in self.build.compilers]
native_compilers = ['%s_COMPILER' % i for i in self.build.compilers]
cross_compilers = ['%s_CROSS_COMPILER' % i for i in self.build.cross_compilers]
pch_compilers = ['%s_PCH' % i for i in self.environment.coredata.compilers]
native_compilers = ['%s_COMPILER' % i for i in self.environment.coredata.compilers]
cross_compilers = ['%s_CROSS_COMPILER' % i for i in self.environment.coredata.cross_compilers]
ninja_compdb = [self.ninja_command, '-t', 'compdb'] + pch_compilers + native_compilers + cross_compilers
builddir = self.environment.get_build_dir()
try:
@ -1492,7 +1492,7 @@ int dummy;
def generate_static_link_rules(self, is_cross):
num_pools = self.environment.coredata.backend_options['backend_max_links'].value
if 'java' in self.build.compilers:
if 'java' in self.environment.coredata.compilers:
if not is_cross:
self.generate_java_link()
if is_cross:
@ -1532,11 +1532,8 @@ int dummy;
def generate_dynamic_link_rules(self):
num_pools = self.environment.coredata.backend_options['backend_max_links'].value
ctypes = [(self.build.compilers, False)]
if self.environment.is_cross_build():
ctypes.append((self.build.cross_compilers, True))
else:
ctypes.append((self.build.cross_compilers, True))
ctypes = [(self.environment.coredata.compilers, False),
(self.environment.coredata.cross_compilers, True)]
for (complist, is_cross) in ctypes:
for langname, compiler in complist.items():
if langname == 'java' \
@ -1718,13 +1715,13 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
depfile=depfile))
def generate_compile_rules(self):
for langname, compiler in self.build.compilers.items():
for langname, compiler in self.environment.coredata.compilers.items():
if compiler.get_id() == 'clang':
self.generate_llvm_ir_compile_rule(compiler, False)
self.generate_compile_rule_for(langname, compiler, False)
self.generate_pch_rule_for(langname, compiler, False)
if self.environment.is_cross_build():
cclist = self.build.cross_compilers
cclist = self.environment.coredata.cross_compilers
for langname, compiler in cclist.items():
if compiler.get_id() == 'clang':
self.generate_llvm_ir_compile_rule(compiler, True)
@ -1803,6 +1800,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
cmd = cmdlist
elem = NinjaBuildElement(self.all_outputs, outfiles, rulename, infilename)
elem.add_dep([self.get_target_filename(x) for x in generator.depends])
if generator.depfile is not None:
elem.add_item('DEPFILE', depfile)
if len(extra_dependencies) > 0:
@ -1822,7 +1820,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
Find all module and submodule made available in a Fortran code file.
"""
compiler = None
for lang, c in self.build.compilers.items():
for lang, c in self.environment.coredata.compilers.items():
if lang == 'fortran':
compiler = c
break

@ -114,9 +114,6 @@ class Build:
self.environment = environment
self.projects = {}
self.targets = OrderedDict()
# Coredata holds the state. This is just here for convenience.
self.compilers = environment.coredata.compilers
self.cross_compilers = environment.coredata.cross_compilers
self.global_args = {}
self.projects_args = {}
self.global_link_args = {}
@ -149,10 +146,6 @@ class Build:
def copy(self):
other = Build(self.environment)
for k, v in self.__dict__.items():
if k in ['compilers', 'cross_compilers']:
# These alias coredata's fields of the same name, and must not
# become copies.
continue
if isinstance(v, (list, dict, set, OrderedDict)):
other.__dict__[k] = v.copy()
else:
@ -1280,6 +1273,7 @@ class Generator:
self.exe = exe
self.depfile = None
self.capture = False
self.depends = []
self.process_kwargs(kwargs)
def __repr__(self):
@ -1328,6 +1322,12 @@ class Generator:
if not isinstance(capture, bool):
raise InvalidArguments('Capture must be boolean.')
self.capture = capture
if 'depends' in kwargs:
depends = listify(kwargs['depends'], unholder=True)
for d in depends:
if not isinstance(d, BuildTarget):
raise InvalidArguments('Depends entries must be build targets.')
self.depends.append(d)
def get_base_outnames(self, inname):
plainname = os.path.basename(inname)

@ -27,6 +27,11 @@ import ast
import argparse
import configparser
from typing import Optional, Any, TypeVar, Generic, Type, List, Union
import typing
import enum
if typing.TYPE_CHECKING:
from . import dependencies
version = '0.50.999'
backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'vs2019', 'xcode']
@ -221,6 +226,112 @@ def load_configs(filenames: List[str]) -> configparser.ConfigParser:
return config
if typing.TYPE_CHECKING:
CacheKeyType = typing.Tuple[typing.Tuple[typing.Any, ...], ...]
SubCacheKeyType = typing.Tuple[typing.Any, ...]
class DependencyCacheType(enum.Enum):
OTHER = 0
PKG_CONFIG = 1
@classmethod
def from_type(cls, dep: 'dependencies.Dependency') -> 'DependencyCacheType':
from . import dependencies
# As more types gain search overrides they'll need to be added here
if isinstance(dep, dependencies.PkgConfigDependency):
return cls.PKG_CONFIG
return cls.OTHER
class DependencySubCache:
def __init__(self, type_: DependencyCacheType):
self.types = [type_]
self.__cache = {} # type: typing.Dict[SubCacheKeyType, dependencies.Dependency]
def __getitem__(self, key: 'SubCacheKeyType') -> 'dependencies.Dependency':
return self.__cache[key]
def __setitem__(self, key: 'SubCacheKeyType', value: 'dependencies.Dependency') -> None:
self.__cache[key] = value
def __contains__(self, key: 'SubCacheKeyType') -> bool:
return key in self.__cache
def values(self) -> typing.Iterable['dependencies.Dependency']:
return self.__cache.values()
class DependencyCache:
"""Class that stores a cache of dependencies.
This class is meant to encapsulate the fact that we need multiple keys to
successfully lookup by providing a simple get/put interface.
"""
def __init__(self, builtins: typing.Dict[str, UserOption[typing.Any]], cross: bool):
self.__cache = OrderedDict() # type: typing.MutableMapping[CacheKeyType, DependencySubCache]
self.__builtins = builtins
self.__is_cross = cross
def __calculate_subkey(self, type_: DependencyCacheType) -> typing.Tuple[typing.Any, ...]:
if type_ is DependencyCacheType.PKG_CONFIG:
if self.__is_cross:
return tuple(self.__builtins['cross_pkg_config_path'].value)
return tuple(self.__builtins['pkg_config_path'].value)
assert type_ is DependencyCacheType.OTHER, 'Someone forgot to update subkey calculations for a new type'
return tuple()
def __iter__(self) -> typing.Iterator['CacheKeyType']:
return self.keys()
def put(self, key: 'CacheKeyType', dep: 'dependencies.Dependency') -> None:
t = DependencyCacheType.from_type(dep)
if key not in self.__cache:
self.__cache[key] = DependencySubCache(t)
subkey = self.__calculate_subkey(t)
self.__cache[key][subkey] = dep
def get(self, key: 'CacheKeyType') -> typing.Optional['dependencies.Dependency']:
"""Get a value from the cache.
If there is no cache entry then None will be returned.
"""
try:
val = self.__cache[key]
except KeyError:
return None
for t in val.types:
subkey = self.__calculate_subkey(t)
try:
return val[subkey]
except KeyError:
pass
return None
def values(self) -> typing.Iterator['dependencies.Dependency']:
for c in self.__cache.values():
yield from c.values()
def keys(self) -> typing.Iterator['CacheKeyType']:
return iter(self.__cache.keys())
def items(self) -> typing.Iterator[typing.Tuple['CacheKeyType', typing.List['dependencies.Dependency']]]:
for k, v in self.__cache.items():
vs = []
for t in v.types:
subkey = self.__calculate_subkey(t)
if subkey in v:
vs.append(v[subkey])
yield k, vs
def clear(self) -> None:
self.__cache.clear()
# This class contains all data that must persist over multiple
# invocations of Meson. It is roughly the same thing as
# cmakecache.
@ -248,7 +359,14 @@ class CoreData:
self.cross_files = self.__load_config_files(options.cross_file, 'cross')
self.compilers = OrderedDict()
self.cross_compilers = OrderedDict()
self.deps = OrderedDict()
build_cache = DependencyCache(self.builtins, False)
if self.cross_files:
host_cache = DependencyCache(self.builtins, True)
else:
host_cache = build_cache
self.deps = PerMachine(build_cache, host_cache) # type: PerMachine[DependencyCache]
self.compiler_check_cache = OrderedDict()
# Only to print a warning if it changes between Meson invocations.
self.config_files = self.__load_config_files(options.native_file, 'native')
@ -510,8 +628,11 @@ class CoreData:
# Some options default to environment variables if they are
# unset, set those now. These will either be overwritten
# below, or they won't.
options['pkg_config_path'] = os.environ.get('PKG_CONFIG_PATH', '').split(':')
# below, or they won't. These should only be set on the first run.
if env.first_invocation:
p_env = os.environ.get('PKG_CONFIG_PATH')
if p_env:
options['pkg_config_path'] = p_env.split(':')
for k, v in env.cmd_line_options.items():
if subproject:

@ -2358,8 +2358,8 @@ class ExtraFrameworkDependency(ExternalDependency):
return 'framework'
def get_dep_identifier(name, kwargs, want_cross: bool) -> Tuple:
identifier = (name, want_cross)
def get_dep_identifier(name, kwargs) -> Tuple:
identifier = (name, )
for key, value in kwargs.items():
# 'version' is irrelevant for caching; the caller must check version matches
# 'native' is handled above with `want_cross`

@ -411,8 +411,8 @@ class Environment:
# target machine.)
machines = PerThreeMachineDefaultable()
# Similar to coredata.compilers and build.compilers, but lower level in
# that there is no meta data, only names/paths.
# Similar to coredata.compilers, but lower level in that there is no
# meta data, only names/paths.
binaries = PerMachineDefaultable()
# Misc other properties about each machine.
@ -535,7 +535,7 @@ class Environment:
self.coredata.meson_command = mesonlib.meson_command
self.first_invocation = True
def is_cross_build(self):
def is_cross_build(self) -> bool:
return not self.machines.matches_build_machine(MachineChoice.HOST)
def dump_coredata(self):

@ -1633,7 +1633,7 @@ class CompilerHolder(InterpreterObject):
ModuleState = namedtuple('ModuleState', [
'build_to_src', 'subproject', 'subdir', 'current_lineno', 'environment',
'project_name', 'project_version', 'backend', 'compilers', 'targets',
'project_name', 'project_version', 'backend', 'targets',
'data', 'headers', 'man', 'global_args', 'project_args', 'build_machine',
'host_machine', 'target_machine', 'current_node'])
@ -1668,7 +1668,6 @@ class ModuleHolder(InterpreterObject, ObjectHolder):
# The backend object is under-used right now, but we will need it:
# https://github.com/mesonbuild/meson/issues/1419
backend=self.interpreter.backend,
compilers=self.interpreter.build.compilers,
targets=self.interpreter.build.targets,
data=self.interpreter.build.data,
headers=self.interpreter.build.get_headers(),
@ -1825,9 +1824,9 @@ class MesonMain(InterpreterObject):
if not isinstance(native, bool):
raise InterpreterException('Type of "native" must be a boolean.')
if native:
clist = self.build.compilers
clist = self.interpreter.coredata.compilers
else:
clist = self.build.cross_compilers
clist = self.interpreter.coredata.cross_compilers
if cname in clist:
return CompilerHolder(clist[cname], self.build.environment, self.interpreter.subproject)
raise InterpreterException('Tried to access compiler for unspecified language "%s".' % cname)
@ -1984,7 +1983,12 @@ permitted_kwargs = {'add_global_arguments': {'language', 'native'},
},
'executable': build.known_exe_kwargs,
'find_program': {'required', 'native'},
'generator': {'arguments', 'output', 'depfile', 'capture', 'preserve_path_from'},
'generator': {'arguments',
'output',
'depends',
'depfile',
'capture',
'preserve_path_from'},
'include_directories': {'is_system'},
'install_data': {'install_dir', 'install_mode', 'rename', 'sources'},
'install_headers': {'install_dir', 'install_mode', 'subdir'},
@ -2209,7 +2213,7 @@ class Interpreter(InterpreterBase):
def check_cross_stdlibs(self):
if self.build.environment.is_cross_build():
props = self.build.environment.properties.host
for l in self.build.cross_compilers.keys():
for l in self.coredata.cross_compilers.keys():
try:
di = mesonlib.stringlistify(props.get_stdlib(l))
if len(di) != 2:
@ -2860,14 +2864,13 @@ external dependencies (including libraries) must go to "dependencies".''')
# FIXME: Not all dependencies support such a distinction right now,
# and we repeat this check inside dependencies that do. We need to
# consolidate this somehow.
is_cross = self.environment.is_cross_build()
if 'native' in kwargs and is_cross:
want_cross = not kwargs['native']
if self.environment.is_cross_build() and kwargs.get('native', False):
for_machine = MachineChoice.BUILD
else:
want_cross = is_cross
for_machine = MachineChoice.HOST
identifier = dependencies.get_dep_identifier(name, kwargs, want_cross)
cached_dep = self.coredata.deps.get(identifier)
identifier = dependencies.get_dep_identifier(name, kwargs)
cached_dep = self.coredata.deps[for_machine].get(identifier)
if cached_dep:
if not cached_dep.found():
mlog.log('Dependency', mlog.bold(name),
@ -3019,7 +3022,11 @@ external dependencies (including libraries) must go to "dependencies".''')
# cannot cache them. They must always be evaluated else
# we won't actually read all the build files.
if dep.found():
self.coredata.deps[identifier] = dep
if self.environment.is_cross_build() and kwargs.get('native', False):
for_machine = MachineChoice.BUILD
else:
for_machine = MachineChoice.HOST
self.coredata.deps[for_machine].put(identifier, dep)
return DependencyHolder(dep, self.subproject)
if has_fallback:
@ -3858,7 +3865,7 @@ different subdirectory.
self.print_extra_warnings()
def print_extra_warnings(self):
for c in self.build.compilers.values():
for c in self.coredata.compilers.values():
if c.get_id() == 'clang':
self.check_clang_asan_lundef()
break
@ -4075,7 +4082,7 @@ This will become a hard error in the future.''', location=self.current_node)
def get_used_languages(self, target):
result = {}
for i in target.sources:
for lang, c in self.build.compilers.items():
for lang, c in self.coredata.compilers.items():
if c.can_compile(i):
result[lang] = True
break

@ -63,7 +63,8 @@ class Conf:
raise ConfException('Directory {} is neither a Meson build directory nor a project source directory.'.format(build_dir))
def clear_cache(self):
self.coredata.deps = {}
self.coredata.deps.host.clear()
self.coredata.deps.build.clear()
def set_options(self, options):
self.coredata.set_options(options)

@ -282,7 +282,7 @@ def list_deps_from_source(intr: IntrospectionInterpreter):
def list_deps(coredata: cdata.CoreData):
result = []
for d in coredata.deps.values():
for d in coredata.deps.host.values():
if d.found():
result += [{'name': d.name,
'compile_args': d.get_compile_args(),

@ -52,7 +52,8 @@ class CmakeModule(ExtensionModule):
super().__init__(interpreter)
self.snippets.add('configure_package_config_file')
def detect_voidp_size(self, compilers, env):
def detect_voidp_size(self, env):
compilers = env.coredata.compilers
compiler = compilers.get('c', None)
if not compiler:
compiler = compilers.get('cpp', None)
@ -115,7 +116,7 @@ class CmakeModule(ExtensionModule):
conf = {
'CVF_VERSION': (version, ''),
'CMAKE_SIZEOF_VOID_P': (str(self.detect_voidp_size(state.compilers, state.environment)), '')
'CMAKE_SIZEOF_VOID_P': (str(self.detect_voidp_size(state.environment)), '')
}
mesonlib.do_conf_file(template_file, version_file, conf, 'meson')

@ -83,7 +83,7 @@ class RPMModule(ExtensionModule):
fn.write('BuildRequires: meson\n')
for compiler in required_compilers:
fn.write('BuildRequires: %s\n' % compiler)
for dep in coredata.environment.coredata.deps:
for dep in coredata.environment.coredata.deps.host:
fn.write('BuildRequires: pkgconfig(%s)\n' % dep[0])
# ext_libs and ext_progs have been removed from coredata so the following code
# no longer works. It is kept as a reminder of the idea should anyone wish

@ -50,7 +50,7 @@ class WindowsModule(ExtensionModule):
rescomp = ExternalProgram.from_bin_list(state.environment.binaries.host, 'windres')
if not rescomp or not rescomp.found():
comp = self.detect_compiler(state.compilers)
comp = self.detect_compiler(state.environment.coredata.compilers)
if comp.id in {'msvc', 'clang-cl', 'intel-cl'}:
rescomp = ExternalProgram('rc', silent=True)
else:

@ -97,13 +97,11 @@ def run(options):
print('Cached cross compilers:')
dump_compilers(v)
elif k == 'deps':
native = []
cross = []
for dep_key, dep in sorted(v.items()):
if dep_key[1]:
cross.append((dep_key, dep))
else:
native.append((dep_key, dep))
native = list(sorted(v.build.items()))
if v.host is not v.build:
cross = list(sorted(v.host.items()))
else:
cross = []
def print_dep(dep_key, dep):
print(' ' + dep_key[0] + ": ")
@ -115,12 +113,14 @@ def run(options):
if native:
print('Cached native dependencies:')
for dep_key, dep in native:
print_dep(dep_key, dep)
for dep_key, deps in native:
for dep in deps:
print_dep(dep_key, dep)
if cross:
print('Cached dependencies:')
for dep_key, dep in cross:
print_dep(dep_key, dep)
for dep_key, deps in cross:
for dep in deps:
print_dep(dep_key, dep)
else:
print(k + ':')
print(textwrap.indent(pprint.pformat(v), ' '))

@ -23,7 +23,7 @@ from . import compilers
forbidden_option_names = set(coredata.builtin_options.keys())
forbidden_prefixes = [lang + '_' for lang in compilers.all_languages] + ['b_', 'backend_']
reserved_prefixes = ['cross_']
reserved_prefixes = ['cross_', 'build_']
def is_invalid_name(name: str, *, log: bool = True) -> bool:
if name in forbidden_option_names:

@ -0,0 +1,7 @@
#!/usr/bin/env python3
import sys, subprocess
prog, infile, outfile = sys.argv[1:]
subprocess.check_call([prog, infile, outfile])

@ -0,0 +1,22 @@
#include<stdio.h>
#include<assert.h>
#define BUFSIZE 1024
int main(int argc, char **argv) {
char buffer[BUFSIZE];
size_t num_read;
size_t num_written;
FILE *fin = fopen(argv[1], "rb");
FILE *fout;
assert(fin);
num_read = fread(buffer, 1, BUFSIZE, fin);
assert(num_read > 0);
fclose(fin);
fout = fopen(argv[2], "wb");
assert(fout);
num_written = fwrite(buffer, 1, num_read, fout);
assert(num_written == num_read);
fclose(fout);
return 0;
}

@ -0,0 +1,11 @@
runner = find_program('copyrunner.py')
copier = executable('copier', 'filecopier.c', native: true)
cg = generator(runner,
output: ['@BASENAME@.c'],
arguments: [copier.full_path(), '@INPUT@', '@OUTPUT@'],
depends: copier)
test('generatordep',
executable('gd', 'prog.c', cg.process('libsrc.c.in')))

@ -0,0 +1,5 @@
int func();
int main(int argc, char **argv) {
return func() != 42;
}

@ -15,3 +15,9 @@ generated = gen.process(['input_src.dat'])
e2 = executable('prog', 'prog.c', generated)
test('pipelined', e2)
# This is in a subdirectory to make sure
# we write proper subdir paths to output.
subdir('src')
subdir('depends')

@ -1,5 +0,0 @@
project('pipeline test', 'c')
# This is in a subdirectory to make sure
# we write proper subdir paths to output.
subdir('src')
Loading…
Cancel
Save