Skip to content

Commit

Permalink
Fix subprojects logic in cross build scenarios
Browse files Browse the repository at this point in the history
Where we cannot use the same subproject state for both machines.

We may not even need to use a subproject for both machines, for example
if the build machine has a dependency installed, and we only need to
fall back to a subproject for the host machine.

Fixes: mesonbuild#10947
  • Loading branch information
oleavr committed Mar 14, 2024
1 parent b591340 commit eea60e5
Show file tree
Hide file tree
Showing 22 changed files with 405 additions and 98 deletions.
6 changes: 6 additions & 0 deletions mesonbuild/backend/backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -371,6 +371,8 @@ def get_target_dir(self, target: T.Union[build.Target, build.CustomTargetIndex])
dirname = target.get_output_subdir()
else:
dirname = 'meson-out'
if target.build_only_subproject:
dirname = 'build.' + dirname
return dirname

def get_target_dir_relative_to(self, t: build.Target, o: build.Target) -> str:
Expand Down Expand Up @@ -409,6 +411,10 @@ def get_target_generated_dir(
# target that the GeneratedList is used in
return os.path.join(self.get_target_private_dir(target), src)

@classmethod
def compute_build_subdir(cls, subdir: str, build_only_subproject: bool) -> str:
return build.compute_build_subdir(subdir, build_only_subproject)

def get_unity_source_file(self, target: T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex],
suffix: str, number: int) -> mesonlib.File:
# There is a potential conflict here, but it is unlikely that
Expand Down
9 changes: 5 additions & 4 deletions mesonbuild/backend/ninjabackend.py
Original file line number Diff line number Diff line change
Expand Up @@ -2785,7 +2785,7 @@ def generate_llvm_ir_compile(self, target, src):
return (rel_obj, rel_src)

@lru_cache(maxsize=None)
def generate_inc_dir(self, compiler: 'Compiler', d: str, basedir: str, is_system: bool) -> \
def generate_inc_dir(self, compiler: 'Compiler', d: str, basedir: str, is_system: bool, build_only_subproject: bool) -> \
T.Tuple['ImmutableListProtocol[str]', 'ImmutableListProtocol[str]']:
# Avoid superfluous '/.' at the end of paths when d is '.'
if d not in ('', '.'):
Expand All @@ -2800,8 +2800,9 @@ def generate_inc_dir(self, compiler: 'Compiler', d: str, basedir: str, is_system
# inc = include_directories('foo/bar/baz')
#
# But never subdir()s into the actual dir.
if os.path.isdir(os.path.join(self.environment.get_build_dir(), expdir)):
bargs = compiler.get_include_args(expdir, is_system)
subdir = self.compute_build_subdir(expdir, build_only_subproject)
if os.path.isdir(os.path.join(self.environment.get_build_dir(), subdir)):
bargs = compiler.get_include_args(subdir, is_system)
else:
bargs = []
return (sargs, bargs)
Expand Down Expand Up @@ -2850,7 +2851,7 @@ def _generate_single_compile_target_args(self, target: build.BuildTarget, compil
# flags will be added in reversed order.
for d in reversed(i.get_incdirs()):
# Add source subdir first so that the build subdir overrides it
(compile_obj, includeargs) = self.generate_inc_dir(compiler, d, basedir, i.is_system)
(compile_obj, includeargs) = self.generate_inc_dir(compiler, d, basedir, i.is_system, i.build_only_subproject)
commands += compile_obj
commands += includeargs
for d in i.get_extra_build_dirs():
Expand Down
59 changes: 52 additions & 7 deletions mesonbuild/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,23 +291,59 @@ def get_custom_targets(self):

def copy(self) -> Build:
other = Build(self.environment)
self._copy_to(other)
return other

def copy_for_build(self) -> Build:
other = Build(self.environment.copy_for_build())
self._copy_to(other)

other.tests = []
other.benchmarks = []
other.test_setups = {}
other.test_setup_default_name = None
other.find_overrides = {}
other.searched_programs = set()

other.dependency_overrides = PerMachineDefaultable.default(False, self.dependency_overrides.build, {})
other.devenv = []
other.modules = []

return other

def _copy_to(self, other: Build) -> None:
for k, v in self.__dict__.items():
if k == 'environment':
continue
if isinstance(v, (list, dict, set, OrderedDict)):
other.__dict__[k] = v.copy()
else:
other.__dict__[k] = v
return other

def merge(self, other: Build) -> None:
is_build_only = other.environment.coredata.is_build_only

for k, v in other.__dict__.items():
if k == 'environment':
continue
if is_build_only and k == 'dependency_overrides':
continue
self.__dict__[k] = v

def ensure_static_linker(self, compiler: Compiler) -> None:
if self.static_linker[compiler.for_machine] is None and compiler.needs_static_linker():
self.static_linker[compiler.for_machine] = detect_static_linker(self.environment, compiler)

def get_project(self):
return self.projects['']
return self.projects[('', MachineChoice.HOST)]

def find_subproject_descriptive_name(self, name: str) -> T.Optional[str]:
for for_machine in iter(MachineChoice):
subp_id = (name, for_machine)
p = self.projects.get(subp_id, None)
if p is not None:
return p
return None

def get_subproject_dir(self):
return self.subproject_dir
Expand Down Expand Up @@ -371,6 +407,7 @@ class IncludeDirs(HoldableObject):
curdir: str
incdirs: T.List[str]
is_system: bool
build_only_subproject: bool
# Interpreter has validated that all given directories
# actually exist.
extra_build_dirs: T.List[str] = field(default_factory=list)
Expand Down Expand Up @@ -608,7 +645,7 @@ def get_source_subdir(self) -> str:
return self.subdir

def get_output_subdir(self) -> str:
return self.get_source_subdir()
return compute_build_subdir(self.get_source_subdir(), self.build_only_subproject)

def get_typename(self) -> str:
return self.typename
Expand All @@ -624,7 +661,7 @@ def _get_id_hash(target_id: str) -> str:
return h.hexdigest()[:7]

@staticmethod
def construct_id_from_path(subdir: str, name: str, type_suffix: str) -> str:
def construct_id_from_path(subdir: str, name: str, type_suffix: str, extra_prefix: str = '') -> str:
"""Construct target ID from subdir, name and type suffix.
This helper function is made public mostly for tests."""
Expand All @@ -635,7 +672,7 @@ def construct_id_from_path(subdir: str, name: str, type_suffix: str) -> str:
# FIXME replace with assert when slash in names is prohibited
name_part = name.replace('/', '@').replace('\\', '@')
assert not has_path_sep(type_suffix)
my_id = name_part + type_suffix
my_id = extra_prefix + name_part + type_suffix
if subdir:
subdir_part = Target._get_id_hash(subdir)
# preserve myid for better debuggability
Expand All @@ -647,7 +684,7 @@ def get_id(self) -> str:
if getattr(self, 'name_suffix_set', False):
name += '.' + self.suffix
return self.construct_id_from_path(
self.subdir, name, self.type_suffix())
self.subdir, name, self.type_suffix(), 'build.' if self.build_only_subproject else '')

def process_kwargs_base(self, kwargs: T.Dict[str, T.Any]) -> None:
if 'build_by_default' in kwargs:
Expand Down Expand Up @@ -1537,7 +1574,8 @@ def add_include_dirs(self, args: T.Sequence['IncludeDirs'], set_is_system: T.Opt
set_is_system = 'preserve'
if set_is_system != 'preserve':
is_system = set_is_system == 'system'
ids = [IncludeDirs(x.get_curdir(), x.get_incdirs(), is_system, x.get_extra_build_dirs()) for x in ids]
ids = [IncludeDirs(x.get_curdir(), x.get_incdirs(), is_system,
self.build_only_subproject, x.get_extra_build_dirs()) for x in ids]
self.include_dirs += ids

def get_aliases(self) -> T.List[T.Tuple[str, str, str]]:
Expand Down Expand Up @@ -2968,6 +3006,7 @@ class CustomTargetIndex(CustomTargetBase, HoldableObject):

def __post_init__(self) -> None:
self.for_machine = self.target.for_machine
self.build_only_subproject = self.target.build_only_subproject

@property
def name(self) -> str:
Expand Down Expand Up @@ -3112,6 +3151,12 @@ def get_sources_string_names(sources, backend):
raise AssertionError(f'Unknown source type: {s!r}')
return names

def compute_build_subdir(subdir: str, build_only_subproject: bool) -> str:
if build_only_subproject:
assert subdir.startswith('subprojects')
return 'build.' + subdir
return subdir

def load(build_dir: str) -> Build:
filename = os.path.join(build_dir, 'meson-private', 'build.dat')
try:
Expand Down
24 changes: 22 additions & 2 deletions mesonbuild/coredata.py
Original file line number Diff line number Diff line change
Expand Up @@ -605,6 +605,24 @@ def __init__(self, options: SharedCMDOptions, scratch_dir: str, meson_command: T
self.builtin_options_libdir_cross_fixup()
self.init_builtins('')

def copy_for_build(self) -> CoreData:
other = CoreData.__new__(CoreData)
for k, v in self.__dict__.items():
other.__dict__[k] = v

other.cross_files = []

other.compilers = PerMachine(self.compilers.build, self.compilers.build)

other.deps = PerMachineDefaultable.default(
is_cross=False,
build=self.deps.build,
host=self.deps.host)

other.is_build_only = True

return other

@staticmethod
def __load_config_files(options: SharedCMDOptions, scratch_dir: str, ftype: str) -> T.List[str]:
# Need to try and make the passed filenames absolute because when the
Expand Down Expand Up @@ -946,7 +964,9 @@ def copy_build_options_from_regular_ones(self) -> bool:
def set_options(self, options: T.Dict[OptionKey, T.Any], subproject: str = '', first_invocation: bool = False) -> bool:
dirty = False
if not self.is_cross_build():
options = {k: v for k, v in options.items() if k.machine is not MachineChoice.BUILD}
other_machine = MachineChoice.HOST if self.is_build_only else MachineChoice.BUILD
options = {k: v for k, v in options.items() if k.machine is not other_machine}

# Set prefix first because it's needed to sanitize other options
pfk = OptionKey('prefix')
if pfk in options:
Expand All @@ -969,7 +989,7 @@ def set_options(self, options: T.Dict[OptionKey, T.Any], subproject: str = '', f
sub = f'In subproject {subproject}: ' if subproject else ''
raise MesonException(f'{sub}Unknown options: "{unknown_options_str}"')

if not self.is_cross_build():
if not self.is_cross_build() and not self.is_build_only:
dirty |= self.copy_build_options_from_regular_ones()

return dirty
Expand Down
35 changes: 35 additions & 0 deletions mesonbuild/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -650,6 +650,33 @@ def __init__(self, source_dir: str, build_dir: str, options: coredata.SharedCMDO
self.default_pkgconfig = ['pkg-config']
self.wrap_resolver: T.Optional['Resolver'] = None

def copy_for_build(self) -> Environment:
other = Environment.__new__(Environment)
for k, v in self.__dict__.items():
other.__dict__[k] = v

other.coredata = self.coredata.copy_for_build()

machines: PerThreeMachineDefaultable[MachineInfo] = PerThreeMachineDefaultable()
machines.build = self.machines.build
other.machines = machines.default_missing()

binaries: PerMachineDefaultable[BinaryTable] = PerMachineDefaultable()
binaries.build = self.binaries.build
other.binaries = binaries.default_missing()

properties: PerMachineDefaultable[Properties] = PerMachineDefaultable()
properties.build = self.properties.build
other.properties = properties.default_missing()

cmakevars: PerMachineDefaultable[CMakeVariables] = PerMachineDefaultable()
cmakevars.build = self.cmakevars.build
other.cmakevars = cmakevars.default_missing()

other.exe_wrapper = None

return other

def _load_machine_file_options(self, config: 'ConfigParser', properties: Properties, machine: MachineChoice) -> None:
"""Read the contents of a Machine file and put it in the options store."""

Expand Down Expand Up @@ -871,6 +898,14 @@ def get_source_dir(self) -> str:
def get_build_dir(self) -> str:
return self.build_dir

def build_output_rpath(self, subdir: str, *parts: T.Sequence[str]) -> str:
if self.coredata.is_build_only:
assert subdir.startswith('subprojects')
result = 'build.' + subdir
else:
result = subdir
return os.path.join(result, *parts)

def get_import_lib_dir(self) -> str:
"Install dir for the import library (library used for linking)"
return self.get_libdir()
Expand Down
25 changes: 16 additions & 9 deletions mesonbuild/interpreter/dependencyfallbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def _do_dependency_cache(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_
name = func_args[0]
cached_dep = self._get_cached_dep(name, kwargs)
if cached_dep:
self._verify_fallback_consistency(cached_dep)
self._verify_fallback_consistency(cached_dep, kwargs.get('native', False))
return cached_dep

def _do_dependency(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
Expand All @@ -95,7 +95,8 @@ def _do_dependency(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs
def _do_existing_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
subp_name = func_args[0]
varname = self.subproject_varname
if subp_name and self._get_subproject(subp_name):
native = kwargs.get('native', False)
if subp_name and self._get_subproject(subp_name, native):
return self._get_subproject_dep(subp_name, varname, kwargs)
return None

Expand Down Expand Up @@ -127,18 +128,21 @@ def _do_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs
func_kwargs.setdefault('version', [])
if 'default_options' in kwargs and isinstance(kwargs['default_options'], str):
func_kwargs['default_options'] = listify(kwargs['default_options'])
func_kwargs.setdefault('native', kwargs.get('native', False))
self.interpreter.do_subproject(subp_name, func_kwargs)
return self._get_subproject_dep(subp_name, varname, kwargs)

def _get_subproject(self, subp_name: str) -> T.Optional[SubprojectHolder]:
sub = self.interpreter.subprojects.get(subp_name)
def _get_subproject(self, subp_name: str, native: bool) -> T.Optional[SubprojectHolder]:
sub = self.interpreter.find_subproject(subp_name, native)
if sub and sub.found():
return sub
return None

def _get_subproject_dep(self, subp_name: str, varname: str, kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
native = kwargs.get('native', False)

# Verify the subproject is found
subproject = self._get_subproject(subp_name)
subproject = self._get_subproject(subp_name, native)
if not subproject:
mlog.log('Dependency', mlog.bold(self._display_name), 'from subproject',
mlog.bold(subp_name), 'found:', mlog.red('NO'),
Expand All @@ -160,7 +164,7 @@ def _get_subproject_dep(self, subp_name: str, varname: str, kwargs: TYPE_nkwargs
# If we have cached_dep we did all the checks and logging already in
# self._get_cached_dep().
if cached_dep:
self._verify_fallback_consistency(cached_dep)
self._verify_fallback_consistency(cached_dep, native)
return cached_dep

# Legacy: Use the variable name if provided instead of relying on the
Expand Down Expand Up @@ -256,10 +260,12 @@ def _get_subproject_variable(self, subproject: SubprojectHolder, varname: str) -
return None
return var_dep

def _verify_fallback_consistency(self, cached_dep: Dependency) -> None:
def _verify_fallback_consistency(self, cached_dep: Dependency, native: bool) -> None:
subp_name = self.subproject_name
if subp_name is None:
return
varname = self.subproject_varname
subproject = self._get_subproject(subp_name)
subproject = self._get_subproject(subp_name, native)
if subproject and varname:
var_dep = self._get_subproject_variable(subproject, varname)
if var_dep and cached_dep.found() and var_dep != cached_dep:
Expand Down Expand Up @@ -336,7 +342,8 @@ def lookup(self, kwargs: TYPE_nkwargs, force_fallback: bool = False) -> Dependen
subp_name, varname = self.wrap_resolver.find_dep_provider(name)
if subp_name:
self.forcefallback |= subp_name in force_fallback_for
if self.forcefallback or self.allow_fallback is True or required or self._get_subproject(subp_name):
if self.forcefallback or self.allow_fallback is True or required \
or self._get_subproject(subp_name, kwargs.get('native', False)):
self._subproject_impl(subp_name, varname)
break

Expand Down
Loading

0 comments on commit eea60e5

Please sign in to comment.