bugfix: nested matrices in spec lists (#12320)
* stack concretization: fix handling of variant names with dashes * spec_list: bugfix for handling nested matrices
This commit is contained in:
parent
9336a8be38
commit
be6ac6ce6d
6 changed files with 88 additions and 27 deletions
|
@ -1340,11 +1340,7 @@ def _concretize_from_constraints(spec_constraints):
|
|||
try:
|
||||
return s.concretized()
|
||||
except spack.spec.InvalidDependencyError as e:
|
||||
dep_index = e.message.index('depend on ') + len('depend on ')
|
||||
invalid_msg = e.message[dep_index:]
|
||||
invalid_deps_string = ['^' + d.strip(',')
|
||||
for d in invalid_msg.split()
|
||||
if d != 'or']
|
||||
invalid_deps_string = ['^' + d for d in e.invalid_deps]
|
||||
invalid_deps = [c for c in spec_constraints
|
||||
if any(c.satisfies(invd, strict=True)
|
||||
for invd in invalid_deps_string)]
|
||||
|
@ -1352,13 +1348,13 @@ def _concretize_from_constraints(spec_constraints):
|
|||
raise e
|
||||
invalid_constraints.extend(invalid_deps)
|
||||
except UnknownVariantError as e:
|
||||
invalid_variants = re.findall(r"'(\w+)'", e.message)
|
||||
invalid_deps = [c for c in spec_constraints
|
||||
invalid_variants = e.unknown_variants
|
||||
inv_variant_constraints = [c for c in spec_constraints
|
||||
if any(name in c.variants
|
||||
for name in invalid_variants)]
|
||||
if len(invalid_deps) != len(invalid_variants):
|
||||
if len(inv_variant_constraints) != len(invalid_variants):
|
||||
raise e
|
||||
invalid_constraints.extend(invalid_deps)
|
||||
invalid_constraints.extend(inv_variant_constraints)
|
||||
|
||||
|
||||
def make_repo_path(root):
|
||||
|
|
|
@ -963,8 +963,7 @@ def get_dependency(self, name):
|
|||
dep = self._dependencies.get(name)
|
||||
if dep is not None:
|
||||
return dep
|
||||
raise InvalidDependencyError(
|
||||
self.name + " does not depend on " + comma_or(name))
|
||||
raise InvalidDependencyError(self.name, name)
|
||||
|
||||
def _find_deps(self, where, deptype):
|
||||
deptype = dp.canonical_deptype(deptype)
|
||||
|
@ -2067,8 +2066,7 @@ def concretize(self, tests=False):
|
|||
|
||||
extra = set(user_spec_deps.keys()).difference(visited_user_specs)
|
||||
if extra:
|
||||
raise InvalidDependencyError(
|
||||
self.name + " does not depend on " + comma_or(extra))
|
||||
raise InvalidDependencyError(self.name, extra)
|
||||
|
||||
# This dictionary will store object IDs rather than Specs as keys
|
||||
# since the Spec __hash__ will change as patches are added to them
|
||||
|
@ -4156,6 +4154,10 @@ class InconsistentSpecError(SpecError):
|
|||
class InvalidDependencyError(SpecError):
|
||||
"""Raised when a dependency in a spec is not actually a dependency
|
||||
of the package."""
|
||||
def __init__(self, pkg, deps):
|
||||
self.invalid_deps = deps
|
||||
super(InvalidDependencyError, self).__init__(
|
||||
'Package {0} does not depend on {1}'.format(pkg, comma_or(deps)))
|
||||
|
||||
|
||||
class NoProviderError(SpecError):
|
||||
|
|
|
@ -54,16 +54,7 @@ def specs_as_constraints(self):
|
|||
constraints = []
|
||||
for item in self.specs_as_yaml_list:
|
||||
if isinstance(item, dict): # matrix of specs
|
||||
excludes = item.get('exclude', [])
|
||||
for combo in itertools.product(*(item['matrix'])):
|
||||
# Test against the excludes using a single spec
|
||||
ordered_combo = sorted(combo, key=spec_ordering_key)
|
||||
test_spec = Spec(' '.join(ordered_combo))
|
||||
if any(test_spec.satisfies(x) for x in excludes):
|
||||
continue
|
||||
|
||||
# Add as list of constraints
|
||||
constraints.append([Spec(x) for x in ordered_combo])
|
||||
constraints.extend(_expand_matrix_constraints(item))
|
||||
else: # individual spec
|
||||
constraints.append([Spec(item)])
|
||||
self._constraints = constraints
|
||||
|
@ -161,6 +152,36 @@ def __getitem__(self, key):
|
|||
return self.specs[key]
|
||||
|
||||
|
||||
def _expand_matrix_constraints(object, specify=True):
|
||||
# recurse so we can handle nexted matrices
|
||||
expanded_rows = []
|
||||
for row in object['matrix']:
|
||||
new_row = []
|
||||
for r in row:
|
||||
if isinstance(r, dict):
|
||||
new_row.extend(_expand_matrix_constraints(r, specify=False))
|
||||
else:
|
||||
new_row.append([r])
|
||||
expanded_rows.append(new_row)
|
||||
|
||||
results = []
|
||||
excludes = object.get('exclude', []) # only compute once
|
||||
for combo in itertools.product(*expanded_rows):
|
||||
# Construct a combined spec to test against excludes
|
||||
flat_combo = [constraint for list in combo for constraint in list]
|
||||
ordered_combo = sorted(flat_combo, key=spec_ordering_key)
|
||||
test_spec = Spec(' '.join(ordered_combo))
|
||||
if any(test_spec.satisfies(x) for x in excludes):
|
||||
continue
|
||||
|
||||
# Add to list of constraints
|
||||
if specify:
|
||||
results.append([Spec(x) for x in ordered_combo])
|
||||
else:
|
||||
results.append(ordered_combo)
|
||||
return results
|
||||
|
||||
|
||||
class SpecListError(SpackError):
|
||||
"""Error class for all errors related to SpecList objects."""
|
||||
|
||||
|
|
|
@ -1139,6 +1139,33 @@ def test_stack_concretize_extraneous_variants(tmpdir, config, mock_packages):
|
|||
user.variants['shared'].value)
|
||||
|
||||
|
||||
def test_stack_concretize_extraneous_variants_with_dash(tmpdir, config,
|
||||
mock_packages):
|
||||
filename = str(tmpdir.join('spack.yaml'))
|
||||
with open(filename, 'w') as f:
|
||||
f.write("""\
|
||||
env:
|
||||
definitions:
|
||||
- packages: [libelf, mpileaks]
|
||||
- install:
|
||||
- matrix:
|
||||
- [$packages]
|
||||
- ['shared=False', '+shared-libs']
|
||||
specs:
|
||||
- $install
|
||||
""")
|
||||
with tmpdir.as_cwd():
|
||||
env('create', 'test', './spack.yaml')
|
||||
with ev.read('test'):
|
||||
concretize()
|
||||
|
||||
ev.read('test')
|
||||
|
||||
# Regression test for handling of variants with dashes in them
|
||||
# will fail before this point if code regresses
|
||||
assert True
|
||||
|
||||
|
||||
def test_stack_definition_extension(tmpdir):
|
||||
filename = str(tmpdir.join('spack.yaml'))
|
||||
with open(filename, 'w') as f:
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
from spack.spec_list import SpecList
|
||||
from spack.spec import Spec
|
||||
|
||||
|
@ -143,3 +144,15 @@ def test_spec_list_extension(self):
|
|||
otherlist.specs_as_yaml_list)
|
||||
assert speclist.specs == self.default_specs + otherlist.specs
|
||||
assert speclist._reference is new_ref
|
||||
|
||||
def test_spec_list_nested_matrices(self):
|
||||
inner_matrix = [{'matrix': [['zlib', 'libelf'], ['%gcc', '%intel']]}]
|
||||
outer_addition = ['+shared', '~shared']
|
||||
outer_matrix = [{'matrix': [inner_matrix, outer_addition]}]
|
||||
speclist = SpecList('specs', outer_matrix)
|
||||
|
||||
expected_components = itertools.product(['zlib', 'libelf'],
|
||||
['%gcc', '%intel'],
|
||||
['+shared', '~shared'])
|
||||
expected = [Spec(' '.join(combo)) for combo in expected_components]
|
||||
assert set(speclist.specs) == set(expected)
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
import llnl.util.tty.color
|
||||
import llnl.util.lang as lang
|
||||
|
||||
from spack.util.string import comma_or
|
||||
import spack.directives
|
||||
import spack.error as error
|
||||
|
||||
|
@ -778,9 +779,10 @@ class DuplicateVariantError(error.SpecError):
|
|||
class UnknownVariantError(error.SpecError):
|
||||
"""Raised when an unknown variant occurs in a spec."""
|
||||
|
||||
def __init__(self, pkg, variant):
|
||||
def __init__(self, pkg, variants):
|
||||
self.unknown_variants = variants
|
||||
super(UnknownVariantError, self).__init__(
|
||||
'Package {0} has no variant {1}!'.format(pkg, variant)
|
||||
'Package {0} has no variant {1}!'.format(pkg, comma_or(variants))
|
||||
)
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue