Environments: unify the spec objects on first concretization (#29948)
Currently environments are indexed by build hashes. When looking into this bug I noticed there is a disconnect between environments that are concretized in memory for the first time and environments that are read from a `spack.lock`. The issue is that specs read from a `spack.lock` don't have a full hash, since they are indexed by a build hash which is strictly coarser. They are also marked "final" as they are read from a file, so we can't compute additional hashes. This bugfix PR makes "first concretization" equivalent to re-reading the specs from a corresponding `spack.lock`, and doing so unveiled a few tests were we were making wrong assumptions and relying on the fact that a `spack.lock` file was not there already. * Add unit test * Modify mpich to trigger jobs in pipelines * Fix two failing unit tests * Fix another full_hash vs. build_hash mismatch in tests
This commit is contained in:
parent
dfff935f17
commit
23e85f4086
5 changed files with 43 additions and 9 deletions
|
@ -1271,11 +1271,33 @@ def _concretize_separately(self, tests=False):
|
|||
|
||||
finish = time.time()
|
||||
tty.msg('Environment concretized in %.2f seconds.' % (finish - start))
|
||||
results = []
|
||||
by_hash = {}
|
||||
for abstract, concrete in zip(root_specs, concretized_root_specs):
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
results.append((abstract, concrete))
|
||||
by_hash[concrete.build_hash()] = concrete
|
||||
|
||||
# Unify the specs objects, so we get correct references to all parents
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
|
||||
# Re-attach information on test dependencies
|
||||
if tests:
|
||||
# This is slow, but the information on test dependency is lost
|
||||
# after unification or when reading from a lockfile.
|
||||
for h in self.specs_by_hash:
|
||||
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
|
||||
for node in computed_spec.traverse():
|
||||
test_deps = node.dependencies(deptype='test')
|
||||
for test_dependency in test_deps:
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), deptype='test'
|
||||
)
|
||||
|
||||
results = [
|
||||
(abstract, self.specs_by_hash[h]) for abstract, h in
|
||||
zip(self.concretized_user_specs, self.concretized_order)
|
||||
]
|
||||
return results
|
||||
|
||||
def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
|
||||
|
|
|
@ -579,3 +579,15 @@ def test_get_spec_filter_list(mutable_mock_env_path, config, mutable_mock_repo):
|
|||
'libelf'])
|
||||
|
||||
assert affected_pkg_names == expected_affected_pkg_names
|
||||
|
||||
|
||||
@pytest.mark.regression('29947')
|
||||
def test_affected_specs_on_first_concretization(mutable_mock_env_path, config):
|
||||
e = ev.create('first_concretization')
|
||||
e.add('hdf5~mpi~szip')
|
||||
e.add('hdf5~mpi+szip')
|
||||
e.concretize()
|
||||
|
||||
affected_specs = spack.ci.get_spec_filter_list(e, ['zlib'])
|
||||
hdf5_specs = [s for s in affected_specs if s.name == 'hdf5']
|
||||
assert len(hdf5_specs) == 2
|
||||
|
|
|
@ -1731,12 +1731,12 @@ def test_ci_reproduce(tmpdir, mutable_mock_env_path,
|
|||
job_spec_yaml_path = os.path.join(
|
||||
working_dir.strpath, 'archivefiles.yaml')
|
||||
with open(job_spec_yaml_path, 'w') as fd:
|
||||
fd.write(job_spec.to_yaml(hash=ht.full_hash))
|
||||
fd.write(job_spec.to_yaml(hash=ht.build_hash))
|
||||
|
||||
root_spec_yaml_path = os.path.join(
|
||||
working_dir.strpath, 'root.yaml')
|
||||
with open(root_spec_yaml_path, 'w') as fd:
|
||||
fd.write(root_spec.to_yaml(hash=ht.full_hash))
|
||||
fd.write(root_spec.to_yaml(hash=ht.build_hash))
|
||||
|
||||
artifacts_root = os.path.join(working_dir.strpath, 'scratch_dir')
|
||||
pipeline_path = os.path.join(artifacts_root, 'pipeline.yml')
|
||||
|
|
|
@ -851,7 +851,7 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery,
|
|||
# but not added as a root
|
||||
mpi_spec_yaml_path = tmpdir.join('{0}.yaml'.format(mpi_spec.name))
|
||||
with open(mpi_spec_yaml_path.strpath, 'w') as fd:
|
||||
fd.write(mpi_spec.to_yaml(hash=ht.full_hash))
|
||||
fd.write(mpi_spec.to_yaml(hash=ht.build_hash))
|
||||
|
||||
install('--no-add', '-f', mpi_spec_yaml_path.strpath)
|
||||
assert(mpi_spec not in e.roots())
|
||||
|
|
|
@ -174,10 +174,10 @@ class Mpich(AutotoolsPackage):
|
|||
depends_on('argobots', when='+argobots')
|
||||
|
||||
# building from git requires regenerating autotools files
|
||||
depends_on('automake@1.15:', when='@develop', type=("build"))
|
||||
depends_on('libtool@2.4.4:', when='@develop', type=("build"))
|
||||
depends_on("m4", when="@develop", type=("build")),
|
||||
depends_on("autoconf@2.67:", when='@develop', type=("build"))
|
||||
depends_on('automake@1.15:', when='@develop', type='build')
|
||||
depends_on('libtool@2.4.4:', when='@develop', type='build')
|
||||
depends_on("m4", when="@develop", type='build'),
|
||||
depends_on("autoconf@2.67:", when='@develop', type='build')
|
||||
|
||||
# building with "+hwloc' also requires regenerating autotools files
|
||||
depends_on('automake@1.15:', when='@3.3:3.3.99 +hwloc', type="build")
|
||||
|
|
Loading…
Reference in a new issue