hashes: remove full_hash and build_hash from spack
This commit is contained in:
parent
512645ff2e
commit
f6e7c0b740
28 changed files with 335 additions and 711 deletions
|
@ -182,7 +182,6 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||||
|
|
||||||
for indexed_spec in spec_list:
|
for indexed_spec in spec_list:
|
||||||
dag_hash = indexed_spec.dag_hash()
|
dag_hash = indexed_spec.dag_hash()
|
||||||
full_hash = indexed_spec._full_hash
|
|
||||||
|
|
||||||
if dag_hash not in self._mirrors_for_spec:
|
if dag_hash not in self._mirrors_for_spec:
|
||||||
self._mirrors_for_spec[dag_hash] = []
|
self._mirrors_for_spec[dag_hash] = []
|
||||||
|
@ -190,11 +189,8 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||||
for entry in self._mirrors_for_spec[dag_hash]:
|
for entry in self._mirrors_for_spec[dag_hash]:
|
||||||
# A binary mirror can only have one spec per DAG hash, so
|
# A binary mirror can only have one spec per DAG hash, so
|
||||||
# if we already have an entry under this DAG hash for this
|
# if we already have an entry under this DAG hash for this
|
||||||
# mirror url, we may need to replace the spec associated
|
# mirror url, we're done.
|
||||||
# with it (but only if it has a different full_hash).
|
|
||||||
if entry['mirror_url'] == mirror_url:
|
if entry['mirror_url'] == mirror_url:
|
||||||
if full_hash and full_hash != entry['spec']._full_hash:
|
|
||||||
entry['spec'] = indexed_spec
|
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
self._mirrors_for_spec[dag_hash].append({
|
self._mirrors_for_spec[dag_hash].append({
|
||||||
|
@ -403,6 +399,11 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||||
hash_fetch_url = url_util.join(
|
hash_fetch_url = url_util.join(
|
||||||
mirror_url, _build_cache_relative_path, 'index.json.hash')
|
mirror_url, _build_cache_relative_path, 'index.json.hash')
|
||||||
|
|
||||||
|
if not web_util.url_exists(index_fetch_url):
|
||||||
|
# A binary mirror is not required to have an index, so avoid
|
||||||
|
# raising FetchCacheError in that case.
|
||||||
|
return False
|
||||||
|
|
||||||
old_cache_key = None
|
old_cache_key = None
|
||||||
fetched_hash = None
|
fetched_hash = None
|
||||||
|
|
||||||
|
@ -790,8 +791,13 @@ def generate_package_index(cache_prefix):
|
||||||
tty.debug('Retrieving spec descriptor files from {0} to build index'.format(
|
tty.debug('Retrieving spec descriptor files from {0} to build index'.format(
|
||||||
cache_prefix))
|
cache_prefix))
|
||||||
|
|
||||||
all_mirror_specs = {}
|
tmpdir = tempfile.mkdtemp()
|
||||||
|
db_root_dir = os.path.join(tmpdir, 'db_root')
|
||||||
|
db = spack_db.Database(None, db_dir=db_root_dir,
|
||||||
|
enable_transaction_locking=False,
|
||||||
|
record_fields=['spec', 'ref_count', 'in_buildcache'])
|
||||||
|
|
||||||
|
try:
|
||||||
for file_path in file_list:
|
for file_path in file_list:
|
||||||
try:
|
try:
|
||||||
spec_url = url_util.join(cache_prefix, file_path)
|
spec_url = url_util.join(cache_prefix, file_path)
|
||||||
|
@ -805,84 +811,20 @@ def generate_package_index(cache_prefix):
|
||||||
elif spec_url.endswith('.yaml'):
|
elif spec_url.endswith('.yaml'):
|
||||||
spec_dict = syaml.load(spec_file_contents)
|
spec_dict = syaml.load(spec_file_contents)
|
||||||
s = Spec.from_yaml(spec_file_contents)
|
s = Spec.from_yaml(spec_file_contents)
|
||||||
all_mirror_specs[s.dag_hash()] = {
|
if s:
|
||||||
'spec_url': spec_url,
|
db.add(s, None)
|
||||||
'spec': s,
|
db.mark(s, 'in_buildcache', True)
|
||||||
'num_deps': len(list(s.traverse(root=False))),
|
|
||||||
'binary_cache_checksum': spec_dict['binary_cache_checksum'],
|
|
||||||
'buildinfo': spec_dict['buildinfo'],
|
|
||||||
}
|
|
||||||
except (URLError, web_util.SpackWebError) as url_err:
|
except (URLError, web_util.SpackWebError) as url_err:
|
||||||
tty.error('Error reading specfile: {0}'.format(file_path))
|
tty.error('Error reading specfile: {0}'.format(file_path))
|
||||||
tty.error(url_err)
|
tty.error(url_err)
|
||||||
|
|
||||||
sorted_specs = sorted(all_mirror_specs.keys(),
|
# Now generate the index, compute its hash, and push the two files to
|
||||||
key=lambda k: all_mirror_specs[k]['num_deps'])
|
# the mirror.
|
||||||
|
|
||||||
tmpdir = tempfile.mkdtemp()
|
|
||||||
db_root_dir = os.path.join(tmpdir, 'db_root')
|
|
||||||
db = spack_db.Database(None, db_dir=db_root_dir,
|
|
||||||
enable_transaction_locking=False,
|
|
||||||
record_fields=['spec', 'ref_count', 'in_buildcache'])
|
|
||||||
|
|
||||||
try:
|
|
||||||
tty.debug('Specs sorted by number of dependencies:')
|
|
||||||
for dag_hash in sorted_specs:
|
|
||||||
spec_record = all_mirror_specs[dag_hash]
|
|
||||||
s = spec_record['spec']
|
|
||||||
num_deps = spec_record['num_deps']
|
|
||||||
tty.debug(' {0}/{1} -> {2}'.format(
|
|
||||||
s.name, dag_hash[:7], num_deps))
|
|
||||||
if num_deps > 0:
|
|
||||||
# Check each of this spec's dependencies (which we have already
|
|
||||||
# processed), as they are the source of truth for their own
|
|
||||||
# full hash. If the full hash we have for any deps does not
|
|
||||||
# match what those deps have themselves, then we need to splice
|
|
||||||
# this spec with those deps, and push this spliced spec
|
|
||||||
# (spec.json file) back to the mirror, as well as update the
|
|
||||||
# all_mirror_specs dictionary with this spliced spec.
|
|
||||||
to_splice = []
|
|
||||||
for dep in s.dependencies():
|
|
||||||
dep_dag_hash = dep.dag_hash()
|
|
||||||
if dep_dag_hash in all_mirror_specs:
|
|
||||||
true_dep = all_mirror_specs[dep_dag_hash]['spec']
|
|
||||||
if true_dep.full_hash() != dep.full_hash():
|
|
||||||
to_splice.append(true_dep)
|
|
||||||
|
|
||||||
if to_splice:
|
|
||||||
tty.debug(' needs the following deps spliced:')
|
|
||||||
for true_dep in to_splice:
|
|
||||||
tty.debug(' {0}/{1}'.format(
|
|
||||||
true_dep.name, true_dep.dag_hash()[:7]))
|
|
||||||
s = s.splice(true_dep, True)
|
|
||||||
|
|
||||||
# Push this spliced spec back to the mirror
|
|
||||||
spliced_spec_dict = s.to_dict(hash=ht.full_hash)
|
|
||||||
for key in ['binary_cache_checksum', 'buildinfo']:
|
|
||||||
spliced_spec_dict[key] = spec_record[key]
|
|
||||||
|
|
||||||
temp_json_path = os.path.join(tmpdir, 'spliced.spec.json')
|
|
||||||
with open(temp_json_path, 'w') as fd:
|
|
||||||
fd.write(sjson.dump(spliced_spec_dict))
|
|
||||||
|
|
||||||
spliced_spec_url = spec_record['spec_url']
|
|
||||||
web_util.push_to_url(
|
|
||||||
temp_json_path, spliced_spec_url, keep_original=False)
|
|
||||||
tty.debug(' spliced and wrote {0}'.format(
|
|
||||||
spliced_spec_url))
|
|
||||||
spec_record['spec'] = s
|
|
||||||
|
|
||||||
db.add(s, None)
|
|
||||||
db.mark(s, 'in_buildcache', True)
|
|
||||||
|
|
||||||
# Now that we have fixed any old specfiles that might have had the wrong
|
|
||||||
# full hash for their dependencies, we can generate the index, compute
|
|
||||||
# the hash, and push those files to the mirror.
|
|
||||||
index_json_path = os.path.join(db_root_dir, 'index.json')
|
index_json_path = os.path.join(db_root_dir, 'index.json')
|
||||||
with open(index_json_path, 'w') as f:
|
with open(index_json_path, 'w') as f:
|
||||||
db._write_to_file(f)
|
db._write_to_file(f)
|
||||||
|
|
||||||
# Read the index back in and compute it's hash
|
# Read the index back in and compute its hash
|
||||||
with open(index_json_path) as f:
|
with open(index_json_path) as f:
|
||||||
index_string = f.read()
|
index_string = f.read()
|
||||||
index_hash = compute_hash(index_string)
|
index_hash = compute_hash(index_string)
|
||||||
|
@ -1610,16 +1552,14 @@ def install_single_spec(spec, allow_root=False, unsigned=False, force=False):
|
||||||
install_root_node(node, allow_root=allow_root, unsigned=unsigned, force=force)
|
install_root_node(node, allow_root=allow_root, unsigned=unsigned, force=force)
|
||||||
|
|
||||||
|
|
||||||
def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
|
def try_direct_fetch(spec, mirrors=None):
|
||||||
"""
|
"""
|
||||||
Try to find the spec directly on the configured mirrors
|
Try to find the spec directly on the configured mirrors
|
||||||
"""
|
"""
|
||||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||||
specfile_name = tarball_name(spec, '.spec.json')
|
specfile_name = tarball_name(spec, '.spec.json')
|
||||||
specfile_is_json = True
|
specfile_is_json = True
|
||||||
lenient = not full_hash_match
|
|
||||||
found_specs = []
|
found_specs = []
|
||||||
spec_full_hash = spec.full_hash()
|
|
||||||
|
|
||||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||||
buildcache_fetch_url_yaml = url_util.join(
|
buildcache_fetch_url_yaml = url_util.join(
|
||||||
|
@ -1649,9 +1589,6 @@ def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
|
||||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||||
fetched_spec._mark_concrete()
|
fetched_spec._mark_concrete()
|
||||||
|
|
||||||
# Do not recompute the full hash for the fetched spec, instead just
|
|
||||||
# read the property.
|
|
||||||
if lenient or fetched_spec._full_hash == spec_full_hash:
|
|
||||||
found_specs.append({
|
found_specs.append({
|
||||||
'mirror_url': mirror.fetch_url,
|
'mirror_url': mirror.fetch_url,
|
||||||
'spec': fetched_spec,
|
'spec': fetched_spec,
|
||||||
|
@ -1660,18 +1597,13 @@ def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
|
||||||
return found_specs
|
return found_specs
|
||||||
|
|
||||||
|
|
||||||
def get_mirrors_for_spec(spec=None, full_hash_match=False,
|
def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||||
mirrors_to_check=None, index_only=False):
|
|
||||||
"""
|
"""
|
||||||
Check if concrete spec exists on mirrors and return a list
|
Check if concrete spec exists on mirrors and return a list
|
||||||
indicating the mirrors on which it can be found
|
indicating the mirrors on which it can be found
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
spec (spack.spec.Spec): The spec to look for in binary mirrors
|
spec (spack.spec.Spec): The spec to look for in binary mirrors
|
||||||
full_hash_match (bool): If True, only includes mirrors where the spec
|
|
||||||
full hash matches the locally computed full hash of the ``spec``
|
|
||||||
argument. If False, any mirror which has a matching DAG hash
|
|
||||||
is included in the results.
|
|
||||||
mirrors_to_check (dict): Optionally override the configured mirrors
|
mirrors_to_check (dict): Optionally override the configured mirrors
|
||||||
with the mirrors in this dictionary.
|
with the mirrors in this dictionary.
|
||||||
index_only (bool): Do not attempt direct fetching of ``spec.json``
|
index_only (bool): Do not attempt direct fetching of ``spec.json``
|
||||||
|
@ -1688,29 +1620,14 @@ def get_mirrors_for_spec(spec=None, full_hash_match=False,
|
||||||
tty.debug("No Spack mirrors are currently configured")
|
tty.debug("No Spack mirrors are currently configured")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
results = []
|
results = binary_index.find_built_spec(spec)
|
||||||
lenient = not full_hash_match
|
|
||||||
spec_full_hash = spec.full_hash()
|
|
||||||
|
|
||||||
def filter_candidates(candidate_list):
|
|
||||||
filtered_candidates = []
|
|
||||||
for candidate in candidate_list:
|
|
||||||
candidate_full_hash = candidate['spec']._full_hash
|
|
||||||
if lenient or spec_full_hash == candidate_full_hash:
|
|
||||||
filtered_candidates.append(candidate)
|
|
||||||
return filtered_candidates
|
|
||||||
|
|
||||||
candidates = binary_index.find_built_spec(spec)
|
|
||||||
if candidates:
|
|
||||||
results = filter_candidates(candidates)
|
|
||||||
|
|
||||||
# Maybe we just didn't have the latest information from the mirror, so
|
# Maybe we just didn't have the latest information from the mirror, so
|
||||||
# try to fetch directly, unless we are only considering the indices.
|
# try to fetch directly, unless we are only considering the indices.
|
||||||
if not results and not index_only:
|
if not results and not index_only:
|
||||||
results = try_direct_fetch(spec,
|
results = try_direct_fetch(spec, mirrors=mirrors_to_check)
|
||||||
full_hash_match=full_hash_match,
|
# We found a spec by the direct fetch approach, we might as well
|
||||||
mirrors=mirrors_to_check)
|
# add it to our mapping.
|
||||||
|
|
||||||
if results:
|
if results:
|
||||||
binary_index.update_spec(spec, results)
|
binary_index.update_spec(spec, results)
|
||||||
|
|
||||||
|
@ -1860,124 +1777,35 @@ def push_keys(*mirrors, **kwargs):
|
||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir)
|
||||||
|
|
||||||
|
|
||||||
def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
|
def needs_rebuild(spec, mirror_url):
|
||||||
if not spec.concrete:
|
if not spec.concrete:
|
||||||
raise ValueError('spec must be concrete to check against mirror')
|
raise ValueError('spec must be concrete to check against mirror')
|
||||||
|
|
||||||
pkg_name = spec.name
|
pkg_name = spec.name
|
||||||
pkg_version = spec.version
|
pkg_version = spec.version
|
||||||
|
|
||||||
pkg_hash = spec.dag_hash()
|
pkg_hash = spec.dag_hash()
|
||||||
pkg_full_hash = spec.full_hash()
|
|
||||||
|
|
||||||
tty.debug('Checking {0}-{1}, dag_hash = {2}, full_hash = {3}'.format(
|
tty.debug('Checking {0}-{1}, dag_hash = {2}'.format(
|
||||||
pkg_name, pkg_version, pkg_hash, pkg_full_hash))
|
pkg_name, pkg_version, pkg_hash))
|
||||||
tty.debug(spec.tree())
|
tty.debug(spec.tree())
|
||||||
|
|
||||||
# Try to retrieve the specfile directly, based on the known
|
# Try to retrieve the specfile directly, based on the known
|
||||||
# format of the name, in order to determine if the package
|
# format of the name, in order to determine if the package
|
||||||
# needs to be rebuilt.
|
# needs to be rebuilt.
|
||||||
cache_prefix = build_cache_prefix(mirror_url)
|
cache_prefix = build_cache_prefix(mirror_url)
|
||||||
specfile_is_json = True
|
|
||||||
specfile_name = tarball_name(spec, '.spec.json')
|
specfile_name = tarball_name(spec, '.spec.json')
|
||||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
|
||||||
specfile_path = os.path.join(cache_prefix, specfile_name)
|
specfile_path = os.path.join(cache_prefix, specfile_name)
|
||||||
deprecated_specfile_path = os.path.join(cache_prefix,
|
|
||||||
deprecated_specfile_name)
|
|
||||||
|
|
||||||
result_of_error = 'Package ({0}) will {1}be rebuilt'.format(
|
# Only check for the presence of the json version of the spec. If the
|
||||||
spec.short_spec, '' if rebuild_on_errors else 'not ')
|
# mirror only has the yaml version, or doesn't have the spec at all, we
|
||||||
|
# need to rebuild.
|
||||||
try:
|
return not web_util.url_exists(specfile_path)
|
||||||
_, _, spec_file = web_util.read_from_url(specfile_path)
|
|
||||||
except (URLError, web_util.SpackWebError) as url_err:
|
|
||||||
try:
|
|
||||||
_, _, spec_file = web_util.read_from_url(deprecated_specfile_path)
|
|
||||||
specfile_is_json = False
|
|
||||||
except (URLError, web_util.SpackWebError) as url_err_y:
|
|
||||||
err_msg = [
|
|
||||||
'Unable to determine whether {0} needs rebuilding,',
|
|
||||||
' caught exception attempting to read from {1} or {2}.',
|
|
||||||
]
|
|
||||||
tty.error(''.join(err_msg).format(
|
|
||||||
spec.short_spec,
|
|
||||||
specfile_path,
|
|
||||||
deprecated_specfile_path))
|
|
||||||
tty.debug(url_err)
|
|
||||||
tty.debug(url_err_y)
|
|
||||||
tty.warn(result_of_error)
|
|
||||||
return rebuild_on_errors
|
|
||||||
|
|
||||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
|
||||||
if not spec_file_contents:
|
|
||||||
tty.error('Reading {0} returned nothing'.format(
|
|
||||||
specfile_path if specfile_is_json else deprecated_specfile_path))
|
|
||||||
tty.warn(result_of_error)
|
|
||||||
return rebuild_on_errors
|
|
||||||
|
|
||||||
spec_dict = (sjson.load(spec_file_contents)
|
|
||||||
if specfile_is_json else syaml.load(spec_file_contents))
|
|
||||||
|
|
||||||
try:
|
|
||||||
nodes = spec_dict['spec']['nodes']
|
|
||||||
except KeyError:
|
|
||||||
# Prior node dict format omitted 'nodes' key
|
|
||||||
nodes = spec_dict['spec']
|
|
||||||
name = spec.name
|
|
||||||
|
|
||||||
# In the old format:
|
|
||||||
# The "spec" key represents a list of objects, each with a single
|
|
||||||
# key that is the package name. While the list usually just contains
|
|
||||||
# a single object, we iterate over the list looking for the object
|
|
||||||
# with the name of this concrete spec as a key, out of an abundance
|
|
||||||
# of caution.
|
|
||||||
# In format version 2:
|
|
||||||
# ['spec']['nodes'] is still a list of objects, but with a
|
|
||||||
# multitude of keys. The list will commonly contain many objects, and in the
|
|
||||||
# case of build specs, it is highly likely that the same name will occur
|
|
||||||
# once as the actual package, and then again as the build provenance of that
|
|
||||||
# same package. Hence format version 2 matches on the dag hash, not name.
|
|
||||||
if nodes and 'name' not in nodes[0]:
|
|
||||||
# old style
|
|
||||||
cached_pkg_specs = [item[name] for item in nodes if name in item]
|
|
||||||
elif nodes and spec_dict['spec']['_meta']['version'] == 2:
|
|
||||||
cached_pkg_specs = [item for item in nodes
|
|
||||||
if item[ht.dag_hash.name] == spec.dag_hash()]
|
|
||||||
cached_target = cached_pkg_specs[0] if cached_pkg_specs else None
|
|
||||||
|
|
||||||
# If either the full_hash didn't exist in the specfile, or it
|
|
||||||
# did, but didn't match the one we computed locally, then we should
|
|
||||||
# just rebuild. This can be simplified once the dag_hash and the
|
|
||||||
# full_hash become the same thing.
|
|
||||||
rebuild = False
|
|
||||||
|
|
||||||
if not cached_target:
|
|
||||||
reason = 'did not find spec in specfile contents'
|
|
||||||
rebuild = True
|
|
||||||
elif ht.full_hash.name not in cached_target:
|
|
||||||
reason = 'full_hash was missing from remote specfile'
|
|
||||||
rebuild = True
|
|
||||||
else:
|
|
||||||
full_hash = cached_target[ht.full_hash.name]
|
|
||||||
if full_hash != pkg_full_hash:
|
|
||||||
reason = 'hash mismatch, remote = {0}, local = {1}'.format(
|
|
||||||
full_hash, pkg_full_hash)
|
|
||||||
rebuild = True
|
|
||||||
|
|
||||||
if rebuild:
|
|
||||||
tty.msg('Rebuilding {0}, reason: {1}'.format(
|
|
||||||
spec.short_spec, reason))
|
|
||||||
tty.msg(spec.tree())
|
|
||||||
|
|
||||||
return rebuild
|
|
||||||
|
|
||||||
|
|
||||||
def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
||||||
rebuild_on_errors=False):
|
|
||||||
"""Check all the given specs against buildcaches on the given mirrors and
|
"""Check all the given specs against buildcaches on the given mirrors and
|
||||||
determine if any of the specs need to be rebuilt. Reasons for needing to
|
determine if any of the specs need to be rebuilt. Specs need to be rebuilt
|
||||||
rebuild include binary cache for spec isn't present on a mirror, or it is
|
when their hash doesn't exist in the mirror.
|
||||||
present but the full_hash has changed since last time spec was built.
|
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
mirrors (dict): Mirrors to check against
|
mirrors (dict): Mirrors to check against
|
||||||
|
@ -1985,8 +1813,6 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||||
output_file (str): Path to output file to be written. If provided,
|
output_file (str): Path to output file to be written. If provided,
|
||||||
mirrors with missing or out-of-date specs will be formatted as a
|
mirrors with missing or out-of-date specs will be formatted as a
|
||||||
JSON object and written to this file.
|
JSON object and written to this file.
|
||||||
rebuild_on_errors (bool): Treat any errors encountered while
|
|
||||||
checking specs as a signal to rebuild package.
|
|
||||||
|
|
||||||
Returns: 1 if any spec was out-of-date on any mirror, 0 otherwise.
|
Returns: 1 if any spec was out-of-date on any mirror, 0 otherwise.
|
||||||
|
|
||||||
|
@ -1998,7 +1824,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||||
rebuild_list = []
|
rebuild_list = []
|
||||||
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
if needs_rebuild(spec, mirror.fetch_url, rebuild_on_errors):
|
if needs_rebuild(spec, mirror.fetch_url):
|
||||||
rebuild_list.append({
|
rebuild_list.append({
|
||||||
'short_spec': spec.short_spec,
|
'short_spec': spec.short_spec,
|
||||||
'hash': spec.dag_hash()
|
'hash': spec.dag_hash()
|
||||||
|
|
|
@ -399,7 +399,7 @@ def append_dep(s, d):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||||
spec=s, full_hash_match=True, index_only=check_index_only)
|
spec=s, index_only=check_index_only)
|
||||||
|
|
||||||
skey = spec_deps_key(s)
|
skey = spec_deps_key(s)
|
||||||
spec_labels[skey] = {
|
spec_labels[skey] = {
|
||||||
|
@ -801,7 +801,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||||
max_needs_job = ''
|
max_needs_job = ''
|
||||||
|
|
||||||
# If this is configured, spack will fail "spack ci generate" if it
|
# If this is configured, spack will fail "spack ci generate" if it
|
||||||
# generates any full hash which exists under the broken specs url.
|
# generates any hash which exists under the broken specs url.
|
||||||
broken_spec_urls = None
|
broken_spec_urls = None
|
||||||
if broken_specs_url:
|
if broken_specs_url:
|
||||||
if broken_specs_url.startswith('http'):
|
if broken_specs_url.startswith('http'):
|
||||||
|
@ -829,9 +829,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||||
root_spec = spec_record['rootSpec']
|
root_spec = spec_record['rootSpec']
|
||||||
pkg_name = pkg_name_from_spec_label(spec_label)
|
pkg_name = pkg_name_from_spec_label(spec_label)
|
||||||
release_spec = root_spec[pkg_name]
|
release_spec = root_spec[pkg_name]
|
||||||
release_spec_full_hash = release_spec.full_hash()
|
|
||||||
release_spec_dag_hash = release_spec.dag_hash()
|
release_spec_dag_hash = release_spec.dag_hash()
|
||||||
release_spec_build_hash = release_spec.build_hash()
|
release_spec_runtime_hash = release_spec.runtime_hash()
|
||||||
|
|
||||||
if prune_untouched_packages:
|
if prune_untouched_packages:
|
||||||
if release_spec not in affected_specs:
|
if release_spec not in affected_specs:
|
||||||
|
@ -901,8 +900,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||||
'SPACK_ROOT_SPEC': format_root_spec(
|
'SPACK_ROOT_SPEC': format_root_spec(
|
||||||
root_spec, main_phase, strip_compilers),
|
root_spec, main_phase, strip_compilers),
|
||||||
'SPACK_JOB_SPEC_DAG_HASH': release_spec_dag_hash,
|
'SPACK_JOB_SPEC_DAG_HASH': release_spec_dag_hash,
|
||||||
'SPACK_JOB_SPEC_BUILD_HASH': release_spec_build_hash,
|
'SPACK_JOB_SPEC_RUNTIME_HASH': release_spec_runtime_hash,
|
||||||
'SPACK_JOB_SPEC_FULL_HASH': release_spec_full_hash,
|
|
||||||
'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
|
'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
|
||||||
'SPACK_COMPILER_ACTION': compiler_action
|
'SPACK_COMPILER_ACTION': compiler_action
|
||||||
}
|
}
|
||||||
|
@ -1006,9 +1004,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if (broken_spec_urls is not None and
|
if (broken_spec_urls is not None and
|
||||||
release_spec_full_hash in broken_spec_urls):
|
release_spec_dag_hash in broken_spec_urls):
|
||||||
known_broken_specs_encountered.append('{0} ({1})'.format(
|
known_broken_specs_encountered.append('{0} ({1})'.format(
|
||||||
release_spec, release_spec_full_hash))
|
release_spec, release_spec_dag_hash))
|
||||||
|
|
||||||
if artifacts_root:
|
if artifacts_root:
|
||||||
job_dependencies.append({
|
job_dependencies.append({
|
||||||
|
|
|
@ -161,11 +161,6 @@ def setup_parser(subparser):
|
||||||
help=('Check single spec from json or yaml file instead of release ' +
|
help=('Check single spec from json or yaml file instead of release ' +
|
||||||
'specs file'))
|
'specs file'))
|
||||||
|
|
||||||
check.add_argument(
|
|
||||||
'--rebuild-on-error', default=False, action='store_true',
|
|
||||||
help="Default to rebuilding packages if errors are encountered " +
|
|
||||||
"during the process of checking whether rebuilding is needed")
|
|
||||||
|
|
||||||
check.set_defaults(func=check_fn)
|
check.set_defaults(func=check_fn)
|
||||||
|
|
||||||
# Download tarball and specfile
|
# Download tarball and specfile
|
||||||
|
@ -361,7 +356,7 @@ def list_fn(args):
|
||||||
try:
|
try:
|
||||||
specs = bindist.update_cache_and_get_specs()
|
specs = bindist.update_cache_and_get_specs()
|
||||||
except bindist.FetchCacheError as e:
|
except bindist.FetchCacheError as e:
|
||||||
tty.error(e)
|
tty.die(e)
|
||||||
|
|
||||||
if not args.allarch:
|
if not args.allarch:
|
||||||
arch = spack.spec.Spec.default_arch()
|
arch = spack.spec.Spec.default_arch()
|
||||||
|
@ -430,7 +425,7 @@ def check_fn(args):
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
sys.exit(bindist.check_specs_against_mirrors(
|
sys.exit(bindist.check_specs_against_mirrors(
|
||||||
configured_mirrors, specs, args.output_file, args.rebuild_on_error))
|
configured_mirrors, specs, args.output_file))
|
||||||
|
|
||||||
|
|
||||||
def download_fn(args):
|
def download_fn(args):
|
||||||
|
@ -486,7 +481,7 @@ def save_specfile_fn(args):
|
||||||
else:
|
else:
|
||||||
root_spec = Spec(args.root_spec)
|
root_spec = Spec(args.root_spec)
|
||||||
root_spec.concretize()
|
root_spec.concretize()
|
||||||
root_spec_as_json = root_spec.to_json(hash=ht.build_hash)
|
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||||
save_dependency_specfiles(
|
save_dependency_specfiles(
|
||||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||||
|
|
|
@ -167,8 +167,7 @@ def ci_reindex(args):
|
||||||
|
|
||||||
def ci_rebuild(args):
|
def ci_rebuild(args):
|
||||||
"""Check a single spec against the remote mirror, and rebuild it from
|
"""Check a single spec against the remote mirror, and rebuild it from
|
||||||
source if the mirror does not contain the full hash match of the spec
|
source if the mirror does not contain the hash. """
|
||||||
as computed locally. """
|
|
||||||
env = spack.cmd.require_active_env(cmd_name='ci rebuild')
|
env = spack.cmd.require_active_env(cmd_name='ci rebuild')
|
||||||
|
|
||||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||||
|
@ -280,8 +279,8 @@ def ci_rebuild(args):
|
||||||
env, root_spec, job_spec_pkg_name, compiler_action)
|
env, root_spec, job_spec_pkg_name, compiler_action)
|
||||||
job_spec = spec_map[job_spec_pkg_name]
|
job_spec = spec_map[job_spec_pkg_name]
|
||||||
|
|
||||||
job_spec_yaml_file = '{0}.yaml'.format(job_spec_pkg_name)
|
job_spec_json_file = '{0}.json'.format(job_spec_pkg_name)
|
||||||
job_spec_yaml_path = os.path.join(repro_dir, job_spec_yaml_file)
|
job_spec_json_path = os.path.join(repro_dir, job_spec_json_file)
|
||||||
|
|
||||||
# To provide logs, cdash reports, etc for developer download/perusal,
|
# To provide logs, cdash reports, etc for developer download/perusal,
|
||||||
# these things have to be put into artifacts. This means downstream
|
# these things have to be put into artifacts. This means downstream
|
||||||
|
@ -335,23 +334,23 @@ def ci_rebuild(args):
|
||||||
# using a compiler already installed on the target system).
|
# using a compiler already installed on the target system).
|
||||||
spack_ci.configure_compilers(compiler_action)
|
spack_ci.configure_compilers(compiler_action)
|
||||||
|
|
||||||
# Write this job's spec yaml into the reproduction directory, and it will
|
# Write this job's spec json into the reproduction directory, and it will
|
||||||
# also be used in the generated "spack install" command to install the spec
|
# also be used in the generated "spack install" command to install the spec
|
||||||
tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))
|
tty.debug('job concrete spec path: {0}'.format(job_spec_json_path))
|
||||||
with open(job_spec_yaml_path, 'w') as fd:
|
with open(job_spec_json_path, 'w') as fd:
|
||||||
fd.write(job_spec.to_yaml(hash=ht.build_hash))
|
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
# Write the concrete root spec yaml into the reproduction directory
|
# Write the concrete root spec json into the reproduction directory
|
||||||
root_spec_yaml_path = os.path.join(repro_dir, 'root.yaml')
|
root_spec_json_path = os.path.join(repro_dir, 'root.json')
|
||||||
with open(root_spec_yaml_path, 'w') as fd:
|
with open(root_spec_json_path, 'w') as fd:
|
||||||
fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))
|
fd.write(spec_map['root'].to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
# Write some other details to aid in reproduction into an artifact
|
# Write some other details to aid in reproduction into an artifact
|
||||||
repro_file = os.path.join(repro_dir, 'repro.json')
|
repro_file = os.path.join(repro_dir, 'repro.json')
|
||||||
repro_details = {
|
repro_details = {
|
||||||
'job_name': ci_job_name,
|
'job_name': ci_job_name,
|
||||||
'job_spec_yaml': job_spec_yaml_file,
|
'job_spec_json': job_spec_json_file,
|
||||||
'root_spec_yaml': 'root.yaml',
|
'root_spec_json': 'root.json',
|
||||||
'ci_project_dir': ci_project_dir
|
'ci_project_dir': ci_project_dir
|
||||||
}
|
}
|
||||||
with open(repro_file, 'w') as fd:
|
with open(repro_file, 'w') as fd:
|
||||||
|
@ -366,25 +365,24 @@ def ci_rebuild(args):
|
||||||
fd.write(b'\n')
|
fd.write(b'\n')
|
||||||
|
|
||||||
# If we decided there should be a temporary storage mechanism, add that
|
# If we decided there should be a temporary storage mechanism, add that
|
||||||
# mirror now so it's used when we check for a full hash match already
|
# mirror now so it's used when we check for a hash match already
|
||||||
# built for this spec.
|
# built for this spec.
|
||||||
if pipeline_mirror_url:
|
if pipeline_mirror_url:
|
||||||
spack.mirror.add(spack_ci.TEMP_STORAGE_MIRROR_NAME,
|
spack.mirror.add(spack_ci.TEMP_STORAGE_MIRROR_NAME,
|
||||||
pipeline_mirror_url,
|
pipeline_mirror_url,
|
||||||
cfg.default_modify_scope())
|
cfg.default_modify_scope())
|
||||||
|
|
||||||
# Check configured mirrors for a built spec with a matching full hash
|
# Check configured mirrors for a built spec with a matching hash
|
||||||
matches = bindist.get_mirrors_for_spec(
|
matches = bindist.get_mirrors_for_spec(job_spec, index_only=False)
|
||||||
job_spec, full_hash_match=True, index_only=False)
|
|
||||||
|
|
||||||
if matches:
|
if matches:
|
||||||
# Got a full hash match on at least one configured mirror. All
|
# Got a hash match on at least one configured mirror. All
|
||||||
# matches represent the fully up-to-date spec, so should all be
|
# matches represent the fully up-to-date spec, so should all be
|
||||||
# equivalent. If artifacts mirror is enabled, we just pick one
|
# equivalent. If artifacts mirror is enabled, we just pick one
|
||||||
# of the matches and download the buildcache files from there to
|
# of the matches and download the buildcache files from there to
|
||||||
# the artifacts, so they're available to be used by dependent
|
# the artifacts, so they're available to be used by dependent
|
||||||
# jobs in subsequent stages.
|
# jobs in subsequent stages.
|
||||||
tty.msg('No need to rebuild {0}, found full hash match at: '.format(
|
tty.msg('No need to rebuild {0}, found hash match at: '.format(
|
||||||
job_spec_pkg_name))
|
job_spec_pkg_name))
|
||||||
for match in matches:
|
for match in matches:
|
||||||
tty.msg(' {0}'.format(match['mirror_url']))
|
tty.msg(' {0}'.format(match['mirror_url']))
|
||||||
|
@ -403,7 +401,7 @@ def ci_rebuild(args):
|
||||||
# Now we are done and successful
|
# Now we are done and successful
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
# No full hash match anywhere means we need to rebuild spec
|
# No hash match anywhere means we need to rebuild spec
|
||||||
|
|
||||||
# Start with spack arguments
|
# Start with spack arguments
|
||||||
install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]
|
install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]
|
||||||
|
@ -415,7 +413,6 @@ def ci_rebuild(args):
|
||||||
install_args.extend([
|
install_args.extend([
|
||||||
'install',
|
'install',
|
||||||
'--keep-stage',
|
'--keep-stage',
|
||||||
'--require-full-hash-match',
|
|
||||||
])
|
])
|
||||||
|
|
||||||
can_verify = spack_ci.can_verify_binaries()
|
can_verify = spack_ci.can_verify_binaries()
|
||||||
|
@ -443,8 +440,8 @@ def ci_rebuild(args):
|
||||||
|
|
||||||
# TODO: once we have the concrete spec registry, use the DAG hash
|
# TODO: once we have the concrete spec registry, use the DAG hash
|
||||||
# to identify the spec to install, rather than the concrete spec
|
# to identify the spec to install, rather than the concrete spec
|
||||||
# yaml file.
|
# json file.
|
||||||
install_args.extend(['-f', job_spec_yaml_path])
|
install_args.extend(['-f', job_spec_json_path])
|
||||||
|
|
||||||
tty.debug('Installing {0} from source'.format(job_spec.name))
|
tty.debug('Installing {0} from source'.format(job_spec.name))
|
||||||
tty.debug('spack install arguments: {0}'.format(
|
tty.debug('spack install arguments: {0}'.format(
|
||||||
|
@ -477,13 +474,13 @@ def ci_rebuild(args):
|
||||||
tty.debug('spack install exited {0}'.format(install_exit_code))
|
tty.debug('spack install exited {0}'.format(install_exit_code))
|
||||||
|
|
||||||
# If a spec fails to build in a spack develop pipeline, we add it to a
|
# If a spec fails to build in a spack develop pipeline, we add it to a
|
||||||
# list of known broken full hashes. This allows spack PR pipelines to
|
# list of known broken hashes. This allows spack PR pipelines to
|
||||||
# avoid wasting compute cycles attempting to build those hashes.
|
# avoid wasting compute cycles attempting to build those hashes.
|
||||||
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
||||||
tty.debug('Install failed on develop')
|
tty.debug('Install failed on develop')
|
||||||
if 'broken-specs-url' in gitlab_ci:
|
if 'broken-specs-url' in gitlab_ci:
|
||||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||||
dev_fail_hash = job_spec.full_hash()
|
dev_fail_hash = job_spec.dag_hash()
|
||||||
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
|
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
|
||||||
tty.msg('Reporting broken develop build as: {0}'.format(
|
tty.msg('Reporting broken develop build as: {0}'.format(
|
||||||
broken_spec_path))
|
broken_spec_path))
|
||||||
|
@ -494,7 +491,7 @@ def ci_rebuild(args):
|
||||||
'broken-spec': {
|
'broken-spec': {
|
||||||
'job-url': get_env_var('CI_JOB_URL'),
|
'job-url': get_env_var('CI_JOB_URL'),
|
||||||
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
|
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
|
||||||
'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
|
'concrete-spec-dict': job_spec.to_dict(hash=ht.dag_hash)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -539,7 +536,7 @@ def ci_rebuild(args):
|
||||||
# per-PR mirror, if this is a PR pipeline
|
# per-PR mirror, if this is a PR pipeline
|
||||||
if buildcache_mirror_url:
|
if buildcache_mirror_url:
|
||||||
spack_ci.push_mirror_contents(
|
spack_ci.push_mirror_contents(
|
||||||
env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
|
env, job_spec_json_path, buildcache_mirror_url, sign_binaries
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create another copy of that buildcache in the per-pipeline
|
# Create another copy of that buildcache in the per-pipeline
|
||||||
|
@ -548,14 +545,14 @@ def ci_rebuild(args):
|
||||||
# prefix is set)
|
# prefix is set)
|
||||||
if pipeline_mirror_url:
|
if pipeline_mirror_url:
|
||||||
spack_ci.push_mirror_contents(
|
spack_ci.push_mirror_contents(
|
||||||
env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
|
env, job_spec_json_path, pipeline_mirror_url, sign_binaries
|
||||||
)
|
)
|
||||||
|
|
||||||
# If this is a develop pipeline, check if the spec that we just built is
|
# If this is a develop pipeline, check if the spec that we just built is
|
||||||
# on the broken-specs list. If so, remove it.
|
# on the broken-specs list. If so, remove it.
|
||||||
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
|
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
|
||||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||||
just_built_hash = job_spec.full_hash()
|
just_built_hash = job_spec.dag_hash()
|
||||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||||
if web_util.url_exists(broken_spec_path):
|
if web_util.url_exists(broken_spec_path):
|
||||||
tty.msg('Removing {0} from the list of broken specs'.format(
|
tty.msg('Removing {0} from the list of broken specs'.format(
|
||||||
|
|
|
@ -47,7 +47,6 @@ def update_kwargs_from_args(args, kwargs):
|
||||||
'explicit': True, # Always true for install command
|
'explicit': True, # Always true for install command
|
||||||
'stop_at': args.until,
|
'stop_at': args.until,
|
||||||
'unsigned': args.unsigned,
|
'unsigned': args.unsigned,
|
||||||
'full_hash_match': args.full_hash_match,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
kwargs.update({
|
kwargs.update({
|
||||||
|
@ -117,11 +116,6 @@ def setup_parser(subparser):
|
||||||
'--no-check-signature', action='store_true',
|
'--no-check-signature', action='store_true',
|
||||||
dest='unsigned', default=False,
|
dest='unsigned', default=False,
|
||||||
help="do not check signatures of binary packages")
|
help="do not check signatures of binary packages")
|
||||||
subparser.add_argument(
|
|
||||||
'--require-full-hash-match', action='store_true',
|
|
||||||
dest='full_hash_match', default=False, help="""when installing from
|
|
||||||
binary mirrors, do not install binary package unless the full hash of the
|
|
||||||
remote spec matches that of the local spec""")
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--show-log-on-error', action='store_true',
|
'--show-log-on-error', action='store_true',
|
||||||
help="print full build log to stderr if build fails")
|
help="print full build log to stderr if build fails")
|
||||||
|
@ -470,7 +464,7 @@ def get_tests(specs):
|
||||||
})
|
})
|
||||||
|
|
||||||
# If we are using the monitor, we send configs. and create build
|
# If we are using the monitor, we send configs. and create build
|
||||||
# The full_hash is the main package id, the build_hash for others
|
# The dag_hash is the main package id
|
||||||
if args.use_monitor and specs:
|
if args.use_monitor and specs:
|
||||||
monitor.new_configuration(specs)
|
monitor.new_configuration(specs)
|
||||||
install_specs(args, kwargs, zip(abstract_specs, specs))
|
install_specs(args, kwargs, zip(abstract_specs, specs))
|
||||||
|
|
|
@ -151,9 +151,9 @@ def solve(parser, args):
|
||||||
# With -y, just print YAML to output.
|
# With -y, just print YAML to output.
|
||||||
if args.format == 'yaml':
|
if args.format == 'yaml':
|
||||||
# use write because to_yaml already has a newline.
|
# use write because to_yaml already has a newline.
|
||||||
sys.stdout.write(spec.to_yaml(hash=ht.build_hash))
|
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||||
elif args.format == 'json':
|
elif args.format == 'json':
|
||||||
sys.stdout.write(spec.to_json(hash=ht.build_hash))
|
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(
|
sys.stdout.write(
|
||||||
spec.tree(color=sys.stdout.isatty(), **kwargs))
|
spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||||
|
|
|
@ -52,8 +52,8 @@ def setup_parser(subparser):
|
||||||
'-N', '--namespaces', action='store_true', default=False,
|
'-N', '--namespaces', action='store_true', default=False,
|
||||||
help='show fully qualified package names')
|
help='show fully qualified package names')
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--hash-type', default="build_hash",
|
'--hash-type', default="dag_hash",
|
||||||
choices=['build_hash', 'full_hash', 'dag_hash'],
|
choices=['runtime_hash', 'dag_hash'],
|
||||||
help='generate spec with a particular hash type.')
|
help='generate spec with a particular hash type.')
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-t', '--types', action='store_true', default=False,
|
'-t', '--types', action='store_true', default=False,
|
||||||
|
|
|
@ -332,13 +332,18 @@ def _report_suite_results(test_suite, args, constraints):
|
||||||
.format(results_desc, test_suite.name, matching))
|
.format(results_desc, test_suite.name, matching))
|
||||||
|
|
||||||
results = {}
|
results = {}
|
||||||
|
tty.msg('test results')
|
||||||
with open(test_suite.results_file, 'r') as f:
|
with open(test_suite.results_file, 'r') as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
pkg_id, status = line.split()
|
pkg_id, status = line.split()
|
||||||
results[pkg_id] = status
|
results[pkg_id] = status
|
||||||
|
tty.msg(' {0}'.format(pkg_id))
|
||||||
|
|
||||||
|
tty.msg('test specs:')
|
||||||
|
|
||||||
failed, skipped, untested = 0, 0, 0
|
failed, skipped, untested = 0, 0, 0
|
||||||
for pkg_id in test_specs:
|
for pkg_id in test_specs:
|
||||||
|
tty.msg(' {0}'.format(pkg_id))
|
||||||
if pkg_id in results:
|
if pkg_id in results:
|
||||||
status = results[pkg_id]
|
status = results[pkg_id]
|
||||||
if status == 'FAILED':
|
if status == 'FAILED':
|
||||||
|
|
|
@ -433,7 +433,7 @@ def _failed_spec_path(self, spec):
|
||||||
.format(spec.name))
|
.format(spec.name))
|
||||||
|
|
||||||
return os.path.join(self._failure_dir,
|
return os.path.join(self._failure_dir,
|
||||||
'{0}-{1}'.format(spec.name, spec.full_hash()))
|
'{0}-{1}'.format(spec.name, spec.dag_hash()))
|
||||||
|
|
||||||
def clear_all_failures(self):
|
def clear_all_failures(self):
|
||||||
"""Force remove install failure tracking files."""
|
"""Force remove install failure tracking files."""
|
||||||
|
@ -645,8 +645,12 @@ def _write_to_file(self, stream):
|
||||||
# TODO: fix this before we support multiple install locations.
|
# TODO: fix this before we support multiple install locations.
|
||||||
database = {
|
database = {
|
||||||
'database': {
|
'database': {
|
||||||
|
# TODO: move this to a top-level _meta section if we ever
|
||||||
|
# TODO: bump the DB version to 7
|
||||||
|
'version': str(_db_version),
|
||||||
|
|
||||||
|
# dictionary of installation records, keyed by DAG hash
|
||||||
'installs': installs,
|
'installs': installs,
|
||||||
'version': str(_db_version)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1092,6 +1096,8 @@ def _add(
|
||||||
"Specs added to DB must be concrete.")
|
"Specs added to DB must be concrete.")
|
||||||
|
|
||||||
key = spec.dag_hash()
|
key = spec.dag_hash()
|
||||||
|
spec_run_hash = spec._runtime_hash
|
||||||
|
spec_pkg_hash = spec._package_hash
|
||||||
upstream, record = self.query_by_spec_hash(key)
|
upstream, record = self.query_by_spec_hash(key)
|
||||||
if upstream:
|
if upstream:
|
||||||
return
|
return
|
||||||
|
@ -1153,10 +1159,11 @@ def _add(
|
||||||
record.ref_count += 1
|
record.ref_count += 1
|
||||||
|
|
||||||
# Mark concrete once everything is built, and preserve
|
# Mark concrete once everything is built, and preserve
|
||||||
# the original hash of concrete specs.
|
# the original hashes of concrete specs.
|
||||||
new_spec._mark_concrete()
|
new_spec._mark_concrete()
|
||||||
new_spec._hash = key
|
new_spec._hash = key
|
||||||
new_spec._full_hash = spec._full_hash
|
new_spec._runtime_hash = spec_run_hash
|
||||||
|
new_spec._package_hash = spec_pkg_hash
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# It is already in the database
|
# It is already in the database
|
||||||
|
|
|
@ -110,13 +110,9 @@ def write_spec(self, spec, path):
|
||||||
"""Write a spec out to a file."""
|
"""Write a spec out to a file."""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
with open(path, 'w') as f:
|
with open(path, 'w') as f:
|
||||||
# The hash the the projection is the DAG hash but we write out the
|
# The hash of the projection is the DAG hash which contains
|
||||||
# full provenance by full hash so it's availabe if we want it later
|
# the full provenance, so it's availabe if we want it later
|
||||||
# extension = os.path.splitext(path)[-1].lower()
|
spec.to_json(f, hash=ht.dag_hash)
|
||||||
# if 'json' in extension:
|
|
||||||
spec.to_json(f, hash=ht.full_hash)
|
|
||||||
# elif 'yaml' in extension:
|
|
||||||
# spec.to_yaml(f, hash=ht.full_hash)
|
|
||||||
|
|
||||||
def write_host_environment(self, spec):
|
def write_host_environment(self, spec):
|
||||||
"""The host environment is a json file with os, kernel, and spack
|
"""The host environment is a json file with os, kernel, and spack
|
||||||
|
|
|
@ -1819,6 +1819,9 @@ def _read_lockfile_dict(self, d):
|
||||||
json_specs_by_hash = d['concrete_specs']
|
json_specs_by_hash = d['concrete_specs']
|
||||||
root_hashes = set(self.concretized_order)
|
root_hashes = set(self.concretized_order)
|
||||||
|
|
||||||
|
import pdb
|
||||||
|
pdb.set_trace()
|
||||||
|
|
||||||
specs_by_hash = {}
|
specs_by_hash = {}
|
||||||
for dag_hash, node_dict in json_specs_by_hash.items():
|
for dag_hash, node_dict in json_specs_by_hash.items():
|
||||||
spec = Spec.from_node_dict(node_dict)
|
spec = Spec.from_node_dict(node_dict)
|
||||||
|
@ -1856,12 +1859,20 @@ def _read_lockfile_dict(self, d):
|
||||||
# trees and binary mirrors, and as such, must be considered the
|
# trees and binary mirrors, and as such, must be considered the
|
||||||
# permanent id of the spec.
|
# permanent id of the spec.
|
||||||
dag_hash = spec.dag_hash() # == full_hash()
|
dag_hash = spec.dag_hash() # == full_hash()
|
||||||
build_hash = spec.build_hash()
|
|
||||||
runtime_hash = spec.runtime_hash() # == old dag_hash()
|
runtime_hash = spec.runtime_hash() # == old dag_hash()
|
||||||
|
|
||||||
if runtime_hash in root_hashes:
|
if dag_hash in root_hashes:
|
||||||
|
# This spec's dag hash (now computed with build deps and pkg
|
||||||
|
# hash) is in the keys found in the file, so we're looking at
|
||||||
|
# the current format
|
||||||
|
pass
|
||||||
|
elif runtime_hash in root_hashes:
|
||||||
|
# This spec's runtime hash (the old dag hash w/out build deps,
|
||||||
|
# etc) is a key in this lockfile, so this is the oldest format
|
||||||
old_hash_to_new[runtime_hash] = dag_hash
|
old_hash_to_new[runtime_hash] = dag_hash
|
||||||
elif build_hash in root_hashes:
|
else:
|
||||||
|
# Neither of this spec's hashes appeared as a key in the lock
|
||||||
|
# file, so
|
||||||
old_hash_to_new[build_hash] = dag_hash
|
old_hash_to_new[build_hash] = dag_hash
|
||||||
|
|
||||||
if (runtime_hash in root_hashes or
|
if (runtime_hash in root_hashes or
|
||||||
|
|
|
@ -39,16 +39,6 @@ def attr(self):
|
||||||
deptype=('build', 'link', 'run'), package_hash=True, name='hash')
|
deptype=('build', 'link', 'run'), package_hash=True, name='hash')
|
||||||
|
|
||||||
|
|
||||||
#: Same as dag_hash; old name.
|
|
||||||
full_hash = SpecHashDescriptor(
|
|
||||||
deptype=('build', 'link', 'run'), package_hash=True, name='full_hash')
|
|
||||||
|
|
||||||
|
|
||||||
#: Hash descriptor that includes build dependencies.
|
|
||||||
build_hash = SpecHashDescriptor(
|
|
||||||
deptype=('build', 'link', 'run'), package_hash=False, name='build_hash')
|
|
||||||
|
|
||||||
|
|
||||||
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
||||||
process_hash = SpecHashDescriptor(
|
process_hash = SpecHashDescriptor(
|
||||||
deptype=('build', 'link', 'run', 'test'),
|
deptype=('build', 'link', 'run', 'test'),
|
||||||
|
@ -57,7 +47,18 @@ def attr(self):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#: Package hash used as part of full hash
|
#: Package hash used as part of dag hash
|
||||||
package_hash = SpecHashDescriptor(
|
package_hash = SpecHashDescriptor(
|
||||||
deptype=(), package_hash=True, name='package_hash',
|
deptype=(), package_hash=True, name='package_hash',
|
||||||
override=lambda s: s.package.content_hash())
|
override=lambda s: s.package.content_hash())
|
||||||
|
|
||||||
|
|
||||||
|
# Deprecated hash types, no longer used, but needed to understand old serialized
|
||||||
|
# spec formats
|
||||||
|
|
||||||
|
full_hash = SpecHashDescriptor(
|
||||||
|
deptype=('build', 'link', 'run'), package_hash=True, name='full_hash')
|
||||||
|
|
||||||
|
|
||||||
|
build_hash = SpecHashDescriptor(
|
||||||
|
deptype=('build', 'link', 'run'), package_hash=False, name='build_hash')
|
||||||
|
|
|
@ -260,8 +260,7 @@ def _hms(seconds):
|
||||||
return ' '.join(parts)
|
return ' '.join(parts)
|
||||||
|
|
||||||
|
|
||||||
def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
|
def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||||
full_hash_match=False):
|
|
||||||
"""
|
"""
|
||||||
Extract the package from binary cache
|
Extract the package from binary cache
|
||||||
|
|
||||||
|
@ -278,7 +277,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
|
||||||
``False`` otherwise
|
``False`` otherwise
|
||||||
"""
|
"""
|
||||||
installed_from_cache = _try_install_from_binary_cache(
|
installed_from_cache = _try_install_from_binary_cache(
|
||||||
pkg, explicit, unsigned=unsigned, full_hash_match=full_hash_match)
|
pkg, explicit, unsigned=unsigned)
|
||||||
pkg_id = package_id(pkg)
|
pkg_id = package_id(pkg)
|
||||||
if not installed_from_cache:
|
if not installed_from_cache:
|
||||||
pre = 'No binary for {0} found'.format(pkg_id)
|
pre = 'No binary for {0} found'.format(pkg_id)
|
||||||
|
@ -390,8 +389,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
|
def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
|
||||||
full_hash_match=False):
|
|
||||||
"""
|
"""
|
||||||
Try to extract the package from binary cache.
|
Try to extract the package from binary cache.
|
||||||
|
|
||||||
|
@ -403,8 +401,7 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
|
||||||
"""
|
"""
|
||||||
pkg_id = package_id(pkg)
|
pkg_id = package_id(pkg)
|
||||||
tty.debug('Searching for binary cache of {0}'.format(pkg_id))
|
tty.debug('Searching for binary cache of {0}'.format(pkg_id))
|
||||||
matches = binary_distribution.get_mirrors_for_spec(
|
matches = binary_distribution.get_mirrors_for_spec(pkg.spec)
|
||||||
pkg.spec, full_hash_match=full_hash_match)
|
|
||||||
|
|
||||||
if not matches:
|
if not matches:
|
||||||
return False
|
return False
|
||||||
|
@ -1204,7 +1201,6 @@ def _install_task(self, task):
|
||||||
install_args = task.request.install_args
|
install_args = task.request.install_args
|
||||||
cache_only = install_args.get('cache_only')
|
cache_only = install_args.get('cache_only')
|
||||||
explicit = task.explicit
|
explicit = task.explicit
|
||||||
full_hash_match = install_args.get('full_hash_match')
|
|
||||||
tests = install_args.get('tests')
|
tests = install_args.get('tests')
|
||||||
unsigned = install_args.get('unsigned')
|
unsigned = install_args.get('unsigned')
|
||||||
use_cache = install_args.get('use_cache')
|
use_cache = install_args.get('use_cache')
|
||||||
|
@ -1217,8 +1213,7 @@ def _install_task(self, task):
|
||||||
|
|
||||||
# Use the binary cache if requested
|
# Use the binary cache if requested
|
||||||
if use_cache and \
|
if use_cache and \
|
||||||
_install_from_cache(pkg, cache_only, explicit, unsigned,
|
_install_from_cache(pkg, cache_only, explicit, unsigned):
|
||||||
full_hash_match):
|
|
||||||
self._update_installed(task)
|
self._update_installed(task)
|
||||||
if task.compiler:
|
if task.compiler:
|
||||||
spack.compilers.add_compilers_to_config(
|
spack.compilers.add_compilers_to_config(
|
||||||
|
@ -2306,7 +2301,6 @@ def _add_default_args(self):
|
||||||
('dirty', False),
|
('dirty', False),
|
||||||
('fail_fast', False),
|
('fail_fast', False),
|
||||||
('fake', False),
|
('fake', False),
|
||||||
('full_hash_match', False),
|
|
||||||
('install_deps', True),
|
('install_deps', True),
|
||||||
('install_package', True),
|
('install_package', True),
|
||||||
('install_source', False),
|
('install_source', False),
|
||||||
|
|
|
@ -132,7 +132,7 @@ def __init__(self, host=None, prefix="ms1", allow_fail=False, tags=None,
|
||||||
self.tags = tags
|
self.tags = tags
|
||||||
self.save_local = save_local
|
self.save_local = save_local
|
||||||
|
|
||||||
# We keey lookup of build_id by full_hash
|
# We keey lookup of build_id by dag_hash
|
||||||
self.build_ids = {}
|
self.build_ids = {}
|
||||||
self.setup_save()
|
self.setup_save()
|
||||||
|
|
||||||
|
@ -412,7 +412,7 @@ def new_configuration(self, specs):
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
|
|
||||||
# Remove extra level of nesting
|
# Remove extra level of nesting
|
||||||
as_dict = {"spec": spec.to_dict(hash=ht.full_hash)['spec'],
|
as_dict = {"spec": spec.to_dict(hash=ht.dag_hash)['spec'],
|
||||||
"spack_version": self.spack_version}
|
"spack_version": self.spack_version}
|
||||||
|
|
||||||
if self.save_local:
|
if self.save_local:
|
||||||
|
@ -437,7 +437,7 @@ def failed_concretization(self, specs):
|
||||||
meta = spec.to_dict()['spec']
|
meta = spec.to_dict()['spec']
|
||||||
nodes = []
|
nodes = []
|
||||||
for node in meta.get("nodes", []):
|
for node in meta.get("nodes", []):
|
||||||
for hashtype in ["build_hash", "full_hash"]:
|
for hashtype in ["hash", "runtime_hash"]:
|
||||||
node[hashtype] = "FAILED_CONCRETIZATION"
|
node[hashtype] = "FAILED_CONCRETIZATION"
|
||||||
nodes.append(node)
|
nodes.append(node)
|
||||||
meta['nodes'] = nodes
|
meta['nodes'] = nodes
|
||||||
|
@ -470,13 +470,13 @@ def get_build_id(self, spec, return_response=False, spec_exists=True):
|
||||||
"""
|
"""
|
||||||
Retrieve a build id, either in the local cache, or query the server.
|
Retrieve a build id, either in the local cache, or query the server.
|
||||||
"""
|
"""
|
||||||
full_hash = spec.full_hash()
|
dag_hash = spec.dag_hash()
|
||||||
if full_hash in self.build_ids:
|
if dag_hash in self.build_ids:
|
||||||
return self.build_ids[full_hash]
|
return self.build_ids[dag_hash]
|
||||||
|
|
||||||
# Prepare build environment data (including spack version)
|
# Prepare build environment data (including spack version)
|
||||||
data = self.build_environment.copy()
|
data = self.build_environment.copy()
|
||||||
data['full_hash'] = full_hash
|
data['hash'] = dag_hash
|
||||||
|
|
||||||
# If the build should be tagged, add it
|
# If the build should be tagged, add it
|
||||||
if self.tags:
|
if self.tags:
|
||||||
|
@ -494,10 +494,10 @@ def get_build_id(self, spec, return_response=False, spec_exists=True):
|
||||||
data['spec'] = syaml.load(read_file(spec_file))
|
data['spec'] = syaml.load(read_file(spec_file))
|
||||||
|
|
||||||
if self.save_local:
|
if self.save_local:
|
||||||
return self.get_local_build_id(data, full_hash, return_response)
|
return self.get_local_build_id(data, dag_hash, return_response)
|
||||||
return self.get_server_build_id(data, full_hash, return_response)
|
return self.get_server_build_id(data, dag_hash, return_response)
|
||||||
|
|
||||||
def get_local_build_id(self, data, full_hash, return_response):
|
def get_local_build_id(self, data, dag_hash, return_response):
|
||||||
"""
|
"""
|
||||||
Generate a local build id based on hashing the expected data
|
Generate a local build id based on hashing the expected data
|
||||||
"""
|
"""
|
||||||
|
@ -510,15 +510,15 @@ def get_local_build_id(self, data, full_hash, return_response):
|
||||||
return response
|
return response
|
||||||
return bid
|
return bid
|
||||||
|
|
||||||
def get_server_build_id(self, data, full_hash, return_response=False):
|
def get_server_build_id(self, data, dag_hash, return_response=False):
|
||||||
"""
|
"""
|
||||||
Retrieve a build id from the spack monitor server
|
Retrieve a build id from the spack monitor server
|
||||||
"""
|
"""
|
||||||
response = self.do_request("builds/new/", data=sjson.dump(data))
|
response = self.do_request("builds/new/", data=sjson.dump(data))
|
||||||
|
|
||||||
# Add the build id to the lookup
|
# Add the build id to the lookup
|
||||||
bid = self.build_ids[full_hash] = response['data']['build']['build_id']
|
bid = self.build_ids[dag_hash] = response['data']['build']['build_id']
|
||||||
self.build_ids[full_hash] = bid
|
self.build_ids[dag_hash] = bid
|
||||||
|
|
||||||
# If the function is called directly, the user might want output
|
# If the function is called directly, the user might want output
|
||||||
if return_response:
|
if return_response:
|
||||||
|
|
|
@ -110,8 +110,7 @@
|
||||||
'properties': {
|
'properties': {
|
||||||
'name': {'type': 'string'},
|
'name': {'type': 'string'},
|
||||||
'hash': {'type': 'string'},
|
'hash': {'type': 'string'},
|
||||||
'full_hash': {'type': 'string'},
|
'runtime_hash': {'type': 'string'},
|
||||||
'build_hash': {'type': 'string'},
|
|
||||||
'package_hash': {'type': 'string'},
|
'package_hash': {'type': 'string'},
|
||||||
'version': {
|
'version': {
|
||||||
'oneOf': [
|
'oneOf': [
|
||||||
|
|
|
@ -1192,8 +1192,7 @@ def __init__(self, spec_like=None, normal=False,
|
||||||
self.namespace = None
|
self.namespace = None
|
||||||
|
|
||||||
self._hash = None
|
self._hash = None
|
||||||
self._build_hash = None
|
self._runtime_hash = None
|
||||||
self._full_hash = None
|
|
||||||
self._package_hash = None
|
self._package_hash = None
|
||||||
self._dunder_hash = None
|
self._dunder_hash = None
|
||||||
self._package = None
|
self._package = None
|
||||||
|
@ -1210,12 +1209,12 @@ def __init__(self, spec_like=None, normal=False,
|
||||||
self.external_path = external_path
|
self.external_path = external_path
|
||||||
self.external_modules = Spec._format_module_list(external_modules)
|
self.external_modules = Spec._format_module_list(external_modules)
|
||||||
|
|
||||||
# Older spack versions did not compute full_hash or build_hash,
|
# Older spack versions may have either computed different hashes or
|
||||||
# and we may not have the necessary information to recompute them
|
# computed them differently, and we may not have the necessary
|
||||||
# if we read in old specs. Old concrete specs are marked "final"
|
# information to recompute them if we read in old specs.
|
||||||
# when read in to indicate that we shouldn't recompute full_hash
|
# Old concrete specs are marked "final" when read in to indicate
|
||||||
# or build_hash. New specs are not final; we can lazily compute
|
# that we shouldn't recompute the current dag_hash. New specs are
|
||||||
# their hashes.
|
# not final; we can lazily compute their hashes.
|
||||||
self._hashes_final = False
|
self._hashes_final = False
|
||||||
|
|
||||||
# This attribute is used to store custom information for
|
# This attribute is used to store custom information for
|
||||||
|
@ -1804,27 +1803,13 @@ def dag_hash(self, length=None):
|
||||||
"""This is Spack's default hash, used to identify installations.
|
"""This is Spack's default hash, used to identify installations.
|
||||||
|
|
||||||
Same as the full hash (includes package hash and build/link/run deps).
|
Same as the full hash (includes package hash and build/link/run deps).
|
||||||
|
Tells us when package files and any dependencies have changes.
|
||||||
|
|
||||||
NOTE: Versions of Spack prior to 0.18 only included link and run deps.
|
NOTE: Versions of Spack prior to 0.18 only included link and run deps.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return self._cached_hash(ht.dag_hash, length)
|
return self._cached_hash(ht.dag_hash, length)
|
||||||
|
|
||||||
def full_hash(self, length=None):
|
|
||||||
"""Hash that includes all build and run inputs for a spec.
|
|
||||||
|
|
||||||
Inputs are: the package hash (to identify the package.py),
|
|
||||||
build, link, and run dependencies.
|
|
||||||
"""
|
|
||||||
return self._cached_hash(ht.full_hash, length)
|
|
||||||
|
|
||||||
def build_hash(self, length=None):
|
|
||||||
"""Hash used to store specs in environments.
|
|
||||||
|
|
||||||
This hash includes build dependencies, and we need to preserve
|
|
||||||
them to be able to rebuild an entire environment for a user.
|
|
||||||
"""
|
|
||||||
return self._cached_hash(ht.build_hash, length)
|
|
||||||
|
|
||||||
def process_hash(self, length=None):
|
def process_hash(self, length=None):
|
||||||
"""Hash used to transfer specs among processes.
|
"""Hash used to transfer specs among processes.
|
||||||
|
|
||||||
|
@ -1833,6 +1818,7 @@ def process_hash(self, length=None):
|
||||||
"""
|
"""
|
||||||
return self._cached_hash(ht.process_hash, length)
|
return self._cached_hash(ht.process_hash, length)
|
||||||
|
|
||||||
|
|
||||||
def dag_hash_bit_prefix(self, bits):
|
def dag_hash_bit_prefix(self, bits):
|
||||||
"""Get the first <bits> bits of the DAG hash as an integer type."""
|
"""Get the first <bits> bits of the DAG hash as an integer type."""
|
||||||
return spack.util.hash.base32_prefix_bits(self.dag_hash(), bits)
|
return spack.util.hash.base32_prefix_bits(self.dag_hash(), bits)
|
||||||
|
@ -2002,7 +1988,7 @@ def to_dict(self, hash=ht.dag_hash):
|
||||||
"dependencies": [
|
"dependencies": [
|
||||||
{
|
{
|
||||||
"name": "readline",
|
"name": "readline",
|
||||||
"build_hash": "4f47cggum7p4qmp3xna4hi547o66unva",
|
"hash": "4f47cggum7p4qmp3xna4hi547o66unva",
|
||||||
"type": [
|
"type": [
|
||||||
"build",
|
"build",
|
||||||
"link"
|
"link"
|
||||||
|
@ -2010,16 +1996,15 @@ def to_dict(self, hash=ht.dag_hash):
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "zlib",
|
"name": "zlib",
|
||||||
"build_hash": "uvgh6p7rhll4kexqnr47bvqxb3t33jtq",
|
"hash": "uvgh6p7rhll4kexqnr47bvqxb3t33jtq",
|
||||||
"type": [
|
"type": [
|
||||||
"build",
|
"build",
|
||||||
"link"
|
"link"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"hash": "d2yzqp2highd7sn4nr5ndkw3ydcrlhtk",
|
"runtime_hash": "d2yzqp2highd7sn4nr5ndkw3ydcrlhtk",
|
||||||
"full_hash": "tve45xfqkfgmzwcyfetze2z6syrg7eaf",
|
"hash": "tve45xfqkfgmzwcyfetze2z6syrg7eaf",
|
||||||
"build_hash": "tsjnz7lgob7bu2wd4sqzzjenxewc2zha"
|
|
||||||
},
|
},
|
||||||
# ... more node dicts for readline and its dependencies ...
|
# ... more node dicts for readline and its dependencies ...
|
||||||
]
|
]
|
||||||
|
@ -2071,37 +2056,29 @@ def node_dict_with_hashes(self, hash=ht.dag_hash):
|
||||||
node = self.to_node_dict(hash)
|
node = self.to_node_dict(hash)
|
||||||
node[ht.dag_hash.name] = self.dag_hash()
|
node[ht.dag_hash.name] = self.dag_hash()
|
||||||
|
|
||||||
# full_hash and build_hash are lazily computed -- but if we write
|
# dag_hash is lazily computed -- but if we write a spec out, we want it
|
||||||
# a spec out, we want them to be included. This is effectively
|
# to be included. This is effectively the last chance we get to compute
|
||||||
# the last chance we get to compute them accurately.
|
# it accurately.
|
||||||
if self.concrete:
|
if self.concrete:
|
||||||
# build and full hashes can be written out if:
|
# dag_hash can be written out if:
|
||||||
# 1. they're precomputed (i.e. we read them from somewhere
|
# 1. it's precomputed (i.e. we read it from somewhere
|
||||||
# and they were already on the spec
|
# and it was already on the spec)
|
||||||
# 2. we can still compute them lazily (i.e. we just made them and
|
# 2. we can still compute it lazily (i.e. we just made the spec and
|
||||||
# have the full dependency graph on-hand)
|
# have the full dependency graph on-hand)
|
||||||
#
|
#
|
||||||
# we want to avoid recomputing either hash for specs we read
|
# we want to avoid recomputing the dag_hash for specs we read
|
||||||
# in from the DB or elsewhere, as we may not have the info
|
# in from the DB or elsewhere, as we may not have the info
|
||||||
# (like patches, package versions, etc.) that we need to
|
# (like patches, package versions, etc.) that we need to
|
||||||
# compute them. Unknown hashes are better than wrong hashes.
|
# compute it. Unknown hashes are better than wrong hashes.
|
||||||
write_full_hash = (
|
write_dag_hash = (
|
||||||
self._hashes_final and self._full_hash or # cached and final
|
self._hashes_final and self._hash or # cached and final
|
||||||
not self._hashes_final) # lazily compute
|
not self._hashes_final) # lazily compute
|
||||||
if write_full_hash:
|
if write_dag_hash:
|
||||||
node[ht.full_hash.name] = self.full_hash()
|
node[ht.dag_hash.name] = self.dag_hash()
|
||||||
|
|
||||||
write_build_hash = 'build' in hash.deptype and (
|
|
||||||
self._hashes_final and self._build_hash or # cached and final
|
|
||||||
not self._hashes_final) # lazily compute
|
|
||||||
if write_build_hash:
|
|
||||||
node[ht.build_hash.name] = self.build_hash()
|
|
||||||
else:
|
else:
|
||||||
node['concrete'] = False
|
node['concrete'] = False
|
||||||
|
|
||||||
if hash.name == 'build_hash':
|
if hash.name == 'process_hash':
|
||||||
node[hash.name] = self.build_hash()
|
|
||||||
elif hash.name == 'process_hash':
|
|
||||||
node[hash.name] = self.process_hash()
|
node[hash.name] = self.process_hash()
|
||||||
elif hash.name == 'runtime_hash':
|
elif hash.name == 'runtime_hash':
|
||||||
node[hash.name] = self.runtime_hash()
|
node[hash.name] = self.runtime_hash()
|
||||||
|
@ -2187,7 +2164,7 @@ def from_node_dict(node):
|
||||||
|
|
||||||
# this spec may have been built with older packages than we have
|
# this spec may have been built with older packages than we have
|
||||||
# on-hand, and we may not have the build dependencies, so mark it
|
# on-hand, and we may not have the build dependencies, so mark it
|
||||||
# so we don't recompute full_hash and build_hash.
|
# so we don't recompute dag_hash.
|
||||||
spec._hashes_final = spec._concrete
|
spec._hashes_final = spec._concrete
|
||||||
|
|
||||||
if 'patches' in node:
|
if 'patches' in node:
|
||||||
|
@ -2200,7 +2177,7 @@ def from_node_dict(node):
|
||||||
# FIXME: Monkey patches mvar to store patches order
|
# FIXME: Monkey patches mvar to store patches order
|
||||||
mvar._patches_in_order_of_appearance = patches
|
mvar._patches_in_order_of_appearance = patches
|
||||||
|
|
||||||
# Don't read dependencies here; from_node_dict() is used by
|
# Don't read dependencies here; from_dict() is used by
|
||||||
# from_yaml() and from_json() to read the root *and* each dependency
|
# from_yaml() and from_json() to read the root *and* each dependency
|
||||||
# spec.
|
# spec.
|
||||||
|
|
||||||
|
@ -2227,7 +2204,6 @@ def dependencies_from_node_dict(node):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
|
def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
|
||||||
"""Read the DependencySpec portion of a YAML-formatted Spec.
|
"""Read the DependencySpec portion of a YAML-formatted Spec.
|
||||||
|
|
||||||
This needs to be backward-compatible with older spack spec
|
This needs to be backward-compatible with older spack spec
|
||||||
formats so that reindex will work on old specs/databases.
|
formats so that reindex will work on old specs/databases.
|
||||||
"""
|
"""
|
||||||
|
@ -2247,16 +2223,11 @@ def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
|
||||||
elif isinstance(elt, dict):
|
elif isinstance(elt, dict):
|
||||||
# new format: elements of dependency spec are keyed.
|
# new format: elements of dependency spec are keyed.
|
||||||
for key in (ht.dag_hash.name,
|
for key in (ht.dag_hash.name,
|
||||||
ht.full_hash.name,
|
|
||||||
ht.build_hash.name,
|
ht.build_hash.name,
|
||||||
|
ht.full_hash.name,
|
||||||
ht.runtime_hash.name,
|
ht.runtime_hash.name,
|
||||||
ht.process_hash.name):
|
ht.process_hash.name):
|
||||||
if key in elt:
|
if key in elt:
|
||||||
# FIXME: if the key is 'hash' it could mean the old
|
|
||||||
# dag hash without build deps, or the new dag hash which
|
|
||||||
# is equivalent to the full hash. If this was the old
|
|
||||||
# dag hash, we need to keep the hash value but set the
|
|
||||||
# key hash type to "runtime_hash".
|
|
||||||
dep_hash, deptypes = elt[key], elt['type']
|
dep_hash, deptypes = elt[key], elt['type']
|
||||||
hash_type = key
|
hash_type = key
|
||||||
break
|
break
|
||||||
|
@ -3797,20 +3768,17 @@ def _dup(self, other, deps=True, cleardeps=True):
|
||||||
|
|
||||||
if self._concrete:
|
if self._concrete:
|
||||||
self._hash = other._hash
|
self._hash = other._hash
|
||||||
self._build_hash = other._build_hash
|
|
||||||
self._dunder_hash = other._dunder_hash
|
self._dunder_hash = other._dunder_hash
|
||||||
self._normal = True
|
self._normal = other._normal
|
||||||
self._full_hash = other._full_hash
|
self._full_hash = other._full_hash
|
||||||
self._runtime_hash = other._runtime_hash
|
self._runtime_hash = other._runtime_hash
|
||||||
self._package_hash = other._package_hash
|
self._package_hash = other._package_hash
|
||||||
else:
|
else:
|
||||||
self._hash = None
|
self._hash = None
|
||||||
self._build_hash = None
|
|
||||||
self._dunder_hash = None
|
self._dunder_hash = None
|
||||||
# Note, we could use other._normal if we are copying all deps, but
|
# Note, we could use other._normal if we are copying all deps, but
|
||||||
# always set it False here to avoid the complexity of checking
|
# always set it False here to avoid the complexity of checking
|
||||||
self._normal = False
|
self._normal = False
|
||||||
self._full_hash = None
|
|
||||||
self._runtime_hash = None
|
self._runtime_hash = None
|
||||||
self._package_hash = None
|
self._package_hash = None
|
||||||
|
|
||||||
|
@ -4751,12 +4719,9 @@ def from_self(name, transitive):
|
||||||
for dep in ret.traverse(root=True, order='post'):
|
for dep in ret.traverse(root=True, order='post'):
|
||||||
opposite = other_nodes if dep.name in self_nodes else self_nodes
|
opposite = other_nodes if dep.name in self_nodes else self_nodes
|
||||||
if any(name in dep for name in opposite.keys()):
|
if any(name in dep for name in opposite.keys()):
|
||||||
|
|
||||||
# package hash cannot be affected by splice
|
# package hash cannot be affected by splice
|
||||||
dep.clear_cached_hashes(ignore=['package_hash'])
|
dep.clear_cached_hashes(ignore=['package_hash'])
|
||||||
|
|
||||||
dep.build_hash()
|
|
||||||
dep.full_hash()
|
|
||||||
dep.dag_hash()
|
dep.dag_hash()
|
||||||
|
|
||||||
return nodes[self.name]
|
return nodes[self.name]
|
||||||
|
@ -4844,7 +4809,7 @@ def _spec_from_old_dict(data):
|
||||||
if 'dependencies' not in node[name]:
|
if 'dependencies' not in node[name]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for dname, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
|
for dname, _, dtypes, _ in Spec.dependencies_from_node_dict(node):
|
||||||
deps[name]._add_dependency(deps[dname], dtypes)
|
deps[name]._add_dependency(deps[dname], dtypes)
|
||||||
|
|
||||||
return spec
|
return spec
|
||||||
|
@ -4878,7 +4843,7 @@ def _spec_from_dict(data):
|
||||||
break
|
break
|
||||||
|
|
||||||
if not any_deps: # If we never see a dependency...
|
if not any_deps: # If we never see a dependency...
|
||||||
hash_type = ht.dag_hash.name # use the full_hash provenance
|
hash_type = ht.dag_hash.name
|
||||||
elif not hash_type: # Seen a dependency, still don't know hash_type
|
elif not hash_type: # Seen a dependency, still don't know hash_type
|
||||||
raise spack.error.SpecError("Spec dictionary contains malformed "
|
raise spack.error.SpecError("Spec dictionary contains malformed "
|
||||||
"dependencies. Old format?")
|
"dependencies. Old format?")
|
||||||
|
@ -4888,9 +4853,6 @@ def _spec_from_dict(data):
|
||||||
|
|
||||||
# Pass 1: Create a single lookup dictionary by hash
|
# Pass 1: Create a single lookup dictionary by hash
|
||||||
for i, node in enumerate(nodes):
|
for i, node in enumerate(nodes):
|
||||||
if 'build_spec' in node.keys():
|
|
||||||
node_hash = node[hash_type]
|
|
||||||
else:
|
|
||||||
node_hash = node[hash_type]
|
node_hash = node[hash_type]
|
||||||
node_spec = Spec.from_node_dict(node)
|
node_spec = Spec.from_node_dict(node)
|
||||||
hash_dict[node_hash] = node
|
hash_dict[node_hash] = node
|
||||||
|
@ -5350,7 +5312,7 @@ def save_dependency_specfiles(
|
||||||
json_path = os.path.join(output_directory, '{0}.json'.format(dep_name))
|
json_path = os.path.join(output_directory, '{0}.json'.format(dep_name))
|
||||||
|
|
||||||
with open(json_path, 'w') as fd:
|
with open(json_path, 'w') as fd:
|
||||||
fd.write(dep_spec.to_json(hash=ht.build_hash))
|
fd.write(dep_spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
|
|
||||||
class SpecParseError(spack.error.SpecError):
|
class SpecParseError(spack.error.SpecError):
|
||||||
|
|
|
@ -394,31 +394,12 @@ def test_built_spec_cache(mirror_dir):
|
||||||
|
|
||||||
gspec, cspec = Spec('garply').concretized(), Spec('corge').concretized()
|
gspec, cspec = Spec('garply').concretized(), Spec('corge').concretized()
|
||||||
|
|
||||||
full_hash_map = {
|
for s in [gspec, cspec]:
|
||||||
'garply': gspec.full_hash(),
|
results = bindist.get_mirrors_for_spec(s)
|
||||||
'corge': cspec.full_hash(),
|
assert(any([r['spec'] == s for r in results]))
|
||||||
}
|
|
||||||
|
|
||||||
gspec_results = bindist.get_mirrors_for_spec(gspec)
|
|
||||||
|
|
||||||
gspec_mirrors = {}
|
|
||||||
for result in gspec_results:
|
|
||||||
s = result['spec']
|
|
||||||
assert(s._full_hash == full_hash_map[s.name])
|
|
||||||
assert(result['mirror_url'] not in gspec_mirrors)
|
|
||||||
gspec_mirrors[result['mirror_url']] = True
|
|
||||||
|
|
||||||
cspec_results = bindist.get_mirrors_for_spec(cspec, full_hash_match=True)
|
|
||||||
|
|
||||||
cspec_mirrors = {}
|
|
||||||
for result in cspec_results:
|
|
||||||
s = result['spec']
|
|
||||||
assert(s._full_hash == full_hash_map[s.name])
|
|
||||||
assert(result['mirror_url'] not in cspec_mirrors)
|
|
||||||
cspec_mirrors[result['mirror_url']] = True
|
|
||||||
|
|
||||||
|
|
||||||
def fake_full_hash(spec):
|
def fake_dag_hash(spec):
|
||||||
# Generate an arbitrary hash that is intended to be different than
|
# Generate an arbitrary hash that is intended to be different than
|
||||||
# whatever a Spec reported before (to test actions that trigger when
|
# whatever a Spec reported before (to test actions that trigger when
|
||||||
# the hash changes)
|
# the hash changes)
|
||||||
|
@ -430,7 +411,7 @@ def fake_full_hash(spec):
|
||||||
'test_mirror'
|
'test_mirror'
|
||||||
)
|
)
|
||||||
def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||||
"""Make sure needs_rebuild properly compares remote full_hash
|
"""Make sure needs_rebuild properly compares remote hash
|
||||||
against locally computed one, avoiding unnecessary rebuilds"""
|
against locally computed one, avoiding unnecessary rebuilds"""
|
||||||
|
|
||||||
# Create a temp mirror directory for buildcache usage
|
# Create a temp mirror directory for buildcache usage
|
||||||
|
@ -450,7 +431,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||||
assert not rebuild
|
assert not rebuild
|
||||||
|
|
||||||
# Now monkey patch Spec to change the full hash on the package
|
# Now monkey patch Spec to change the full hash on the package
|
||||||
monkeypatch.setattr(spack.spec.Spec, 'full_hash', fake_full_hash)
|
monkeypatch.setattr(spack.spec.Spec, 'dag_hash', fake_dag_hash)
|
||||||
|
|
||||||
rebuild = bindist.needs_rebuild(s, mirror_url, rebuild_on_errors=True)
|
rebuild = bindist.needs_rebuild(s, mirror_url, rebuild_on_errors=True)
|
||||||
|
|
||||||
|
@ -624,57 +605,6 @@ def test_install_legacy_yaml(test_legacy_mirror, install_mockery_mutable_config,
|
||||||
uninstall_cmd('-y', '/t5mczux3tfqpxwmg7egp7axy2jvyulqk')
|
uninstall_cmd('-y', '/t5mczux3tfqpxwmg7egp7axy2jvyulqk')
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures(
|
|
||||||
'install_mockery_mutable_config', 'mock_packages', 'mock_fetch',
|
|
||||||
)
|
|
||||||
def test_update_index_fix_deps(monkeypatch, tmpdir, mutable_config):
|
|
||||||
"""Ensure spack buildcache update-index properly fixes up spec descriptor
|
|
||||||
files on the mirror when updating the buildcache index."""
|
|
||||||
|
|
||||||
# Create a temp mirror directory for buildcache usage
|
|
||||||
mirror_dir = tmpdir.join('mirror_dir')
|
|
||||||
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
|
|
||||||
spack.config.set('mirrors', {'test': mirror_url})
|
|
||||||
|
|
||||||
a = Spec('a').concretized()
|
|
||||||
b = Spec('b').concretized()
|
|
||||||
new_b_full_hash = 'abcdef'
|
|
||||||
|
|
||||||
# Install package a with dep b
|
|
||||||
install_cmd('--no-cache', a.name)
|
|
||||||
|
|
||||||
# Create a buildcache for a and its dep b, and update index
|
|
||||||
buildcache_cmd('create', '-uad', mirror_dir.strpath, a.name)
|
|
||||||
buildcache_cmd('update-index', '-d', mirror_dir.strpath)
|
|
||||||
|
|
||||||
# Simulate an update to b that only affects full hash by simply overwriting
|
|
||||||
# the full hash in the spec.json file on the mirror
|
|
||||||
b_spec_json_name = bindist.tarball_name(b, '.spec.json')
|
|
||||||
b_spec_json_path = os.path.join(mirror_dir.strpath,
|
|
||||||
bindist.build_cache_relative_path(),
|
|
||||||
b_spec_json_name)
|
|
||||||
fs.filter_file(r'"full_hash":\s"\S+"',
|
|
||||||
'"full_hash": "{0}"'.format(new_b_full_hash),
|
|
||||||
b_spec_json_path)
|
|
||||||
# When we update the index, spack should notice that a's notion of the
|
|
||||||
# full hash of b doesn't match b's notion of it's own full hash, and as
|
|
||||||
# a result, spack should fix the spec.json for a
|
|
||||||
buildcache_cmd('update-index', '-d', mirror_dir.strpath)
|
|
||||||
|
|
||||||
# Read in the concrete spec json of a
|
|
||||||
a_spec_json_name = bindist.tarball_name(a, '.spec.json')
|
|
||||||
a_spec_json_path = os.path.join(mirror_dir.strpath,
|
|
||||||
bindist.build_cache_relative_path(),
|
|
||||||
a_spec_json_name)
|
|
||||||
|
|
||||||
# Turn concrete spec json into a concrete spec (a)
|
|
||||||
with open(a_spec_json_path) as fd:
|
|
||||||
a_prime = spec.Spec.from_json(fd.read())
|
|
||||||
|
|
||||||
# Make sure the full hash of b in a's spec json matches the new value
|
|
||||||
assert(a_prime[b.name].full_hash() == new_b_full_hash)
|
|
||||||
|
|
||||||
|
|
||||||
def test_FetchCacheError_only_accepts_lists_of_errors():
|
def test_FetchCacheError_only_accepts_lists_of_errors():
|
||||||
with pytest.raises(TypeError, match="list"):
|
with pytest.raises(TypeError, match="list"):
|
||||||
bindist.FetchCacheError("error")
|
bindist.FetchCacheError("error")
|
||||||
|
|
|
@ -767,19 +767,13 @@ def test_ci_rebuild(tmpdir, mutable_mock_env_path,
|
||||||
shutil.copyfile(env.lock_path,
|
shutil.copyfile(env.lock_path,
|
||||||
os.path.join(env_dir.strpath, 'spack.lock'))
|
os.path.join(env_dir.strpath, 'spack.lock'))
|
||||||
|
|
||||||
root_spec_build_hash = None
|
root_spec_dag_hash = None
|
||||||
job_spec_dag_hash = None
|
|
||||||
job_spec_full_hash = None
|
|
||||||
|
|
||||||
for h, s in env.specs_by_hash.items():
|
for h, s in env.specs_by_hash.items():
|
||||||
if s.name == 'archive-files':
|
if s.name == 'archive-files':
|
||||||
root_spec_build_hash = h
|
root_spec_dag_hash = h
|
||||||
job_spec_dag_hash = s.dag_hash()
|
|
||||||
job_spec_full_hash = s.full_hash()
|
|
||||||
|
|
||||||
assert root_spec_build_hash
|
assert root_spec_dag_hash
|
||||||
assert job_spec_dag_hash
|
|
||||||
assert job_spec_full_hash
|
|
||||||
|
|
||||||
def fake_cdash_register(build_name, base_url, project, site, track):
|
def fake_cdash_register(build_name, base_url, project, site, track):
|
||||||
return ('fakebuildid', 'fakestamp')
|
return ('fakebuildid', 'fakestamp')
|
||||||
|
@ -801,8 +795,8 @@ def fake_cdash_register(build_name, base_url, project, site, track):
|
||||||
'SPACK_CONCRETE_ENV_DIR': env_dir.strpath,
|
'SPACK_CONCRETE_ENV_DIR': env_dir.strpath,
|
||||||
'CI_PIPELINE_ID': '7192',
|
'CI_PIPELINE_ID': '7192',
|
||||||
'SPACK_SIGNING_KEY': signing_key,
|
'SPACK_SIGNING_KEY': signing_key,
|
||||||
'SPACK_ROOT_SPEC': root_spec_build_hash,
|
'SPACK_ROOT_SPEC': root_spec_dag_hash,
|
||||||
'SPACK_JOB_SPEC_DAG_HASH': job_spec_dag_hash,
|
'SPACK_JOB_SPEC_DAG_HASH': root_spec_dag_hash,
|
||||||
'SPACK_JOB_SPEC_PKG_NAME': 'archive-files',
|
'SPACK_JOB_SPEC_PKG_NAME': 'archive-files',
|
||||||
'SPACK_COMPILER_ACTION': 'NONE',
|
'SPACK_COMPILER_ACTION': 'NONE',
|
||||||
'SPACK_CDASH_BUILD_NAME': '(specs) archive-files',
|
'SPACK_CDASH_BUILD_NAME': '(specs) archive-files',
|
||||||
|
@ -816,8 +810,8 @@ def fake_cdash_register(build_name, base_url, project, site, track):
|
||||||
|
|
||||||
expected_repro_files = [
|
expected_repro_files = [
|
||||||
'install.sh',
|
'install.sh',
|
||||||
'root.yaml',
|
'root.json',
|
||||||
'archive-files.yaml',
|
'archive-files.json',
|
||||||
'spack.yaml',
|
'spack.yaml',
|
||||||
'spack.lock'
|
'spack.lock'
|
||||||
]
|
]
|
||||||
|
@ -839,14 +833,13 @@ def mystrip(s):
|
||||||
install_parts = [mystrip(s) for s in install_line.split(' ')]
|
install_parts = [mystrip(s) for s in install_line.split(' ')]
|
||||||
|
|
||||||
assert('--keep-stage' in install_parts)
|
assert('--keep-stage' in install_parts)
|
||||||
assert('--require-full-hash-match' in install_parts)
|
|
||||||
assert('--no-check-signature' not in install_parts)
|
assert('--no-check-signature' not in install_parts)
|
||||||
assert('--no-add' in install_parts)
|
assert('--no-add' in install_parts)
|
||||||
assert('-f' in install_parts)
|
assert('-f' in install_parts)
|
||||||
flag_index = install_parts.index('-f')
|
flag_index = install_parts.index('-f')
|
||||||
assert('archive-files.yaml' in install_parts[flag_index + 1])
|
assert('archive-files.json' in install_parts[flag_index + 1])
|
||||||
|
|
||||||
broken_spec_file = os.path.join(broken_specs_path, job_spec_full_hash)
|
broken_spec_file = os.path.join(broken_specs_path, root_spec_dag_hash)
|
||||||
with open(broken_spec_file) as fd:
|
with open(broken_spec_file) as fd:
|
||||||
broken_spec_content = fd.read()
|
broken_spec_content = fd.read()
|
||||||
assert(ci_job_url in broken_spec_content)
|
assert(ci_job_url in broken_spec_content)
|
||||||
|
@ -894,13 +887,11 @@ def test_ci_nothing_to_rebuild(tmpdir, mutable_mock_env_path,
|
||||||
env_cmd('create', 'test', './spack.yaml')
|
env_cmd('create', 'test', './spack.yaml')
|
||||||
with ev.read('test') as env:
|
with ev.read('test') as env:
|
||||||
env.concretize()
|
env.concretize()
|
||||||
root_spec_build_hash = None
|
root_spec_dag_hash = None
|
||||||
job_spec_dag_hash = None
|
|
||||||
|
|
||||||
for h, s in env.specs_by_hash.items():
|
for h, s in env.specs_by_hash.items():
|
||||||
if s.name == 'archive-files':
|
if s.name == 'archive-files':
|
||||||
root_spec_build_hash = h
|
root_spec_dag_hash = h
|
||||||
job_spec_dag_hash = s.dag_hash()
|
|
||||||
|
|
||||||
# Create environment variables as gitlab would do it
|
# Create environment variables as gitlab would do it
|
||||||
os.environ.update({
|
os.environ.update({
|
||||||
|
@ -909,8 +900,8 @@ def test_ci_nothing_to_rebuild(tmpdir, mutable_mock_env_path,
|
||||||
'SPACK_JOB_REPRO_DIR': 'repro_dir',
|
'SPACK_JOB_REPRO_DIR': 'repro_dir',
|
||||||
'SPACK_LOCAL_MIRROR_DIR': mirror_dir.strpath,
|
'SPACK_LOCAL_MIRROR_DIR': mirror_dir.strpath,
|
||||||
'SPACK_CONCRETE_ENV_DIR': tmpdir.strpath,
|
'SPACK_CONCRETE_ENV_DIR': tmpdir.strpath,
|
||||||
'SPACK_ROOT_SPEC': root_spec_build_hash,
|
'SPACK_ROOT_SPEC': root_spec_dag_hash,
|
||||||
'SPACK_JOB_SPEC_DAG_HASH': job_spec_dag_hash,
|
'SPACK_JOB_SPEC_DAG_HASH': root_spec_dag_hash,
|
||||||
'SPACK_JOB_SPEC_PKG_NAME': 'archive-files',
|
'SPACK_JOB_SPEC_PKG_NAME': 'archive-files',
|
||||||
'SPACK_COMPILER_ACTION': 'NONE',
|
'SPACK_COMPILER_ACTION': 'NONE',
|
||||||
'SPACK_REMOTE_MIRROR_URL': mirror_url,
|
'SPACK_REMOTE_MIRROR_URL': mirror_url,
|
||||||
|
@ -980,7 +971,7 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
|
||||||
spec_map = ci.get_concrete_specs(
|
spec_map = ci.get_concrete_specs(
|
||||||
env, 'patchelf', 'patchelf', 'FIND_ANY')
|
env, 'patchelf', 'patchelf', 'FIND_ANY')
|
||||||
concrete_spec = spec_map['patchelf']
|
concrete_spec = spec_map['patchelf']
|
||||||
spec_json = concrete_spec.to_json(hash=ht.build_hash)
|
spec_json = concrete_spec.to_json(hash=ht.dag_hash)
|
||||||
json_path = str(tmpdir.join('spec.json'))
|
json_path = str(tmpdir.join('spec.json'))
|
||||||
with open(json_path, 'w') as ypfd:
|
with open(json_path, 'w') as ypfd:
|
||||||
ypfd.write(spec_json)
|
ypfd.write(spec_json)
|
||||||
|
@ -1323,12 +1314,12 @@ def test_ci_rebuild_index(tmpdir, mutable_mock_env_path,
|
||||||
spec_map = ci.get_concrete_specs(
|
spec_map = ci.get_concrete_specs(
|
||||||
env, 'callpath', 'callpath', 'FIND_ANY')
|
env, 'callpath', 'callpath', 'FIND_ANY')
|
||||||
concrete_spec = spec_map['callpath']
|
concrete_spec = spec_map['callpath']
|
||||||
spec_yaml = concrete_spec.to_yaml(hash=ht.build_hash)
|
spec_json = concrete_spec.to_json(hash=ht.dag_hash)
|
||||||
yaml_path = str(tmpdir.join('spec.yaml'))
|
json_path = str(tmpdir.join('spec.json'))
|
||||||
with open(yaml_path, 'w') as ypfd:
|
with open(json_path, 'w') as ypfd:
|
||||||
ypfd.write(spec_yaml)
|
ypfd.write(spec_json)
|
||||||
|
|
||||||
install_cmd('--keep-stage', '-f', yaml_path)
|
install_cmd('--keep-stage', '-f', json_path)
|
||||||
buildcache_cmd('create', '-u', '-a', '-f', '--mirror-url',
|
buildcache_cmd('create', '-u', '-a', '-f', '--mirror-url',
|
||||||
mirror_url, 'callpath')
|
mirror_url, 'callpath')
|
||||||
ci_cmd('rebuild-index')
|
ci_cmd('rebuild-index')
|
||||||
|
@ -1412,8 +1403,8 @@ def test_ci_generate_bootstrap_prune_dag(
|
||||||
# nothing in the environment needs rebuilding. With the monkeypatch, the
|
# nothing in the environment needs rebuilding. With the monkeypatch, the
|
||||||
# process sees the compiler as needing a rebuild, which should then result
|
# process sees the compiler as needing a rebuild, which should then result
|
||||||
# in the specs built with that compiler needing a rebuild too.
|
# in the specs built with that compiler needing a rebuild too.
|
||||||
def fake_get_mirrors_for_spec(spec=None, full_hash_match=False,
|
def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None,
|
||||||
mirrors_to_check=None, index_only=False):
|
index_only=False):
|
||||||
if spec.name == 'gcc':
|
if spec.name == 'gcc':
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
|
@ -1674,14 +1665,14 @@ def test_ci_generate_read_broken_specs_url(tmpdir, mutable_mock_env_path,
|
||||||
"""Verify that `broken-specs-url` works as intended"""
|
"""Verify that `broken-specs-url` works as intended"""
|
||||||
spec_a = Spec('a')
|
spec_a = Spec('a')
|
||||||
spec_a.concretize()
|
spec_a.concretize()
|
||||||
a_full_hash = spec_a.full_hash()
|
a_dag_hash = spec_a.dag_hash()
|
||||||
|
|
||||||
spec_flattendeps = Spec('flatten-deps')
|
spec_flattendeps = Spec('flatten-deps')
|
||||||
spec_flattendeps.concretize()
|
spec_flattendeps.concretize()
|
||||||
flattendeps_full_hash = spec_flattendeps.full_hash()
|
flattendeps_dag_hash = spec_flattendeps.dag_hash()
|
||||||
|
|
||||||
# Mark 'a' as broken (but not 'flatten-deps')
|
# Mark 'a' as broken (but not 'flatten-deps')
|
||||||
broken_spec_a_path = str(tmpdir.join(a_full_hash))
|
broken_spec_a_path = str(tmpdir.join(a_dag_hash))
|
||||||
with open(broken_spec_a_path, 'w') as bsf:
|
with open(broken_spec_a_path, 'w') as bsf:
|
||||||
bsf.write('')
|
bsf.write('')
|
||||||
|
|
||||||
|
@ -1718,10 +1709,10 @@ def test_ci_generate_read_broken_specs_url(tmpdir, mutable_mock_env_path,
|
||||||
output = ci_cmd('generate', output=str, fail_on_error=False)
|
output = ci_cmd('generate', output=str, fail_on_error=False)
|
||||||
assert('known to be broken' in output)
|
assert('known to be broken' in output)
|
||||||
|
|
||||||
ex = '({0})'.format(a_full_hash)
|
ex = '({0})'.format(a_dag_hash)
|
||||||
assert(ex in output)
|
assert(ex in output)
|
||||||
|
|
||||||
ex = '({0})'.format(flattendeps_full_hash)
|
ex = '({0})'.format(flattendeps_dag_hash)
|
||||||
assert(ex not in output)
|
assert(ex not in output)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1776,15 +1767,15 @@ def test_ci_reproduce(tmpdir, mutable_mock_env_path,
|
||||||
root_spec = s
|
root_spec = s
|
||||||
job_spec = s
|
job_spec = s
|
||||||
|
|
||||||
job_spec_yaml_path = os.path.join(
|
job_spec_json_path = os.path.join(
|
||||||
working_dir.strpath, 'archivefiles.yaml')
|
working_dir.strpath, 'archivefiles.json')
|
||||||
with open(job_spec_yaml_path, 'w') as fd:
|
with open(job_spec_json_path, 'w') as fd:
|
||||||
fd.write(job_spec.to_yaml(hash=ht.build_hash))
|
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
root_spec_yaml_path = os.path.join(
|
root_spec_json_path = os.path.join(
|
||||||
working_dir.strpath, 'root.yaml')
|
working_dir.strpath, 'root.json')
|
||||||
with open(root_spec_yaml_path, 'w') as fd:
|
with open(root_spec_json_path, 'w') as fd:
|
||||||
fd.write(root_spec.to_yaml(hash=ht.build_hash))
|
fd.write(root_spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
artifacts_root = os.path.join(working_dir.strpath, 'scratch_dir')
|
artifacts_root = os.path.join(working_dir.strpath, 'scratch_dir')
|
||||||
pipeline_path = os.path.join(artifacts_root, 'pipeline.yml')
|
pipeline_path = os.path.join(artifacts_root, 'pipeline.yml')
|
||||||
|
@ -1798,8 +1789,8 @@ def test_ci_reproduce(tmpdir, mutable_mock_env_path,
|
||||||
repro_file = os.path.join(working_dir.strpath, 'repro.json')
|
repro_file = os.path.join(working_dir.strpath, 'repro.json')
|
||||||
repro_details = {
|
repro_details = {
|
||||||
'job_name': job_name,
|
'job_name': job_name,
|
||||||
'job_spec_yaml': 'archivefiles.yaml',
|
'job_spec_json': 'archivefiles.json',
|
||||||
'root_spec_yaml': 'root.yaml',
|
'root_spec_json': 'root.json',
|
||||||
'ci_project_dir': working_dir.strpath
|
'ci_project_dir': working_dir.strpath
|
||||||
}
|
}
|
||||||
with open(repro_file, 'w') as fd:
|
with open(repro_file, 'w') as fd:
|
||||||
|
|
|
@ -982,7 +982,7 @@ def create_v1_lockfile_dict(roots, all_specs):
|
||||||
# Version one lockfiles use the dag hash without build deps as keys,
|
# Version one lockfiles use the dag hash without build deps as keys,
|
||||||
# but they write out the full node dict (including build deps)
|
# but they write out the full node dict (including build deps)
|
||||||
"concrete_specs": dict(
|
"concrete_specs": dict(
|
||||||
(s.runtime_hash(), s.to_node_dict(hash=ht.build_hash))
|
(s.runtime_hash(), s.to_node_dict(hash=ht.dag_hash))
|
||||||
for s in all_specs
|
for s in all_specs
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -2469,19 +2469,19 @@ def test_newline_in_commented_sequence_is_not_an_issue(tmpdir):
|
||||||
abspath = tmpdir.join('spack.yaml')
|
abspath = tmpdir.join('spack.yaml')
|
||||||
abspath.write(spack_yaml)
|
abspath.write(spack_yaml)
|
||||||
|
|
||||||
def extract_build_hash(environment):
|
def extract_dag_hash(environment):
|
||||||
_, dyninst = next(iter(environment.specs_by_hash.items()))
|
_, dyninst = next(iter(environment.specs_by_hash.items()))
|
||||||
return dyninst['libelf'].build_hash()
|
return dyninst['libelf'].dag_hash()
|
||||||
|
|
||||||
# Concretize a first time and create a lockfile
|
# Concretize a first time and create a lockfile
|
||||||
with ev.Environment(str(tmpdir)) as e:
|
with ev.Environment(str(tmpdir)) as e:
|
||||||
concretize()
|
concretize()
|
||||||
libelf_first_hash = extract_build_hash(e)
|
libelf_first_hash = extract_dag_hash(e)
|
||||||
|
|
||||||
# Check that a second run won't error
|
# Check that a second run won't error
|
||||||
with ev.Environment(str(tmpdir)) as e:
|
with ev.Environment(str(tmpdir)) as e:
|
||||||
concretize()
|
concretize()
|
||||||
libelf_second_hash = extract_build_hash(e)
|
libelf_second_hash = extract_dag_hash(e)
|
||||||
|
|
||||||
assert libelf_first_hash == libelf_second_hash
|
assert libelf_first_hash == libelf_second_hash
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
import filecmp
|
import filecmp
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
@ -621,20 +620,20 @@ def test_cdash_buildstamp_param(tmpdir, mock_fetch, install_mockery, capfd):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
def test_cdash_install_from_spec_yaml(tmpdir, mock_fetch, install_mockery,
|
def test_cdash_install_from_spec_json(tmpdir, mock_fetch, install_mockery,
|
||||||
capfd, mock_packages, mock_archive,
|
capfd, mock_packages, mock_archive,
|
||||||
config):
|
config):
|
||||||
# capfd interferes with Spack's capturing
|
# capfd interferes with Spack's capturing
|
||||||
with capfd.disabled():
|
with capfd.disabled():
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
|
|
||||||
spec_yaml_path = str(tmpdir.join('spec.yaml'))
|
spec_json_path = str(tmpdir.join('spec.json'))
|
||||||
|
|
||||||
pkg_spec = Spec('a')
|
pkg_spec = Spec('a')
|
||||||
pkg_spec.concretize()
|
pkg_spec.concretize()
|
||||||
|
|
||||||
with open(spec_yaml_path, 'w') as fd:
|
with open(spec_json_path, 'w') as fd:
|
||||||
fd.write(pkg_spec.to_yaml(hash=ht.build_hash))
|
fd.write(pkg_spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
install(
|
install(
|
||||||
'--log-format=cdash',
|
'--log-format=cdash',
|
||||||
|
@ -642,7 +641,7 @@ def test_cdash_install_from_spec_yaml(tmpdir, mock_fetch, install_mockery,
|
||||||
'--cdash-build=my_custom_build',
|
'--cdash-build=my_custom_build',
|
||||||
'--cdash-site=my_custom_site',
|
'--cdash-site=my_custom_site',
|
||||||
'--cdash-track=my_custom_track',
|
'--cdash-track=my_custom_track',
|
||||||
'-f', spec_yaml_path)
|
'-f', spec_json_path)
|
||||||
|
|
||||||
report_dir = tmpdir.join('cdash_reports')
|
report_dir = tmpdir.join('cdash_reports')
|
||||||
assert report_dir in tmpdir.listdir()
|
assert report_dir in tmpdir.listdir()
|
||||||
|
@ -846,14 +845,14 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery,
|
||||||
post_install_specs = e.all_specs()
|
post_install_specs = e.all_specs()
|
||||||
assert all([s in env_specs for s in post_install_specs])
|
assert all([s in env_specs for s in post_install_specs])
|
||||||
|
|
||||||
# Make sure we can install a concrete dependency spec from a spec.yaml
|
# Make sure we can install a concrete dependency spec from a spec.json
|
||||||
# file on disk, using the ``--no-add` option, and the spec is installed
|
# file on disk, using the ``--no-add` option, and the spec is installed
|
||||||
# but not added as a root
|
# but not added as a root
|
||||||
mpi_spec_yaml_path = tmpdir.join('{0}.yaml'.format(mpi_spec.name))
|
mpi_spec_json_path = tmpdir.join('{0}.json'.format(mpi_spec.name))
|
||||||
with open(mpi_spec_yaml_path.strpath, 'w') as fd:
|
with open(mpi_spec_json_path.strpath, 'w') as fd:
|
||||||
fd.write(mpi_spec.to_yaml(hash=ht.build_hash))
|
fd.write(mpi_spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
install('--no-add', '-f', mpi_spec_yaml_path.strpath)
|
install('--no-add', '-f', mpi_spec_json_path.strpath)
|
||||||
assert(mpi_spec not in e.roots())
|
assert(mpi_spec not in e.roots())
|
||||||
|
|
||||||
find_output = find('-l', output=str)
|
find_output = find('-l', output=str)
|
||||||
|
@ -1016,76 +1015,6 @@ def test_install_fails_no_args_suggests_env_activation(tmpdir):
|
||||||
assert 'using the `spack.yaml` in this directory' in output
|
assert 'using the `spack.yaml` in this directory' in output
|
||||||
|
|
||||||
|
|
||||||
default_full_hash = spack.spec.Spec.full_hash
|
|
||||||
|
|
||||||
|
|
||||||
def fake_full_hash(spec):
|
|
||||||
# Generate an arbitrary hash that is intended to be different than
|
|
||||||
# whatever a Spec reported before (to test actions that trigger when
|
|
||||||
# the hash changes)
|
|
||||||
if spec.name == 'libdwarf':
|
|
||||||
return 'tal4c7h4z0gqmixb1eqa92mjoybxn5l6'
|
|
||||||
return default_full_hash(spec)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cache_install_full_hash_match(
|
|
||||||
install_mockery_mutable_config, mock_packages, mock_fetch,
|
|
||||||
mock_archive, mutable_config, monkeypatch, tmpdir):
|
|
||||||
"""Make sure installing from cache respects full hash argument"""
|
|
||||||
|
|
||||||
# Create a temp mirror directory for buildcache usage
|
|
||||||
mirror_dir = tmpdir.join('mirror_dir')
|
|
||||||
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
|
|
||||||
|
|
||||||
s = Spec('libdwarf').concretized()
|
|
||||||
package_id = spack.installer.package_id(s.package)
|
|
||||||
|
|
||||||
# Install a package
|
|
||||||
install(s.name)
|
|
||||||
|
|
||||||
# Put installed package in the buildcache
|
|
||||||
buildcache('create', '-u', '-a', '-f', '-d', mirror_dir.strpath, s.name)
|
|
||||||
|
|
||||||
# Now uninstall the package
|
|
||||||
uninstall('-y', s.name)
|
|
||||||
|
|
||||||
# Configure the mirror with the binary package in it
|
|
||||||
mirror('add', 'test-mirror', mirror_url)
|
|
||||||
|
|
||||||
# Make sure we get the binary version by default
|
|
||||||
install_output = install('--no-check-signature', s.name, output=str)
|
|
||||||
expect_extract_msg = 'Extracting {0} from binary cache'.format(package_id)
|
|
||||||
|
|
||||||
assert expect_extract_msg in install_output
|
|
||||||
|
|
||||||
uninstall('-y', s.name)
|
|
||||||
|
|
||||||
# Now monkey patch Spec to change the full hash on the package
|
|
||||||
monkeypatch.setattr(spack.spec.Spec, 'full_hash', fake_full_hash)
|
|
||||||
|
|
||||||
# Check that even if the full hash changes, we install from binary when
|
|
||||||
# we don't explicitly require the full hash to match
|
|
||||||
install_output = install('--no-check-signature', s.name, output=str)
|
|
||||||
assert expect_extract_msg in install_output
|
|
||||||
|
|
||||||
uninstall('-y', s.name)
|
|
||||||
|
|
||||||
# Finally, make sure that if we insist on the full hash match, spack
|
|
||||||
# installs from source.
|
|
||||||
install_output = install('--require-full-hash-match', s.name, output=str)
|
|
||||||
expect_msg = 'No binary for {0} found: installing from source'.format(
|
|
||||||
package_id)
|
|
||||||
|
|
||||||
assert expect_msg in install_output
|
|
||||||
|
|
||||||
uninstall('-y', s.name)
|
|
||||||
mirror('rm', 'test-mirror')
|
|
||||||
|
|
||||||
# Get rid of that libdwarf binary in the mirror so other tests don't try to
|
|
||||||
# use it and fail because of NoVerifyException
|
|
||||||
shutil.rmtree(mirror_dir.strpath)
|
|
||||||
|
|
||||||
|
|
||||||
def test_install_env_with_tests_all(tmpdir, mock_packages, mock_fetch,
|
def test_install_env_with_tests_all(tmpdir, mock_packages, mock_fetch,
|
||||||
install_mockery, mutable_mock_env_path):
|
install_mockery, mutable_mock_env_path):
|
||||||
env('create', 'test')
|
env('create', 'test')
|
||||||
|
|
|
@ -256,11 +256,11 @@ def test_hash_change(mock_test_stage, mock_packages, mock_archive, mock_fetch,
|
||||||
outfile = os.path.join(testdir, 'test_suite.lock')
|
outfile = os.path.join(testdir, 'test_suite.lock')
|
||||||
with open(outfile, 'r') as f:
|
with open(outfile, 'r') as f:
|
||||||
output = f.read()
|
output = f.read()
|
||||||
val_replace = '"full_hash": "{0}"'.format(
|
val_replace = '"hash": "{0}"'.format(
|
||||||
spack.store.db.query('printing-package')[0].full_hash())
|
spack.store.db.query('printing-package')[0].dag_hash())
|
||||||
changed_hash = output.replace(
|
changed_hash = output.replace(
|
||||||
val_replace,
|
val_replace,
|
||||||
'"full_hash": "fakehash492ucwhwvzhxfbmcc45x49ha"')
|
'"hash": "fakehash492ucwhwvzhxfbmcc45x49ha"')
|
||||||
with open(outfile, 'w') as f:
|
with open(outfile, 'w') as f:
|
||||||
f.write(changed_hash)
|
f.write(changed_hash)
|
||||||
|
|
||||||
|
@ -271,6 +271,8 @@ def test_hash_change(mock_test_stage, mock_packages, mock_archive, mock_fetch,
|
||||||
results_output = spack_test('results')
|
results_output = spack_test('results')
|
||||||
assert 'PASSED' in results_output
|
assert 'PASSED' in results_output
|
||||||
|
|
||||||
|
assert(False)
|
||||||
|
|
||||||
|
|
||||||
def test_test_results_none(mock_packages, mock_test_stage):
|
def test_test_results_none(mock_packages, mock_test_stage):
|
||||||
name = 'trivial'
|
name = 'trivial'
|
||||||
|
|
|
@ -1080,8 +1080,7 @@ def test_external_packages_have_consistent_hash(self):
|
||||||
s._old_concretize(), t._new_concretize()
|
s._old_concretize(), t._new_concretize()
|
||||||
|
|
||||||
assert s.dag_hash() == t.dag_hash()
|
assert s.dag_hash() == t.dag_hash()
|
||||||
assert s.build_hash() == t.build_hash()
|
assert s.runtime_hash() == t.runtime_hash()
|
||||||
assert s.full_hash() == t.full_hash()
|
|
||||||
|
|
||||||
def test_external_that_would_require_a_virtual_dependency(self):
|
def test_external_that_would_require_a_virtual_dependency(self):
|
||||||
s = Spec('requires-virtual').concretized()
|
s = Spec('requires-virtual').concretized()
|
||||||
|
|
|
@ -118,13 +118,7 @@ def test_read_and_write_spec(temporary_store, config, mock_packages):
|
||||||
# Make sure spec file can be read back in to get the original spec
|
# Make sure spec file can be read back in to get the original spec
|
||||||
spec_from_file = layout.read_spec(spec_path)
|
spec_from_file = layout.read_spec(spec_path)
|
||||||
|
|
||||||
# currently we don't store build dependency information when
|
stored_deptypes = spack.hash_types.dag_hash
|
||||||
# we write out specs to the filesystem.
|
|
||||||
|
|
||||||
# TODO: fix this when we can concretize more loosely based on
|
|
||||||
# TODO: what is installed. We currently omit these to
|
|
||||||
# TODO: increase reuse of build dependencies.
|
|
||||||
stored_deptypes = spack.hash_types.full_hash
|
|
||||||
expected = spec.copy(deps=stored_deptypes)
|
expected = spec.copy(deps=stored_deptypes)
|
||||||
expected._mark_concrete()
|
expected._mark_concrete()
|
||||||
|
|
||||||
|
|
|
@ -49,7 +49,7 @@ def mock_monitor_request(monkeypatch):
|
||||||
def mock_do_request(self, endpoint, *args, **kwargs):
|
def mock_do_request(self, endpoint, *args, **kwargs):
|
||||||
|
|
||||||
build = {"build_id": 1,
|
build = {"build_id": 1,
|
||||||
"spec_full_hash": "bpfvysmqndtmods4rmy6d6cfquwblngp",
|
"spec_hash": "bpfvysmqndtmods4rmy6d6cfquwblngp",
|
||||||
"spec_name": "dttop"}
|
"spec_name": "dttop"}
|
||||||
|
|
||||||
# Service Info
|
# Service Info
|
||||||
|
@ -111,7 +111,7 @@ def mock_do_request(self, endpoint, *args, **kwargs):
|
||||||
elif endpoint == "specs/new/":
|
elif endpoint == "specs/new/":
|
||||||
return {"message": "success",
|
return {"message": "success",
|
||||||
"data": {
|
"data": {
|
||||||
"full_hash": "bpfvysmqndtmods4rmy6d6cfquwblngp",
|
"hash": "bpfvysmqndtmods4rmy6d6cfquwblngp",
|
||||||
"name": "dttop",
|
"name": "dttop",
|
||||||
"version": "1.0",
|
"version": "1.0",
|
||||||
"spack_version": "0.16.0-1379-7a5351d495",
|
"spack_version": "0.16.0-1379-7a5351d495",
|
||||||
|
@ -264,12 +264,12 @@ def test_install_monitor_save_local(install_mockery_mutable_config,
|
||||||
# Get the spec name
|
# Get the spec name
|
||||||
spec = spack.spec.Spec("dttop")
|
spec = spack.spec.Spec("dttop")
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
full_hash = spec.full_hash()
|
dag_hash = spec.dag_hash()
|
||||||
|
|
||||||
# Ensure we have monitor results saved
|
# Ensure we have monitor results saved
|
||||||
for dirname in os.listdir(str(reports_dir)):
|
for dirname in os.listdir(str(reports_dir)):
|
||||||
dated_dir = os.path.join(str(reports_dir), dirname)
|
dated_dir = os.path.join(str(reports_dir), dirname)
|
||||||
build_metadata = "build-metadata-%s.json" % full_hash
|
build_metadata = "build-metadata-%s.json" % dag_hash
|
||||||
assert build_metadata in os.listdir(dated_dir)
|
assert build_metadata in os.listdir(dated_dir)
|
||||||
spec_file = "spec-dttop-%s-config.json" % spec.version
|
spec_file = "spec-dttop-%s-config.json" % spec.version
|
||||||
assert spec_file in os.listdir(dated_dir)
|
assert spec_file in os.listdir(dated_dir)
|
||||||
|
|
|
@ -1001,7 +1001,7 @@ def test_splice(self, transitive):
|
||||||
dep.concretize()
|
dep.concretize()
|
||||||
# Sanity checking that these are not the same thing.
|
# Sanity checking that these are not the same thing.
|
||||||
assert dep.dag_hash() != spec['splice-h'].dag_hash()
|
assert dep.dag_hash() != spec['splice-h'].dag_hash()
|
||||||
assert dep.build_hash() != spec['splice-h'].build_hash()
|
assert dep.runtime_hash() != spec['splice-h'].runtime_hash()
|
||||||
# Do the splice.
|
# Do the splice.
|
||||||
out = spec.splice(dep, transitive)
|
out = spec.splice(dep, transitive)
|
||||||
# Returned spec should still be concrete.
|
# Returned spec should still be concrete.
|
||||||
|
@ -1009,18 +1009,18 @@ def test_splice(self, transitive):
|
||||||
# Traverse the spec and assert that all dependencies are accounted for.
|
# Traverse the spec and assert that all dependencies are accounted for.
|
||||||
for node in spec.traverse():
|
for node in spec.traverse():
|
||||||
assert node.name in out
|
assert node.name in out
|
||||||
# If the splice worked, then the full hash of the spliced dep should
|
# If the splice worked, then the dag hash of the spliced dep should
|
||||||
# now match the full hash of the build spec of the dependency from the
|
# now match the dag hash of the build spec of the dependency from the
|
||||||
# returned spec.
|
# returned spec.
|
||||||
out_h_build = out['splice-h'].build_spec
|
out_h_build = out['splice-h'].build_spec
|
||||||
assert out_h_build.full_hash() == dep.full_hash()
|
assert out_h_build.dag_hash() == dep.dag_hash()
|
||||||
# Transitivity should determine whether the transitive dependency was
|
# Transitivity should determine whether the transitive dependency was
|
||||||
# changed.
|
# changed.
|
||||||
expected_z = dep['splice-z'] if transitive else spec['splice-z']
|
expected_z = dep['splice-z'] if transitive else spec['splice-z']
|
||||||
assert out['splice-z'].full_hash() == expected_z.full_hash()
|
assert out['splice-z'].dag_hash() == expected_z.dag_hash()
|
||||||
# Sanity check build spec of out should be the original spec.
|
# Sanity check build spec of out should be the original spec.
|
||||||
assert (out['splice-t'].build_spec.full_hash() ==
|
assert (out['splice-t'].build_spec.dag_hash() ==
|
||||||
spec['splice-t'].full_hash())
|
spec['splice-t'].dag_hash())
|
||||||
# Finally, the spec should know it's been spliced:
|
# Finally, the spec should know it's been spliced:
|
||||||
assert out.spliced
|
assert out.spliced
|
||||||
|
|
||||||
|
@ -1032,39 +1032,39 @@ def test_splice_with_cached_hashes(self, transitive):
|
||||||
dep.concretize()
|
dep.concretize()
|
||||||
|
|
||||||
# monkeypatch hashes so we can test that they are cached
|
# monkeypatch hashes so we can test that they are cached
|
||||||
spec._full_hash = 'aaaaaa'
|
spec._hash = 'aaaaaa'
|
||||||
spec._build_hash = 'aaaaaa'
|
spec._runtime_hash = 'aaaaaa'
|
||||||
dep._full_hash = 'bbbbbb'
|
dep._hash = 'bbbbbb'
|
||||||
dep._build_hash = 'bbbbbb'
|
dep._runtime_hash = 'bbbbbb'
|
||||||
spec['splice-h']._full_hash = 'cccccc'
|
spec['splice-h']._hash = 'cccccc'
|
||||||
spec['splice-h']._build_hash = 'cccccc'
|
spec['splice-h']._runtime_hash = 'cccccc'
|
||||||
spec['splice-z']._full_hash = 'dddddd'
|
spec['splice-z']._hash = 'dddddd'
|
||||||
spec['splice-z']._build_hash = 'dddddd'
|
spec['splice-z']._runtime_hash = 'dddddd'
|
||||||
dep['splice-z']._full_hash = 'eeeeee'
|
dep['splice-z']._hash = 'eeeeee'
|
||||||
dep['splice-z']._build_hash = 'eeeeee'
|
dep['splice-z']._runtime_hash = 'eeeeee'
|
||||||
|
|
||||||
out = spec.splice(dep, transitive=transitive)
|
out = spec.splice(dep, transitive=transitive)
|
||||||
out_z_expected = (dep if transitive else spec)['splice-z']
|
out_z_expected = (dep if transitive else spec)['splice-z']
|
||||||
|
|
||||||
assert out.full_hash() != spec.full_hash()
|
assert out.dag_hash() != spec.dag_hash()
|
||||||
assert (out['splice-h'].full_hash() == dep.full_hash()) == transitive
|
assert (out['splice-h'].dag_hash() == dep.dag_hash()) == transitive
|
||||||
assert out['splice-z'].full_hash() == out_z_expected.full_hash()
|
assert out['splice-z'].dag_hash() == out_z_expected.dag_hash()
|
||||||
|
|
||||||
assert out.build_hash() != spec.build_hash()
|
assert out.runtime_hash() != spec.runtime_hash()
|
||||||
assert (out['splice-h'].build_hash() == dep.build_hash()) == transitive
|
assert (out['splice-h'].runtime_hash() == dep.runtime_hash()) == transitive
|
||||||
assert out['splice-z'].build_hash() == out_z_expected.build_hash()
|
assert out['splice-z'].runtime_hash() == out_z_expected.runtime_hash()
|
||||||
|
|
||||||
@pytest.mark.parametrize('transitive', [True, False])
|
@pytest.mark.parametrize('transitive', [True, False])
|
||||||
def test_splice_input_unchanged(self, transitive):
|
def test_splice_input_unchanged(self, transitive):
|
||||||
spec = Spec('splice-t').concretized()
|
spec = Spec('splice-t').concretized()
|
||||||
dep = Spec('splice-h+foo').concretized()
|
dep = Spec('splice-h+foo').concretized()
|
||||||
orig_spec_hash = spec.full_hash()
|
orig_spec_hash = spec.dag_hash()
|
||||||
orig_dep_hash = dep.full_hash()
|
orig_dep_hash = dep.dag_hash()
|
||||||
spec.splice(dep, transitive)
|
spec.splice(dep, transitive)
|
||||||
# Post-splice, dag hash should still be different; no changes should be
|
# Post-splice, dag hash should still be different; no changes should be
|
||||||
# made to these specs.
|
# made to these specs.
|
||||||
assert spec.full_hash() == orig_spec_hash
|
assert spec.dag_hash() == orig_spec_hash
|
||||||
assert dep.full_hash() == orig_dep_hash
|
assert dep.dag_hash() == orig_dep_hash
|
||||||
|
|
||||||
@pytest.mark.parametrize('transitive', [True, False])
|
@pytest.mark.parametrize('transitive', [True, False])
|
||||||
def test_splice_subsequent(self, transitive):
|
def test_splice_subsequent(self, transitive):
|
||||||
|
@ -1079,12 +1079,12 @@ def test_splice_subsequent(self, transitive):
|
||||||
# Transitivity shouldn't matter since Splice Z has no dependencies.
|
# Transitivity shouldn't matter since Splice Z has no dependencies.
|
||||||
out2 = out.splice(dep, transitive)
|
out2 = out.splice(dep, transitive)
|
||||||
assert out2.concrete
|
assert out2.concrete
|
||||||
assert out2['splice-z'].build_hash() != spec['splice-z'].build_hash()
|
assert out2['splice-z'].runtime_hash() != spec['splice-z'].runtime_hash()
|
||||||
assert out2['splice-z'].build_hash() != out['splice-z'].build_hash()
|
assert out2['splice-z'].runtime_hash() != out['splice-z'].runtime_hash()
|
||||||
assert out2['splice-z'].full_hash() != spec['splice-z'].full_hash()
|
assert out2['splice-z'].dag_hash() != spec['splice-z'].dag_hash()
|
||||||
assert out2['splice-z'].full_hash() != out['splice-z'].full_hash()
|
assert out2['splice-z'].dag_hash() != out['splice-z'].dag_hash()
|
||||||
assert (out2['splice-t'].build_spec.full_hash() ==
|
assert (out2['splice-t'].build_spec.dag_hash() ==
|
||||||
spec['splice-t'].full_hash())
|
spec['splice-t'].dag_hash())
|
||||||
assert out2.spliced
|
assert out2.spliced
|
||||||
|
|
||||||
@pytest.mark.parametrize('transitive', [True, False])
|
@pytest.mark.parametrize('transitive', [True, False])
|
||||||
|
@ -1096,13 +1096,13 @@ def test_splice_dict(self, transitive):
|
||||||
out = spec.splice(dep, transitive)
|
out = spec.splice(dep, transitive)
|
||||||
|
|
||||||
# Sanity check all hashes are unique...
|
# Sanity check all hashes are unique...
|
||||||
assert spec.full_hash() != dep.full_hash()
|
assert spec.dag_hash() != dep.dag_hash()
|
||||||
assert out.full_hash() != dep.full_hash()
|
assert out.dag_hash() != dep.dag_hash()
|
||||||
assert out.full_hash() != spec.full_hash()
|
assert out.dag_hash() != spec.dag_hash()
|
||||||
node_list = out.to_dict()['spec']['nodes']
|
node_list = out.to_dict()['spec']['nodes']
|
||||||
root_nodes = [n for n in node_list if n['full_hash'] == out.full_hash()]
|
root_nodes = [n for n in node_list if n['hash'] == out.dag_hash()]
|
||||||
build_spec_nodes = [n for n in node_list if n['full_hash'] == spec.full_hash()]
|
build_spec_nodes = [n for n in node_list if n['hash'] == spec.dag_hash()]
|
||||||
assert spec.full_hash() == out.build_spec.full_hash()
|
assert spec.dag_hash() == out.build_spec.dag_hash()
|
||||||
assert len(root_nodes) == 1
|
assert len(root_nodes) == 1
|
||||||
assert len(build_spec_nodes) == 1
|
assert len(build_spec_nodes) == 1
|
||||||
|
|
||||||
|
@ -1115,28 +1115,28 @@ def test_splice_dict_roundtrip(self, transitive):
|
||||||
out = spec.splice(dep, transitive)
|
out = spec.splice(dep, transitive)
|
||||||
|
|
||||||
# Sanity check all hashes are unique...
|
# Sanity check all hashes are unique...
|
||||||
assert spec.full_hash() != dep.full_hash()
|
assert spec.dag_hash() != dep.dag_hash()
|
||||||
assert out.full_hash() != dep.full_hash()
|
assert out.dag_hash() != dep.dag_hash()
|
||||||
assert out.full_hash() != spec.full_hash()
|
assert out.dag_hash() != spec.dag_hash()
|
||||||
out_rt_spec = Spec.from_dict(out.to_dict()) # rt is "round trip"
|
out_rt_spec = Spec.from_dict(out.to_dict()) # rt is "round trip"
|
||||||
assert out_rt_spec.full_hash() == out.full_hash()
|
assert out_rt_spec.dag_hash() == out.dag_hash()
|
||||||
out_rt_spec_bld_hash = out_rt_spec.build_spec.full_hash()
|
out_rt_spec_bld_hash = out_rt_spec.build_spec.dag_hash()
|
||||||
out_rt_spec_h_bld_hash = out_rt_spec['splice-h'].build_spec.full_hash()
|
out_rt_spec_h_bld_hash = out_rt_spec['splice-h'].build_spec.dag_hash()
|
||||||
out_rt_spec_z_bld_hash = out_rt_spec['splice-z'].build_spec.full_hash()
|
out_rt_spec_z_bld_hash = out_rt_spec['splice-z'].build_spec.dag_hash()
|
||||||
|
|
||||||
# In any case, the build spec for splice-t (root) should point to the
|
# In any case, the build spec for splice-t (root) should point to the
|
||||||
# original spec, preserving build provenance.
|
# original spec, preserving build provenance.
|
||||||
assert spec.full_hash() == out_rt_spec_bld_hash
|
assert spec.dag_hash() == out_rt_spec_bld_hash
|
||||||
assert out_rt_spec.full_hash() != out_rt_spec_bld_hash
|
assert out_rt_spec.dag_hash() != out_rt_spec_bld_hash
|
||||||
|
|
||||||
# The build spec for splice-h should always point to the introduced
|
# The build spec for splice-h should always point to the introduced
|
||||||
# spec, since that is the spec spliced in.
|
# spec, since that is the spec spliced in.
|
||||||
assert dep['splice-h'].full_hash() == out_rt_spec_h_bld_hash
|
assert dep['splice-h'].dag_hash() == out_rt_spec_h_bld_hash
|
||||||
|
|
||||||
# The build spec for splice-z will depend on whether or not the splice
|
# The build spec for splice-z will depend on whether or not the splice
|
||||||
# was transitive.
|
# was transitive.
|
||||||
expected_z_bld_hash = (dep['splice-z'].full_hash() if transitive else
|
expected_z_bld_hash = (dep['splice-z'].dag_hash() if transitive else
|
||||||
spec['splice-z'].full_hash())
|
spec['splice-z'].dag_hash())
|
||||||
assert expected_z_bld_hash == out_rt_spec_z_bld_hash
|
assert expected_z_bld_hash == out_rt_spec_z_bld_hash
|
||||||
|
|
||||||
@pytest.mark.parametrize('spec,constraint,expected_result', [
|
@pytest.mark.parametrize('spec,constraint,expected_result', [
|
||||||
|
|
|
@ -499,7 +499,7 @@ def test_parse_yaml_simple(self, mock_packages, tmpdir):
|
||||||
specfile = tmpdir.join('libdwarf.yaml')
|
specfile = tmpdir.join('libdwarf.yaml')
|
||||||
|
|
||||||
with specfile.open('w') as f:
|
with specfile.open('w') as f:
|
||||||
f.write(s.to_yaml(hash=ht.build_hash))
|
f.write(s.to_yaml(hash=ht.dag_hash))
|
||||||
|
|
||||||
# Check an absolute path to spec.yaml by itself:
|
# Check an absolute path to spec.yaml by itself:
|
||||||
# "spack spec /path/to/libdwarf.yaml"
|
# "spack spec /path/to/libdwarf.yaml"
|
||||||
|
@ -522,7 +522,7 @@ def test_parse_filename_missing_slash_as_spec(self, mock_packages, tmpdir):
|
||||||
# write the file to the current directory to make sure it exists,
|
# write the file to the current directory to make sure it exists,
|
||||||
# and that we still do not parse the spec as a file.
|
# and that we still do not parse the spec as a file.
|
||||||
with specfile.open('w') as f:
|
with specfile.open('w') as f:
|
||||||
f.write(s.to_yaml(hash=ht.build_hash))
|
f.write(s.to_yaml(hash=ht.dag_hash))
|
||||||
|
|
||||||
# Check the spec `libelf.yaml` in the working directory, which
|
# Check the spec `libelf.yaml` in the working directory, which
|
||||||
# should evaluate to a spec called `yaml` in the `libelf`
|
# should evaluate to a spec called `yaml` in the `libelf`
|
||||||
|
@ -562,7 +562,7 @@ def test_parse_yaml_dependency(self, mock_packages, tmpdir):
|
||||||
specfile = tmpdir.join('libelf.yaml')
|
specfile = tmpdir.join('libelf.yaml')
|
||||||
|
|
||||||
with specfile.open('w') as f:
|
with specfile.open('w') as f:
|
||||||
f.write(s['libelf'].to_yaml(hash=ht.build_hash))
|
f.write(s['libelf'].to_yaml(hash=ht.dag_hash))
|
||||||
|
|
||||||
# Make sure we can use yaml path as dependency, e.g.:
|
# Make sure we can use yaml path as dependency, e.g.:
|
||||||
# "spack spec libdwarf ^ /path/to/libelf.yaml"
|
# "spack spec libdwarf ^ /path/to/libelf.yaml"
|
||||||
|
@ -577,7 +577,7 @@ def test_parse_yaml_relative_paths(self, mock_packages, tmpdir):
|
||||||
specfile = tmpdir.join('libdwarf.yaml')
|
specfile = tmpdir.join('libdwarf.yaml')
|
||||||
|
|
||||||
with specfile.open('w') as f:
|
with specfile.open('w') as f:
|
||||||
f.write(s.to_yaml(hash=ht.build_hash))
|
f.write(s.to_yaml(hash=ht.dag_hash))
|
||||||
|
|
||||||
file_name = specfile.basename
|
file_name = specfile.basename
|
||||||
parent_dir = os.path.basename(specfile.dirname)
|
parent_dir = os.path.basename(specfile.dirname)
|
||||||
|
@ -612,7 +612,7 @@ def test_parse_yaml_relative_subdir_path(self, mock_packages, tmpdir):
|
||||||
specfile = tmpdir.mkdir('subdir').join('libdwarf.yaml')
|
specfile = tmpdir.mkdir('subdir').join('libdwarf.yaml')
|
||||||
|
|
||||||
with specfile.open('w') as f:
|
with specfile.open('w') as f:
|
||||||
f.write(s.to_yaml(hash=ht.build_hash))
|
f.write(s.to_yaml(hash=ht.dag_hash))
|
||||||
|
|
||||||
file_name = specfile.basename
|
file_name = specfile.basename
|
||||||
|
|
||||||
|
@ -632,7 +632,7 @@ def test_parse_yaml_dependency_relative_paths(self, mock_packages, tmpdir):
|
||||||
specfile = tmpdir.join('libelf.yaml')
|
specfile = tmpdir.join('libelf.yaml')
|
||||||
|
|
||||||
with specfile.open('w') as f:
|
with specfile.open('w') as f:
|
||||||
f.write(s['libelf'].to_yaml(hash=ht.build_hash))
|
f.write(s['libelf'].to_yaml(hash=ht.dag_hash))
|
||||||
|
|
||||||
file_name = specfile.basename
|
file_name = specfile.basename
|
||||||
parent_dir = os.path.basename(specfile.dirname)
|
parent_dir = os.path.basename(specfile.dirname)
|
||||||
|
@ -691,7 +691,7 @@ def test_parse_yaml_variant_error(self, mock_packages, tmpdir):
|
||||||
specfile = tmpdir.join('a.yaml')
|
specfile = tmpdir.join('a.yaml')
|
||||||
|
|
||||||
with specfile.open('w') as f:
|
with specfile.open('w') as f:
|
||||||
f.write(s.to_yaml(hash=ht.build_hash))
|
f.write(s.to_yaml(hash=ht.dag_hash))
|
||||||
|
|
||||||
with pytest.raises(RedundantSpecError):
|
with pytest.raises(RedundantSpecError):
|
||||||
# Trying to change a variant on a concrete spec is an error
|
# Trying to change a variant on a concrete spec is an error
|
||||||
|
|
|
@ -152,13 +152,8 @@ def descend_and_check(iterable, level=0):
|
||||||
assert level >= 5
|
assert level >= 5
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("hash_type", [
|
|
||||||
ht.dag_hash,
|
|
||||||
ht.build_hash,
|
|
||||||
ht.full_hash
|
|
||||||
])
|
|
||||||
def test_ordered_read_not_required_for_consistent_dag_hash(
|
def test_ordered_read_not_required_for_consistent_dag_hash(
|
||||||
hash_type, config, mock_packages
|
config, mock_packages
|
||||||
):
|
):
|
||||||
"""Make sure ordered serialization isn't required to preserve hashes.
|
"""Make sure ordered serialization isn't required to preserve hashes.
|
||||||
|
|
||||||
|
@ -175,15 +170,15 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
|
||||||
#
|
#
|
||||||
# Dict & corresponding YAML & JSON from the original spec.
|
# Dict & corresponding YAML & JSON from the original spec.
|
||||||
#
|
#
|
||||||
spec_dict = spec.to_dict(hash=hash_type)
|
spec_dict = spec.to_dict()
|
||||||
spec_yaml = spec.to_yaml(hash=hash_type)
|
spec_yaml = spec.to_yaml()
|
||||||
spec_json = spec.to_json(hash=hash_type)
|
spec_json = spec.to_json()
|
||||||
|
|
||||||
#
|
#
|
||||||
# Make a spec with reversed OrderedDicts for every
|
# Make a spec with reversed OrderedDicts for every
|
||||||
# OrderedDict in the original.
|
# OrderedDict in the original.
|
||||||
#
|
#
|
||||||
reversed_spec_dict = reverse_all_dicts(spec.to_dict(hash=hash_type))
|
reversed_spec_dict = reverse_all_dicts(spec.to_dict())
|
||||||
|
|
||||||
#
|
#
|
||||||
# Dump to YAML and JSON
|
# Dump to YAML and JSON
|
||||||
|
@ -218,7 +213,7 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
|
||||||
)
|
)
|
||||||
|
|
||||||
# Strip spec if we stripped the yaml
|
# Strip spec if we stripped the yaml
|
||||||
spec = spec.copy(deps=hash_type.deptype)
|
spec = spec.copy(deps=ht.dag_hash.deptype)
|
||||||
|
|
||||||
# specs are equal to the original
|
# specs are equal to the original
|
||||||
assert spec == round_trip_yaml_spec
|
assert spec == round_trip_yaml_spec
|
||||||
|
@ -234,17 +229,16 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
|
||||||
assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
|
assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
|
||||||
assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
|
assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
|
||||||
|
|
||||||
# full_hashes are equal if we round-tripped by build_hash or full_hash
|
# dag_hash is equal after round-trip by dag_hash
|
||||||
if hash_type in (ht.build_hash, ht.full_hash):
|
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
round_trip_yaml_spec.concretize()
|
round_trip_yaml_spec.concretize()
|
||||||
round_trip_json_spec.concretize()
|
round_trip_json_spec.concretize()
|
||||||
round_trip_reversed_yaml_spec.concretize()
|
round_trip_reversed_yaml_spec.concretize()
|
||||||
round_trip_reversed_json_spec.concretize()
|
round_trip_reversed_json_spec.concretize()
|
||||||
assert spec.full_hash() == round_trip_yaml_spec.full_hash()
|
assert spec.dag_hash() == round_trip_yaml_spec.dag_hash()
|
||||||
assert spec.full_hash() == round_trip_json_spec.full_hash()
|
assert spec.dag_hash() == round_trip_json_spec.dag_hash()
|
||||||
assert spec.full_hash() == round_trip_reversed_yaml_spec.full_hash()
|
assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
|
||||||
assert spec.full_hash() == round_trip_reversed_json_spec.full_hash()
|
assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("module", [
|
@pytest.mark.parametrize("module", [
|
||||||
|
@ -350,7 +344,7 @@ def test_save_dependency_spec_jsons_subset(tmpdir, config):
|
||||||
spec_a.concretize()
|
spec_a.concretize()
|
||||||
b_spec = spec_a['b']
|
b_spec = spec_a['b']
|
||||||
c_spec = spec_a['c']
|
c_spec = spec_a['c']
|
||||||
spec_a_json = spec_a.to_json(hash=ht.build_hash)
|
spec_a_json = spec_a.to_json()
|
||||||
|
|
||||||
save_dependency_specfiles(spec_a_json, output_path, ['b', 'c'])
|
save_dependency_specfiles(spec_a_json, output_path, ['b', 'c'])
|
||||||
|
|
||||||
|
|
|
@ -544,7 +544,7 @@ _spack_buildcache_preview() {
|
||||||
}
|
}
|
||||||
|
|
||||||
_spack_buildcache_check() {
|
_spack_buildcache_check() {
|
||||||
SPACK_COMPREPLY="-h --help -m --mirror-url -o --output-file --scope -s --spec --spec-file --rebuild-on-error"
|
SPACK_COMPREPLY="-h --help -m --mirror-url -o --output-file --scope -s --spec --spec-file"
|
||||||
}
|
}
|
||||||
|
|
||||||
_spack_buildcache_download() {
|
_spack_buildcache_download() {
|
||||||
|
@ -1174,7 +1174,7 @@ _spack_info() {
|
||||||
_spack_install() {
|
_spack_install() {
|
||||||
if $list_options
|
if $list_options
|
||||||
then
|
then
|
||||||
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --monitor --monitor-save-local --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix --include-build-deps --no-check-signature --require-full-hash-match --show-log-on-error --source -n --no-checksum --deprecated -v --verbose --fake --only-concrete --no-add -f --file --clean --dirty --test --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all -U --fresh --reuse"
|
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --monitor --monitor-save-local --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix --include-build-deps --no-check-signature --show-log-on-error --source -n --no-checksum --deprecated -v --verbose --fake --only-concrete --no-add -f --file --clean --dirty --test --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all -U --fresh --reuse"
|
||||||
else
|
else
|
||||||
_all_packages
|
_all_packages
|
||||||
fi
|
fi
|
||||||
|
|
Loading…
Reference in a new issue