buildcache: Update layout and signing (#30750)

This PR introduces a new build cache layout and package format, with improvements for
both efficiency and security.

## Old Format
Currently a binary package consists of a `spec.json` file at the root and a `.spack` file,
which is a `tar` archive containing a copy of the `spec.json` format, possibly a detached
signature (`.asc`) file, and a tar-gzip compressed archive containing the install tree.

```
build_cache/
  # metadata (for indexing)
  <arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
  <arch>/
    <compiler>/
      <name>-<ver>/
        # tar archive
        <arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
          # tar archive contents:
          # metadata (contains sha256 of internal .tar.gz)
          <arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
          # signature
          <arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.asc
          # tar.gz-compressed prefix
          <arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.tar.gz
```

After this change, the nesting has been removed so that the `.spack` file is the
compressed archive of the install tree.  Now signed binary packages, will take the
form of a clearsigned `spec.json` file (a `spec.json.sig`) at the root, while unsigned
binary packages will contain a `spec.json` at the root.

## New Format

```
build_cache/
  # metadata (for indexing, contains sha256 of .spack file)
  <arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
  # clearsigned spec.json metadata
  <arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
  <arch>/
    <compiler>/
      <name>-<ver>/
        # tar.gz-compressed prefix (may support more compression formats later)
        <arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
```

## Benefits
The major benefit of this change is that the signatures on binary packages can be
verified without:

1. Having to download the tarball, or
2. having to extract an unknown tarball.

(1) is an improvement in efficiency; (2) is a security fix: we now ensure that we trust the
binary before we try to run it through `tar`, which avoids potential attacks.

## Backward compatibility
Also after this change, spack should still be able to handle the previous buildcache
structure and binary mirrors with mixed layouts.
This commit is contained in:
Scott Wittenburg 2022-05-24 15:39:20 -06:00 committed by GitHub
parent 0fe5e72744
commit 70824e4a5e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 561 additions and 148 deletions

View file

@ -4,6 +4,7 @@ Set-Location spack
git config --global user.email "spack@example.com"
git config --global user.name "Test User"
git config --global core.longpaths true
if ($(git branch --show-current) -ne "develop")
{

View file

@ -563,6 +563,13 @@ def __init__(self, msg):
super(NewLayoutException, self).__init__(msg)
class UnsignedPackageException(spack.error.SpackError):
"""
Raised if installation of unsigned package is attempted without
the use of ``--no-check-signature``.
"""
def compute_hash(data):
return hashlib.sha256(data.encode('utf-8')).hexdigest()
@ -751,15 +758,16 @@ def select_signing_key(key=None):
return key
def sign_tarball(key, force, specfile_path):
if os.path.exists('%s.asc' % specfile_path):
def sign_specfile(key, force, specfile_path):
signed_specfile_path = '%s.sig' % specfile_path
if os.path.exists(signed_specfile_path):
if force:
os.remove('%s.asc' % specfile_path)
os.remove(signed_specfile_path)
else:
raise NoOverwriteException('%s.asc' % specfile_path)
raise NoOverwriteException(signed_specfile_path)
key = select_signing_key(key)
spack.util.gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True)
def _fetch_spec_from_mirror(spec_url):
@ -768,7 +776,10 @@ def _fetch_spec_from_mirror(spec_url):
_, _, spec_file = web_util.read_from_url(spec_url)
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
# Need full spec.json name or this gets confused with index.json.
if spec_url.endswith('.json'):
if spec_url.endswith('.json.sig'):
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
s = Spec.from_dict(specfile_json)
elif spec_url.endswith('.json'):
s = Spec.from_json(spec_file_contents)
elif spec_url.endswith('.yaml'):
s = Spec.from_yaml(spec_file_contents)
@ -829,7 +840,9 @@ def generate_package_index(cache_prefix):
file_list = (
entry
for entry in web_util.list_url(cache_prefix)
if entry.endswith('.yaml') or entry.endswith('spec.json'))
if entry.endswith('.yaml') or
entry.endswith('spec.json') or
entry.endswith('spec.json.sig'))
except KeyError as inst:
msg = 'No packages at {0}: {1}'.format(cache_prefix, inst)
tty.warn(msg)
@ -944,7 +957,7 @@ def _build_tarball(
tmpdir = tempfile.mkdtemp()
cache_prefix = build_cache_prefix(tmpdir)
tarfile_name = tarball_name(spec, '.tar.gz')
tarfile_name = tarball_name(spec, '.spack')
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
spackfile_path = os.path.join(
@ -967,10 +980,12 @@ def _build_tarball(
spec_file = spack.store.layout.spec_file_path(spec)
specfile_name = tarball_name(spec, '.spec.json')
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
signed_specfile_path = '{0}.sig'.format(specfile_path)
deprecated_specfile_path = specfile_path.replace('.spec.json', '.spec.yaml')
remote_specfile_path = url_util.join(
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)))
remote_signed_specfile_path = '{0}.sig'.format(remote_specfile_path)
remote_specfile_path_deprecated = url_util.join(
outdir, os.path.relpath(deprecated_specfile_path,
os.path.realpath(tmpdir)))
@ -979,9 +994,12 @@ def _build_tarball(
if force:
if web_util.url_exists(remote_specfile_path):
web_util.remove_url(remote_specfile_path)
if web_util.url_exists(remote_signed_specfile_path):
web_util.remove_url(remote_signed_specfile_path)
if web_util.url_exists(remote_specfile_path_deprecated):
web_util.remove_url(remote_specfile_path_deprecated)
elif (web_util.url_exists(remote_specfile_path) or
web_util.url_exists(remote_signed_specfile_path) or
web_util.url_exists(remote_specfile_path_deprecated)):
raise NoOverwriteException(url_util.format(remote_specfile_path))
@ -1043,6 +1061,7 @@ def _build_tarball(
raise ValueError(
'{0} not a valid spec file type (json or yaml)'.format(
spec_file))
spec_dict['buildcache_layout_version'] = 1
bchecksum = {}
bchecksum['hash_algorithm'] = 'sha256'
bchecksum['hash'] = checksum
@ -1061,25 +1080,15 @@ def _build_tarball(
# sign the tarball and spec file with gpg
if not unsigned:
key = select_signing_key(key)
sign_tarball(key, force, specfile_path)
# put tarball, spec and signature files in .spack archive
with closing(tarfile.open(spackfile_path, 'w')) as tar:
tar.add(name=tarfile_path, arcname='%s' % tarfile_name)
tar.add(name=specfile_path, arcname='%s' % specfile_name)
if not unsigned:
tar.add(name='%s.asc' % specfile_path,
arcname='%s.asc' % specfile_name)
# cleanup file moved to archive
os.remove(tarfile_path)
if not unsigned:
os.remove('%s.asc' % specfile_path)
sign_specfile(key, force, specfile_path)
# push tarball and signed spec json to remote mirror
web_util.push_to_url(
spackfile_path, remote_spackfile_path, keep_original=False)
web_util.push_to_url(
specfile_path, remote_specfile_path, keep_original=False)
signed_specfile_path if not unsigned else specfile_path,
remote_signed_specfile_path if not unsigned else remote_specfile_path,
keep_original=False)
tty.debug('Buildcache for "{0}" written to \n {1}'
.format(spec, remote_spackfile_path))
@ -1162,48 +1171,174 @@ def push(specs, push_url, specs_kwargs=None, **kwargs):
warnings.warn(str(e))
def download_tarball(spec, preferred_mirrors=None):
def try_verify(specfile_path):
"""Utility function to attempt to verify a local file. Assumes the
file is a clearsigned signature file.
Args:
specfile_path (str): Path to file to be verified.
Returns:
``True`` if the signature could be verified, ``False`` otherwise.
"""
suppress = config.get('config:suppress_gpg_warnings', False)
try:
spack.util.gpg.verify(specfile_path, suppress_warnings=suppress)
except Exception:
return False
return True
def try_fetch(url_to_fetch):
"""Utility function to try and fetch a file from a url, stage it
locally, and return the path to the staged file.
Args:
url_to_fetch (str): Url pointing to remote resource to fetch
Returns:
Path to locally staged resource or ``None`` if it could not be fetched.
"""
stage = Stage(url_to_fetch, keep=True)
stage.create()
try:
stage.fetch()
except fs.FetchError:
stage.destroy()
return None
return stage
def _delete_staged_downloads(download_result):
"""Clean up stages used to download tarball and specfile"""
download_result['tarball_stage'].destroy()
download_result['specfile_stage'].destroy()
def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
"""
Download binary tarball for given package into stage area, returning
path to downloaded tarball if successful, None otherwise.
Args:
spec (spack.spec.Spec): Concrete spec
preferred_mirrors (list): If provided, this is a list of preferred
mirror urls. Other configured mirrors will only be used if the
tarball can't be retrieved from one of these.
unsigned (bool): Whether or not to require signed binaries
mirrors_for_spec (list): Optional list of concrete specs and mirrors
obtained by calling binary_distribution.get_mirrors_for_spec().
These will be checked in order first before looking in other
configured mirrors.
Returns:
Path to the downloaded tarball, or ``None`` if the tarball could not
be downloaded from any configured mirrors.
``None`` if the tarball could not be downloaded (maybe also verified,
depending on whether new-style signed binary packages were found).
Otherwise, return an object indicating the path to the downloaded
tarball, the path to the downloaded specfile (in the case of new-style
buildcache), and whether or not the tarball is already verified.
.. code-block:: JSON
{
"tarball_path": "path-to-locally-saved-tarfile",
"specfile_path": "none-or-path-to-locally-saved-specfile",
"signature_verified": "true-if-binary-pkg-was-already-verified"
}
"""
if not spack.mirror.MirrorCollection():
tty.die("Please add a spack mirror to allow " +
"download of pre-compiled packages.")
tarball = tarball_path_name(spec, '.spack')
specfile_prefix = tarball_name(spec, '.spec')
urls_to_try = []
mirrors_to_try = []
if preferred_mirrors:
for preferred_url in preferred_mirrors:
urls_to_try.append(url_util.join(
preferred_url, _build_cache_relative_path, tarball))
# Note on try_first and try_next:
# mirrors_for_spec mostly likely came from spack caching remote
# mirror indices locally and adding their specs to a local data
# structure supporting quick lookup of concrete specs. Those
# mirrors are likely a subset of all configured mirrors, and
# we'll probably find what we need in one of them. But we'll
# look in all configured mirrors if needed, as maybe the spec
# we need was in an un-indexed mirror. No need to check any
# mirror for the spec twice though.
try_first = [i['mirror_url'] for i in mirrors_for_spec] if mirrors_for_spec else []
try_next = [
i.fetch_url for i in spack.mirror.MirrorCollection().values()
if i.fetch_url not in try_first
]
for mirror in spack.mirror.MirrorCollection().values():
if not preferred_mirrors or mirror.fetch_url not in preferred_mirrors:
urls_to_try.append(url_util.join(
mirror.fetch_url, _build_cache_relative_path, tarball))
for url in try_first + try_next:
mirrors_to_try.append({
'specfile': url_util.join(url,
_build_cache_relative_path, specfile_prefix),
'spackfile': url_util.join(url,
_build_cache_relative_path, tarball)
})
for try_url in urls_to_try:
# stage the tarball into standard place
stage = Stage(try_url, name="build_cache", keep=True)
stage.create()
try:
stage.fetch()
return stage.save_filename
except fs.FetchError:
continue
tried_to_verify_sigs = []
# Assumes we care more about finding a spec file by preferred ext
# than by mirrory priority. This can be made less complicated as
# we remove support for deprecated spec formats and buildcache layouts.
for ext in ['json.sig', 'json', 'yaml']:
for mirror_to_try in mirrors_to_try:
specfile_url = '{0}.{1}'.format(mirror_to_try['specfile'], ext)
spackfile_url = mirror_to_try['spackfile']
local_specfile_stage = try_fetch(specfile_url)
if local_specfile_stage:
local_specfile_path = local_specfile_stage.save_filename
signature_verified = False
if ext.endswith('.sig') and not unsigned:
# If we found a signed specfile at the root, try to verify
# the signature immediately. We will not download the
# tarball if we could not verify the signature.
tried_to_verify_sigs.append(specfile_url)
signature_verified = try_verify(local_specfile_path)
if not signature_verified:
tty.warn("Failed to verify: {0}".format(specfile_url))
if unsigned or signature_verified or not ext.endswith('.sig'):
# We will download the tarball in one of three cases:
# 1. user asked for --no-check-signature
# 2. user didn't ask for --no-check-signature, but we
# found a spec.json.sig and verified the signature already
# 3. neither of the first two cases are true, but this file
# is *not* a signed json (not a spec.json.sig file). That
# means we already looked at all the mirrors and either didn't
# find any .sig files or couldn't verify any of them. But it
# is still possible to find an old style binary package where
# the signature is a detached .asc file in the outer archive
# of the tarball, and in that case, the only way to know is to
# download the tarball. This is a deprecated use case, so if
# something goes wrong during the extraction process (can't
# verify signature, checksum doesn't match) we will fail at
# that point instead of trying to download more tarballs from
# the remaining mirrors, looking for one we can use.
tarball_stage = try_fetch(spackfile_url)
if tarball_stage:
return {
'tarball_stage': tarball_stage,
'specfile_stage': local_specfile_stage,
'signature_verified': signature_verified,
}
local_specfile_stage.destroy()
# Falling through the nested loops meeans we exhaustively searched
# for all known kinds of spec files on all mirrors and did not find
# an acceptable one for which we could download a tarball.
if tried_to_verify_sigs:
raise NoVerifyException(("Spack found new style signed binary packages, "
"but was unable to verify any of them. Please "
"obtain and trust the correct public key. If "
"these are public spack binaries, please see the "
"spack docs for locations where keys can be found."))
tty.warn("download_tarball() was unable to download " +
"{0} from any configured mirrors".format(spec))
@ -1377,7 +1512,55 @@ def is_backup_file(file):
relocate.relocate_text(text_names, prefix_to_prefix_text)
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
stagepath = os.path.dirname(filename)
spackfile_name = tarball_name(spec, '.spack')
spackfile_path = os.path.join(stagepath, spackfile_name)
tarfile_name = tarball_name(spec, '.tar.gz')
tarfile_path = os.path.join(extract_to, tarfile_name)
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
deprecated_yaml_path = os.path.join(extract_to, deprecated_yaml_name)
json_name = tarball_name(spec, '.spec.json')
json_path = os.path.join(extract_to, json_name)
with closing(tarfile.open(spackfile_path, 'r')) as tar:
tar.extractall(extract_to)
# some buildcache tarfiles use bzip2 compression
if not os.path.exists(tarfile_path):
tarfile_name = tarball_name(spec, '.tar.bz2')
tarfile_path = os.path.join(extract_to, tarfile_name)
if os.path.exists(json_path):
specfile_path = json_path
elif os.path.exists(deprecated_yaml_path):
specfile_path = deprecated_yaml_path
else:
raise ValueError('Cannot find spec file for {0}.'.format(extract_to))
if not unsigned:
if os.path.exists('%s.asc' % specfile_path):
suppress = config.get('config:suppress_gpg_warnings', False)
try:
spack.util.gpg.verify('%s.asc' % specfile_path, specfile_path, suppress)
except Exception:
raise NoVerifyException("Spack was unable to verify package "
"signature, please obtain and trust the "
"correct public key.")
else:
raise UnsignedPackageException(
"To install unsigned packages, use the --no-check-signature option.")
# get the sha256 checksum of the tarball
local_checksum = checksum_tarball(tarfile_path)
# if the checksums don't match don't install
if local_checksum != remote_checksum['hash']:
raise NoChecksumException(
"Package tarball failed checksum verification.\n"
"It cannot be installed.")
return tarfile_path
def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
force=False):
"""
extract binary tarball for given package into install area
@ -1388,66 +1571,59 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
else:
raise NoOverwriteException(str(spec.prefix))
tmpdir = tempfile.mkdtemp()
stagepath = os.path.dirname(filename)
spackfile_name = tarball_name(spec, '.spack')
spackfile_path = os.path.join(stagepath, spackfile_name)
tarfile_name = tarball_name(spec, '.tar.gz')
tarfile_path = os.path.join(tmpdir, tarfile_name)
specfile_is_json = True
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
deprecated_yaml_path = os.path.join(tmpdir, deprecated_yaml_name)
json_name = tarball_name(spec, '.spec.json')
json_path = os.path.join(tmpdir, json_name)
with closing(tarfile.open(spackfile_path, 'r')) as tar:
tar.extractall(tmpdir)
# some buildcache tarfiles use bzip2 compression
if not os.path.exists(tarfile_path):
tarfile_name = tarball_name(spec, '.tar.bz2')
tarfile_path = os.path.join(tmpdir, tarfile_name)
specfile_path = download_result['specfile_stage'].save_filename
if os.path.exists(json_path):
specfile_path = json_path
elif os.path.exists(deprecated_yaml_path):
specfile_is_json = False
specfile_path = deprecated_yaml_path
else:
raise ValueError('Cannot find spec file for {0}.'.format(tmpdir))
if not unsigned:
if os.path.exists('%s.asc' % specfile_path):
try:
suppress = config.get('config:suppress_gpg_warnings', False)
spack.util.gpg.verify(
'%s.asc' % specfile_path, specfile_path, suppress)
except Exception as e:
shutil.rmtree(tmpdir)
raise e
else:
shutil.rmtree(tmpdir)
raise NoVerifyException(
"Package spec file failed signature verification.\n"
"Use spack buildcache keys to download "
"and install a key for verification from the mirror.")
# get the sha256 checksum of the tarball
checksum = checksum_tarball(tarfile_path)
# get the sha256 checksum recorded at creation
spec_dict = {}
with open(specfile_path, 'r') as inputfile:
content = inputfile.read()
if specfile_is_json:
if specfile_path.endswith('.json.sig'):
spec_dict = Spec.extract_json_from_clearsig(content)
elif specfile_path.endswith('.json'):
spec_dict = sjson.load(content)
else:
spec_dict = syaml.load(content)
bchecksum = spec_dict['binary_cache_checksum']
# if the checksums don't match don't install
if bchecksum['hash'] != checksum:
shutil.rmtree(tmpdir)
raise NoChecksumException(
"Package tarball failed checksum verification.\n"
"It cannot be installed.")
bchecksum = spec_dict['binary_cache_checksum']
filename = download_result['tarball_stage'].save_filename
signature_verified = download_result['signature_verified']
tmpdir = None
if ('buildcache_layout_version' not in spec_dict or
int(spec_dict['buildcache_layout_version']) < 1):
# Handle the older buildcache layout where the .spack file
# contains a spec json/yaml, maybe an .asc file (signature),
# and another tarball containing the actual install tree.
tty.warn("This binary package uses a deprecated layout, "
"and support for it will eventually be removed "
"from spack.")
tmpdir = tempfile.mkdtemp()
try:
tarfile_path = _extract_inner_tarball(
spec, filename, tmpdir, unsigned, bchecksum)
except Exception as e:
_delete_staged_downloads(download_result)
shutil.rmtree(tmpdir)
raise e
else:
# Newer buildcache layout: the .spack file contains just
# in the install tree, the signature, if it exists, is
# wrapped around the spec.json at the root. If sig verify
# was required, it was already done before downloading
# the tarball.
tarfile_path = filename
if not unsigned and not signature_verified:
raise UnsignedPackageException(
"To install unsigned packages, use the --no-check-signature option.")
# compute the sha256 checksum of the tarball
local_checksum = checksum_tarball(tarfile_path)
# if the checksums don't match don't install
if local_checksum != bchecksum['hash']:
_delete_staged_downloads(download_result)
raise NoChecksumException(
"Package tarball failed checksum verification.\n"
"It cannot be installed.")
new_relative_prefix = str(os.path.relpath(spec.prefix,
spack.store.layout.root))
@ -1472,11 +1648,13 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
try:
tar.extractall(path=extract_tmp)
except Exception as e:
_delete_staged_downloads(download_result)
shutil.rmtree(extracted_dir)
raise e
try:
shutil.move(extracted_dir, spec.prefix)
except Exception as e:
_delete_staged_downloads(download_result)
shutil.rmtree(extracted_dir)
raise e
os.remove(tarfile_path)
@ -1495,9 +1673,11 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
spec_id = spec.format('{name}/{hash:7}')
tty.warn('No manifest file in tarball for spec %s' % spec_id)
finally:
shutil.rmtree(tmpdir)
if tmpdir:
shutil.rmtree(tmpdir)
if os.path.exists(filename):
os.remove(filename)
_delete_staged_downloads(download_result)
def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None):
@ -1525,21 +1705,23 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
return
tarball = download_tarball(spec)
if not tarball:
download_result = download_tarball(spec, unsigned)
if not download_result:
msg = 'download of binary cache file for spec "{0}" failed'
raise RuntimeError(msg.format(spec.format()))
if sha256:
checker = spack.util.crypto.Checker(sha256)
msg = 'cannot verify checksum for "{0}" [expected={1}]'
msg = msg.format(tarball, sha256)
if not checker.check(tarball):
tarball_path = download_result['tarball_stage'].save_filename
msg = msg.format(tarball_path, sha256)
if not checker.check(tarball_path):
_delete_staged_downloads(download_result)
raise spack.binary_distribution.NoChecksumException(msg)
tty.debug('Verified SHA256 checksum of the build cache')
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
extract_tarball(spec, tarball, allow_root, unsigned, force)
extract_tarball(spec, download_result, allow_root, unsigned, force)
spack.hooks.post_install(spec)
spack.store.db.add(spec, spack.store.layout)
@ -1565,6 +1747,8 @@ def try_direct_fetch(spec, mirrors=None):
"""
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
specfile_name = tarball_name(spec, '.spec.json')
signed_specfile_name = tarball_name(spec, '.spec.json.sig')
specfile_is_signed = False
specfile_is_json = True
found_specs = []
@ -1573,24 +1757,35 @@ def try_direct_fetch(spec, mirrors=None):
mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name)
buildcache_fetch_url_json = url_util.join(
mirror.fetch_url, _build_cache_relative_path, specfile_name)
buildcache_fetch_url_signed_json = url_util.join(
mirror.fetch_url, _build_cache_relative_path, signed_specfile_name)
try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
specfile_is_signed = True
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
specfile_is_json = False
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
tty.debug('Did not find {0} on {1}'.format(
specfile_name, buildcache_fetch_url_json), url_err)
tty.debug('Did not find {0} on {1}'.format(
specfile_name, buildcache_fetch_url_yaml), url_err_y)
continue
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
specfile_is_json = False
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
tty.debug('Did not find {0} on {1}'.format(
specfile_name, buildcache_fetch_url_signed_json), url_err)
tty.debug('Did not find {0} on {1}'.format(
specfile_name, buildcache_fetch_url_json), url_err_x)
tty.debug('Did not find {0} on {1}'.format(
specfile_name, buildcache_fetch_url_yaml), url_err_y)
continue
specfile_contents = codecs.getreader('utf-8')(fs).read()
# read the spec from the build cache file. All specs in build caches
# are concrete (as they are built) so we need to mark this spec
# concrete on read-in.
if specfile_is_json:
if specfile_is_signed:
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
fetched_spec = Spec.from_dict(specfile_json)
elif specfile_is_json:
fetched_spec = Spec.from_json(specfile_contents)
else:
fetched_spec = Spec.from_yaml(specfile_contents)
@ -1917,7 +2112,8 @@ def download_single_spec(
'path': local_tarball_path,
'required': True,
}, {
'url': [tarball_name(concrete_spec, '.spec.json'),
'url': [tarball_name(concrete_spec, '.spec.json.sig'),
tarball_name(concrete_spec, '.spec.json'),
tarball_name(concrete_spec, '.spec.yaml')],
'path': destination,
'required': True,

View file

@ -350,7 +350,7 @@ def _process_external_package(pkg, explicit):
def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
preferred_mirrors=None):
mirrors_for_spec=None):
"""
Process the binary cache tarball.
@ -360,17 +360,17 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
preferred_mirrors (list): Optional list of urls to prefer when
attempting to download the tarball
mirrors_for_spec (list): Optional list of concrete specs and mirrors
obtained by calling binary_distribution.get_mirrors_for_spec().
Return:
bool: ``True`` if the package was extracted from binary cache,
else ``False``
"""
tarball = binary_distribution.download_tarball(
binary_spec, preferred_mirrors=preferred_mirrors)
download_result = binary_distribution.download_tarball(
binary_spec, unsigned, mirrors_for_spec=mirrors_for_spec)
# see #10063 : install from source if tarball doesn't exist
if tarball is None:
if download_result is None:
tty.msg('{0} exists in binary cache but with different hash'
.format(pkg.name))
return False
@ -380,9 +380,9 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
# don't print long padded paths while extracting/relocating binaries
with spack.util.path.filter_padding():
binary_distribution.extract_tarball(
binary_spec, tarball, allow_root=False, unsigned=unsigned, force=False
)
binary_distribution.extract_tarball(binary_spec, download_result,
allow_root=False, unsigned=unsigned,
force=False)
pkg.installed_from_binary_cache = True
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
@ -406,12 +406,8 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
if not matches:
return False
# In the absence of guidance from user or some other reason to prefer one
# mirror over another, any match will suffice, so just pick the first one.
preferred_mirrors = [match['mirror_url'] for match in matches]
binary_spec = matches[0]['spec']
return _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
preferred_mirrors=preferred_mirrors)
return _process_binary_cache_tarball(pkg, pkg.spec, explicit, unsigned,
mirrors_for_spec=matches)
def clear_failures():

View file

@ -37,5 +37,6 @@
'hash': {'type': 'string'},
},
},
'buildcache_layout_version': {'type': 'number'}
},
}

View file

@ -183,6 +183,13 @@
default_format += '{%compiler.name}{@compiler.version}{compiler_flags}'
default_format += '{variants}{arch=architecture}'
#: Regular expression to pull spec contents out of clearsigned signature
#: file.
CLEARSIGN_FILE_REGEX = re.compile(
(r"^-----BEGIN PGP SIGNED MESSAGE-----"
r"\s+Hash:\s+[^\s]+\s+(.+)-----BEGIN PGP SIGNATURE-----"),
re.MULTILINE | re.DOTALL)
#: specfile format version. Must increase monotonically
specfile_format_version = 3
@ -2395,8 +2402,8 @@ def spec_and_dependency_types(s):
def from_dict(data):
"""Construct a spec from JSON/YAML.
Parameters:
data -- a nested dict/list data structure read from YAML or JSON.
Args:
data: a nested dict/list data structure read from YAML or JSON.
"""
return _spec_from_dict(data)
@ -2405,8 +2412,8 @@ def from_dict(data):
def from_yaml(stream):
"""Construct a spec from YAML.
Parameters:
stream -- string or file object to read from.
Args:
stream: string or file object to read from.
"""
try:
data = yaml.load(stream)
@ -2421,8 +2428,8 @@ def from_yaml(stream):
def from_json(stream):
"""Construct a spec from JSON.
Parameters:
stream -- string or file object to read from.
Args:
stream: string or file object to read from.
"""
try:
data = sjson.load(stream)
@ -2433,6 +2440,27 @@ def from_json(stream):
e,
)
@staticmethod
def extract_json_from_clearsig(data):
m = CLEARSIGN_FILE_REGEX.search(data)
if m:
return sjson.load(m.group(1))
return sjson.load(data)
@staticmethod
def from_signed_json(stream):
"""Construct a spec from clearsigned json spec file.
Args:
stream: string or file object to read from.
"""
data = stream
if hasattr(stream, 'read'):
data = stream.read()
extracted_json = Spec.extract_json_from_clearsig(data)
return Spec.from_dict(extracted_json)
@staticmethod
def from_detection(spec_str, extra_attributes=None):
"""Construct a spec from a spec string determined during external

View file

@ -602,6 +602,25 @@ def test_install_legacy_yaml(test_legacy_mirror, install_mockery_mutable_config,
uninstall_cmd('-y', '/t5mczux3tfqpxwmg7egp7axy2jvyulqk')
def test_install_legacy_buildcache_layout(install_mockery_mutable_config):
"""Legacy buildcache layout involved a nested archive structure
where the .spack file contained a repeated spec.json and another
compressed archive file containing the install tree. This test
makes sure we can still read that layout."""
legacy_layout_dir = os.path.join(test_path, 'data', 'mirrors', 'legacy_layout')
mirror_url = "file://{0}".format(legacy_layout_dir)
filename = ("test-debian6-core2-gcc-4.5.0-archive-files-2.0-"
"l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json")
spec_json_path = os.path.join(legacy_layout_dir, 'build_cache', filename)
mirror_cmd('add', '--scope', 'site', 'test-legacy-layout', mirror_url)
output = install_cmd(
'--no-check-signature', '--cache-only', '-f', spec_json_path, output=str)
mirror_cmd('rm', '--scope=site', 'test-legacy-layout')
expect_line = ("Extracting archive-files-2.0-"
"l3vdiqvbobmspwyb4q2b62fz6nitd4hk from binary cache")
assert(expect_line in output)
def test_FetchCacheError_only_accepts_lists_of_errors():
with pytest.raises(TypeError, match="list"):
bindist.FetchCacheError("error")

View file

@ -25,7 +25,6 @@
import spack.paths as spack_paths
import spack.repo as repo
import spack.util.gpg
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
from spack.schema.buildcache_spec import schema as specfile_schema
@ -925,7 +924,7 @@ def fake_dl_method(spec, *args, **kwargs):
def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
install_mockery_mutable_config, mock_packages,
mock_fetch, mock_stage, mock_gnupghome,
ci_base_environment):
ci_base_environment, mock_binary_index):
working_dir = tmpdir.join('working_dir')
mirror_dir = working_dir.join('mirror')
@ -1038,10 +1037,10 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
# Also test buildcache_spec schema
bc_files_list = os.listdir(buildcache_path)
for file_name in bc_files_list:
if file_name.endswith('.spec.json'):
if file_name.endswith('.spec.json.sig'):
spec_json_path = os.path.join(buildcache_path, file_name)
with open(spec_json_path) as json_fd:
json_object = sjson.load(json_fd)
json_object = Spec.extract_json_from_clearsig(json_fd.read())
validate(json_object, specfile_schema)
logs_dir = working_dir.join('logs_dir')

View file

@ -0,0 +1,54 @@
{
"spec": {
"_meta": {
"version": 3
},
"nodes": [
{
"name": "archive-files",
"version": "2.0",
"arch": {
"platform": "test",
"platform_os": "debian6",
"target": {
"name": "core2",
"vendor": "GenuineIntel",
"features": [
"mmx",
"sse",
"sse2",
"ssse3"
],
"generation": 0,
"parents": [
"nocona"
]
}
},
"compiler": {
"name": "gcc",
"version": "4.5.0"
},
"namespace": "builtin.mock",
"parameters": {
"cflags": [],
"cppflags": [],
"cxxflags": [],
"fflags": [],
"ldflags": [],
"ldlibs": []
},
"package_hash": "ncv2pr4o2yemepsa4h7u4p4dsgieul5fxvh6s5am5fsb65ebugaa====",
"hash": "l3vdiqvbobmspwyb4q2b62fz6nitd4hk"
}
]
},
"binary_cache_checksum": {
"hash_algorithm": "sha256",
"hash": "c226b51d88876746efd6f9737cc6dfdd349870b6c0b9c045d9bad0f2764a40b9"
},
"buildinfo": {
"relative_prefix": "test-debian6-core2/gcc-4.5.0/archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk",
"relative_rpaths": false
}
}

View file

@ -0,0 +1,93 @@
-----BEGIN PGP SIGNED MESSAGE-----
Hash: SHA512
{
"spec": {
"_meta": {
"version": 2
},
"nodes": [
{
"name": "zlib",
"version": "1.2.12",
"arch": {
"platform": "linux",
"platform_os": "ubuntu18.04",
"target": {
"name": "haswell",
"vendor": "GenuineIntel",
"features": [
"aes",
"avx",
"avx2",
"bmi1",
"bmi2",
"f16c",
"fma",
"mmx",
"movbe",
"pclmulqdq",
"popcnt",
"rdrand",
"sse",
"sse2",
"sse4_1",
"sse4_2",
"ssse3"
],
"generation": 0,
"parents": [
"ivybridge",
"x86_64_v3"
]
}
},
"compiler": {
"name": "gcc",
"version": "8.4.0"
},
"namespace": "builtin",
"parameters": {
"optimize": true,
"patches": [
"0d38234384870bfd34dfcb738a9083952656f0c766a0f5990b1893076b084b76"
],
"pic": true,
"shared": true,
"cflags": [],
"cppflags": [],
"cxxflags": [],
"fflags": [],
"ldflags": [],
"ldlibs": []
},
"patches": [
"0d38234384870bfd34dfcb738a9083952656f0c766a0f5990b1893076b084b76"
],
"package_hash": "bm7rut622h3yt5mpm4kvf7pmh7tnmueezgk5yquhr2orbmixwxuq====",
"hash": "g7otk5dra3hifqxej36m5qzm7uyghqgb",
"full_hash": "fx2fyri7bv3vpz2rhke6g3l3dwxda4t6",
"build_hash": "g7otk5dra3hifqxej36m5qzm7uyghqgb"
}
]
},
"binary_cache_checksum": {
"hash_algorithm": "sha256",
"hash": "5b9a180f14e0d04b17b1b0c2a26cf3beae448d77d1bda4279283ca4567d0be90"
},
"buildinfo": {
"relative_prefix": "linux-ubuntu18.04-haswell/gcc-8.4.0/zlib-1.2.12-g7otk5dra3hifqxej36m5qzm7uyghqgb",
"relative_rpaths": false
}
}
-----BEGIN PGP SIGNATURE-----
iQEzBAEBCgAdFiEEz8AGj4zHZe4OaI2OQ0Tg92UAr50FAmJ5lD4ACgkQQ0Tg92UA
r52LEggAl/wXlOlHDnjWvqBlqAn3gaJEZ5PDVPczk6k0w+SNfDGrHfWJnL2c23Oq
CssbHylSgAFvaPT1frbiLfZj6L4j4Ym1qsxIlGNsVfW7Pbc4yNF0flqYMdWKXbgY
2sQoPegIKK7EBtpjDf0+VRYfJTMqjsSgjT/o+nTkg9oAnvU23EqXI5uiY84Z5z6l
CKLBm0GWg7MzI0u8NdiQMVNYVatvvZ8EQpblEUQ7jD4Bo0yoSr33Qdq8uvu4ZdlW
bvbIgeY3pTPF13g9uNznHLxW4j9BWQnOtHFI5UKQnYRner504Yoz9k+YxhwuDlaY
TrOxvHe9hG1ox1AP4tqQc+HsNpm5Kg==
=gi2R
-----END PGP SIGNATURE-----

View file

@ -215,7 +215,7 @@ def test_process_binary_cache_tarball_none(install_mockery, monkeypatch,
def test_process_binary_cache_tarball_tar(install_mockery, monkeypatch, capfd):
"""Tests of _process_binary_cache_tarball with a tar file."""
def _spec(spec, preferred_mirrors=None):
def _spec(spec, unsigned=False, mirrors_for_spec=None):
return spec
# Skip binary distribution functionality since assume tested elsewhere

View file

@ -17,6 +17,7 @@
from llnl.util.compat import Iterable, Mapping
import spack.hash_types as ht
import spack.paths
import spack.spec
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
@ -45,6 +46,27 @@ def test_simple_spec():
check_json_round_trip(spec)
def test_read_spec_from_signed_json():
spec_dir = os.path.join(
spack.paths.test_path, 'data', 'mirrors', 'signed_json')
file_name = (
'linux-ubuntu18.04-haswell-gcc-8.4.0-'
'zlib-1.2.12-g7otk5dra3hifqxej36m5qzm7uyghqgb.spec.json.sig')
spec_path = os.path.join(spec_dir, file_name)
def check_spec(spec_to_check):
assert(spec_to_check.name == 'zlib')
assert(spec_to_check._hash == 'g7otk5dra3hifqxej36m5qzm7uyghqgb')
with open(spec_path) as fd:
s = Spec.from_signed_json(fd)
check_spec(s)
with open(spec_path) as fd:
s = Spec.from_signed_json(fd.read())
check_spec(s)
def test_normal_spec(mock_packages):
spec = Spec('mpileaks+debug~opt')
spec.normalize()

View file

@ -292,17 +292,21 @@ def sign(key, file, output, clearsign=False):
@_autoinit
def verify(signature, file, suppress_warnings=False):
def verify(signature, file=None, suppress_warnings=False):
"""Verify the signature on a file.
Args:
signature (str): signature of the file
file (str): file to be verified
signature (str): signature of the file (or clearsigned file)
file (str): file to be verified. If None, then signature is
assumed to be a clearsigned file.
suppress_warnings (bool): whether or not to suppress warnings
from GnuPG
"""
args = [signature]
if file:
args.append(file)
kwargs = {'error': str} if suppress_warnings else {}
GPG('--verify', signature, file, **kwargs)
GPG('--verify', *args, **kwargs)
@_autoinit