Streamline key management for build caches (#17792)
* Rework spack.util.web.list_url() list_url() now accepts an optional recursive argument (default: False) for controlling whether to only return files within the prefix url or to return all files whose path starts with the prefix url. Allows for the most effecient implementation for the given prefix url scheme. For example, only recursive queries are supported for S3 prefixes, so the returned list is trimmed down if recursive == False, but the native search is returned as-is when recursive == True. Suitable implementations for each case are also used for file system URLs. * Switch to using an explicit index for public keys Switches to maintaining a build cache's keys under build_cache/_pgp. Within this directory is an index.json file listing all the available keys and a <fingerprint>.pub file for each such key. - Adds spack.binary_distribution.generate_key_index() - (re)generates a build cache's key index - Modifies spack.binary_distribution.build_tarball() - if tarball is signed, automatically pushes the key used for signing along with the tarball - if regenerate_index == True, automatically (re)generates the build cache's key index along with the build cache's package index; as in spack.binary_distribution.generate_key_index() - Modifies spack.binary_distribution.get_keys() - a build cache's key index is now used instead of programmatic listing - Adds spack.binary_distribution.push_keys() - publishes keys from Spack's keyring to a given list of mirrors - Adds new spack subcommand: spack gpg publish - publishes keys from Spack's keyring to a given list of mirrors - Modifies spack.util.gpg.Gpg.signing_keys() - Accepts optional positional arguments for filtering the set of keys returned - Adds spack.util.gpg.Gpg.public_keys() - As spack.util.gpg.Gpg.signing_keys(), except public keys are returned - Modifies spack.util.gpg.Gpg.export_keys() - Fixes an issue where GnuPG would prompt for user input if trying to overwrite an existing file - Modifies spack.util.gpg.Gpg.untrust() - Fixes an issue where GnuPG would fail for input that were not key fingerprints - Modifies spack.util.web.url_exists() - Fixes an issue where url_exists() would throw instead of returning False * rework gpg module/fix error with very long GNUPGHOME dir * add a shim for functools.cached_property * handle permission denied error in gpg util * fix tests/make gpgconf optional if no socket dir is available
This commit is contained in:
parent
421f4e12a7
commit
2d93154119
14 changed files with 734 additions and 214 deletions
|
@ -26,17 +26,18 @@
|
||||||
import spack.config as config
|
import spack.config as config
|
||||||
import spack.database as spack_db
|
import spack.database as spack_db
|
||||||
import spack.fetch_strategy as fs
|
import spack.fetch_strategy as fs
|
||||||
import spack.util.gpg
|
|
||||||
import spack.relocate as relocate
|
import spack.relocate as relocate
|
||||||
|
import spack.util.gpg
|
||||||
|
import spack.util.spack_json as sjson
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
from spack.util.gpg import Gpg
|
|
||||||
|
|
||||||
_build_cache_relative_path = 'build_cache'
|
_build_cache_relative_path = 'build_cache'
|
||||||
|
_build_cache_keys_relative_path = '_pgp'
|
||||||
|
|
||||||
BUILD_CACHE_INDEX_TEMPLATE = '''
|
BUILD_CACHE_INDEX_TEMPLATE = '''
|
||||||
<html>
|
<html>
|
||||||
|
@ -247,15 +248,9 @@ def checksum_tarball(file):
|
||||||
return hasher.hexdigest()
|
return hasher.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def sign_tarball(key, force, specfile_path):
|
def select_signing_key(key=None):
|
||||||
# Sign the packages if keys available
|
|
||||||
if spack.util.gpg.Gpg.gpg() is None:
|
|
||||||
raise NoGpgException(
|
|
||||||
"gpg2 is not available in $PATH .\n"
|
|
||||||
"Use spack install gnupg and spack load gnupg.")
|
|
||||||
|
|
||||||
if key is None:
|
if key is None:
|
||||||
keys = Gpg.signing_keys()
|
keys = spack.util.gpg.signing_keys()
|
||||||
if len(keys) == 1:
|
if len(keys) == 1:
|
||||||
key = keys[0]
|
key = keys[0]
|
||||||
|
|
||||||
|
@ -263,26 +258,30 @@ def sign_tarball(key, force, specfile_path):
|
||||||
raise PickKeyException(str(keys))
|
raise PickKeyException(str(keys))
|
||||||
|
|
||||||
if len(keys) == 0:
|
if len(keys) == 0:
|
||||||
msg = "No default key available for signing.\n"
|
raise NoKeyException(
|
||||||
msg += "Use spack gpg init and spack gpg create"
|
"No default key available for signing.\n"
|
||||||
msg += " to create a default key."
|
"Use spack gpg init and spack gpg create"
|
||||||
raise NoKeyException(msg)
|
" to create a default key.")
|
||||||
|
return key
|
||||||
|
|
||||||
|
|
||||||
|
def sign_tarball(key, force, specfile_path):
|
||||||
if os.path.exists('%s.asc' % specfile_path):
|
if os.path.exists('%s.asc' % specfile_path):
|
||||||
if force:
|
if force:
|
||||||
os.remove('%s.asc' % specfile_path)
|
os.remove('%s.asc' % specfile_path)
|
||||||
else:
|
else:
|
||||||
raise NoOverwriteException('%s.asc' % specfile_path)
|
raise NoOverwriteException('%s.asc' % specfile_path)
|
||||||
|
|
||||||
Gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
|
key = select_signing_key(key)
|
||||||
|
spack.util.gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
|
||||||
|
|
||||||
|
|
||||||
def generate_package_index(cache_prefix):
|
def generate_package_index(cache_prefix):
|
||||||
"""Create the build cache index page.
|
"""Create the build cache index page.
|
||||||
|
|
||||||
Creates (or replaces) the "index.json" page at the location given in
|
Creates (or replaces) the "index.json" page at the location given in
|
||||||
cache_prefix. This page contains a link for each binary package (*.yaml)
|
cache_prefix. This page contains a link for each binary package (.yaml)
|
||||||
and public key (*.key) under cache_prefix.
|
under cache_prefix.
|
||||||
"""
|
"""
|
||||||
tmpdir = tempfile.mkdtemp()
|
tmpdir = tempfile.mkdtemp()
|
||||||
db_root_dir = os.path.join(tmpdir, 'db_root')
|
db_root_dir = os.path.join(tmpdir, 'db_root')
|
||||||
|
@ -325,6 +324,45 @@ def generate_package_index(cache_prefix):
|
||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_key_index(key_prefix, tmpdir=None):
|
||||||
|
"""Create the key index page.
|
||||||
|
|
||||||
|
Creates (or replaces) the "index.json" page at the location given in
|
||||||
|
key_prefix. This page contains an entry for each key (.pub) under
|
||||||
|
key_prefix.
|
||||||
|
"""
|
||||||
|
|
||||||
|
tty.debug(' '.join(('Retrieving key.pub files from',
|
||||||
|
url_util.format(key_prefix),
|
||||||
|
'to build key index')))
|
||||||
|
|
||||||
|
fingerprints = (
|
||||||
|
entry[:-4]
|
||||||
|
for entry in web_util.list_url(key_prefix, recursive=False)
|
||||||
|
if entry.endswith('.pub'))
|
||||||
|
|
||||||
|
keys_local = url_util.local_file_path(key_prefix)
|
||||||
|
if keys_local:
|
||||||
|
target = os.path.join(keys_local, 'index.json')
|
||||||
|
else:
|
||||||
|
target = os.path.join(tmpdir, 'index.json')
|
||||||
|
|
||||||
|
index = {
|
||||||
|
'keys': dict(
|
||||||
|
(fingerprint, {}) for fingerprint
|
||||||
|
in sorted(set(fingerprints)))
|
||||||
|
}
|
||||||
|
with open(target, 'w') as f:
|
||||||
|
sjson.dump(index, f)
|
||||||
|
|
||||||
|
if not keys_local:
|
||||||
|
web_util.push_to_url(
|
||||||
|
target,
|
||||||
|
url_util.join(key_prefix, 'index.json'),
|
||||||
|
keep_original=False,
|
||||||
|
extra_args={'ContentType': 'application/json'})
|
||||||
|
|
||||||
|
|
||||||
def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||||
allow_root=False, key=None, regenerate_index=False):
|
allow_root=False, key=None, regenerate_index=False):
|
||||||
"""
|
"""
|
||||||
|
@ -445,7 +483,9 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||||
|
|
||||||
# sign the tarball and spec file with gpg
|
# sign the tarball and spec file with gpg
|
||||||
if not unsigned:
|
if not unsigned:
|
||||||
|
key = select_signing_key(key)
|
||||||
sign_tarball(key, force, specfile_path)
|
sign_tarball(key, force, specfile_path)
|
||||||
|
|
||||||
# put tarball, spec and signature files in .spack archive
|
# put tarball, spec and signature files in .spack archive
|
||||||
with closing(tarfile.open(spackfile_path, 'w')) as tar:
|
with closing(tarfile.open(spackfile_path, 'w')) as tar:
|
||||||
tar.add(name=tarfile_path, arcname='%s' % tarfile_name)
|
tar.add(name=tarfile_path, arcname='%s' % tarfile_name)
|
||||||
|
@ -468,7 +508,15 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||||
.format(spec, remote_spackfile_path))
|
.format(spec, remote_spackfile_path))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# create an index.html for the build_cache directory so specs can be
|
# push the key to the build cache's _pgp directory so it can be
|
||||||
|
# imported
|
||||||
|
if not unsigned:
|
||||||
|
push_keys(outdir,
|
||||||
|
keys=[key],
|
||||||
|
regenerate_index=regenerate_index,
|
||||||
|
tmpdir=tmpdir)
|
||||||
|
|
||||||
|
# create an index.json for the build_cache directory so specs can be
|
||||||
# found
|
# found
|
||||||
if regenerate_index:
|
if regenerate_index:
|
||||||
generate_package_index(url_util.join(
|
generate_package_index(url_util.join(
|
||||||
|
@ -695,7 +743,8 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||||
if os.path.exists('%s.asc' % specfile_path):
|
if os.path.exists('%s.asc' % specfile_path):
|
||||||
try:
|
try:
|
||||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||||
Gpg.verify('%s.asc' % specfile_path, specfile_path, suppress)
|
spack.util.gpg.verify(
|
||||||
|
'%s.asc' % specfile_path, specfile_path, suppress)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir)
|
||||||
raise e
|
raise e
|
||||||
|
@ -898,41 +947,46 @@ def get_specs():
|
||||||
return _cached_specs
|
return _cached_specs
|
||||||
|
|
||||||
|
|
||||||
def get_keys(install=False, trust=False, force=False):
|
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||||
|
"""Get pgp public keys available on mirror with suffix .pub
|
||||||
"""
|
"""
|
||||||
Get pgp public keys available on mirror
|
mirror_collection = (mirrors or spack.mirror.MirrorCollection())
|
||||||
with suffix .key or .pub
|
|
||||||
"""
|
if not mirror_collection:
|
||||||
if not spack.mirror.MirrorCollection():
|
|
||||||
tty.die("Please add a spack mirror to allow " +
|
tty.die("Please add a spack mirror to allow " +
|
||||||
"download of build caches.")
|
"download of build caches.")
|
||||||
|
|
||||||
keys = set()
|
for mirror in mirror_collection.values():
|
||||||
|
fetch_url = mirror.fetch_url
|
||||||
|
keys_url = url_util.join(fetch_url,
|
||||||
|
_build_cache_relative_path,
|
||||||
|
_build_cache_keys_relative_path)
|
||||||
|
keys_index = url_util.join(keys_url, 'index.json')
|
||||||
|
|
||||||
for mirror in spack.mirror.MirrorCollection().values():
|
tty.debug('Finding public keys in {0}'.format(
|
||||||
fetch_url_build_cache = url_util.join(
|
url_util.format(fetch_url)))
|
||||||
mirror.fetch_url, _build_cache_relative_path)
|
|
||||||
|
|
||||||
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
|
try:
|
||||||
if mirror_dir:
|
_, _, json_file = web_util.read_from_url(keys_index)
|
||||||
tty.debug('Finding public keys in {0}'.format(mirror_dir))
|
json_index = sjson.load(codecs.getreader('utf-8')(json_file))
|
||||||
files = os.listdir(str(mirror_dir))
|
except (URLError, web_util.SpackWebError) as url_err:
|
||||||
for file in files:
|
if web_util.url_exists(keys_index):
|
||||||
if re.search(r'\.key', file) or re.search(r'\.pub', file):
|
err_msg = [
|
||||||
link = url_util.join(fetch_url_build_cache, file)
|
'Unable to find public keys in {0},',
|
||||||
keys.add(link)
|
' caught exception attempting to read from {1}.',
|
||||||
else:
|
]
|
||||||
tty.debug('Finding public keys at {0}'
|
|
||||||
.format(url_util.format(fetch_url_build_cache)))
|
|
||||||
# For s3 mirror need to request index.html directly
|
|
||||||
p, links = web_util.spider(
|
|
||||||
url_util.join(fetch_url_build_cache, 'index.html'))
|
|
||||||
|
|
||||||
for link in links:
|
tty.error(''.join(err_msg).format(
|
||||||
if re.search(r'\.key', link) or re.search(r'\.pub', link):
|
url_util.format(fetch_url),
|
||||||
keys.add(link)
|
url_util.format(keys_index)))
|
||||||
|
|
||||||
|
tty.debug(url_err)
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
for fingerprint, key_attributes in json_index['keys'].items():
|
||||||
|
link = os.path.join(keys_url, fingerprint + '.pub')
|
||||||
|
|
||||||
for link in keys:
|
|
||||||
with Stage(link, name="build_cache", keep=True) as stage:
|
with Stage(link, name="build_cache", keep=True) as stage:
|
||||||
if os.path.exists(stage.save_filename) and force:
|
if os.path.exists(stage.save_filename) and force:
|
||||||
os.remove(stage.save_filename)
|
os.remove(stage.save_filename)
|
||||||
|
@ -941,16 +995,80 @@ def get_keys(install=False, trust=False, force=False):
|
||||||
stage.fetch()
|
stage.fetch()
|
||||||
except fs.FetchError:
|
except fs.FetchError:
|
||||||
continue
|
continue
|
||||||
tty.debug('Found key {0}'.format(link))
|
|
||||||
|
tty.debug('Found key {0}'.format(fingerprint))
|
||||||
if install:
|
if install:
|
||||||
if trust:
|
if trust:
|
||||||
Gpg.trust(stage.save_filename)
|
spack.util.gpg.trust(stage.save_filename)
|
||||||
tty.debug('Added this key to trusted keys.')
|
tty.debug('Added this key to trusted keys.')
|
||||||
else:
|
else:
|
||||||
tty.debug('Will not add this key to trusted keys.'
|
tty.debug('Will not add this key to trusted keys.'
|
||||||
'Use -t to install all downloaded keys')
|
'Use -t to install all downloaded keys')
|
||||||
|
|
||||||
|
|
||||||
|
def push_keys(*mirrors, **kwargs):
|
||||||
|
"""
|
||||||
|
Upload pgp public keys to the given mirrors
|
||||||
|
"""
|
||||||
|
keys = kwargs.get('keys')
|
||||||
|
regenerate_index = kwargs.get('regenerate_index', False)
|
||||||
|
tmpdir = kwargs.get('tmpdir')
|
||||||
|
remove_tmpdir = False
|
||||||
|
|
||||||
|
keys = spack.util.gpg.public_keys(*(keys or []))
|
||||||
|
|
||||||
|
try:
|
||||||
|
for mirror in mirrors:
|
||||||
|
push_url = getattr(mirror, 'push_url', mirror)
|
||||||
|
keys_url = url_util.join(push_url,
|
||||||
|
_build_cache_relative_path,
|
||||||
|
_build_cache_keys_relative_path)
|
||||||
|
keys_local = url_util.local_file_path(keys_url)
|
||||||
|
|
||||||
|
verb = 'Writing' if keys_local else 'Uploading'
|
||||||
|
tty.debug('{0} public keys to {1}'.format(
|
||||||
|
verb, url_util.format(push_url)))
|
||||||
|
|
||||||
|
if keys_local: # mirror is local, don't bother with the tmpdir
|
||||||
|
prefix = keys_local
|
||||||
|
mkdirp(keys_local)
|
||||||
|
else:
|
||||||
|
# A tmp dir is created for the first mirror that is non-local.
|
||||||
|
# On the off-hand chance that all the mirrors are local, then
|
||||||
|
# we can avoid the need to create a tmp dir.
|
||||||
|
if tmpdir is None:
|
||||||
|
tmpdir = tempfile.mkdtemp()
|
||||||
|
remove_tmpdir = True
|
||||||
|
prefix = tmpdir
|
||||||
|
|
||||||
|
for fingerprint in keys:
|
||||||
|
tty.debug(' ' + fingerprint)
|
||||||
|
filename = fingerprint + '.pub'
|
||||||
|
|
||||||
|
export_target = os.path.join(prefix, filename)
|
||||||
|
spack.util.gpg.export_keys(export_target, fingerprint)
|
||||||
|
|
||||||
|
# If mirror is local, the above export writes directly to the
|
||||||
|
# mirror (export_target points directly to the mirror).
|
||||||
|
#
|
||||||
|
# If not, then export_target is a tmpfile that needs to be
|
||||||
|
# uploaded to the mirror.
|
||||||
|
if not keys_local:
|
||||||
|
spack.util.web.push_to_url(
|
||||||
|
export_target,
|
||||||
|
url_util.join(keys_url, filename),
|
||||||
|
keep_original=False)
|
||||||
|
|
||||||
|
if regenerate_index:
|
||||||
|
if keys_local:
|
||||||
|
generate_key_index(keys_url)
|
||||||
|
else:
|
||||||
|
generate_key_index(keys_url, tmpdir)
|
||||||
|
finally:
|
||||||
|
if remove_tmpdir:
|
||||||
|
shutil.rmtree(tmpdir)
|
||||||
|
|
||||||
|
|
||||||
def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
|
def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
|
||||||
if not spec.concrete:
|
if not spec.concrete:
|
||||||
raise ValueError('spec must be concrete to check against mirror')
|
raise ValueError('spec must be concrete to check against mirror')
|
||||||
|
|
|
@ -6,9 +6,10 @@
|
||||||
import os
|
import os
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
import spack.binary_distribution
|
||||||
import spack.cmd.common.arguments as arguments
|
import spack.cmd.common.arguments as arguments
|
||||||
import spack.paths
|
import spack.paths
|
||||||
from spack.util.gpg import Gpg
|
import spack.util.gpg
|
||||||
|
|
||||||
description = "handle GPG actions for spack"
|
description = "handle GPG actions for spack"
|
||||||
section = "packaging"
|
section = "packaging"
|
||||||
|
@ -81,37 +82,64 @@ def setup_parser(subparser):
|
||||||
'all secret keys if unspecified')
|
'all secret keys if unspecified')
|
||||||
export.set_defaults(func=gpg_export)
|
export.set_defaults(func=gpg_export)
|
||||||
|
|
||||||
|
publish = subparsers.add_parser('publish', help=gpg_publish.__doc__)
|
||||||
|
|
||||||
|
output = publish.add_mutually_exclusive_group(required=True)
|
||||||
|
output.add_argument('-d', '--directory',
|
||||||
|
metavar='directory',
|
||||||
|
type=str,
|
||||||
|
help="local directory where " +
|
||||||
|
"keys will be published.")
|
||||||
|
output.add_argument('-m', '--mirror-name',
|
||||||
|
metavar='mirror-name',
|
||||||
|
type=str,
|
||||||
|
help="name of the mirror where " +
|
||||||
|
"keys will be published.")
|
||||||
|
output.add_argument('--mirror-url',
|
||||||
|
metavar='mirror-url',
|
||||||
|
type=str,
|
||||||
|
help="URL of the mirror where " +
|
||||||
|
"keys will be published.")
|
||||||
|
publish.add_argument('--rebuild-index', action='store_true',
|
||||||
|
default=False, help=(
|
||||||
|
"Regenerate buildcache key index "
|
||||||
|
"after publishing key(s)"))
|
||||||
|
publish.add_argument('keys', nargs='*',
|
||||||
|
help='the keys to publish; '
|
||||||
|
'all public keys if unspecified')
|
||||||
|
publish.set_defaults(func=gpg_publish)
|
||||||
|
|
||||||
|
|
||||||
def gpg_create(args):
|
def gpg_create(args):
|
||||||
"""create a new key"""
|
"""create a new key"""
|
||||||
if args.export:
|
if args.export:
|
||||||
old_sec_keys = Gpg.signing_keys()
|
old_sec_keys = spack.util.gpg.signing_keys()
|
||||||
Gpg.create(name=args.name, email=args.email,
|
spack.util.gpg.create(name=args.name, email=args.email,
|
||||||
comment=args.comment, expires=args.expires)
|
comment=args.comment, expires=args.expires)
|
||||||
if args.export:
|
if args.export:
|
||||||
new_sec_keys = set(Gpg.signing_keys())
|
new_sec_keys = set(spack.util.gpg.signing_keys())
|
||||||
new_keys = new_sec_keys.difference(old_sec_keys)
|
new_keys = new_sec_keys.difference(old_sec_keys)
|
||||||
Gpg.export_keys(args.export, *new_keys)
|
spack.util.gpg.export_keys(args.export, *new_keys)
|
||||||
|
|
||||||
|
|
||||||
def gpg_export(args):
|
def gpg_export(args):
|
||||||
"""export a secret key"""
|
"""export a secret key"""
|
||||||
keys = args.keys
|
keys = args.keys
|
||||||
if not keys:
|
if not keys:
|
||||||
keys = Gpg.signing_keys()
|
keys = spack.util.gpg.signing_keys()
|
||||||
Gpg.export_keys(args.location, *keys)
|
spack.util.gpg.export_keys(args.location, *keys)
|
||||||
|
|
||||||
|
|
||||||
def gpg_list(args):
|
def gpg_list(args):
|
||||||
"""list keys available in the keyring"""
|
"""list keys available in the keyring"""
|
||||||
Gpg.list(args.trusted, args.signing)
|
spack.util.gpg.list(args.trusted, args.signing)
|
||||||
|
|
||||||
|
|
||||||
def gpg_sign(args):
|
def gpg_sign(args):
|
||||||
"""sign a package"""
|
"""sign a package"""
|
||||||
key = args.key
|
key = args.key
|
||||||
if key is None:
|
if key is None:
|
||||||
keys = Gpg.signing_keys()
|
keys = spack.util.gpg.signing_keys()
|
||||||
if len(keys) == 1:
|
if len(keys) == 1:
|
||||||
key = keys[0]
|
key = keys[0]
|
||||||
elif not keys:
|
elif not keys:
|
||||||
|
@ -123,12 +151,12 @@ def gpg_sign(args):
|
||||||
if not output:
|
if not output:
|
||||||
output = args.spec[0] + '.asc'
|
output = args.spec[0] + '.asc'
|
||||||
# TODO: Support the package format Spack creates.
|
# TODO: Support the package format Spack creates.
|
||||||
Gpg.sign(key, ' '.join(args.spec), output, args.clearsign)
|
spack.util.gpg.sign(key, ' '.join(args.spec), output, args.clearsign)
|
||||||
|
|
||||||
|
|
||||||
def gpg_trust(args):
|
def gpg_trust(args):
|
||||||
"""add a key to the keyring"""
|
"""add a key to the keyring"""
|
||||||
Gpg.trust(args.keyfile)
|
spack.util.gpg.trust(args.keyfile)
|
||||||
|
|
||||||
|
|
||||||
def gpg_init(args):
|
def gpg_init(args):
|
||||||
|
@ -141,12 +169,12 @@ def gpg_init(args):
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
if not filename.endswith('.key'):
|
if not filename.endswith('.key'):
|
||||||
continue
|
continue
|
||||||
Gpg.trust(os.path.join(root, filename))
|
spack.util.gpg.trust(os.path.join(root, filename))
|
||||||
|
|
||||||
|
|
||||||
def gpg_untrust(args):
|
def gpg_untrust(args):
|
||||||
"""remove a key from the keyring"""
|
"""remove a key from the keyring"""
|
||||||
Gpg.untrust(args.signing, *args.keys)
|
spack.util.gpg.untrust(args.signing, *args.keys)
|
||||||
|
|
||||||
|
|
||||||
def gpg_verify(args):
|
def gpg_verify(args):
|
||||||
|
@ -155,7 +183,17 @@ def gpg_verify(args):
|
||||||
signature = args.signature
|
signature = args.signature
|
||||||
if signature is None:
|
if signature is None:
|
||||||
signature = args.spec[0] + '.asc'
|
signature = args.spec[0] + '.asc'
|
||||||
Gpg.verify(signature, ' '.join(args.spec))
|
spack.util.gpg.verify(signature, ' '.join(args.spec))
|
||||||
|
|
||||||
|
|
||||||
|
def gpg_publish(args):
|
||||||
|
"""publish public keys to a build cache"""
|
||||||
|
|
||||||
|
# TODO(opadron): switch to using the mirror args once #17547 is merged
|
||||||
|
mirror = args.directory
|
||||||
|
|
||||||
|
spack.binary_distribution.push_keys(
|
||||||
|
mirror, keys=args.keys, regenerate_index=args.rebuild_index)
|
||||||
|
|
||||||
|
|
||||||
def gpg(parser, args):
|
def gpg(parser, args):
|
||||||
|
|
|
@ -19,8 +19,10 @@
|
||||||
import spack.cmd.install as install
|
import spack.cmd.install as install
|
||||||
import spack.cmd.uninstall as uninstall
|
import spack.cmd.uninstall as uninstall
|
||||||
import spack.cmd.mirror as mirror
|
import spack.cmd.mirror as mirror
|
||||||
from spack.spec import Spec
|
import spack.mirror
|
||||||
|
import spack.util.gpg
|
||||||
from spack.directory_layout import YamlDirectoryLayout
|
from spack.directory_layout import YamlDirectoryLayout
|
||||||
|
from spack.spec import Spec
|
||||||
|
|
||||||
|
|
||||||
def_install_path_scheme = '${ARCHITECTURE}/${COMPILERNAME}-${COMPILERVER}/${PACKAGE}-${VERSION}-${HASH}' # noqa: E501
|
def_install_path_scheme = '${ARCHITECTURE}/${COMPILERNAME}-${COMPILERVER}/${PACKAGE}-${VERSION}-${HASH}' # noqa: E501
|
||||||
|
@ -469,3 +471,40 @@ def test_relative_rpaths_install_nondefault(tmpdir,
|
||||||
margs = mparser.parse_args(
|
margs = mparser.parse_args(
|
||||||
['rm', '--scope', 'site', 'test-mirror-rel'])
|
['rm', '--scope', 'site', 'test-mirror-rel'])
|
||||||
mirror.mirror(mparser, margs)
|
mirror.mirror(mparser, margs)
|
||||||
|
|
||||||
|
|
||||||
|
def test_push_and_fetch_keys(mock_gnupghome):
|
||||||
|
testpath = str(mock_gnupghome)
|
||||||
|
|
||||||
|
mirror = os.path.join(testpath, 'mirror')
|
||||||
|
mirrors = {'test-mirror': mirror}
|
||||||
|
mirrors = spack.mirror.MirrorCollection(mirrors)
|
||||||
|
mirror = spack.mirror.Mirror('file://' + mirror)
|
||||||
|
|
||||||
|
gpg_dir1 = os.path.join(testpath, 'gpg1')
|
||||||
|
gpg_dir2 = os.path.join(testpath, 'gpg2')
|
||||||
|
|
||||||
|
# dir 1: create a new key, record its fingerprint, and push it to a new
|
||||||
|
# mirror
|
||||||
|
with spack.util.gpg.gnupg_home_override(gpg_dir1):
|
||||||
|
spack.util.gpg.create(name='test-key',
|
||||||
|
email='fake@test.key',
|
||||||
|
expires='0',
|
||||||
|
comment=None)
|
||||||
|
|
||||||
|
keys = spack.util.gpg.public_keys()
|
||||||
|
assert len(keys) == 1
|
||||||
|
fpr = keys[0]
|
||||||
|
|
||||||
|
bindist.push_keys(mirror, keys=[fpr], regenerate_index=True)
|
||||||
|
|
||||||
|
# dir 2: import the key from the mirror, and confirm that its fingerprint
|
||||||
|
# matches the one created above
|
||||||
|
with spack.util.gpg.gnupg_home_override(gpg_dir2):
|
||||||
|
assert len(spack.util.gpg.public_keys()) == 0
|
||||||
|
|
||||||
|
bindist.get_keys(mirrors=mirrors, install=True, trust=True, force=True)
|
||||||
|
|
||||||
|
new_keys = spack.util.gpg.public_keys()
|
||||||
|
assert len(new_keys) == 1
|
||||||
|
assert new_keys[0] == fpr
|
||||||
|
|
|
@ -53,15 +53,8 @@ def test_urlencode_string():
|
||||||
assert(s_enc == 'Spack+Test+Project')
|
assert(s_enc == 'Spack+Test+Project')
|
||||||
|
|
||||||
|
|
||||||
def has_gpg():
|
@pytest.mark.skipif(not spack.util.gpg.has_gpg(),
|
||||||
try:
|
reason='This test requires gpg')
|
||||||
gpg = spack.util.gpg.Gpg.gpg()
|
|
||||||
except spack.util.gpg.SpackGPGError:
|
|
||||||
gpg = None
|
|
||||||
return bool(gpg)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(not has_gpg(), reason='This test requires gpg')
|
|
||||||
def test_import_signing_key(mock_gnupghome):
|
def test_import_signing_key(mock_gnupghome):
|
||||||
signing_key_dir = spack_paths.mock_gpg_keys_path
|
signing_key_dir = spack_paths.mock_gpg_keys_path
|
||||||
signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
|
signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
|
||||||
|
|
|
@ -35,14 +35,6 @@
|
||||||
git = exe.which('git', required=True)
|
git = exe.which('git', required=True)
|
||||||
|
|
||||||
|
|
||||||
def has_gpg():
|
|
||||||
try:
|
|
||||||
gpg = spack.util.gpg.Gpg.gpg()
|
|
||||||
except spack.util.gpg.SpackGPGError:
|
|
||||||
gpg = None
|
|
||||||
return bool(gpg)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def env_deactivate():
|
def env_deactivate():
|
||||||
yield
|
yield
|
||||||
|
@ -690,7 +682,8 @@ def test_ci_rebuild_basic(tmpdir, mutable_mock_env_path, env_deactivate,
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
@pytest.mark.skipif(not has_gpg(), reason='This test requires gpg')
|
@pytest.mark.skipif(not spack.util.gpg.has_gpg(),
|
||||||
|
reason='This test requires gpg')
|
||||||
def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
|
def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
|
||||||
install_mockery, mock_packages, mock_fetch,
|
install_mockery, mock_packages, mock_fetch,
|
||||||
mock_stage, mock_gnupghome):
|
mock_stage, mock_gnupghome):
|
||||||
|
|
|
@ -29,39 +29,35 @@
|
||||||
('gpg2', 'gpg (GnuPG) 2.2.19'), # gpg2 command
|
('gpg2', 'gpg (GnuPG) 2.2.19'), # gpg2 command
|
||||||
])
|
])
|
||||||
def test_find_gpg(cmd_name, version, tmpdir, mock_gnupghome, monkeypatch):
|
def test_find_gpg(cmd_name, version, tmpdir, mock_gnupghome, monkeypatch):
|
||||||
|
TEMPLATE = ('#!/bin/sh\n'
|
||||||
|
'echo "{version}"\n')
|
||||||
|
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
with open(cmd_name, 'w') as f:
|
for fname in (cmd_name, 'gpgconf'):
|
||||||
f.write("""\
|
with open(fname, 'w') as f:
|
||||||
#!/bin/sh
|
f.write(TEMPLATE.format(version=version))
|
||||||
echo "{version}"
|
fs.set_executable(fname)
|
||||||
""".format(version=version))
|
|
||||||
fs.set_executable(cmd_name)
|
|
||||||
|
|
||||||
monkeypatch.setitem(os.environ, "PATH", str(tmpdir))
|
monkeypatch.setitem(os.environ, "PATH", str(tmpdir))
|
||||||
if version == 'undetectable' or version.endswith('1.3.4'):
|
if version == 'undetectable' or version.endswith('1.3.4'):
|
||||||
with pytest.raises(spack.util.gpg.SpackGPGError):
|
with pytest.raises(spack.util.gpg.SpackGPGError):
|
||||||
exe = spack.util.gpg.Gpg.gpg()
|
spack.util.gpg.ensure_gpg(reevaluate=True)
|
||||||
else:
|
else:
|
||||||
exe = spack.util.gpg.Gpg.gpg()
|
spack.util.gpg.ensure_gpg(reevaluate=True)
|
||||||
assert isinstance(exe, spack.util.executable.Executable)
|
gpg_exe = spack.util.gpg.get_global_gpg_instance().gpg_exe
|
||||||
|
assert isinstance(gpg_exe, spack.util.executable.Executable)
|
||||||
|
gpgconf_exe = spack.util.gpg.get_global_gpg_instance().gpgconf_exe
|
||||||
|
assert isinstance(gpgconf_exe, spack.util.executable.Executable)
|
||||||
|
|
||||||
|
|
||||||
def test_no_gpg_in_path(tmpdir, mock_gnupghome, monkeypatch):
|
def test_no_gpg_in_path(tmpdir, mock_gnupghome, monkeypatch):
|
||||||
monkeypatch.setitem(os.environ, "PATH", str(tmpdir))
|
monkeypatch.setitem(os.environ, "PATH", str(tmpdir))
|
||||||
with pytest.raises(spack.util.gpg.SpackGPGError):
|
with pytest.raises(spack.util.gpg.SpackGPGError):
|
||||||
spack.util.gpg.Gpg.gpg()
|
spack.util.gpg.ensure_gpg(reevaluate=True)
|
||||||
|
|
||||||
|
|
||||||
def has_gpg():
|
|
||||||
try:
|
|
||||||
gpg = spack.util.gpg.Gpg.gpg()
|
|
||||||
except spack.util.gpg.SpackGPGError:
|
|
||||||
gpg = None
|
|
||||||
return bool(gpg)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.maybeslow
|
@pytest.mark.maybeslow
|
||||||
@pytest.mark.skipif(not has_gpg(),
|
@pytest.mark.skipif(not spack.util.gpg.has_gpg(),
|
||||||
reason='These tests require gnupg2')
|
reason='These tests require gnupg2')
|
||||||
def test_gpg(tmpdir, mock_gnupghome):
|
def test_gpg(tmpdir, mock_gnupghome):
|
||||||
# Verify a file with an empty keyring.
|
# Verify a file with an empty keyring.
|
||||||
|
@ -103,7 +99,7 @@ def test_gpg(tmpdir, mock_gnupghome):
|
||||||
'--export', str(keypath),
|
'--export', str(keypath),
|
||||||
'Spack testing 1',
|
'Spack testing 1',
|
||||||
'spack@googlegroups.com')
|
'spack@googlegroups.com')
|
||||||
keyfp = spack.util.gpg.Gpg.signing_keys()[0]
|
keyfp = spack.util.gpg.signing_keys()[0]
|
||||||
|
|
||||||
# List the keys.
|
# List the keys.
|
||||||
# TODO: Test the output here.
|
# TODO: Test the output here.
|
||||||
|
|
|
@ -781,10 +781,8 @@ def mock_gnupghome(monkeypatch):
|
||||||
# This comes up because tmp paths on macOS are already long-ish, and
|
# This comes up because tmp paths on macOS are already long-ish, and
|
||||||
# pytest makes them longer.
|
# pytest makes them longer.
|
||||||
short_name_tmpdir = tempfile.mkdtemp()
|
short_name_tmpdir = tempfile.mkdtemp()
|
||||||
monkeypatch.setattr(spack.util.gpg, 'GNUPGHOME', short_name_tmpdir)
|
with spack.util.gpg.gnupg_home_override(short_name_tmpdir):
|
||||||
monkeypatch.setattr(spack.util.gpg.Gpg, '_gpg', None)
|
yield short_name_tmpdir
|
||||||
|
|
||||||
yield
|
|
||||||
|
|
||||||
# clean up, since we are doing this manually
|
# clean up, since we are doing this manually
|
||||||
shutil.rmtree(short_name_tmpdir)
|
shutil.rmtree(short_name_tmpdir)
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.cmd.buildcache as buildcache
|
import spack.cmd.buildcache as buildcache
|
||||||
|
import spack.util.gpg
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.paths import mock_gpg_keys_path
|
from spack.paths import mock_gpg_keys_path
|
||||||
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
|
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
|
||||||
|
@ -31,14 +32,6 @@
|
||||||
from spack.relocate import file_is_relocatable
|
from spack.relocate import file_is_relocatable
|
||||||
|
|
||||||
|
|
||||||
def has_gpg():
|
|
||||||
try:
|
|
||||||
gpg = spack.util.gpg.Gpg.gpg()
|
|
||||||
except spack.util.gpg.SpackGPGError:
|
|
||||||
gpg = None
|
|
||||||
return bool(gpg)
|
|
||||||
|
|
||||||
|
|
||||||
def fake_fetchify(url, pkg):
|
def fake_fetchify(url, pkg):
|
||||||
"""Fake the URL for a package so it downloads from a file."""
|
"""Fake the URL for a package so it downloads from a file."""
|
||||||
fetcher = FetchStrategyComposite()
|
fetcher = FetchStrategyComposite()
|
||||||
|
@ -46,7 +39,8 @@ def fake_fetchify(url, pkg):
|
||||||
pkg.fetcher = fetcher
|
pkg.fetcher = fetcher
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(not has_gpg(), reason='This test requires gpg')
|
@pytest.mark.skipif(not spack.util.gpg.has_gpg(),
|
||||||
|
reason='This test requires gpg')
|
||||||
@pytest.mark.usefixtures('install_mockery', 'mock_gnupghome')
|
@pytest.mark.usefixtures('install_mockery', 'mock_gnupghome')
|
||||||
def test_buildcache(mock_archive, tmpdir):
|
def test_buildcache(mock_archive, tmpdir):
|
||||||
# tweak patchelf to only do a download
|
# tweak patchelf to only do a download
|
||||||
|
@ -101,12 +95,9 @@ def test_buildcache(mock_archive, tmpdir):
|
||||||
|
|
||||||
create_args = ['create', '-a', '-f', '-d', mirror_path, pkghash]
|
create_args = ['create', '-a', '-f', '-d', mirror_path, pkghash]
|
||||||
# Create a private key to sign package with if gpg2 available
|
# Create a private key to sign package with if gpg2 available
|
||||||
if spack.util.gpg.Gpg.gpg():
|
spack.util.gpg.create(name='test key 1', expires='0',
|
||||||
spack.util.gpg.Gpg.create(name='test key 1', expires='0',
|
|
||||||
email='spack@googlegroups.com',
|
email='spack@googlegroups.com',
|
||||||
comment='Spack test key')
|
comment='Spack test key')
|
||||||
else:
|
|
||||||
create_args.insert(create_args.index('-a'), '-u')
|
|
||||||
|
|
||||||
create_args.insert(create_args.index('-a'), '--rebuild-index')
|
create_args.insert(create_args.index('-a'), '--rebuild-index')
|
||||||
|
|
||||||
|
@ -119,8 +110,6 @@ def test_buildcache(mock_archive, tmpdir):
|
||||||
pkg.do_uninstall(force=True)
|
pkg.do_uninstall(force=True)
|
||||||
|
|
||||||
install_args = ['install', '-a', '-f', pkghash]
|
install_args = ['install', '-a', '-f', pkghash]
|
||||||
if not spack.util.gpg.Gpg.gpg():
|
|
||||||
install_args.insert(install_args.index('-a'), '-u')
|
|
||||||
args = parser.parse_args(install_args)
|
args = parser.parse_args(install_args)
|
||||||
# Test install
|
# Test install
|
||||||
buildcache.buildcache(parser, args)
|
buildcache.buildcache(parser, args)
|
||||||
|
@ -144,8 +133,6 @@ def test_buildcache(mock_archive, tmpdir):
|
||||||
# Uninstall the package
|
# Uninstall the package
|
||||||
pkg.do_uninstall(force=True)
|
pkg.do_uninstall(force=True)
|
||||||
|
|
||||||
if not spack.util.gpg.Gpg.gpg():
|
|
||||||
install_args.insert(install_args.index('-a'), '-u')
|
|
||||||
args = parser.parse_args(install_args)
|
args = parser.parse_args(install_args)
|
||||||
buildcache.buildcache(parser, args)
|
buildcache.buildcache(parser, args)
|
||||||
|
|
||||||
|
|
|
@ -839,7 +839,8 @@ def test_get_stage_root_in_spack(self, clear_stage_root):
|
||||||
assert 'spack' in path.split(os.path.sep)
|
assert 'spack' in path.split(os.path.sep)
|
||||||
|
|
||||||
# Make sure cached stage path value was changed appropriately
|
# Make sure cached stage path value was changed appropriately
|
||||||
assert spack.stage._stage_root == test_path
|
assert spack.stage._stage_root in (
|
||||||
|
test_path, os.path.join(test_path, getpass.getuser()))
|
||||||
|
|
||||||
# Make sure the directory exists
|
# Make sure the directory exists
|
||||||
assert os.path.isdir(spack.stage._stage_root)
|
assert os.path.isdir(spack.stage._stage_root)
|
||||||
|
|
|
@ -3,7 +3,10 @@
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import spack.util.gpg as gpg
|
import os
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import spack.util.gpg
|
||||||
|
|
||||||
|
|
||||||
def test_parse_gpg_output_case_one():
|
def test_parse_gpg_output_case_one():
|
||||||
|
@ -17,7 +20,7 @@ def test_parse_gpg_output_case_one():
|
||||||
uid:::::::AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA::Joe (Test) <j.s@s.com>:
|
uid:::::::AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA::Joe (Test) <j.s@s.com>:
|
||||||
ssb::2048:1:AAAAAAAAAAAAAAAA:AAAAAAAAAA::::::::::
|
ssb::2048:1:AAAAAAAAAAAAAAAA:AAAAAAAAAA::::::::::
|
||||||
"""
|
"""
|
||||||
keys = gpg.parse_keys_output(output)
|
keys = spack.util.gpg.parse_secret_keys_output(output)
|
||||||
|
|
||||||
assert len(keys) == 2
|
assert len(keys) == 2
|
||||||
assert keys[0] == 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
|
assert keys[0] == 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
|
||||||
|
@ -34,7 +37,7 @@ def test_parse_gpg_output_case_two():
|
||||||
fpr:::::::::YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY:
|
fpr:::::::::YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY:
|
||||||
grp:::::::::AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA:
|
grp:::::::::AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA:
|
||||||
"""
|
"""
|
||||||
keys = gpg.parse_keys_output(output)
|
keys = spack.util.gpg.parse_secret_keys_output(output)
|
||||||
|
|
||||||
assert len(keys) == 1
|
assert len(keys) == 1
|
||||||
assert keys[0] == 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
|
assert keys[0] == 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
|
||||||
|
@ -53,8 +56,29 @@ def test_parse_gpg_output_case_three():
|
||||||
ssb::2048:1:AAAAAAAAAAAAAAAA:AAAAAAAAAA::::::::::
|
ssb::2048:1:AAAAAAAAAAAAAAAA:AAAAAAAAAA::::::::::
|
||||||
fpr:::::::::ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ:"""
|
fpr:::::::::ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ:"""
|
||||||
|
|
||||||
keys = gpg.parse_keys_output(output)
|
keys = spack.util.gpg.parse_secret_keys_output(output)
|
||||||
|
|
||||||
assert len(keys) == 2
|
assert len(keys) == 2
|
||||||
assert keys[0] == 'WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW'
|
assert keys[0] == 'WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW'
|
||||||
assert keys[1] == 'YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY'
|
assert keys[1] == 'YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(not spack.util.gpg.GpgConstants.user_run_dir,
|
||||||
|
reason='This test requires /var/run/user/$(id -u)')
|
||||||
|
def test_really_long_gnupg_home_dir(tmpdir):
|
||||||
|
N = 960
|
||||||
|
|
||||||
|
tdir = str(tmpdir)
|
||||||
|
while len(tdir) < N:
|
||||||
|
tdir = os.path.join(tdir, 'filler')
|
||||||
|
|
||||||
|
tdir = tdir[:N].rstrip(os.sep)
|
||||||
|
tdir += '0' * (N - len(tdir))
|
||||||
|
|
||||||
|
with spack.util.gpg.gnupg_home_override(tdir):
|
||||||
|
spack.util.gpg.create(name='Spack testing 1',
|
||||||
|
email='test@spack.io',
|
||||||
|
comment='Spack testing key',
|
||||||
|
expires='0')
|
||||||
|
|
||||||
|
spack.util.gpg.list(True, True)
|
||||||
|
|
|
@ -167,3 +167,31 @@ def test_get_header():
|
||||||
# If there isn't even a fuzzy match, raise KeyError
|
# If there isn't even a fuzzy match, raise KeyError
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
spack.util.web.get_header(headers, 'ContentLength')
|
spack.util.web.get_header(headers, 'ContentLength')
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_url(tmpdir):
|
||||||
|
testpath = str(tmpdir)
|
||||||
|
|
||||||
|
os.mkdir(os.path.join(testpath, 'dir'))
|
||||||
|
|
||||||
|
with open(os.path.join(testpath, 'file-0.txt'), 'w'):
|
||||||
|
pass
|
||||||
|
with open(os.path.join(testpath, 'file-1.txt'), 'w'):
|
||||||
|
pass
|
||||||
|
with open(os.path.join(testpath, 'file-2.txt'), 'w'):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with open(os.path.join(testpath, 'dir', 'another-file.txt'), 'w'):
|
||||||
|
pass
|
||||||
|
|
||||||
|
list_url = lambda recursive: list(sorted(
|
||||||
|
spack.util.web.list_url(testpath, recursive=recursive)))
|
||||||
|
|
||||||
|
assert list_url(False) == ['file-0.txt',
|
||||||
|
'file-1.txt',
|
||||||
|
'file-2.txt']
|
||||||
|
|
||||||
|
assert list_url(True) == ['dir/another-file.txt',
|
||||||
|
'file-0.txt',
|
||||||
|
'file-1.txt',
|
||||||
|
'file-2.txt']
|
||||||
|
|
|
@ -3,20 +3,73 @@
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import errno
|
||||||
|
import functools
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
import llnl.util.lang
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.paths
|
import spack.paths
|
||||||
|
import spack.util.executable
|
||||||
import spack.version
|
import spack.version
|
||||||
from spack.util.executable import which
|
|
||||||
|
|
||||||
_gnupg_version_re = r"^gpg \(GnuPG\) (.*)$"
|
|
||||||
|
|
||||||
GNUPGHOME = os.getenv('SPACK_GNUPGHOME', spack.paths.gpg_path)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_keys_output(output):
|
_gnupg_version_re = r"^gpg(conf)? \(GnuPG\) (.*)$"
|
||||||
|
_gnupg_home_override = None
|
||||||
|
_global_gpg_instance = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_gnupg_home(gnupg_home=None):
|
||||||
|
"""Returns the directory that should be used as the GNUPGHOME environment
|
||||||
|
variable when calling gpg.
|
||||||
|
|
||||||
|
If a [gnupg_home] is passed directly (and not None), that value will be
|
||||||
|
used.
|
||||||
|
|
||||||
|
Otherwise, if there is an override set (and it is not None), then that
|
||||||
|
value will be used.
|
||||||
|
|
||||||
|
Otherwise, if the environment variable "SPACK_GNUPGHOME" is set, then that
|
||||||
|
value will be used.
|
||||||
|
|
||||||
|
Otherwise, the default gpg path for Spack will be used.
|
||||||
|
|
||||||
|
See also: gnupg_home_override()
|
||||||
|
"""
|
||||||
|
return (gnupg_home or
|
||||||
|
_gnupg_home_override or
|
||||||
|
os.getenv('SPACK_GNUPGHOME') or
|
||||||
|
spack.paths.gpg_path)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def gnupg_home_override(new_gnupg_home):
|
||||||
|
global _gnupg_home_override
|
||||||
|
global _global_gpg_instance
|
||||||
|
|
||||||
|
old_gnupg_home_override = _gnupg_home_override
|
||||||
|
old_global_gpg_instance = _global_gpg_instance
|
||||||
|
|
||||||
|
_gnupg_home_override = new_gnupg_home
|
||||||
|
_global_gpg_instance = None
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
_gnupg_home_override = old_gnupg_home_override
|
||||||
|
_global_gpg_instance = old_global_gpg_instance
|
||||||
|
|
||||||
|
|
||||||
|
def get_global_gpg_instance():
|
||||||
|
global _global_gpg_instance
|
||||||
|
if _global_gpg_instance is None:
|
||||||
|
_global_gpg_instance = Gpg()
|
||||||
|
return _global_gpg_instance
|
||||||
|
|
||||||
|
|
||||||
|
def parse_secret_keys_output(output):
|
||||||
keys = []
|
keys = []
|
||||||
found_sec = False
|
found_sec = False
|
||||||
for line in output.split('\n'):
|
for line in output.split('\n'):
|
||||||
|
@ -31,43 +84,230 @@ def parse_keys_output(output):
|
||||||
return keys
|
return keys
|
||||||
|
|
||||||
|
|
||||||
class Gpg(object):
|
def parse_public_keys_output(output):
|
||||||
_gpg = None
|
keys = []
|
||||||
|
found_pub = False
|
||||||
|
for line in output.split('\n'):
|
||||||
|
if found_pub:
|
||||||
|
if line.startswith('fpr'):
|
||||||
|
keys.append(line.split(':')[9])
|
||||||
|
found_pub = False
|
||||||
|
elif line.startswith('ssb'):
|
||||||
|
found_pub = False
|
||||||
|
elif line.startswith('pub'):
|
||||||
|
found_pub = True
|
||||||
|
return keys
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def gpg():
|
|
||||||
# TODO: Support loading up a GPG environment from a built gpg.
|
|
||||||
if Gpg._gpg is None:
|
|
||||||
gpg = which('gpg2', 'gpg')
|
|
||||||
|
|
||||||
if not gpg:
|
cached_property = getattr(functools, 'cached_property', None)
|
||||||
raise SpackGPGError("Spack requires gpg version 2 or higher.")
|
|
||||||
|
|
||||||
# ensure that the version is actually >= 2 if we find 'gpg'
|
# If older python version has no cached_property, emulate it here.
|
||||||
if gpg.name == 'gpg':
|
# TODO(opadron): maybe this shim should be moved to llnl.util.lang?
|
||||||
output = gpg('--version', output=str)
|
if not cached_property:
|
||||||
|
def cached_property(*args, **kwargs):
|
||||||
|
result = property(llnl.util.lang.memoized(*args, **kwargs))
|
||||||
|
attr = result.fget.__name__
|
||||||
|
|
||||||
|
@result.deleter
|
||||||
|
def result(self):
|
||||||
|
getattr(type(self), attr).fget.cache.pop((self,), None)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class GpgConstants(object):
|
||||||
|
@cached_property
|
||||||
|
def target_version(self):
|
||||||
|
return spack.version.Version('2')
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def gpgconf_string(self):
|
||||||
|
exe_str = spack.util.executable.which_string(
|
||||||
|
'gpgconf', 'gpg2conf', 'gpgconf2')
|
||||||
|
|
||||||
|
no_gpgconf_msg = (
|
||||||
|
'Spack requires gpgconf version >= 2\n'
|
||||||
|
' To install a suitable version using Spack, run\n'
|
||||||
|
' spack install gnupg@2:\n'
|
||||||
|
' and load it by running\n'
|
||||||
|
' spack load gnupg@2:')
|
||||||
|
|
||||||
|
if not exe_str:
|
||||||
|
raise SpackGPGError(no_gpgconf_msg)
|
||||||
|
|
||||||
|
exe = spack.util.executable.Executable(exe_str)
|
||||||
|
output = exe('--version', output=str)
|
||||||
match = re.search(_gnupg_version_re, output, re.M)
|
match = re.search(_gnupg_version_re, output, re.M)
|
||||||
|
|
||||||
if not match:
|
if not match:
|
||||||
raise SpackGPGError("Couldn't determine version of gpg")
|
raise SpackGPGError('Could not determine gpgconf version')
|
||||||
|
|
||||||
v = spack.version.Version(match.group(1))
|
if spack.version.Version(match.group(2)) < self.target_version:
|
||||||
if v < spack.version.Version('2'):
|
raise SpackGPGError(no_gpgconf_msg)
|
||||||
raise SpackGPGError("Spack requires GPG version >= 2")
|
|
||||||
|
|
||||||
# make the GNU PG path if we need to
|
# ensure that the gpgconf we found can run "gpgconf --create-socketdir"
|
||||||
# TODO: does this need to be in the spack directory?
|
try:
|
||||||
# we should probably just use GPG's regular conventions
|
exe('--dry-run', '--create-socketdir')
|
||||||
if not os.path.exists(GNUPGHOME):
|
except spack.util.executable.ProcessError:
|
||||||
os.makedirs(GNUPGHOME)
|
# no dice
|
||||||
os.chmod(GNUPGHOME, 0o700)
|
exe_str = None
|
||||||
gpg.add_default_env('GNUPGHOME', GNUPGHOME)
|
|
||||||
|
|
||||||
Gpg._gpg = gpg
|
return exe_str
|
||||||
return Gpg._gpg
|
|
||||||
|
|
||||||
@classmethod
|
@cached_property
|
||||||
def create(cls, **kwargs):
|
def gpg_string(self):
|
||||||
|
exe_str = spack.util.executable.which_string('gpg2', 'gpg')
|
||||||
|
|
||||||
|
no_gpg_msg = (
|
||||||
|
'Spack requires gpg version >= 2\n'
|
||||||
|
' To install a suitable version using Spack, run\n'
|
||||||
|
' spack install gnupg@2:\n'
|
||||||
|
' and load it by running\n'
|
||||||
|
' spack load gnupg@2:')
|
||||||
|
|
||||||
|
if not exe_str:
|
||||||
|
raise SpackGPGError(no_gpg_msg)
|
||||||
|
|
||||||
|
exe = spack.util.executable.Executable(exe_str)
|
||||||
|
output = exe('--version', output=str)
|
||||||
|
match = re.search(_gnupg_version_re, output, re.M)
|
||||||
|
|
||||||
|
if not match:
|
||||||
|
raise SpackGPGError('Could not determine gpg version')
|
||||||
|
|
||||||
|
if spack.version.Version(match.group(2)) < self.target_version:
|
||||||
|
raise SpackGPGError(no_gpg_msg)
|
||||||
|
|
||||||
|
return exe_str
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def user_run_dir(self):
|
||||||
|
# Try to ensure that (/var)/run/user/$(id -u) exists so that
|
||||||
|
# `gpgconf --create-socketdir` can be run later.
|
||||||
|
#
|
||||||
|
# NOTE(opadron): This action helps prevent a large class of
|
||||||
|
# "file-name-too-long" errors in gpg.
|
||||||
|
|
||||||
|
try:
|
||||||
|
has_suitable_gpgconf = bool(GpgConstants.gpgconf_string)
|
||||||
|
except SpackGPGError:
|
||||||
|
has_suitable_gpgconf = False
|
||||||
|
|
||||||
|
# If there is no suitable gpgconf, don't even bother trying to
|
||||||
|
# precreate a user run dir.
|
||||||
|
if not has_suitable_gpgconf:
|
||||||
|
return None
|
||||||
|
|
||||||
|
result = None
|
||||||
|
for var_run in ('/run', '/var/run'):
|
||||||
|
if not os.path.exists(var_run):
|
||||||
|
continue
|
||||||
|
|
||||||
|
var_run_user = os.path.join(var_run, 'user')
|
||||||
|
try:
|
||||||
|
if not os.path.exists(var_run_user):
|
||||||
|
os.mkdir(var_run_user)
|
||||||
|
os.chmod(var_run_user, 0o777)
|
||||||
|
|
||||||
|
user_dir = os.path.join(var_run_user, str(os.getuid()))
|
||||||
|
|
||||||
|
if not os.path.exists(user_dir):
|
||||||
|
os.mkdir(user_dir)
|
||||||
|
os.chmod(user_dir, 0o700)
|
||||||
|
|
||||||
|
# If the above operation fails due to lack of permissions, then
|
||||||
|
# just carry on without running gpgconf and hope for the best.
|
||||||
|
#
|
||||||
|
# NOTE(opadron): Without a dir in which to create a socket for IPC,
|
||||||
|
# gnupg may fail if GNUPGHOME is set to a path that
|
||||||
|
# is too long, where "too long" in this context is
|
||||||
|
# actually quite short; somewhere in the
|
||||||
|
# neighborhood of more than 100 characters.
|
||||||
|
#
|
||||||
|
# TODO(opadron): Maybe a warning should be printed in this case?
|
||||||
|
except OSError as exc:
|
||||||
|
if exc.errno not in (errno.EPERM, errno.EACCES):
|
||||||
|
raise
|
||||||
|
user_dir = None
|
||||||
|
|
||||||
|
# return the last iteration that provides a usable user run dir
|
||||||
|
if user_dir is not None:
|
||||||
|
result = user_dir
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
for attr in ('gpgconf_string', 'gpg_string', 'user_run_dir'):
|
||||||
|
try:
|
||||||
|
delattr(self, attr)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
GpgConstants = GpgConstants()
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_gpg(reevaluate=False):
|
||||||
|
if reevaluate:
|
||||||
|
GpgConstants.clear()
|
||||||
|
|
||||||
|
if GpgConstants.user_run_dir is not None:
|
||||||
|
GpgConstants.gpgconf_string
|
||||||
|
|
||||||
|
GpgConstants.gpg_string
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def has_gpg(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
return ensure_gpg(*args, **kwargs)
|
||||||
|
except SpackGPGError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE(opadron): When adding methods to this class, consider adding convenience
|
||||||
|
# wrapper functions further down in this file.
|
||||||
|
class Gpg(object):
|
||||||
|
def __init__(self, gnupg_home=None):
|
||||||
|
self.gnupg_home = get_gnupg_home(gnupg_home)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def prep(self):
|
||||||
|
# Make sure that suitable versions of gpgconf and gpg are available
|
||||||
|
ensure_gpg()
|
||||||
|
|
||||||
|
# Make sure that the GNUPGHOME exists
|
||||||
|
if not os.path.exists(self.gnupg_home):
|
||||||
|
os.makedirs(self.gnupg_home)
|
||||||
|
os.chmod(self.gnupg_home, 0o700)
|
||||||
|
|
||||||
|
if not os.path.isdir(self.gnupg_home):
|
||||||
|
raise SpackGPGError(
|
||||||
|
'GNUPGHOME "{0}" exists and is not a directory'.format(
|
||||||
|
self.gnupg_home))
|
||||||
|
|
||||||
|
if GpgConstants.user_run_dir is not None:
|
||||||
|
self.gpgconf_exe('--create-socketdir')
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def gpgconf_exe(self):
|
||||||
|
exe = spack.util.executable.Executable(GpgConstants.gpgconf_string)
|
||||||
|
exe.add_default_env('GNUPGHOME', self.gnupg_home)
|
||||||
|
return exe
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def gpg_exe(self):
|
||||||
|
exe = spack.util.executable.Executable(GpgConstants.gpg_string)
|
||||||
|
exe.add_default_env('GNUPGHOME', self.gnupg_home)
|
||||||
|
return exe
|
||||||
|
|
||||||
|
def __call__(self, *args, **kwargs):
|
||||||
|
if self.prep:
|
||||||
|
return self.gpg_exe(*args, **kwargs)
|
||||||
|
|
||||||
|
def create(self, **kwargs):
|
||||||
r, w = os.pipe()
|
r, w = os.pipe()
|
||||||
r = os.fdopen(r, 'r')
|
r = os.fdopen(r, 'r')
|
||||||
w = os.fdopen(w, 'w')
|
w = os.fdopen(w, 'w')
|
||||||
|
@ -83,64 +323,108 @@ def create(cls, **kwargs):
|
||||||
%%commit
|
%%commit
|
||||||
''' % kwargs)
|
''' % kwargs)
|
||||||
w.close()
|
w.close()
|
||||||
cls.gpg()('--gen-key', '--batch', input=r)
|
self('--gen-key', '--batch', input=r)
|
||||||
r.close()
|
r.close()
|
||||||
|
|
||||||
@classmethod
|
def signing_keys(self, *args):
|
||||||
def signing_keys(cls):
|
output = self('--list-secret-keys', '--with-colons', '--fingerprint',
|
||||||
output = cls.gpg()('--list-secret-keys', '--with-colons',
|
*args, output=str)
|
||||||
'--fingerprint', '--fingerprint', output=str)
|
return parse_secret_keys_output(output)
|
||||||
return parse_keys_output(output)
|
|
||||||
|
|
||||||
@classmethod
|
def public_keys(self, *args):
|
||||||
def export_keys(cls, location, *keys):
|
output = self('--list-public-keys', '--with-colons', '--fingerprint',
|
||||||
cls.gpg()('--armor', '--export', '--output', location, *keys)
|
*args, output=str)
|
||||||
|
return parse_public_keys_output(output)
|
||||||
|
|
||||||
@classmethod
|
def export_keys(self, location, *keys):
|
||||||
def trust(cls, keyfile):
|
self('--batch', '--yes',
|
||||||
cls.gpg()('--import', keyfile)
|
'--armor', '--export',
|
||||||
|
'--output', location, *keys)
|
||||||
|
|
||||||
@classmethod
|
def trust(self, keyfile):
|
||||||
def untrust(cls, signing, *keys):
|
self('--import', keyfile)
|
||||||
args = [
|
|
||||||
'--yes',
|
def untrust(self, signing, *keys):
|
||||||
'--batch',
|
|
||||||
]
|
|
||||||
if signing:
|
if signing:
|
||||||
signing_args = args + ['--delete-secret-keys'] + list(keys)
|
skeys = self.signing_keys(*keys)
|
||||||
cls.gpg()(*signing_args)
|
self('--batch', '--yes', '--delete-secret-keys', *skeys)
|
||||||
args.append('--delete-keys')
|
|
||||||
args.extend(keys)
|
|
||||||
cls.gpg()(*args)
|
|
||||||
|
|
||||||
@classmethod
|
pkeys = self.public_keys(*keys)
|
||||||
def sign(cls, key, file, output, clearsign=False):
|
self('--batch', '--yes', '--delete-keys', *pkeys)
|
||||||
args = [
|
|
||||||
'--armor',
|
|
||||||
'--default-key', key,
|
|
||||||
'--output', output,
|
|
||||||
file,
|
|
||||||
]
|
|
||||||
if clearsign:
|
|
||||||
args.insert(0, '--clearsign')
|
|
||||||
else:
|
|
||||||
args.insert(0, '--detach-sign')
|
|
||||||
cls.gpg()(*args)
|
|
||||||
|
|
||||||
@classmethod
|
def sign(self, key, file, output, clearsign=False):
|
||||||
def verify(cls, signature, file, suppress_warnings=False):
|
self(('--clearsign' if clearsign else '--detach-sign'),
|
||||||
if suppress_warnings:
|
'--armor', '--default-key', key,
|
||||||
cls.gpg()('--verify', signature, file, error=str)
|
'--output', output, file)
|
||||||
else:
|
|
||||||
cls.gpg()('--verify', signature, file)
|
|
||||||
|
|
||||||
@classmethod
|
def verify(self, signature, file, suppress_warnings=False):
|
||||||
def list(cls, trusted, signing):
|
self('--verify', signature, file,
|
||||||
|
**({'error': str} if suppress_warnings else {}))
|
||||||
|
|
||||||
|
def list(self, trusted, signing):
|
||||||
if trusted:
|
if trusted:
|
||||||
cls.gpg()('--list-public-keys')
|
self('--list-public-keys')
|
||||||
|
|
||||||
if signing:
|
if signing:
|
||||||
cls.gpg()('--list-secret-keys')
|
self('--list-secret-keys')
|
||||||
|
|
||||||
|
|
||||||
class SpackGPGError(spack.error.SpackError):
|
class SpackGPGError(spack.error.SpackError):
|
||||||
"""Class raised when GPG errors are detected."""
|
"""Class raised when GPG errors are detected."""
|
||||||
|
|
||||||
|
|
||||||
|
# Convenience wrappers for methods of the Gpg class
|
||||||
|
|
||||||
|
# __call__ is a bit of a special case, since the Gpg instance is, itself, the
|
||||||
|
# "thing" that is being called.
|
||||||
|
@functools.wraps(Gpg.__call__)
|
||||||
|
def gpg(*args, **kwargs):
|
||||||
|
return get_global_gpg_instance()(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
gpg.name = 'gpg'
|
||||||
|
|
||||||
|
|
||||||
|
@functools.wraps(Gpg.create)
|
||||||
|
def create(*args, **kwargs):
|
||||||
|
return get_global_gpg_instance().create(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@functools.wraps(Gpg.signing_keys)
|
||||||
|
def signing_keys(*args, **kwargs):
|
||||||
|
return get_global_gpg_instance().signing_keys(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@functools.wraps(Gpg.public_keys)
|
||||||
|
def public_keys(*args, **kwargs):
|
||||||
|
return get_global_gpg_instance().public_keys(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@functools.wraps(Gpg.export_keys)
|
||||||
|
def export_keys(*args, **kwargs):
|
||||||
|
return get_global_gpg_instance().export_keys(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@functools.wraps(Gpg.trust)
|
||||||
|
def trust(*args, **kwargs):
|
||||||
|
return get_global_gpg_instance().trust(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@functools.wraps(Gpg.untrust)
|
||||||
|
def untrust(*args, **kwargs):
|
||||||
|
return get_global_gpg_instance().untrust(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@functools.wraps(Gpg.sign)
|
||||||
|
def sign(*args, **kwargs):
|
||||||
|
return get_global_gpg_instance().sign(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@functools.wraps(Gpg.verify)
|
||||||
|
def verify(*args, **kwargs):
|
||||||
|
return get_global_gpg_instance().verify(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@functools.wraps(Gpg.list)
|
||||||
|
def list(*args, **kwargs):
|
||||||
|
return get_global_gpg_instance().list(*args, **kwargs)
|
||||||
|
|
|
@ -224,7 +224,7 @@ def url_exists(url):
|
||||||
try:
|
try:
|
||||||
read_from_url(url)
|
read_from_url(url)
|
||||||
return True
|
return True
|
||||||
except URLError:
|
except (SpackWebError, URLError):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@ -295,15 +295,27 @@ def _iter_s3_prefix(client, url, num_entries=1024):
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
def list_url(url):
|
def _iter_local_prefix(path):
|
||||||
|
for root, _, files in os.walk(path):
|
||||||
|
for f in files:
|
||||||
|
yield os.path.relpath(os.path.join(root, f), path)
|
||||||
|
|
||||||
|
|
||||||
|
def list_url(url, recursive=False):
|
||||||
url = url_util.parse(url)
|
url = url_util.parse(url)
|
||||||
|
|
||||||
local_path = url_util.local_file_path(url)
|
local_path = url_util.local_file_path(url)
|
||||||
if local_path:
|
if local_path:
|
||||||
return os.listdir(local_path)
|
if recursive:
|
||||||
|
return list(_iter_local_prefix(local_path))
|
||||||
|
return [subpath for subpath in os.listdir(local_path)
|
||||||
|
if os.path.isfile(os.path.join(local_path, subpath))]
|
||||||
|
|
||||||
if url.scheme == 's3':
|
if url.scheme == 's3':
|
||||||
s3 = s3_util.create_s3_session(url)
|
s3 = s3_util.create_s3_session(url)
|
||||||
|
if recursive:
|
||||||
|
return list(_iter_s3_prefix(s3, url))
|
||||||
|
|
||||||
return list(set(
|
return list(set(
|
||||||
key.split('/', 1)[0]
|
key.split('/', 1)[0]
|
||||||
for key in _iter_s3_prefix(s3, url)))
|
for key in _iter_s3_prefix(s3, url)))
|
||||||
|
|
|
@ -906,7 +906,7 @@ _spack_gpg() {
|
||||||
then
|
then
|
||||||
SPACK_COMPREPLY="-h --help"
|
SPACK_COMPREPLY="-h --help"
|
||||||
else
|
else
|
||||||
SPACK_COMPREPLY="verify trust untrust sign create list init export"
|
SPACK_COMPREPLY="verify trust untrust sign create list init export publish"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -972,6 +972,15 @@ _spack_gpg_export() {
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_spack_gpg_publish() {
|
||||||
|
if $list_options
|
||||||
|
then
|
||||||
|
SPACK_COMPREPLY="-h --help -d --directory -m --mirror-name --mirror-url --rebuild-index"
|
||||||
|
else
|
||||||
|
_keys
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
_spack_graph() {
|
_spack_graph() {
|
||||||
if $list_options
|
if $list_options
|
||||||
then
|
then
|
||||||
|
|
Loading…
Reference in a new issue