Merge pull request #16 in SCALE/spack from features/mirror to develop
# By Todd Gamblin # Via Todd Gamblin * commit '3c3f272280c530553322142d9d836c91b1b01137': spack mirror now checksums fetched archives. New spack mirror command, configuration. Add more output; don't re-add existing compilers
This commit is contained in:
commit
bb4cbd008d
8 changed files with 227 additions and 66 deletions
|
@ -105,4 +105,4 @@ except SpackError, e:
|
|||
tty.die(e.message)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
tty.die("Got a keyboard interrupt from the user.")
|
||||
tty.die("Keyboard interrupt.")
|
||||
|
|
|
@ -137,24 +137,6 @@
|
|||
#
|
||||
sys_type = None
|
||||
|
||||
#
|
||||
# Places to download tarballs from.
|
||||
#
|
||||
# TODO: move to configuration.
|
||||
#
|
||||
# Examples:
|
||||
#
|
||||
# For a local directory:
|
||||
# mirrors = ['file:///Users/gamblin2/spack-mirror']
|
||||
#
|
||||
# For a website:
|
||||
# mirrors = ['http://spackports.org/spack-mirror/']
|
||||
#
|
||||
# For no mirrors:
|
||||
# mirrors = []
|
||||
#
|
||||
mirrors = []
|
||||
|
||||
#
|
||||
# Extra imports that should be generally usable from package.py files.
|
||||
#
|
||||
|
|
|
@ -60,8 +60,17 @@ def compiler_add(args):
|
|||
if not paths:
|
||||
paths = get_path('PATH')
|
||||
|
||||
compilers = spack.compilers.find_compilers(*args.add_paths)
|
||||
spack.compilers.add_compilers_to_config('user', *compilers)
|
||||
compilers = [c for c in spack.compilers.find_compilers(*args.add_paths)
|
||||
if c.spec not in spack.compilers.all_compilers()]
|
||||
|
||||
if compilers:
|
||||
spack.compilers.add_compilers_to_config('user', *compilers)
|
||||
n = len(compilers)
|
||||
tty.msg("Added %d new compiler%s to %s" % (
|
||||
n, 's' if n > 1 else '', spack.config.get_filename('user')))
|
||||
colify(reversed(sorted(c.spec for c in compilers)), indent=4)
|
||||
else:
|
||||
tty.msg("Found no new compilers")
|
||||
|
||||
|
||||
def compiler_remove(args):
|
||||
|
|
|
@ -25,69 +25,197 @@
|
|||
import os
|
||||
import shutil
|
||||
import argparse
|
||||
from datetime import datetime
|
||||
from contextlib import closing
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.filesystem import mkdirp, join_path
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
from spack.spec import Spec
|
||||
from spack.error import SpackError
|
||||
from spack.stage import Stage
|
||||
from spack.util.compression import extension
|
||||
|
||||
|
||||
description = "Create a directory full of package tarballs that can be used as a spack mirror."
|
||||
description = "Manage spack mirrors."
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'directory', help="Directory in which to create mirror.")
|
||||
subparser.add_argument(
|
||||
'packages', nargs=argparse.REMAINDER, help="names of packages to put in mirror")
|
||||
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||
help="Do not check fetched packages against checksum")
|
||||
|
||||
sp = subparser.add_subparsers(
|
||||
metavar='SUBCOMMAND', dest='mirror_command')
|
||||
|
||||
create_parser = sp.add_parser('create', help=mirror_create.__doc__)
|
||||
create_parser.add_argument('-d', '--directory', default=None,
|
||||
help="Directory in which to create mirror.")
|
||||
create_parser.add_argument(
|
||||
'specs', nargs=argparse.REMAINDER, help="Specs of packages to put in mirror")
|
||||
create_parser.add_argument(
|
||||
'-f', '--file', help="File with specs of packages to put in mirror.")
|
||||
|
||||
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
|
||||
add_parser.add_argument('name', help="Mnemonic name for mirror.")
|
||||
add_parser.add_argument(
|
||||
'url', help="URL of mirror directory created by 'spack mirror create'.")
|
||||
|
||||
remove_parser = sp.add_parser('remove', help=mirror_remove.__doc__)
|
||||
remove_parser.add_argument('name')
|
||||
|
||||
list_parser = sp.add_parser('list', help=mirror_list.__doc__)
|
||||
|
||||
|
||||
def mirror(parser, args):
|
||||
if not args.packages:
|
||||
args.packages = [p for p in spack.db.all_package_names()]
|
||||
def mirror_add(args):
|
||||
"""Add a mirror to Spack."""
|
||||
config = spack.config.get_config('user')
|
||||
config.set_value('mirror', args.name, 'url', args.url)
|
||||
config.write()
|
||||
|
||||
|
||||
def mirror_remove(args):
|
||||
"""Remove a mirror by name."""
|
||||
config = spack.config.get_config('user')
|
||||
name = args.name
|
||||
|
||||
if not config.has_named_section('mirror', name):
|
||||
tty.die("No such mirror: %s" % name)
|
||||
config.remove_named_section('mirror', name)
|
||||
config.write()
|
||||
|
||||
|
||||
def mirror_list(args):
|
||||
"""Print out available mirrors to the console."""
|
||||
config = spack.config.get_config()
|
||||
sec_names = config.get_section_names('mirror')
|
||||
|
||||
if not sec_names:
|
||||
tty.msg("No mirrors configured.")
|
||||
return
|
||||
|
||||
max_len = max(len(s) for s in sec_names)
|
||||
fmt = "%%-%ds%%s" % (max_len + 4)
|
||||
|
||||
for name in sec_names:
|
||||
val = config.get_value('mirror', name, 'url')
|
||||
print fmt % (name, val)
|
||||
|
||||
|
||||
def mirror_create(args):
|
||||
"""Create a directory to be used as a spack mirror, and fill it with
|
||||
package archives."""
|
||||
# try to parse specs from the command line first.
|
||||
args.specs = spack.cmd.parse_specs(args.specs)
|
||||
|
||||
# If there is a file, parse each line as a spec and add it to the list.
|
||||
if args.file:
|
||||
with closing(open(args.file, "r")) as stream:
|
||||
for i, string in enumerate(stream):
|
||||
try:
|
||||
s = Spec(string)
|
||||
s.package
|
||||
args.specs.append(s)
|
||||
except SpackError, e:
|
||||
tty.die("Parse error in %s, line %d:" % (args.file, i+1),
|
||||
">>> " + string, str(e))
|
||||
|
||||
if not args.specs:
|
||||
args.specs = spack.db.all_package_names()
|
||||
|
||||
# Default name for directory is spack-mirror-<DATESTAMP>
|
||||
if not args.directory:
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d")
|
||||
args.directory = 'spack-mirror-' + timestamp
|
||||
|
||||
# Make sure nothing is in the way.
|
||||
if os.path.isfile(args.directory):
|
||||
tty.error("%s already exists and is a file." % args.directory)
|
||||
|
||||
# Create a directory if none exists
|
||||
if not os.path.isdir(args.directory):
|
||||
mkdirp(args.directory)
|
||||
tty.msg("Created new mirror in %s" % args.directory)
|
||||
else:
|
||||
tty.msg("Adding to existing mirror in %s" % args.directory)
|
||||
|
||||
# save working directory
|
||||
# Things to keep track of while parsing specs.
|
||||
working_dir = os.getcwd()
|
||||
num_mirrored = 0
|
||||
num_error = 0
|
||||
|
||||
# Iterate through packages and download all the safe tarballs for each of them
|
||||
for pkg_name in args.packages:
|
||||
pkg = spack.db.get(pkg_name)
|
||||
for spec in args.specs:
|
||||
pkg = spec.package
|
||||
|
||||
# Skip any package that has no checksummed versions.
|
||||
if not pkg.versions:
|
||||
tty.msg("No safe (checksummed) versions for package %s. Skipping."
|
||||
% pkg_name)
|
||||
tty.msg("No safe (checksummed) versions for package %s."
|
||||
% pkg.name)
|
||||
continue
|
||||
|
||||
# create a subdir for the current package.
|
||||
pkg_path = join_path(args.directory, pkg_name)
|
||||
pkg_path = join_path(args.directory, pkg.name)
|
||||
mkdirp(pkg_path)
|
||||
|
||||
# Download all the tarballs using Stages, then move them into place
|
||||
for version in pkg.versions:
|
||||
# Skip versions that don't match the spec
|
||||
vspec = Spec('%s@%s' % (pkg.name, version))
|
||||
if not vspec.satisfies(spec):
|
||||
continue
|
||||
|
||||
mirror_path = "%s/%s-%s.%s" % (
|
||||
pkg.name, pkg.name, version, extension(pkg.url))
|
||||
|
||||
os.chdir(working_dir)
|
||||
mirror_file = join_path(args.directory, mirror_path)
|
||||
if os.path.exists(mirror_file):
|
||||
tty.msg("Already fetched %s." % mirror_file)
|
||||
num_mirrored += 1
|
||||
continue
|
||||
|
||||
# Get the URL for the version and set up a stage to download it.
|
||||
url = pkg.url_for_version(version)
|
||||
stage = Stage(url)
|
||||
try:
|
||||
# fetch changes directory into the stage
|
||||
stage.fetch()
|
||||
basename = os.path.basename(stage.archive_file)
|
||||
final_dst = join_path(pkg_path, basename)
|
||||
|
||||
if not args.no_checksum and version in pkg.versions:
|
||||
digest = pkg.versions[version]
|
||||
stage.check(digest)
|
||||
tty.msg("Checksum passed for %s@%s" % (pkg.name, version))
|
||||
|
||||
# change back and move the new archive into place.
|
||||
os.chdir(working_dir)
|
||||
shutil.move(stage.archive_file, final_dst)
|
||||
tty.msg("Added %s to mirror" % final_dst)
|
||||
shutil.move(stage.archive_file, mirror_file)
|
||||
tty.msg("Added %s to mirror" % mirror_file)
|
||||
num_mirrored += 1
|
||||
|
||||
except Exception, e:
|
||||
tty.warn("Error while fetching %s. Skipping." % url, e.message)
|
||||
tty.warn("Error while fetching %s." % url, e.message)
|
||||
num_error += 1
|
||||
|
||||
finally:
|
||||
stage.destroy()
|
||||
|
||||
# Success!
|
||||
tty.msg("Created Spack mirror in %s" % args.directory)
|
||||
# If nothing happened, try to say why.
|
||||
if not num_mirrored:
|
||||
if num_error:
|
||||
tty.error("No packages added to mirror.",
|
||||
"All packages failed to fetch.")
|
||||
else:
|
||||
tty.error("No packages added to mirror. No versions matched specs:")
|
||||
colify(args.specs, indent=4)
|
||||
|
||||
|
||||
def mirror(parser, args):
|
||||
action = { 'create' : mirror_create,
|
||||
'add' : mirror_add,
|
||||
'remove' : mirror_remove,
|
||||
'list' : mirror_list }
|
||||
action[args.mirror_command](args)
|
||||
|
|
|
@ -225,8 +225,7 @@ class SpackConfigParser(cp.RawConfigParser):
|
|||
OPTCRE_NV = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE_NV.pattern)
|
||||
|
||||
def __init__(self, file_or_files):
|
||||
cp.RawConfigParser.__init__(
|
||||
self, dict_type=OrderedDict, allow_no_value=True)
|
||||
cp.RawConfigParser.__init__(self, dict_type=OrderedDict)
|
||||
|
||||
if isinstance(file_or_files, basestring):
|
||||
self.read([file_or_files])
|
||||
|
@ -286,6 +285,16 @@ def has_value(self, section, name, option):
|
|||
return self.has_option(sn, option)
|
||||
|
||||
|
||||
def has_named_section(self, section, name):
|
||||
sn = _make_section_name(section, name)
|
||||
return self.has_section(sn)
|
||||
|
||||
|
||||
def remove_named_section(self, section, name):
|
||||
sn = _make_section_name(section, name)
|
||||
self.remove_section(sn)
|
||||
|
||||
|
||||
def get_section_names(self, sectype):
|
||||
"""Get all named sections with the specified type.
|
||||
A named section looks like this:
|
||||
|
|
|
@ -50,11 +50,10 @@
|
|||
import spack.error
|
||||
import spack.build_environment as build_env
|
||||
import spack.url as url
|
||||
import spack.util.crypto as crypto
|
||||
from spack.version import *
|
||||
from spack.stage import Stage
|
||||
from spack.util.web import get_pages
|
||||
from spack.util.compression import allowed_archive
|
||||
from spack.util.compression import allowed_archive, extension
|
||||
|
||||
"""Allowed URL schemes for spack packages."""
|
||||
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file"]
|
||||
|
@ -399,7 +398,9 @@ def stage(self):
|
|||
raise ValueError("Can only get a stage for a concrete package.")
|
||||
|
||||
if self._stage is None:
|
||||
mirror_path = "%s/%s" % (self.name, os.path.basename(self.url))
|
||||
# TODO: move this logic into a mirror module.
|
||||
mirror_path = "%s/%s" % (self.name, "%s-%s.%s" % (
|
||||
self.name, self.version, extension(self.url)))
|
||||
self._stage = Stage(
|
||||
self.url, mirror_path=mirror_path, name=self.spec.short_spec)
|
||||
return self._stage
|
||||
|
@ -537,7 +538,7 @@ def do_fetch(self):
|
|||
raise ValueError("Can only fetch concrete packages.")
|
||||
|
||||
if spack.do_checksum and not self.version in self.versions:
|
||||
raise ChecksumError(
|
||||
raise FetchError(
|
||||
"Cannot fetch %s safely; there is no checksum on file for version %s."
|
||||
% (self.name, self.version),
|
||||
"Add a checksum to the package file, or use --no-checksum to "
|
||||
|
@ -547,13 +548,8 @@ def do_fetch(self):
|
|||
|
||||
if spack.do_checksum and self.version in self.versions:
|
||||
digest = self.versions[self.version]
|
||||
checker = crypto.Checker(digest)
|
||||
if checker.check(self.stage.archive_file):
|
||||
tty.msg("Checksum passed for %s" % self.name)
|
||||
else:
|
||||
raise ChecksumError(
|
||||
"%s checksum failed for %s." % (checker.hash_name, self.name),
|
||||
"Expected %s but got %s." % (digest, checker.sum))
|
||||
self.stage.check(digest)
|
||||
tty.msg("Checksum passed for %s@%s" % (self.name, self.version))
|
||||
|
||||
|
||||
def do_stage(self):
|
||||
|
@ -866,12 +862,6 @@ def __init__(self, message, long_msg=None):
|
|||
super(FetchError, self).__init__(message, long_msg)
|
||||
|
||||
|
||||
class ChecksumError(FetchError):
|
||||
"""Raised when archive fails to checksum."""
|
||||
def __init__(self, message, long_msg):
|
||||
super(ChecksumError, self).__init__(message, long_msg)
|
||||
|
||||
|
||||
class InstallError(spack.error.SpackError):
|
||||
"""Raised when something goes wrong during install or uninstall."""
|
||||
def __init__(self, message, long_msg=None):
|
||||
|
|
|
@ -31,9 +31,12 @@
|
|||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
import spack.error as serr
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.util.crypto as crypto
|
||||
from spack.util.compression import decompressor_for
|
||||
|
||||
|
||||
STAGE_PREFIX = 'spack-stage-'
|
||||
|
||||
|
||||
|
@ -185,9 +188,13 @@ def _setup(self):
|
|||
@property
|
||||
def archive_file(self):
|
||||
"""Path to the source archive within this stage directory."""
|
||||
path = os.path.join(self.path, os.path.basename(self.url))
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
paths = [os.path.join(self.path, os.path.basename(self.url))]
|
||||
if self.mirror_path:
|
||||
paths.append(os.path.join(self.path, os.path.basename(self.mirror_path)))
|
||||
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
return None
|
||||
|
||||
|
||||
|
@ -247,6 +254,7 @@ def fetch_from_url(self, url):
|
|||
"'spack clean --dist' to remove the bad archive, then fix",
|
||||
"your internet gateway issue and install again.")
|
||||
|
||||
|
||||
def fetch(self):
|
||||
"""Downloads the file at URL to the stage. Returns true if it was downloaded,
|
||||
false if it already existed."""
|
||||
|
@ -257,7 +265,7 @@ def fetch(self):
|
|||
else:
|
||||
urls = [self.url]
|
||||
if self.mirror_path:
|
||||
urls = ["%s/%s" % (m, self.mirror_path) for m in spack.mirrors] + urls
|
||||
urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()] + urls
|
||||
|
||||
for url in urls:
|
||||
tty.msg("Trying to fetch from %s" % url)
|
||||
|
@ -271,6 +279,15 @@ def fetch(self):
|
|||
return self.archive_file
|
||||
|
||||
|
||||
def check(self, digest):
|
||||
"""Check the downloaded archive against a checksum digest"""
|
||||
checker = crypto.Checker(digest)
|
||||
if not checker.check(self.archive_file):
|
||||
raise ChecksumError(
|
||||
"%s checksum failed for %s." % (checker.hash_name, self.archive_file),
|
||||
"Expected %s but got %s." % (digest, checker.sum))
|
||||
|
||||
|
||||
def expand_archive(self):
|
||||
"""Changes to the stage directory and attempt to expand the downloaded
|
||||
archive. Fail if the stage is not set up or if the archive is not yet
|
||||
|
@ -320,6 +337,17 @@ def destroy(self):
|
|||
os.chdir(os.path.dirname(self.path))
|
||||
|
||||
|
||||
def _get_mirrors():
|
||||
"""Get mirrors from spack configuration."""
|
||||
config = spack.config.get_config()
|
||||
|
||||
mirrors = []
|
||||
sec_names = config.get_section_names('mirror')
|
||||
for name in sec_names:
|
||||
mirrors.append(config.get_value('mirror', name, 'url'))
|
||||
return mirrors
|
||||
|
||||
|
||||
def ensure_access(file=spack.stage_path):
|
||||
"""Ensure we can access a directory and die with an error if we can't."""
|
||||
if not can_access(file):
|
||||
|
@ -366,9 +394,15 @@ def find_tmp_root():
|
|||
return None
|
||||
|
||||
|
||||
class FailedDownloadError(serr.SpackError):
|
||||
class FailedDownloadError(spack.error.SpackError):
|
||||
"""Raised wen a download fails."""
|
||||
def __init__(self, url, msg=""):
|
||||
super(FailedDownloadError, self).__init__(
|
||||
"Failed to fetch file from URL: %s" % url, msg)
|
||||
self.url = url
|
||||
|
||||
|
||||
class ChecksumError(spack.error.SpackError):
|
||||
"""Raised when archive fails to checksum."""
|
||||
def __init__(self, message, long_msg):
|
||||
super(ChecksumError, self).__init__(message, long_msg)
|
||||
|
|
|
@ -56,3 +56,12 @@ def stem(path):
|
|||
if re.search(suffix, path):
|
||||
return re.sub(suffix, "", path)
|
||||
return path
|
||||
|
||||
|
||||
def extension(path):
|
||||
"""Get the archive extension for a path."""
|
||||
for type in ALLOWED_ARCHIVE_TYPES:
|
||||
suffix = r'\.%s$' % type
|
||||
if re.search(suffix, path):
|
||||
return type
|
||||
return None
|
||||
|
|
Loading…
Reference in a new issue