Merge pull request #16 in SCALE/spack from features/mirror to develop

# By Todd Gamblin
# Via Todd Gamblin
* commit '3c3f272280c530553322142d9d836c91b1b01137':
  spack mirror now checksums fetched archives.
  New spack mirror command, configuration.
  Add more output; don't re-add existing compilers
This commit is contained in:
George Todd Gamblin 2014-06-24 13:38:13 -07:00
commit bb4cbd008d
8 changed files with 227 additions and 66 deletions

View file

@ -105,4 +105,4 @@ except SpackError, e:
tty.die(e.message) tty.die(e.message)
except KeyboardInterrupt: except KeyboardInterrupt:
tty.die("Got a keyboard interrupt from the user.") tty.die("Keyboard interrupt.")

View file

@ -137,24 +137,6 @@
# #
sys_type = None sys_type = None
#
# Places to download tarballs from.
#
# TODO: move to configuration.
#
# Examples:
#
# For a local directory:
# mirrors = ['file:///Users/gamblin2/spack-mirror']
#
# For a website:
# mirrors = ['http://spackports.org/spack-mirror/']
#
# For no mirrors:
# mirrors = []
#
mirrors = []
# #
# Extra imports that should be generally usable from package.py files. # Extra imports that should be generally usable from package.py files.
# #

View file

@ -60,8 +60,17 @@ def compiler_add(args):
if not paths: if not paths:
paths = get_path('PATH') paths = get_path('PATH')
compilers = spack.compilers.find_compilers(*args.add_paths) compilers = [c for c in spack.compilers.find_compilers(*args.add_paths)
spack.compilers.add_compilers_to_config('user', *compilers) if c.spec not in spack.compilers.all_compilers()]
if compilers:
spack.compilers.add_compilers_to_config('user', *compilers)
n = len(compilers)
tty.msg("Added %d new compiler%s to %s" % (
n, 's' if n > 1 else '', spack.config.get_filename('user')))
colify(reversed(sorted(c.spec for c in compilers)), indent=4)
else:
tty.msg("Found no new compilers")
def compiler_remove(args): def compiler_remove(args):

View file

@ -25,69 +25,197 @@
import os import os
import shutil import shutil
import argparse import argparse
from datetime import datetime
from contextlib import closing
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.filesystem import mkdirp, join_path from llnl.util.filesystem import mkdirp, join_path
import spack import spack
import spack.cmd import spack.cmd
import spack.config
from spack.spec import Spec
from spack.error import SpackError
from spack.stage import Stage from spack.stage import Stage
from spack.util.compression import extension
description = "Create a directory full of package tarballs that can be used as a spack mirror." description = "Manage spack mirrors."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'directory', help="Directory in which to create mirror.") '-n', '--no-checksum', action='store_true', dest='no_checksum',
subparser.add_argument( help="Do not check fetched packages against checksum")
'packages', nargs=argparse.REMAINDER, help="names of packages to put in mirror")
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='mirror_command')
create_parser = sp.add_parser('create', help=mirror_create.__doc__)
create_parser.add_argument('-d', '--directory', default=None,
help="Directory in which to create mirror.")
create_parser.add_argument(
'specs', nargs=argparse.REMAINDER, help="Specs of packages to put in mirror")
create_parser.add_argument(
'-f', '--file', help="File with specs of packages to put in mirror.")
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
add_parser.add_argument('name', help="Mnemonic name for mirror.")
add_parser.add_argument(
'url', help="URL of mirror directory created by 'spack mirror create'.")
remove_parser = sp.add_parser('remove', help=mirror_remove.__doc__)
remove_parser.add_argument('name')
list_parser = sp.add_parser('list', help=mirror_list.__doc__)
def mirror(parser, args): def mirror_add(args):
if not args.packages: """Add a mirror to Spack."""
args.packages = [p for p in spack.db.all_package_names()] config = spack.config.get_config('user')
config.set_value('mirror', args.name, 'url', args.url)
config.write()
def mirror_remove(args):
"""Remove a mirror by name."""
config = spack.config.get_config('user')
name = args.name
if not config.has_named_section('mirror', name):
tty.die("No such mirror: %s" % name)
config.remove_named_section('mirror', name)
config.write()
def mirror_list(args):
"""Print out available mirrors to the console."""
config = spack.config.get_config()
sec_names = config.get_section_names('mirror')
if not sec_names:
tty.msg("No mirrors configured.")
return
max_len = max(len(s) for s in sec_names)
fmt = "%%-%ds%%s" % (max_len + 4)
for name in sec_names:
val = config.get_value('mirror', name, 'url')
print fmt % (name, val)
def mirror_create(args):
"""Create a directory to be used as a spack mirror, and fill it with
package archives."""
# try to parse specs from the command line first.
args.specs = spack.cmd.parse_specs(args.specs)
# If there is a file, parse each line as a spec and add it to the list.
if args.file:
with closing(open(args.file, "r")) as stream:
for i, string in enumerate(stream):
try:
s = Spec(string)
s.package
args.specs.append(s)
except SpackError, e:
tty.die("Parse error in %s, line %d:" % (args.file, i+1),
">>> " + string, str(e))
if not args.specs:
args.specs = spack.db.all_package_names()
# Default name for directory is spack-mirror-<DATESTAMP>
if not args.directory:
timestamp = datetime.now().strftime("%Y-%m-%d")
args.directory = 'spack-mirror-' + timestamp
# Make sure nothing is in the way.
if os.path.isfile(args.directory): if os.path.isfile(args.directory):
tty.error("%s already exists and is a file." % args.directory) tty.error("%s already exists and is a file." % args.directory)
# Create a directory if none exists
if not os.path.isdir(args.directory): if not os.path.isdir(args.directory):
mkdirp(args.directory) mkdirp(args.directory)
tty.msg("Created new mirror in %s" % args.directory)
else:
tty.msg("Adding to existing mirror in %s" % args.directory)
# save working directory # Things to keep track of while parsing specs.
working_dir = os.getcwd() working_dir = os.getcwd()
num_mirrored = 0
num_error = 0
# Iterate through packages and download all the safe tarballs for each of them # Iterate through packages and download all the safe tarballs for each of them
for pkg_name in args.packages: for spec in args.specs:
pkg = spack.db.get(pkg_name) pkg = spec.package
# Skip any package that has no checksummed versions. # Skip any package that has no checksummed versions.
if not pkg.versions: if not pkg.versions:
tty.msg("No safe (checksummed) versions for package %s. Skipping." tty.msg("No safe (checksummed) versions for package %s."
% pkg_name) % pkg.name)
continue continue
# create a subdir for the current package. # create a subdir for the current package.
pkg_path = join_path(args.directory, pkg_name) pkg_path = join_path(args.directory, pkg.name)
mkdirp(pkg_path) mkdirp(pkg_path)
# Download all the tarballs using Stages, then move them into place # Download all the tarballs using Stages, then move them into place
for version in pkg.versions: for version in pkg.versions:
# Skip versions that don't match the spec
vspec = Spec('%s@%s' % (pkg.name, version))
if not vspec.satisfies(spec):
continue
mirror_path = "%s/%s-%s.%s" % (
pkg.name, pkg.name, version, extension(pkg.url))
os.chdir(working_dir)
mirror_file = join_path(args.directory, mirror_path)
if os.path.exists(mirror_file):
tty.msg("Already fetched %s." % mirror_file)
num_mirrored += 1
continue
# Get the URL for the version and set up a stage to download it.
url = pkg.url_for_version(version) url = pkg.url_for_version(version)
stage = Stage(url) stage = Stage(url)
try: try:
# fetch changes directory into the stage
stage.fetch() stage.fetch()
basename = os.path.basename(stage.archive_file)
final_dst = join_path(pkg_path, basename)
if not args.no_checksum and version in pkg.versions:
digest = pkg.versions[version]
stage.check(digest)
tty.msg("Checksum passed for %s@%s" % (pkg.name, version))
# change back and move the new archive into place.
os.chdir(working_dir) os.chdir(working_dir)
shutil.move(stage.archive_file, final_dst) shutil.move(stage.archive_file, mirror_file)
tty.msg("Added %s to mirror" % final_dst) tty.msg("Added %s to mirror" % mirror_file)
num_mirrored += 1
except Exception, e: except Exception, e:
tty.warn("Error while fetching %s. Skipping." % url, e.message) tty.warn("Error while fetching %s." % url, e.message)
num_error += 1
finally: finally:
stage.destroy() stage.destroy()
# Success! # If nothing happened, try to say why.
tty.msg("Created Spack mirror in %s" % args.directory) if not num_mirrored:
if num_error:
tty.error("No packages added to mirror.",
"All packages failed to fetch.")
else:
tty.error("No packages added to mirror. No versions matched specs:")
colify(args.specs, indent=4)
def mirror(parser, args):
action = { 'create' : mirror_create,
'add' : mirror_add,
'remove' : mirror_remove,
'list' : mirror_list }
action[args.mirror_command](args)

View file

@ -225,8 +225,7 @@ class SpackConfigParser(cp.RawConfigParser):
OPTCRE_NV = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE_NV.pattern) OPTCRE_NV = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE_NV.pattern)
def __init__(self, file_or_files): def __init__(self, file_or_files):
cp.RawConfigParser.__init__( cp.RawConfigParser.__init__(self, dict_type=OrderedDict)
self, dict_type=OrderedDict, allow_no_value=True)
if isinstance(file_or_files, basestring): if isinstance(file_or_files, basestring):
self.read([file_or_files]) self.read([file_or_files])
@ -286,6 +285,16 @@ def has_value(self, section, name, option):
return self.has_option(sn, option) return self.has_option(sn, option)
def has_named_section(self, section, name):
sn = _make_section_name(section, name)
return self.has_section(sn)
def remove_named_section(self, section, name):
sn = _make_section_name(section, name)
self.remove_section(sn)
def get_section_names(self, sectype): def get_section_names(self, sectype):
"""Get all named sections with the specified type. """Get all named sections with the specified type.
A named section looks like this: A named section looks like this:

View file

@ -50,11 +50,10 @@
import spack.error import spack.error
import spack.build_environment as build_env import spack.build_environment as build_env
import spack.url as url import spack.url as url
import spack.util.crypto as crypto
from spack.version import * from spack.version import *
from spack.stage import Stage from spack.stage import Stage
from spack.util.web import get_pages from spack.util.web import get_pages
from spack.util.compression import allowed_archive from spack.util.compression import allowed_archive, extension
"""Allowed URL schemes for spack packages.""" """Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file"] _ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file"]
@ -399,7 +398,9 @@ def stage(self):
raise ValueError("Can only get a stage for a concrete package.") raise ValueError("Can only get a stage for a concrete package.")
if self._stage is None: if self._stage is None:
mirror_path = "%s/%s" % (self.name, os.path.basename(self.url)) # TODO: move this logic into a mirror module.
mirror_path = "%s/%s" % (self.name, "%s-%s.%s" % (
self.name, self.version, extension(self.url)))
self._stage = Stage( self._stage = Stage(
self.url, mirror_path=mirror_path, name=self.spec.short_spec) self.url, mirror_path=mirror_path, name=self.spec.short_spec)
return self._stage return self._stage
@ -537,7 +538,7 @@ def do_fetch(self):
raise ValueError("Can only fetch concrete packages.") raise ValueError("Can only fetch concrete packages.")
if spack.do_checksum and not self.version in self.versions: if spack.do_checksum and not self.version in self.versions:
raise ChecksumError( raise FetchError(
"Cannot fetch %s safely; there is no checksum on file for version %s." "Cannot fetch %s safely; there is no checksum on file for version %s."
% (self.name, self.version), % (self.name, self.version),
"Add a checksum to the package file, or use --no-checksum to " "Add a checksum to the package file, or use --no-checksum to "
@ -547,13 +548,8 @@ def do_fetch(self):
if spack.do_checksum and self.version in self.versions: if spack.do_checksum and self.version in self.versions:
digest = self.versions[self.version] digest = self.versions[self.version]
checker = crypto.Checker(digest) self.stage.check(digest)
if checker.check(self.stage.archive_file): tty.msg("Checksum passed for %s@%s" % (self.name, self.version))
tty.msg("Checksum passed for %s" % self.name)
else:
raise ChecksumError(
"%s checksum failed for %s." % (checker.hash_name, self.name),
"Expected %s but got %s." % (digest, checker.sum))
def do_stage(self): def do_stage(self):
@ -866,12 +862,6 @@ def __init__(self, message, long_msg=None):
super(FetchError, self).__init__(message, long_msg) super(FetchError, self).__init__(message, long_msg)
class ChecksumError(FetchError):
"""Raised when archive fails to checksum."""
def __init__(self, message, long_msg):
super(ChecksumError, self).__init__(message, long_msg)
class InstallError(spack.error.SpackError): class InstallError(spack.error.SpackError):
"""Raised when something goes wrong during install or uninstall.""" """Raised when something goes wrong during install or uninstall."""
def __init__(self, message, long_msg=None): def __init__(self, message, long_msg=None):

View file

@ -31,9 +31,12 @@
from llnl.util.filesystem import * from llnl.util.filesystem import *
import spack import spack
import spack.error as serr import spack.config
import spack.error
import spack.util.crypto as crypto
from spack.util.compression import decompressor_for from spack.util.compression import decompressor_for
STAGE_PREFIX = 'spack-stage-' STAGE_PREFIX = 'spack-stage-'
@ -185,9 +188,13 @@ def _setup(self):
@property @property
def archive_file(self): def archive_file(self):
"""Path to the source archive within this stage directory.""" """Path to the source archive within this stage directory."""
path = os.path.join(self.path, os.path.basename(self.url)) paths = [os.path.join(self.path, os.path.basename(self.url))]
if os.path.exists(path): if self.mirror_path:
return path paths.append(os.path.join(self.path, os.path.basename(self.mirror_path)))
for path in paths:
if os.path.exists(path):
return path
return None return None
@ -247,6 +254,7 @@ def fetch_from_url(self, url):
"'spack clean --dist' to remove the bad archive, then fix", "'spack clean --dist' to remove the bad archive, then fix",
"your internet gateway issue and install again.") "your internet gateway issue and install again.")
def fetch(self): def fetch(self):
"""Downloads the file at URL to the stage. Returns true if it was downloaded, """Downloads the file at URL to the stage. Returns true if it was downloaded,
false if it already existed.""" false if it already existed."""
@ -257,7 +265,7 @@ def fetch(self):
else: else:
urls = [self.url] urls = [self.url]
if self.mirror_path: if self.mirror_path:
urls = ["%s/%s" % (m, self.mirror_path) for m in spack.mirrors] + urls urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()] + urls
for url in urls: for url in urls:
tty.msg("Trying to fetch from %s" % url) tty.msg("Trying to fetch from %s" % url)
@ -271,6 +279,15 @@ def fetch(self):
return self.archive_file return self.archive_file
def check(self, digest):
"""Check the downloaded archive against a checksum digest"""
checker = crypto.Checker(digest)
if not checker.check(self.archive_file):
raise ChecksumError(
"%s checksum failed for %s." % (checker.hash_name, self.archive_file),
"Expected %s but got %s." % (digest, checker.sum))
def expand_archive(self): def expand_archive(self):
"""Changes to the stage directory and attempt to expand the downloaded """Changes to the stage directory and attempt to expand the downloaded
archive. Fail if the stage is not set up or if the archive is not yet archive. Fail if the stage is not set up or if the archive is not yet
@ -320,6 +337,17 @@ def destroy(self):
os.chdir(os.path.dirname(self.path)) os.chdir(os.path.dirname(self.path))
def _get_mirrors():
"""Get mirrors from spack configuration."""
config = spack.config.get_config()
mirrors = []
sec_names = config.get_section_names('mirror')
for name in sec_names:
mirrors.append(config.get_value('mirror', name, 'url'))
return mirrors
def ensure_access(file=spack.stage_path): def ensure_access(file=spack.stage_path):
"""Ensure we can access a directory and die with an error if we can't.""" """Ensure we can access a directory and die with an error if we can't."""
if not can_access(file): if not can_access(file):
@ -366,9 +394,15 @@ def find_tmp_root():
return None return None
class FailedDownloadError(serr.SpackError): class FailedDownloadError(spack.error.SpackError):
"""Raised wen a download fails.""" """Raised wen a download fails."""
def __init__(self, url, msg=""): def __init__(self, url, msg=""):
super(FailedDownloadError, self).__init__( super(FailedDownloadError, self).__init__(
"Failed to fetch file from URL: %s" % url, msg) "Failed to fetch file from URL: %s" % url, msg)
self.url = url self.url = url
class ChecksumError(spack.error.SpackError):
"""Raised when archive fails to checksum."""
def __init__(self, message, long_msg):
super(ChecksumError, self).__init__(message, long_msg)

View file

@ -56,3 +56,12 @@ def stem(path):
if re.search(suffix, path): if re.search(suffix, path):
return re.sub(suffix, "", path) return re.sub(suffix, "", path)
return path return path
def extension(path):
"""Get the archive extension for a path."""
for type in ALLOWED_ARCHIVE_TYPES:
suffix = r'\.%s$' % type
if re.search(suffix, path):
return type
return None