Merge branch 'features/cache-archives' of https://github.com/scheibelp/spack into scheibelp-features/cache-archives
This commit is contained in:
commit
bc1320d83a
5 changed files with 84 additions and 3 deletions
|
@ -48,6 +48,10 @@
|
||||||
stage_path = join_path(var_path, "stage")
|
stage_path = join_path(var_path, "stage")
|
||||||
repos_path = join_path(var_path, "repos")
|
repos_path = join_path(var_path, "repos")
|
||||||
share_path = join_path(spack_root, "share", "spack")
|
share_path = join_path(spack_root, "share", "spack")
|
||||||
|
cache_path = join_path(var_path, "cache")
|
||||||
|
|
||||||
|
import spack.fetch_strategy
|
||||||
|
cache = spack.fetch_strategy.FsCache(cache_path)
|
||||||
|
|
||||||
prefix = spack_root
|
prefix = spack_root
|
||||||
opt_path = join_path(prefix, "opt")
|
opt_path = join_path(prefix, "opt")
|
||||||
|
|
|
@ -31,6 +31,7 @@
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.test
|
import spack.test
|
||||||
|
from spack.fetch_strategy import FetchError
|
||||||
|
|
||||||
description ="Run unit tests"
|
description ="Run unit tests"
|
||||||
|
|
||||||
|
@ -50,6 +51,24 @@ def setup_parser(subparser):
|
||||||
help="verbose output")
|
help="verbose output")
|
||||||
|
|
||||||
|
|
||||||
|
class MockCache(object):
|
||||||
|
def store(self, copyCmd, relativeDst):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def fetcher(self, targetPath, digest):
|
||||||
|
return MockCacheFetcher()
|
||||||
|
|
||||||
|
|
||||||
|
class MockCacheFetcher(object):
|
||||||
|
def set_stage(self, stage):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def fetch(self):
|
||||||
|
raise FetchError("Mock cache always fails for tests")
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "[mock fetcher]"
|
||||||
|
|
||||||
def test(parser, args):
|
def test(parser, args):
|
||||||
if args.list:
|
if args.list:
|
||||||
print "Available tests:"
|
print "Available tests:"
|
||||||
|
@ -66,4 +85,5 @@ def test(parser, args):
|
||||||
|
|
||||||
if not os.path.exists(outputDir):
|
if not os.path.exists(outputDir):
|
||||||
mkdirp(outputDir)
|
mkdirp(outputDir)
|
||||||
|
spack.cache = MockCache()
|
||||||
spack.test.run(args.names, outputDir, args.verbose)
|
spack.test.run(args.names, outputDir, args.verbose)
|
||||||
|
|
|
@ -310,7 +310,7 @@ def archive(self, destination):
|
||||||
if not extension(destination) == extension(self.archive_file):
|
if not extension(destination) == extension(self.archive_file):
|
||||||
raise ValueError("Cannot archive without matching extensions.")
|
raise ValueError("Cannot archive without matching extensions.")
|
||||||
|
|
||||||
shutil.move(self.archive_file, destination)
|
shutil.copy(self.archive_file, destination)
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def check(self):
|
def check(self):
|
||||||
|
@ -348,7 +348,7 @@ def reset(self):
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
url = self.url if self.url else "no url"
|
url = self.url if self.url else "no url"
|
||||||
return "URLFetchStrategy<%s>" % url
|
return "%s<%s>" % (self.__class__.__name__, url)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.url:
|
if self.url:
|
||||||
|
@ -357,6 +357,24 @@ def __str__(self):
|
||||||
return "[no url]"
|
return "[no url]"
|
||||||
|
|
||||||
|
|
||||||
|
class CacheURLFetchStrategy(URLFetchStrategy):
|
||||||
|
"""The resource associated with a cache URL may be out of date."""
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(CacheURLFetchStrategy, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
@_needs_stage
|
||||||
|
def fetch(self):
|
||||||
|
super(CacheURLFetchStrategy, self).fetch()
|
||||||
|
if self.digest:
|
||||||
|
try:
|
||||||
|
self.check()
|
||||||
|
except ChecksumError:
|
||||||
|
# Future fetchers will assume they don't need to download if the
|
||||||
|
# file remains
|
||||||
|
os.remove(self.archive_file)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
class VCSFetchStrategy(FetchStrategy):
|
class VCSFetchStrategy(FetchStrategy):
|
||||||
|
|
||||||
def __init__(self, name, *rev_types, **kwargs):
|
def __init__(self, name, *rev_types, **kwargs):
|
||||||
|
@ -815,6 +833,32 @@ def for_package_version(pkg, version):
|
||||||
raise InvalidArgsError(pkg, version)
|
raise InvalidArgsError(pkg, version)
|
||||||
|
|
||||||
|
|
||||||
|
class FsCache(object):
|
||||||
|
def __init__(self, root):
|
||||||
|
self.root = os.path.abspath(root)
|
||||||
|
|
||||||
|
def store(self, fetcher, relativeDst):
|
||||||
|
unique = False
|
||||||
|
uidGroups = [['tag', 'commit'], ['digest'], ['revision']]
|
||||||
|
for grp in uidGroups:
|
||||||
|
try:
|
||||||
|
unique |= any(getattr(fetcher, x) for x in grp)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
if unique:
|
||||||
|
break
|
||||||
|
if not unique:
|
||||||
|
return
|
||||||
|
|
||||||
|
dst = join_path(self.root, relativeDst)
|
||||||
|
mkdirp(os.path.dirname(dst))
|
||||||
|
fetcher.archive(dst)
|
||||||
|
|
||||||
|
def fetcher(self, targetPath, digest):
|
||||||
|
url = "file://" + join_path(self.root, targetPath)
|
||||||
|
return CacheURLFetchStrategy(url, digest)
|
||||||
|
|
||||||
|
|
||||||
class FetchError(spack.error.SpackError):
|
class FetchError(spack.error.SpackError):
|
||||||
|
|
||||||
def __init__(self, msg, long_msg=None):
|
def __init__(self, msg, long_msg=None):
|
||||||
|
|
|
@ -748,6 +748,9 @@ def do_fetch(self, mirror_only=False):
|
||||||
if spack.do_checksum and self.version in self.versions:
|
if spack.do_checksum and self.version in self.versions:
|
||||||
self.stage.check()
|
self.stage.check()
|
||||||
|
|
||||||
|
self.stage.cache_local()
|
||||||
|
|
||||||
|
|
||||||
def do_stage(self, mirror_only=False):
|
def do_stage(self, mirror_only=False):
|
||||||
"""Unpacks the fetched tarball, then changes into the expanded tarball
|
"""Unpacks the fetched tarball, then changes into the expanded tarball
|
||||||
directory."""
|
directory."""
|
||||||
|
|
|
@ -304,6 +304,7 @@ def fetch(self, mirror_only=False):
|
||||||
# Add URL strategies for all the mirrors with the digest
|
# Add URL strategies for all the mirrors with the digest
|
||||||
for url in urls:
|
for url in urls:
|
||||||
fetchers.insert(0, fs.URLFetchStrategy(url, digest))
|
fetchers.insert(0, fs.URLFetchStrategy(url, digest))
|
||||||
|
fetchers.insert(0, spack.cache.fetcher(self.mirror_path, digest))
|
||||||
|
|
||||||
for fetcher in fetchers:
|
for fetcher in fetchers:
|
||||||
try:
|
try:
|
||||||
|
@ -320,6 +321,7 @@ def fetch(self, mirror_only=False):
|
||||||
self.fetcher = self.default_fetcher
|
self.fetcher = self.default_fetcher
|
||||||
raise fs.FetchError(errMessage, None)
|
raise fs.FetchError(errMessage, None)
|
||||||
|
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Check the downloaded archive against a checksum digest.
|
"""Check the downloaded archive against a checksum digest.
|
||||||
No-op if this stage checks code out of a repository."""
|
No-op if this stage checks code out of a repository."""
|
||||||
|
@ -333,6 +335,11 @@ def check(self):
|
||||||
else:
|
else:
|
||||||
self.fetcher.check()
|
self.fetcher.check()
|
||||||
|
|
||||||
|
|
||||||
|
def cache_local(self):
|
||||||
|
spack.cache.store(self.fetcher, self.mirror_path)
|
||||||
|
|
||||||
|
|
||||||
def expand_archive(self):
|
def expand_archive(self):
|
||||||
"""Changes to the stage directory and attempt to expand the downloaded
|
"""Changes to the stage directory and attempt to expand the downloaded
|
||||||
archive. Fail if the stage is not set up or if the archive is not yet
|
archive. Fail if the stage is not set up or if the archive is not yet
|
||||||
|
@ -436,7 +443,7 @@ def expand_archive(self):
|
||||||
shutil.move(source_path, destination_path)
|
shutil.move(source_path, destination_path)
|
||||||
|
|
||||||
|
|
||||||
@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', 'restage', 'destroy'])
|
@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', 'restage', 'destroy', 'cache_local'])
|
||||||
class StageComposite:
|
class StageComposite:
|
||||||
"""
|
"""
|
||||||
Composite for Stage type objects. The first item in this composite is considered to be the root package, and
|
Composite for Stage type objects. The first item in this composite is considered to be the root package, and
|
||||||
|
@ -511,6 +518,9 @@ def destroy(self):
|
||||||
# No need to destroy DIY stage.
|
# No need to destroy DIY stage.
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def cache_local(self):
|
||||||
|
tty.msg("Sources for DIY stages are not cached")
|
||||||
|
|
||||||
|
|
||||||
def _get_mirrors():
|
def _get_mirrors():
|
||||||
"""Get mirrors from spack configuration."""
|
"""Get mirrors from spack configuration."""
|
||||||
|
|
Loading…
Reference in a new issue