Mirrors: avoid re-downloading patches

When updating a mirror, Spack was re-retrieving all patches (since the
fetch logic for patches is separate). This updates the patch logic to
allow the mirror logic to avoid this.
This commit is contained in:
Peter Josef Scheibel 2019-11-26 19:35:58 -08:00 committed by Massimiliano Culpo
parent 754dd6eb1f
commit b62ba7609d
2 changed files with 28 additions and 20 deletions

View file

@ -505,7 +505,6 @@ def add_single_spec(spec, mirror_root, mirror_stats):
with spec.package.stage as pkg_stage: with spec.package.stage as pkg_stage:
pkg_stage.cache_mirror(mirror_stats) pkg_stage.cache_mirror(mirror_stats)
for patch in spec.package.all_patches(): for patch in spec.package.all_patches():
patch.fetch(pkg_stage)
if patch.cache(): if patch.cache():
patch.cache().cache_mirror(mirror_stats) patch.cache().cache_mirror(mirror_stats)
patch.clean() patch.clean()

View file

@ -171,6 +171,7 @@ def __init__(self, pkg, url, level=1, working_dir='.', ordering_key=None,
super(UrlPatch, self).__init__(pkg, url, level, working_dir) super(UrlPatch, self).__init__(pkg, url, level, working_dir)
self.url = url self.url = url
self._stage = None
self.ordering_key = ordering_key self.ordering_key = ordering_key
@ -191,25 +192,6 @@ def fetch(self, stage):
Args: Args:
stage: stage for the package that needs to be patched stage: stage for the package that needs to be patched
""" """
# use archive digest for compressed archives
fetch_digest = self.sha256
if self.archive_sha256:
fetch_digest = self.archive_sha256
fetcher = fs.URLFetchStrategy(self.url, fetch_digest,
expand=bool(self.archive_sha256))
# The same package can have multiple patches with the same name but
# with different contents, therefore apply a subset of the hash.
name = '{0}-{1}'.format(os.path.basename(self.url), fetch_digest[:7])
per_package_ref = os.path.join(self.owner.split('.')[-1], name)
# Reference starting with "spack." is required to avoid cyclic imports
mirror_ref = spack.mirror.mirror_archive_paths(
fetcher,
per_package_ref)
self.stage = spack.stage.Stage(fetcher, mirror_paths=mirror_ref)
self.stage.create() self.stage.create()
self.stage.fetch() self.stage.fetch()
self.stage.check() self.stage.check()
@ -243,6 +225,33 @@ def fetch(self, stage):
"sha256 checksum failed for %s" % self.path, "sha256 checksum failed for %s" % self.path,
"Expected %s but got %s" % (self.sha256, checker.sum)) "Expected %s but got %s" % (self.sha256, checker.sum))
@property
def stage(self):
if self._stage:
return self._stage
# use archive digest for compressed archives
fetch_digest = self.sha256
if self.archive_sha256:
fetch_digest = self.archive_sha256
fetcher = fs.URLFetchStrategy(self.url, fetch_digest,
expand=bool(self.archive_sha256))
# The same package can have multiple patches with the same name but
# with different contents, therefore apply a subset of the hash.
name = '{0}-{1}'.format(os.path.basename(self.url), fetch_digest[:7])
per_package_ref = os.path.join(self.owner.split('.')[-1], name)
# Reference starting with "spack." is required to avoid cyclic imports
mirror_ref = spack.mirror.mirror_archive_paths(
fetcher,
per_package_ref)
self._stage = spack.stage.Stage(fetcher, mirror_paths=mirror_ref)
self._stage.create()
return self._stage
def cache(self): def cache(self):
return self.stage return self.stage