Update the binary index before attempting direct fetches (#32137)

"spack install" will not update the binary index if given a concrete
spec, which causes it to fall back to direct fetches when a simple
index update would have helped. For S3 buckets in particular, this
significantly and needlessly slows down the install process.

This commit alters the logic so that the binary index is updated
whenever a by-hash lookup fails. The lookup is attempted again with
the updated index before falling back to direct fetches. To avoid
updating too frequently (potentially once for each spec being
installed), BinaryCacheIndex.update now includes a "cooldown"
option, and when this option is enabled it will not update more
than once in a cooldown window (set in config.yaml).

Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com>
This commit is contained in:
Jonathon Anderson 2022-10-19 10:36:27 -05:00 committed by GitHub
parent 3ec7304699
commit a423dc646a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 48 additions and 19 deletions

View file

@ -201,3 +201,7 @@ config:
# building and installing packages. This gives information about Spack's
# current progress as well as the current and total number of packages.
terminal_title: false
# Number of seconds a buildcache's index.json is cached locally before probing
# for updates, within a single Spack invocation. Defaults to 10 minutes.
binary_index_ttl: 600

View file

@ -12,6 +12,7 @@
import sys
import tarfile
import tempfile
import time
import traceback
import warnings
from contextlib import closing
@ -105,6 +106,9 @@ def __init__(self, cache_root):
# cache (_mirrors_for_spec)
self._specs_already_associated = set()
# mapping from mirror urls to the time.time() of the last index fetch.
self._last_fetch_times = {}
# _mirrors_for_spec is a dictionary mapping DAG hashes to lists of
# entries indicating mirrors where that concrete spec can be found.
# Each entry is a dictionary consisting of:
@ -137,6 +141,7 @@ def clear(self):
self._index_file_cache = None
self._local_index_cache = None
self._specs_already_associated = set()
self._last_fetch_times = {}
self._mirrors_for_spec = {}
def _write_local_index_cache(self):
@ -242,7 +247,6 @@ def find_built_spec(self, spec, mirrors_to_check=None):
}
]
"""
self.regenerate_spec_cache()
return self.find_by_hash(spec.dag_hash(), mirrors_to_check=mirrors_to_check)
def find_by_hash(self, find_hash, mirrors_to_check=None):
@ -253,6 +257,9 @@ def find_by_hash(self, find_hash, mirrors_to_check=None):
mirrors_to_check: Optional mapping containing mirrors to check. If
None, just assumes all configured mirrors.
"""
if find_hash not in self._mirrors_for_spec:
# Not found in the cached index, pull the latest from the server.
self.update(with_cooldown=True)
if find_hash not in self._mirrors_for_spec:
return None
results = self._mirrors_for_spec[find_hash]
@ -283,7 +290,7 @@ def update_spec(self, spec, found_list):
"spec": new_entry["spec"],
}
def update(self):
def update(self, with_cooldown=False):
"""Make sure local cache of buildcache index files is up to date.
If the same mirrors are configured as the last time this was called
and none of the remote buildcache indices have changed, calling this
@ -325,17 +332,30 @@ def update(self):
fetch_errors = []
all_methods_failed = True
ttl = spack.config.get("config:binary_index_ttl", 600)
now = time.time()
for cached_mirror_url in self._local_index_cache:
cache_entry = self._local_index_cache[cached_mirror_url]
cached_index_hash = cache_entry["index_hash"]
cached_index_path = cache_entry["index_path"]
if cached_mirror_url in configured_mirror_urls:
# Only do a fetch if the last fetch was longer than TTL ago
if (
with_cooldown
and ttl > 0
and cached_mirror_url in self._last_fetch_times
and now - self._last_fetch_times[cached_mirror_url] < ttl
):
# The fetch worked last time, so don't error
all_methods_failed = False
else:
# May need to fetch the index and update the local caches
try:
needs_regen = self._fetch_and_cache_index(
cached_mirror_url, expect_hash=cached_index_hash
)
self._last_fetch_times[cached_mirror_url] = now
all_methods_failed = False
except FetchCacheError as fetch_error:
needs_regen = False
@ -351,6 +371,8 @@ def update(self):
"cache_key": os.path.join(self._index_cache_root, cached_index_path),
}
)
if cached_mirror_url in self._last_fetch_times:
del self._last_fetch_times[cached_mirror_url]
spec_cache_clear_needed = True
spec_cache_regenerate_needed = True
@ -369,6 +391,7 @@ def update(self):
# Need to fetch the index and update the local caches
try:
needs_regen = self._fetch_and_cache_index(mirror_url)
self._last_fetch_times[mirror_url] = now
all_methods_failed = False
except FetchCacheError as fetch_error:
fetch_errors.extend(fetch_error.errors)
@ -1878,8 +1901,8 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
# Maybe we just didn't have the latest information from the mirror, so
# try to fetch directly, unless we are only considering the indices.
# The index may be out-of-date. If we aren't only considering indices, try
# to fetch directly since we know where the file should be.
if not results and not index_only:
results = try_direct_fetch(spec, mirrors=mirrors_to_check)
# We found a spec by the direct fetch approach, we might as well

View file

@ -73,6 +73,7 @@
"binary_index_root": {"type": "string"},
"url_fetch_method": {"type": "string", "enum": ["urllib", "curl"]},
"additional_external_search_paths": {"type": "array", "items": {"type": "string"}},
"binary_index_ttl": {"type": "integer", "minimum": 0},
},
"deprecatedProperties": {
"properties": ["module_roots"],

View file

@ -453,6 +453,7 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
# Update index
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
with spack.config.override("config:binary_index_ttl", 0):
# Check dependency not in buildcache
cache_list = buildcache_cmd("list", "--allarch")
assert "libdwarf" in cache_list