binary_distribution.py: fix type annotation singleton (#40572)

Convince the language server it's really just a BinaryCacheIndex,
otherwise it defaults to thinking it's Singleton, and can't autocomplete
etc.
This commit is contained in:
Harmen Stoppels 2023-10-30 12:52:47 +01:00 committed by GitHub
parent d03289c38b
commit 2f3801196d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 21 additions and 27 deletions

View file

@ -25,7 +25,7 @@
import warnings import warnings
from contextlib import closing, contextmanager from contextlib import closing, contextmanager
from gzip import GzipFile from gzip import GzipFile
from typing import Dict, List, NamedTuple, Optional, Tuple, Union from typing import Dict, List, NamedTuple, Optional, Set, Tuple
from urllib.error import HTTPError, URLError from urllib.error import HTTPError, URLError
import llnl.util.filesystem as fsys import llnl.util.filesystem as fsys
@ -53,6 +53,7 @@
import spack.util.crypto import spack.util.crypto
import spack.util.file_cache as file_cache import spack.util.file_cache as file_cache
import spack.util.gpg import spack.util.gpg
import spack.util.path
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
import spack.util.timer as timer import spack.util.timer as timer
@ -130,25 +131,25 @@ class BinaryCacheIndex:
mean we should have paid the price to update the cache earlier? mean we should have paid the price to update the cache earlier?
""" """
def __init__(self, cache_root): def __init__(self, cache_root: Optional[str] = None):
self._index_cache_root = cache_root self._index_cache_root: str = cache_root or binary_index_location()
# the key associated with the serialized _local_index_cache # the key associated with the serialized _local_index_cache
self._index_contents_key = "contents.json" self._index_contents_key = "contents.json"
# a FileCache instance storing copies of remote binary cache indices # a FileCache instance storing copies of remote binary cache indices
self._index_file_cache = None self._index_file_cache: Optional[file_cache.FileCache] = None
# stores a map of mirror URL to index hash and cache key (index path) # stores a map of mirror URL to index hash and cache key (index path)
self._local_index_cache = None self._local_index_cache: Optional[dict] = None
# hashes of remote indices already ingested into the concrete spec # hashes of remote indices already ingested into the concrete spec
# cache (_mirrors_for_spec) # cache (_mirrors_for_spec)
self._specs_already_associated = set() self._specs_already_associated: Set[str] = set()
# mapping from mirror urls to the time.time() of the last index fetch and a bool indicating # mapping from mirror urls to the time.time() of the last index fetch and a bool indicating
# whether the fetch succeeded or not. # whether the fetch succeeded or not.
self._last_fetch_times = {} self._last_fetch_times: Dict[str, float] = {}
# _mirrors_for_spec is a dictionary mapping DAG hashes to lists of # _mirrors_for_spec is a dictionary mapping DAG hashes to lists of
# entries indicating mirrors where that concrete spec can be found. # entries indicating mirrors where that concrete spec can be found.
@ -158,7 +159,7 @@ def __init__(self, cache_root):
# - the concrete spec itself, keyed by ``spec`` (including the # - the concrete spec itself, keyed by ``spec`` (including the
# full hash, since the dag hash may match but we want to # full hash, since the dag hash may match but we want to
# use the updated source if available) # use the updated source if available)
self._mirrors_for_spec = {} self._mirrors_for_spec: Dict[str, dict] = {}
def _init_local_index_cache(self): def _init_local_index_cache(self):
if not self._index_file_cache: if not self._index_file_cache:
@ -529,15 +530,8 @@ def binary_index_location():
return spack.util.path.canonicalize_path(cache_root) return spack.util.path.canonicalize_path(cache_root)
def _binary_index(): #: Default binary cache index instance
"""Get the singleton store instance.""" BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
return BinaryCacheIndex(binary_index_location())
#: Singleton binary_index instance
binary_index: Union[BinaryCacheIndex, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(
_binary_index
)
class NoOverwriteException(spack.error.SpackError): class NoOverwriteException(spack.error.SpackError):
@ -2255,7 +2249,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
tty.debug("No Spack mirrors are currently configured") tty.debug("No Spack mirrors are currently configured")
return {} return {}
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check) results = BINARY_INDEX.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
# The index may be out-of-date. If we aren't only considering indices, try # The index may be out-of-date. If we aren't only considering indices, try
# to fetch directly since we know where the file should be. # to fetch directly since we know where the file should be.
@ -2264,7 +2258,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
# We found a spec by the direct fetch approach, we might as well # We found a spec by the direct fetch approach, we might as well
# add it to our mapping. # add it to our mapping.
if results: if results:
binary_index.update_spec(spec, results) BINARY_INDEX.update_spec(spec, results)
return results return results
@ -2280,12 +2274,12 @@ def update_cache_and_get_specs():
Throws: Throws:
FetchCacheError FetchCacheError
""" """
binary_index.update() BINARY_INDEX.update()
return binary_index.get_all_built_specs() return BINARY_INDEX.get_all_built_specs()
def clear_spec_cache(): def clear_spec_cache():
binary_index.clear() BINARY_INDEX.clear()
def get_keys(install=False, trust=False, force=False, mirrors=None): def get_keys(install=False, trust=False, force=False, mirrors=None):

View file

@ -214,7 +214,7 @@ def _install_and_test(
with spack.config.override(self.mirror_scope): with spack.config.override(self.mirror_scope):
# This index is currently needed to get the compiler used to build some # This index is currently needed to get the compiler used to build some
# specs that we know by dag hash. # specs that we know by dag hash.
spack.binary_distribution.binary_index.regenerate_spec_cache() spack.binary_distribution.BINARY_INDEX.regenerate_spec_cache()
index = spack.binary_distribution.update_cache_and_get_specs() index = spack.binary_distribution.update_cache_and_get_specs()
if not index: if not index:

View file

@ -932,7 +932,7 @@ def generate_gitlab_ci_yaml(
# Speed up staging by first fetching binary indices from all mirrors # Speed up staging by first fetching binary indices from all mirrors
try: try:
bindist.binary_index.update() bindist.BINARY_INDEX.update()
except bindist.FetchCacheError as e: except bindist.FetchCacheError as e:
tty.warn(e) tty.warn(e)

View file

@ -495,7 +495,7 @@ def mock_binary_index(monkeypatch, tmpdir_factory):
tmpdir = tmpdir_factory.mktemp("mock_binary_index") tmpdir = tmpdir_factory.mktemp("mock_binary_index")
index_path = tmpdir.join("binary_index").strpath index_path = tmpdir.join("binary_index").strpath
mock_index = spack.binary_distribution.BinaryCacheIndex(index_path) mock_index = spack.binary_distribution.BinaryCacheIndex(index_path)
monkeypatch.setattr(spack.binary_distribution, "binary_index", mock_index) monkeypatch.setattr(spack.binary_distribution, "BINARY_INDEX", mock_index)
yield yield
@ -1710,8 +1710,8 @@ def inode_cache():
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def brand_new_binary_cache(): def brand_new_binary_cache():
yield yield
spack.binary_distribution.binary_index = llnl.util.lang.Singleton( spack.binary_distribution.BINARY_INDEX = llnl.util.lang.Singleton(
spack.binary_distribution._binary_index spack.binary_distribution.BinaryCacheIndex
) )