init: Move file cache initialization out of __init__.py

This commit is contained in:
Todd Gamblin 2018-04-13 21:39:39 -07:00 committed by scheibelp
parent d3a1ccd2fa
commit eee502cc3b
9 changed files with 102 additions and 42 deletions

View file

@ -23,12 +23,9 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os
import sys import sys
import multiprocessing import multiprocessing
from spack.paths import var_path, user_config_path
#----------------------------------------------------------------------------- #-----------------------------------------------------------------------------
# Below code imports spack packages. # Below code imports spack packages.
#----------------------------------------------------------------------------- #-----------------------------------------------------------------------------
@ -44,20 +41,8 @@
from spack.util.path import canonicalize_path from spack.util.path import canonicalize_path
# handle basic configuration first
_config = spack.config.get_config('config') _config = spack.config.get_config('config')
# Path where downloaded source code is cached
cache_path = canonicalize_path(
_config.get('source_cache', os.path.join(var_path, "cache")))
# cache for miscellaneous stuff.
misc_cache_path = canonicalize_path(
_config.get('misc_cache', os.path.join(user_config_path, 'cache')))
# TODO: get this out of __init__.py # TODO: get this out of __init__.py
binary_cache_retrieved_specs = set() binary_cache_retrieved_specs = set()
@ -102,13 +87,6 @@
spack_version = Version("0.11.2") spack_version = Version("0.11.2")
# set up the caches after getting all config options
import spack.fetch_strategy
from spack.file_cache import FileCache
misc_cache = FileCache(misc_cache_path)
fetch_cache = spack.fetch_strategy.FsCache(cache_path)
# Set up the default packages database. # Set up the default packages database.
import spack.error import spack.error
try: try:

73
lib/spack/spack/caches.py Normal file
View file

@ -0,0 +1,73 @@
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""Caches used by Spack to store data"""
import os
import spack.paths
import spack.config
import spack.fetch_strategy
from spack.util.path import canonicalize_path
from spack.util.file_cache import FileCache
_misc_cache = None
_fetch_cache = None
def misc_cache():
"""The ``misc_cache`` is Spack's cache for small data.
Currently the ``misc_cache`` stores indexes for virtual dependency
providers and for which packages provide which tags.
"""
global _misc_cache
if _misc_cache is None:
config = spack.config.get_config('config')
path = config.get('misc_cache')
if not path:
path = os.path.join(spack.paths.user_config_path, 'cache')
path = canonicalize_path(path)
_misc_cache = FileCache(path)
return _misc_cache
def fetch_cache():
"""Filesystem cache of downloaded archives.
This prevents Spack from repeatedly fetch the same files when
building the same package different ways or multiple times.
"""
global _fetch_cache
if _fetch_cache is None:
config = spack.config.get_config('config')
path = config.get('source_cache')
if not path:
path = os.path.join(spack.paths.var_path, "cache")
path = canonicalize_path(path)
_fetch_cache = spack.fetch_strategy.FsCache(path)
return _fetch_cache

View file

@ -27,6 +27,7 @@
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
import spack.caches
import spack.cmd import spack.cmd
description = "remove temporary build files and/or downloaded archives" description = "remove temporary build files and/or downloaded archives"
@ -81,8 +82,8 @@ def clean(parser, args):
if args.downloads: if args.downloads:
tty.msg('Removing cached downloads') tty.msg('Removing cached downloads')
spack.fetch_cache.destroy() spack.caches.fetch_cache().destroy()
if args.misc_cache: if args.misc_cache:
tty.msg('Removing cached information on repositories') tty.msg('Removing cached information on repositories')
spack.misc_cache.destroy() spack.caches.misc_cache().destroy()

View file

@ -48,6 +48,7 @@
from llnl.util.filesystem import mkdirp, join_path, install from llnl.util.filesystem import mkdirp, join_path, install
import spack import spack
import spack.caches
import spack.error import spack.error
import spack.spec import spack.spec
from spack.provider_index import ProviderIndex from spack.provider_index import ProviderIndex
@ -252,7 +253,8 @@ def make_provider_index_cache(packages_path, namespace):
cache_filename = 'providers/{0}-index.yaml'.format(namespace) cache_filename = 'providers/{0}-index.yaml'.format(namespace)
# Compute which packages needs to be updated in the cache # Compute which packages needs to be updated in the cache
index_mtime = spack.misc_cache.mtime(cache_filename) misc_cache = spack.caches.misc_cache()
index_mtime = misc_cache.mtime(cache_filename)
needs_update = [ needs_update = [
x for x, sinfo in fast_package_checker.items() x for x, sinfo in fast_package_checker.items()
@ -260,19 +262,19 @@ def make_provider_index_cache(packages_path, namespace):
] ]
# Read the old ProviderIndex, or make a new one. # Read the old ProviderIndex, or make a new one.
index_existed = spack.misc_cache.init_entry(cache_filename) index_existed = misc_cache.init_entry(cache_filename)
if index_existed and not needs_update: if index_existed and not needs_update:
# If the provider index exists and doesn't need an update # If the provider index exists and doesn't need an update
# just read from it # just read from it
with spack.misc_cache.read_transaction(cache_filename) as f: with misc_cache.read_transaction(cache_filename) as f:
index = ProviderIndex.from_yaml(f) index = ProviderIndex.from_yaml(f)
else: else:
# Otherwise we need a write transaction to update it # Otherwise we need a write transaction to update it
with spack.misc_cache.write_transaction(cache_filename) as (old, new): with misc_cache.write_transaction(cache_filename) as (old, new):
index = ProviderIndex.from_yaml(old) if old else ProviderIndex() index = ProviderIndex.from_yaml(old) if old else ProviderIndex()
@ -305,7 +307,8 @@ def make_tag_index_cache(packages_path, namespace):
cache_filename = 'tags/{0}-index.json'.format(namespace) cache_filename = 'tags/{0}-index.json'.format(namespace)
# Compute which packages needs to be updated in the cache # Compute which packages needs to be updated in the cache
index_mtime = spack.misc_cache.mtime(cache_filename) misc_cache = spack.caches.misc_cache()
index_mtime = misc_cache.mtime(cache_filename)
needs_update = [ needs_update = [
x for x, sinfo in fast_package_checker.items() x for x, sinfo in fast_package_checker.items()
@ -313,19 +316,19 @@ def make_tag_index_cache(packages_path, namespace):
] ]
# Read the old ProviderIndex, or make a new one. # Read the old ProviderIndex, or make a new one.
index_existed = spack.misc_cache.init_entry(cache_filename) index_existed = misc_cache.init_entry(cache_filename)
if index_existed and not needs_update: if index_existed and not needs_update:
# If the provider index exists and doesn't need an update # If the provider index exists and doesn't need an update
# just read from it # just read from it
with spack.misc_cache.read_transaction(cache_filename) as f: with misc_cache.read_transaction(cache_filename) as f:
index = TagIndex.from_json(f) index = TagIndex.from_json(f)
else: else:
# Otherwise we need a write transaction to update it # Otherwise we need a write transaction to update it
with spack.misc_cache.write_transaction(cache_filename) as (old, new): with misc_cache.write_transaction(cache_filename) as (old, new):
index = TagIndex.from_json(old) if old else TagIndex() index = TagIndex.from_json(old) if old else TagIndex()

View file

@ -39,6 +39,7 @@
from llnl.util.filesystem import remove_if_dead_link, remove_linked_tree from llnl.util.filesystem import remove_if_dead_link, remove_linked_tree
import spack.paths import spack.paths
import spack.caches
import spack.config import spack.config
import spack.error import spack.error
import spack.fetch_strategy as fs import spack.fetch_strategy as fs
@ -408,7 +409,7 @@ def fetch(self, mirror_only=False):
url, digest, expand=expand, extension=extension)) url, digest, expand=expand, extension=extension))
if self.default_fetcher.cachable: if self.default_fetcher.cachable:
fetchers.insert( fetchers.insert(
0, spack.fetch_cache.fetcher( 0, spack.caches.fetch_cache().fetcher(
self.mirror_path, digest, expand=expand, self.mirror_path, digest, expand=expand,
extension=extension)) extension=extension))
@ -455,7 +456,7 @@ def check(self):
self.fetcher.check() self.fetcher.check()
def cache_local(self): def cache_local(self):
spack.fetch_cache.store(self.fetcher, self.mirror_path) spack.caches.fetch_cache().store(self.fetcher, self.mirror_path)
def expand_archive(self): def expand_archive(self):
"""Changes to the stage directory and attempt to expand the downloaded """Changes to the stage directory and attempt to expand the downloaded

View file

@ -23,7 +23,8 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import pytest import pytest
import spack import spack.stage
import spack.caches
import spack.main import spack.main
import spack.package import spack.package
@ -42,8 +43,10 @@ def __call__(self, *args, **kwargs):
monkeypatch.setattr(spack.package.PackageBase, 'do_clean', Counter()) monkeypatch.setattr(spack.package.PackageBase, 'do_clean', Counter())
monkeypatch.setattr(spack.stage, 'purge', Counter()) monkeypatch.setattr(spack.stage, 'purge', Counter())
monkeypatch.setattr(spack.fetch_cache, 'destroy', Counter(), raising=False) monkeypatch.setattr(
monkeypatch.setattr(spack.misc_cache, 'destroy', Counter()) spack.caches._fetch_cache, 'destroy', Counter(), raising=False)
monkeypatch.setattr(
spack.caches._misc_cache, 'destroy', Counter())
@pytest.mark.usefixtures( @pytest.mark.usefixtures(
@ -66,5 +69,5 @@ def test_function_calls(command_line, counters):
# number of times # number of times
assert spack.package.PackageBase.do_clean.call_count == counters[0] assert spack.package.PackageBase.do_clean.call_count == counters[0]
assert spack.stage.purge.call_count == counters[1] assert spack.stage.purge.call_count == counters[1]
assert spack.fetch_cache.destroy.call_count == counters[2] assert spack.caches.fetch_cache().destroy.call_count == counters[2]
assert spack.misc_cache.destroy.call_count == counters[3] assert spack.caches.misc_cache().destroy.call_count == counters[3]

View file

@ -36,10 +36,11 @@
from llnl.util.filesystem import remove_linked_tree from llnl.util.filesystem import remove_linked_tree
import spack import spack
import spack.paths
import spack.architecture import spack.architecture
import spack.caches
import spack.database import spack.database
import spack.directory_layout import spack.directory_layout
import spack.paths
import spack.platforms.test import spack.platforms.test
import spack.repository import spack.repository
import spack.stage import spack.stage
@ -155,7 +156,7 @@ def fetch(self):
def __str__(self): def __str__(self):
return "[mock fetch cache]" return "[mock fetch cache]"
monkeypatch.setattr(spack, 'fetch_cache', MockCache()) monkeypatch.setattr(spack.caches, '_fetch_cache', MockCache())
# FIXME: The lines below should better be added to a fixture with # FIXME: The lines below should better be added to a fixture with

View file

@ -26,7 +26,7 @@
import os import os
import pytest import pytest
from spack.file_cache import FileCache from spack.util.file_cache import FileCache
@pytest.fixture() @pytest.fixture()