Refactor a few classes related to package repositories (#32273)
Caches used by repositories don't reference the global spack.repo.path instance anymore, but get the repository they refer to during initialization. Spec.virtual now use the index, and computation done to compute the index use Repository.is_virtual_safe. Code to construct mock packages and mock repository has been factored into a unique MockRepositoryBuilder that is used throughout the codebase. Add debug print for pushing and popping config scopes. Changed spack.repo.use_repositories so that it can override or not previous repos spack.repo.use_repositories updates spack.config.config according to the modifications done Removed a peculiar behavior from spack.config.Configuration where push would always bubble-up a scope named command_line if it existed
This commit is contained in:
parent
b594c0aee0
commit
de8c827983
29 changed files with 476 additions and 675 deletions
|
@ -54,7 +54,6 @@
|
|||
r"^share/spack/.*\.fish$",
|
||||
r"^share/spack/qa/run-[^/]*$",
|
||||
r"^share/spack/bash/spack-completion.in$",
|
||||
r"^share/spack/templates/misc/coconcretization.pyt$",
|
||||
# action workflows
|
||||
r"^.github/actions/.*\.py$",
|
||||
# all packages
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import functools
|
||||
import os.path
|
||||
import platform
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
|
@ -25,7 +24,6 @@
|
|||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
|
@ -753,37 +751,20 @@ def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
|||
|
||||
|
||||
def _concretize_specs_together_original(*abstract_specs, **kwargs):
|
||||
def make_concretization_repository(abstract_specs):
|
||||
"""Returns the path to a temporary repository created to contain
|
||||
a fake package that depends on all of the abstract specs.
|
||||
"""
|
||||
abstract_specs = [spack.spec.Spec(s) for s in abstract_specs]
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
repo_path, _ = spack.repo.create_repo(tmpdir)
|
||||
|
||||
debug_msg = "[CONCRETIZATION]: Creating helper repository in {0}"
|
||||
tty.debug(debug_msg.format(repo_path))
|
||||
|
||||
pkg_dir = os.path.join(repo_path, "packages", "concretizationroot")
|
||||
fs.mkdirp(pkg_dir)
|
||||
environment = spack.tengine.make_environment()
|
||||
template = environment.get_template("misc/coconcretization.pyt")
|
||||
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir)
|
||||
# Split recursive specs, as it seems the concretizer has issue
|
||||
# respecting conditions on dependents expressed like
|
||||
# depends_on('foo ^bar@1.0'), see issue #11160
|
||||
split_specs = [
|
||||
dep.copy(deps=False) for spec in abstract_specs for dep in spec.traverse(root=True)
|
||||
dep.copy(deps=False) for spec1 in abstract_specs for dep in spec1.traverse(root=True)
|
||||
]
|
||||
builder.add_package(
|
||||
"concretizationroot", dependencies=[(str(x), None, None) for x in split_specs]
|
||||
)
|
||||
|
||||
with open(os.path.join(pkg_dir, "package.py"), "w") as f:
|
||||
f.write(template.render(specs=[str(s) for s in split_specs]))
|
||||
|
||||
return spack.repo.Repo(repo_path)
|
||||
|
||||
abstract_specs = [spack.spec.Spec(s) for s in abstract_specs]
|
||||
concretization_repository = make_concretization_repository(abstract_specs)
|
||||
|
||||
with spack.repo.additional_repository(concretization_repository):
|
||||
with spack.repo.use_repositories(builder.root, override=False):
|
||||
# Spec from a helper package that depends on all the abstract_specs
|
||||
concretization_root = spack.spec.Spec("concretizationroot")
|
||||
concretization_root.concretize(tests=kwargs.get("tests", False))
|
||||
|
|
|
@ -409,28 +409,22 @@ def __init__(self, *scopes):
|
|||
@_config_mutator
|
||||
def push_scope(self, scope):
|
||||
"""Add a higher precedence scope to the Configuration."""
|
||||
cmd_line_scope = None
|
||||
if self.scopes:
|
||||
highest_precedence_scope = list(self.scopes.values())[-1]
|
||||
if highest_precedence_scope.name == "command_line":
|
||||
# If the command-line scope is present, it should always
|
||||
# be the scope of highest precedence
|
||||
cmd_line_scope = self.pop_scope()
|
||||
|
||||
tty.debug("[CONFIGURATION: PUSH SCOPE]: {}".format(str(scope)), level=2)
|
||||
self.scopes[scope.name] = scope
|
||||
if cmd_line_scope:
|
||||
self.scopes["command_line"] = cmd_line_scope
|
||||
|
||||
@_config_mutator
|
||||
def pop_scope(self):
|
||||
"""Remove the highest precedence scope and return it."""
|
||||
name, scope = self.scopes.popitem(last=True)
|
||||
tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2)
|
||||
return scope
|
||||
|
||||
@_config_mutator
|
||||
def remove_scope(self, scope_name):
|
||||
"""Remove scope by name; has no effect when ``scope_name`` does not exist"""
|
||||
return self.scopes.pop(scope_name, None)
|
||||
scope = self.scopes.pop(scope_name, None)
|
||||
tty.debug("[CONFIGURATION: POP SCOPE]: {}".format(str(scope)), level=2)
|
||||
return scope
|
||||
|
||||
@property
|
||||
def file_scopes(self):
|
||||
|
|
|
@ -577,7 +577,14 @@ def setup_main_options(args):
|
|||
spack.config.set("config:locks", args.locks, scope="command_line")
|
||||
|
||||
if args.mock:
|
||||
spack.repo.path = spack.repo.RepoPath(spack.paths.mock_packages_path)
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
key = syaml.syaml_str("repos")
|
||||
key.override = True
|
||||
spack.config.config.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
[(key, [spack.paths.mock_packages_path])]
|
||||
)
|
||||
spack.repo.path = spack.repo.create(spack.config.config)
|
||||
|
||||
# If the user asked for it, don't check ssl certs.
|
||||
if args.insecure:
|
||||
|
|
|
@ -271,12 +271,13 @@ def to_dict(self):
|
|||
return data
|
||||
|
||||
|
||||
def from_dict(dictionary):
|
||||
def from_dict(dictionary, repository=None):
|
||||
"""Create a patch from json dictionary."""
|
||||
repository = repository or spack.repo.path
|
||||
owner = dictionary.get("owner")
|
||||
if "owner" not in dictionary:
|
||||
raise ValueError("Invalid patch dictionary: %s" % dictionary)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(owner)
|
||||
pkg_cls = repository.get_pkg_class(owner)
|
||||
|
||||
if "url" in dictionary:
|
||||
return UrlPatch(
|
||||
|
@ -329,7 +330,7 @@ class PatchCache(object):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, data=None):
|
||||
def __init__(self, repository, data=None):
|
||||
if data is None:
|
||||
self.index = {}
|
||||
else:
|
||||
|
@ -337,9 +338,11 @@ def __init__(self, data=None):
|
|||
raise IndexError("invalid patch index; try `spack clean -m`")
|
||||
self.index = data["patches"]
|
||||
|
||||
self.repository = repository
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, stream):
|
||||
return PatchCache(sjson.load(stream))
|
||||
def from_json(cls, stream, repository):
|
||||
return PatchCache(repository=repository, data=sjson.load(stream))
|
||||
|
||||
def to_json(self, stream):
|
||||
sjson.dump({"patches": self.index}, stream)
|
||||
|
@ -375,7 +378,7 @@ def patch_for_package(self, sha256, pkg):
|
|||
# because it's the index key)
|
||||
patch_dict = dict(patch_dict)
|
||||
patch_dict["sha256"] = sha256
|
||||
return from_dict(patch_dict)
|
||||
return from_dict(patch_dict, repository=self.repository)
|
||||
|
||||
def update_package(self, pkg_fullname):
|
||||
# remove this package from any patch entries that reference it.
|
||||
|
@ -397,8 +400,8 @@ def update_package(self, pkg_fullname):
|
|||
del self.index[sha256]
|
||||
|
||||
# update the index with per-package patch indexes
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_fullname)
|
||||
partial_index = self._index_patches(pkg_cls)
|
||||
pkg_cls = self.repository.get_pkg_class(pkg_fullname)
|
||||
partial_index = self._index_patches(pkg_cls, self.repository)
|
||||
for sha256, package_to_patch in partial_index.items():
|
||||
p2p = self.index.setdefault(sha256, {})
|
||||
p2p.update(package_to_patch)
|
||||
|
@ -410,7 +413,7 @@ def update(self, other):
|
|||
p2p.update(package_to_patch)
|
||||
|
||||
@staticmethod
|
||||
def _index_patches(pkg_class):
|
||||
def _index_patches(pkg_class, repository):
|
||||
index = {}
|
||||
|
||||
# Add patches from the class
|
||||
|
@ -425,7 +428,7 @@ def _index_patches(pkg_class):
|
|||
for cond, dependency in conditions.items():
|
||||
for pcond, patch_list in dependency.patches.items():
|
||||
for patch in patch_list:
|
||||
dspec_cls = spack.repo.path.get_pkg_class(dependency.spec.name)
|
||||
dspec_cls = repository.get_pkg_class(dependency.spec.name)
|
||||
patch_dict = patch.to_dict()
|
||||
patch_dict.pop("sha256") # save some space
|
||||
index[patch.sha256] = {dspec_cls.fullname: patch_dict}
|
||||
|
|
|
@ -129,7 +129,7 @@ def __repr__(self):
|
|||
|
||||
|
||||
class ProviderIndex(_IndexBase):
|
||||
def __init__(self, specs=None, restrict=False):
|
||||
def __init__(self, repository, specs=None, restrict=False):
|
||||
"""Provider index based on a single mapping of providers.
|
||||
|
||||
Args:
|
||||
|
@ -143,17 +143,16 @@ def __init__(self, specs=None, restrict=False):
|
|||
TODO: as possible without overly restricting results, so it is
|
||||
TODO: not the best name.
|
||||
"""
|
||||
if specs is None:
|
||||
specs = []
|
||||
|
||||
self.repository = repository
|
||||
self.restrict = restrict
|
||||
self.providers = {}
|
||||
|
||||
specs = specs or []
|
||||
for spec in specs:
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
if spec.virtual:
|
||||
if self.repository.is_virtual_safe(spec.name):
|
||||
continue
|
||||
|
||||
self.update(spec)
|
||||
|
@ -171,9 +170,10 @@ def update(self, spec):
|
|||
# Empty specs do not have a package
|
||||
return
|
||||
|
||||
assert not spec.virtual, "cannot update an index using a virtual spec"
|
||||
msg = "cannot update an index passing the virtual spec '{}'".format(spec.name)
|
||||
assert not self.repository.is_virtual_safe(spec.name), msg
|
||||
|
||||
pkg_provided = spec.package_class.provided
|
||||
pkg_provided = self.repository.get_pkg_class(spec.name).provided
|
||||
for provided_spec, provider_specs in six.iteritems(pkg_provided):
|
||||
for provider_spec in provider_specs:
|
||||
# TODO: fix this comment.
|
||||
|
@ -262,12 +262,12 @@ def remove_provider(self, pkg_name):
|
|||
|
||||
def copy(self):
|
||||
"""Return a deep copy of this index."""
|
||||
clone = ProviderIndex()
|
||||
clone = ProviderIndex(repository=self.repository)
|
||||
clone.providers = self._transform(lambda vpkg, pset: (vpkg, set((p.copy() for p in pset))))
|
||||
return clone
|
||||
|
||||
@staticmethod
|
||||
def from_json(stream):
|
||||
def from_json(stream, repository):
|
||||
"""Construct a provider index from its JSON representation.
|
||||
|
||||
Args:
|
||||
|
@ -281,7 +281,7 @@ def from_json(stream):
|
|||
if "provider_index" not in data:
|
||||
raise ProviderIndexError("YAML ProviderIndex does not start with 'provider_index'")
|
||||
|
||||
index = ProviderIndex()
|
||||
index = ProviderIndex(repository=repository)
|
||||
providers = data["provider_index"]["providers"]
|
||||
index.providers = _transform(
|
||||
providers,
|
||||
|
|
|
@ -12,13 +12,16 @@
|
|||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import random
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import string
|
||||
import sys
|
||||
import tempfile
|
||||
import traceback
|
||||
import types
|
||||
import uuid
|
||||
from typing import Dict # novm
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
|
@ -37,6 +40,7 @@
|
|||
import spack.provider_index
|
||||
import spack.spec
|
||||
import spack.tag
|
||||
import spack.util.file_cache
|
||||
import spack.util.naming as nm
|
||||
import spack.util.path
|
||||
from spack.util.executable import which
|
||||
|
@ -576,6 +580,10 @@ def __len__(self):
|
|||
class Indexer(object):
|
||||
"""Adaptor for indexes that need to be generated when repos are updated."""
|
||||
|
||||
def __init__(self, repository):
|
||||
self.repository = repository
|
||||
self.index = None
|
||||
|
||||
def create(self):
|
||||
self.index = self._create()
|
||||
|
||||
|
@ -616,10 +624,10 @@ class TagIndexer(Indexer):
|
|||
"""Lifecycle methods for a TagIndex on a Repo."""
|
||||
|
||||
def _create(self):
|
||||
return spack.tag.TagIndex()
|
||||
return spack.tag.TagIndex(self.repository)
|
||||
|
||||
def read(self, stream):
|
||||
self.index = spack.tag.TagIndex.from_json(stream)
|
||||
self.index = spack.tag.TagIndex.from_json(stream, self.repository)
|
||||
|
||||
def update(self, pkg_fullname):
|
||||
self.index.update_package(pkg_fullname)
|
||||
|
@ -632,14 +640,17 @@ class ProviderIndexer(Indexer):
|
|||
"""Lifecycle methods for virtual package providers."""
|
||||
|
||||
def _create(self):
|
||||
return spack.provider_index.ProviderIndex()
|
||||
return spack.provider_index.ProviderIndex(repository=self.repository)
|
||||
|
||||
def read(self, stream):
|
||||
self.index = spack.provider_index.ProviderIndex.from_json(stream)
|
||||
self.index = spack.provider_index.ProviderIndex.from_json(stream, self.repository)
|
||||
|
||||
def update(self, pkg_fullname):
|
||||
name = pkg_fullname.split(".")[-1]
|
||||
if spack.repo.path.is_virtual(name, use_index=False):
|
||||
is_virtual = (
|
||||
not self.repository.exists(name) or self.repository.get_pkg_class(name).virtual
|
||||
)
|
||||
if is_virtual:
|
||||
return
|
||||
self.index.remove_provider(pkg_fullname)
|
||||
self.index.update(pkg_fullname)
|
||||
|
@ -652,7 +663,7 @@ class PatchIndexer(Indexer):
|
|||
"""Lifecycle methods for patch cache."""
|
||||
|
||||
def _create(self):
|
||||
return spack.patch.PatchCache()
|
||||
return spack.patch.PatchCache(repository=self.repository)
|
||||
|
||||
def needs_update(self):
|
||||
# TODO: patches can change under a package and we should handle
|
||||
|
@ -662,7 +673,7 @@ def needs_update(self):
|
|||
return False
|
||||
|
||||
def read(self, stream):
|
||||
self.index = spack.patch.PatchCache.from_json(stream)
|
||||
self.index = spack.patch.PatchCache.from_json(stream, repository=self.repository)
|
||||
|
||||
def write(self, stream):
|
||||
self.index.to_json(stream)
|
||||
|
@ -687,7 +698,7 @@ class RepoIndex(object):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, package_checker, namespace):
|
||||
def __init__(self, package_checker, namespace, cache):
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
if sys.platform == "win32":
|
||||
|
@ -696,6 +707,7 @@ def __init__(self, package_checker, namespace):
|
|||
|
||||
self.indexers = {}
|
||||
self.indexes = {}
|
||||
self.cache = cache
|
||||
|
||||
def add_indexer(self, name, indexer):
|
||||
"""Add an indexer to the repo index.
|
||||
|
@ -740,24 +752,23 @@ def _build_index(self, name, indexer):
|
|||
cache_filename = "{0}/{1}-index.json".format(name, self.namespace)
|
||||
|
||||
# Compute which packages needs to be updated in the cache
|
||||
misc_cache = spack.caches.misc_cache
|
||||
index_mtime = misc_cache.mtime(cache_filename)
|
||||
index_mtime = self.cache.mtime(cache_filename)
|
||||
needs_update = self.checker.modified_since(index_mtime)
|
||||
|
||||
index_existed = misc_cache.init_entry(cache_filename)
|
||||
index_existed = self.cache.init_entry(cache_filename)
|
||||
if index_existed and not needs_update:
|
||||
# If the index exists and doesn't need an update, read it
|
||||
with misc_cache.read_transaction(cache_filename) as f:
|
||||
with self.cache.read_transaction(cache_filename) as f:
|
||||
indexer.read(f)
|
||||
|
||||
else:
|
||||
# Otherwise update it and rewrite the cache file
|
||||
with misc_cache.write_transaction(cache_filename) as (old, new):
|
||||
with self.cache.write_transaction(cache_filename) as (old, new):
|
||||
indexer.read(old) if old else indexer.create()
|
||||
|
||||
# Compute which packages needs to be updated **again** in case someone updated them
|
||||
# while we waited for the lock
|
||||
new_index_mtime = misc_cache.mtime(cache_filename)
|
||||
new_index_mtime = self.cache.mtime(cache_filename)
|
||||
if new_index_mtime != index_mtime:
|
||||
needs_update = self.checker.modified_since(new_index_mtime)
|
||||
|
||||
|
@ -781,7 +792,8 @@ class RepoPath(object):
|
|||
repos (list): list Repo objects or paths to put in this RepoPath
|
||||
"""
|
||||
|
||||
def __init__(self, *repos):
|
||||
def __init__(self, *repos, **kwargs):
|
||||
cache = kwargs.get("cache", spack.caches.misc_cache)
|
||||
self.repos = []
|
||||
self.by_namespace = nm.NamespaceTrie()
|
||||
|
||||
|
@ -793,7 +805,7 @@ def __init__(self, *repos):
|
|||
for repo in repos:
|
||||
try:
|
||||
if isinstance(repo, six.string_types):
|
||||
repo = Repo(repo)
|
||||
repo = Repo(repo, cache=cache)
|
||||
self.put_last(repo)
|
||||
except RepoError as e:
|
||||
tty.warn(
|
||||
|
@ -884,7 +896,7 @@ def all_package_classes(self):
|
|||
def provider_index(self):
|
||||
"""Merged ProviderIndex from all Repos in the RepoPath."""
|
||||
if self._provider_index is None:
|
||||
self._provider_index = spack.provider_index.ProviderIndex()
|
||||
self._provider_index = spack.provider_index.ProviderIndex(repository=self)
|
||||
for repo in reversed(self.repos):
|
||||
self._provider_index.merge(repo.provider_index)
|
||||
|
||||
|
@ -894,7 +906,7 @@ def provider_index(self):
|
|||
def tag_index(self):
|
||||
"""Merged TagIndex from all Repos in the RepoPath."""
|
||||
if self._tag_index is None:
|
||||
self._tag_index = spack.tag.TagIndex()
|
||||
self._tag_index = spack.tag.TagIndex(repository=self)
|
||||
for repo in reversed(self.repos):
|
||||
self._tag_index.merge(repo.tag_index)
|
||||
|
||||
|
@ -904,7 +916,7 @@ def tag_index(self):
|
|||
def patch_index(self):
|
||||
"""Merged PatchIndex from all Repos in the RepoPath."""
|
||||
if self._patch_index is None:
|
||||
self._patch_index = spack.patch.PatchCache()
|
||||
self._patch_index = spack.patch.PatchCache(repository=self)
|
||||
for repo in reversed(self.repos):
|
||||
self._patch_index.update(repo.patch_index)
|
||||
|
||||
|
@ -933,7 +945,6 @@ def repo_for_pkg(self, spec):
|
|||
"""Given a spec, get the repository for its package."""
|
||||
# We don't @_autospec this function b/c it's called very frequently
|
||||
# and we want to avoid parsing str's into Specs unnecessarily.
|
||||
namespace = None
|
||||
if isinstance(spec, spack.spec.Spec):
|
||||
namespace = spec.namespace
|
||||
name = spec.name
|
||||
|
@ -946,7 +957,7 @@ def repo_for_pkg(self, spec):
|
|||
if namespace:
|
||||
fullspace = python_package_for_repo(namespace)
|
||||
if fullspace not in self.by_namespace:
|
||||
raise UnknownNamespaceError(namespace)
|
||||
raise UnknownNamespaceError(namespace, name=name)
|
||||
return self.by_namespace[fullspace]
|
||||
|
||||
# If there's no namespace, search in the RepoPath.
|
||||
|
@ -991,20 +1002,34 @@ def exists(self, pkg_name):
|
|||
"""
|
||||
return any(repo.exists(pkg_name) for repo in self.repos)
|
||||
|
||||
def is_virtual(self, pkg_name, use_index=True):
|
||||
"""True if the package with this name is virtual, False otherwise.
|
||||
|
||||
Set `use_index` False when calling from a code block that could
|
||||
be run during the computation of the provider index."""
|
||||
def _have_name(self, pkg_name):
|
||||
have_name = pkg_name is not None
|
||||
if have_name and not isinstance(pkg_name, str):
|
||||
raise ValueError("is_virtual(): expected package name, got %s" % type(pkg_name))
|
||||
if use_index:
|
||||
return have_name
|
||||
|
||||
def is_virtual(self, pkg_name):
|
||||
"""Return True if the package with this name is virtual, False otherwise.
|
||||
|
||||
This function use the provider index. If calling from a code block that
|
||||
is used to construct the provider index use the ``is_virtual_safe`` function.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package we want to check
|
||||
"""
|
||||
have_name = self._have_name(pkg_name)
|
||||
return have_name and pkg_name in self.provider_index
|
||||
else:
|
||||
return have_name and (
|
||||
not self.exists(pkg_name) or self.get_pkg_class(pkg_name).virtual
|
||||
)
|
||||
|
||||
def is_virtual_safe(self, pkg_name):
|
||||
"""Return True if the package with this name is virtual, False otherwise.
|
||||
|
||||
This function doesn't use the provider index.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package we want to check
|
||||
"""
|
||||
have_name = self._have_name(pkg_name)
|
||||
return have_name and (not self.exists(pkg_name) or self.get_pkg_class(pkg_name).virtual)
|
||||
|
||||
def __contains__(self, pkg_name):
|
||||
return self.exists(pkg_name)
|
||||
|
@ -1023,7 +1048,7 @@ class Repo(object):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, root):
|
||||
def __init__(self, root, cache=None):
|
||||
"""Instantiate a package repository from a filesystem path.
|
||||
|
||||
Args:
|
||||
|
@ -1078,6 +1103,7 @@ def check(condition, msg):
|
|||
|
||||
# Indexes for this repository, computed lazily
|
||||
self._repo_index = None
|
||||
self._cache = cache or spack.caches.misc_cache
|
||||
|
||||
def real_name(self, import_name):
|
||||
"""Allow users to import Spack packages using Python identifiers.
|
||||
|
@ -1189,10 +1215,10 @@ def purge(self):
|
|||
def index(self):
|
||||
"""Construct the index for this repo lazily."""
|
||||
if self._repo_index is None:
|
||||
self._repo_index = RepoIndex(self._pkg_checker, self.namespace)
|
||||
self._repo_index.add_indexer("providers", ProviderIndexer())
|
||||
self._repo_index.add_indexer("tags", TagIndexer())
|
||||
self._repo_index.add_indexer("patches", PatchIndexer())
|
||||
self._repo_index = RepoIndex(self._pkg_checker, self.namespace, cache=self._cache)
|
||||
self._repo_index.add_indexer("providers", ProviderIndexer(self))
|
||||
self._repo_index.add_indexer("tags", TagIndexer(self))
|
||||
self._repo_index.add_indexer("patches", PatchIndexer(self))
|
||||
return self._repo_index
|
||||
|
||||
@property
|
||||
|
@ -1291,9 +1317,26 @@ def last_mtime(self):
|
|||
return self._pkg_checker.last_mtime()
|
||||
|
||||
def is_virtual(self, pkg_name):
|
||||
"""True if the package with this name is virtual, False otherwise."""
|
||||
"""Return True if the package with this name is virtual, False otherwise.
|
||||
|
||||
This function use the provider index. If calling from a code block that
|
||||
is used to construct the provider index use the ``is_virtual_safe`` function.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package we want to check
|
||||
"""
|
||||
return pkg_name in self.provider_index
|
||||
|
||||
def is_virtual_safe(self, pkg_name):
|
||||
"""Return True if the package with this name is virtual, False otherwise.
|
||||
|
||||
This function doesn't use the provider index.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package we want to check
|
||||
"""
|
||||
return not self.exists(pkg_name) or self.get_pkg_class(pkg_name).virtual
|
||||
|
||||
def get_pkg_class(self, pkg_name):
|
||||
"""Get the class for the package out of its module.
|
||||
|
||||
|
@ -1392,9 +1435,19 @@ def create_or_construct(path, namespace=None):
|
|||
return Repo(path)
|
||||
|
||||
|
||||
def _path(repo_dirs=None):
|
||||
def _path(configuration=None):
|
||||
"""Get the singleton RepoPath instance for Spack."""
|
||||
repo_dirs = repo_dirs or spack.config.get("repos")
|
||||
configuration = configuration or spack.config.config
|
||||
return create(configuration=configuration)
|
||||
|
||||
|
||||
def create(configuration):
|
||||
"""Create a RepoPath from a configuration object.
|
||||
|
||||
Args:
|
||||
configuration (spack.config.Configuration): configuration object
|
||||
"""
|
||||
repo_dirs = configuration.get("repos")
|
||||
if not repo_dirs:
|
||||
raise NoRepoConfiguredError("Spack configuration contains no package repositories.")
|
||||
return RepoPath(*repo_dirs)
|
||||
|
@ -1404,7 +1457,8 @@ def _path(repo_dirs=None):
|
|||
path = llnl.util.lang.Singleton(_path)
|
||||
|
||||
# Add the finder to sys.meta_path
|
||||
sys.meta_path.append(ReposFinder())
|
||||
REPOS_FINDER = ReposFinder()
|
||||
sys.meta_path.append(REPOS_FINDER)
|
||||
|
||||
|
||||
def all_package_names(include_virtuals=False):
|
||||
|
@ -1413,36 +1467,67 @@ def all_package_names(include_virtuals=False):
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def additional_repository(repository):
|
||||
"""Adds temporarily a repository to the default one.
|
||||
|
||||
Args:
|
||||
repository: repository to be added
|
||||
"""
|
||||
path.put_first(repository)
|
||||
yield
|
||||
path.remove(repository)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_repositories(*paths_and_repos):
|
||||
def use_repositories(*paths_and_repos, **kwargs):
|
||||
"""Use the repositories passed as arguments within the context manager.
|
||||
|
||||
Args:
|
||||
*paths_and_repos: paths to the repositories to be used, or
|
||||
already constructed Repo objects
|
||||
|
||||
override (bool): if True use only the repositories passed as input,
|
||||
if False add them to the top of the list of current repositories.
|
||||
Returns:
|
||||
Corresponding RepoPath object
|
||||
"""
|
||||
global path
|
||||
path, saved = RepoPath(*paths_and_repos), path
|
||||
# TODO (Python 2.7): remove this kwargs on deprecation of Python 2.7 support
|
||||
override = kwargs.get("override", True)
|
||||
paths = [getattr(x, "root", x) for x in paths_and_repos]
|
||||
scope_name = "use-repo-{}".format(uuid.uuid4())
|
||||
repos_key = "repos:" if override else "repos"
|
||||
spack.config.config.push_scope(
|
||||
spack.config.InternalConfigScope(name=scope_name, data={repos_key: paths})
|
||||
)
|
||||
path, saved = create(configuration=spack.config.config), path
|
||||
try:
|
||||
yield path
|
||||
finally:
|
||||
spack.config.config.remove_scope(scope_name=scope_name)
|
||||
path = saved
|
||||
|
||||
|
||||
class MockRepositoryBuilder(object):
|
||||
"""Build a mock repository in a directory"""
|
||||
|
||||
def __init__(self, root_directory, namespace=None):
|
||||
namespace = namespace or "".join(random.choice(string.ascii_uppercase) for _ in range(10))
|
||||
self.root, self.namespace = create_repo(str(root_directory), namespace)
|
||||
|
||||
def add_package(self, name, dependencies=None):
|
||||
"""Create a mock package in the repository, using a Jinja2 template.
|
||||
|
||||
Args:
|
||||
name (str): name of the new package
|
||||
dependencies (list): list of ("dep_spec", "dep_type", "condition") tuples.
|
||||
Both "dep_type" and "condition" can default to ``None`` in which case
|
||||
``spack.dependency.default_deptype`` and ``spack.spec.Spec()`` are used.
|
||||
"""
|
||||
dependencies = dependencies or []
|
||||
context = {"cls_name": spack.util.naming.mod_to_class(name), "dependencies": dependencies}
|
||||
template = spack.tengine.make_environment().get_template("mock-repository/package.pyt")
|
||||
text = template.render(context)
|
||||
package_py = self.recipe_filename(name)
|
||||
fs.mkdirp(os.path.dirname(package_py))
|
||||
with open(package_py, "w") as f:
|
||||
f.write(text)
|
||||
|
||||
def remove(self, name):
|
||||
package_py = self.recipe_filename(name)
|
||||
shutil.rmtree(os.path.dirname(package_py))
|
||||
|
||||
def recipe_filename(self, name):
|
||||
return os.path.join(self.root, "packages", name, "package.py")
|
||||
|
||||
|
||||
class RepoError(spack.error.SpackError):
|
||||
"""Superclass for repository-related errors."""
|
||||
|
||||
|
@ -1471,7 +1556,7 @@ class UnknownPackageError(UnknownEntityError):
|
|||
"""Raised when we encounter a package spack doesn't have."""
|
||||
|
||||
def __init__(self, name, repo=None):
|
||||
msg = None
|
||||
msg = "Attempting to retrieve anonymous package."
|
||||
long_msg = None
|
||||
if name:
|
||||
if repo:
|
||||
|
@ -1488,8 +1573,6 @@ def __init__(self, name, repo=None):
|
|||
long_msg = long_msg.format(name)
|
||||
else:
|
||||
long_msg = "You may need to run 'spack clean -m'."
|
||||
else:
|
||||
msg = "Attempting to retrieve anonymous package."
|
||||
|
||||
super(UnknownPackageError, self).__init__(msg, long_msg)
|
||||
self.name = name
|
||||
|
@ -1498,8 +1581,12 @@ def __init__(self, name, repo=None):
|
|||
class UnknownNamespaceError(UnknownEntityError):
|
||||
"""Raised when we encounter an unknown namespace"""
|
||||
|
||||
def __init__(self, namespace):
|
||||
super(UnknownNamespaceError, self).__init__("Unknown namespace: %s" % namespace)
|
||||
def __init__(self, namespace, name=None):
|
||||
msg, long_msg = "Unknown namespace: {}".format(namespace), None
|
||||
if name == "yaml":
|
||||
long_msg = "Did you mean to specify a filename with './{}.{}'?"
|
||||
long_msg = long_msg.format(namespace, name)
|
||||
super(UnknownNamespaceError, self).__init__(msg, long_msg)
|
||||
|
||||
|
||||
class FailedConstructorError(RepoError):
|
||||
|
|
|
@ -1548,16 +1548,7 @@ def package_class(self):
|
|||
|
||||
@property
|
||||
def virtual(self):
|
||||
"""Right now, a spec is virtual if no package exists with its name.
|
||||
|
||||
TODO: revisit this -- might need to use a separate namespace and
|
||||
be more explicit about this.
|
||||
Possible idea: just use conventin and make virtual deps all
|
||||
caps, e.g., MPI vs mpi.
|
||||
"""
|
||||
# This method can be called while regenerating the provider index
|
||||
# So we turn off using the index to detect virtuals
|
||||
return spack.repo.path.is_virtual(self.name, use_index=False)
|
||||
return spack.repo.path.is_virtual(self.name)
|
||||
|
||||
@property
|
||||
def concrete(self):
|
||||
|
@ -2627,7 +2618,9 @@ def _expand_virtual_packages(self, concretizer):
|
|||
a problem.
|
||||
"""
|
||||
# Make an index of stuff this spec already provides
|
||||
self_index = spack.provider_index.ProviderIndex(self.traverse(), restrict=True)
|
||||
self_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=self.traverse(), restrict=True
|
||||
)
|
||||
changed = False
|
||||
done = False
|
||||
|
||||
|
@ -3151,7 +3144,7 @@ def _find_provider(self, vdep, provider_index):
|
|||
Raise an exception if there is a conflicting virtual
|
||||
dependency already in this spec.
|
||||
"""
|
||||
assert vdep.virtual
|
||||
assert spack.repo.path.is_virtual_safe(vdep.name), vdep
|
||||
|
||||
# note that this defensively copies.
|
||||
providers = provider_index.providers_for(vdep)
|
||||
|
@ -3216,16 +3209,18 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
|||
|
||||
# If it's a virtual dependency, try to find an existing
|
||||
# provider in the spec, and merge that.
|
||||
if dep.virtual:
|
||||
if spack.repo.path.is_virtual_safe(dep.name):
|
||||
visited.add(dep.name)
|
||||
provider = self._find_provider(dep, provider_index)
|
||||
if provider:
|
||||
dep = provider
|
||||
else:
|
||||
index = spack.provider_index.ProviderIndex([dep], restrict=True)
|
||||
index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=[dep], restrict=True
|
||||
)
|
||||
items = list(spec_deps.items())
|
||||
for name, vspec in items:
|
||||
if not vspec.virtual:
|
||||
if not spack.repo.path.is_virtual_safe(vspec.name):
|
||||
continue
|
||||
|
||||
if index.providers_for(vspec):
|
||||
|
@ -3375,7 +3370,7 @@ def normalize(self, force=False, tests=False, user_spec_deps=None):
|
|||
# Initialize index of virtual dependency providers if
|
||||
# concretize didn't pass us one already
|
||||
provider_index = spack.provider_index.ProviderIndex(
|
||||
[s for s in all_spec_deps.values()], restrict=True
|
||||
repository=spack.repo.path, specs=[s for s in all_spec_deps.values()], restrict=True
|
||||
)
|
||||
|
||||
# traverse the package DAG and fill out dependencies according
|
||||
|
@ -3753,8 +3748,12 @@ def satisfies_dependencies(self, other, strict=False):
|
|||
return False
|
||||
|
||||
# For virtual dependencies, we need to dig a little deeper.
|
||||
self_index = spack.provider_index.ProviderIndex(self.traverse(), restrict=True)
|
||||
other_index = spack.provider_index.ProviderIndex(other.traverse(), restrict=True)
|
||||
self_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=self.traverse(), restrict=True
|
||||
)
|
||||
other_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=other.traverse(), restrict=True
|
||||
)
|
||||
|
||||
# This handles cases where there are already providers for both vpkgs
|
||||
if not self_index.satisfies(other_index):
|
||||
|
|
|
@ -102,8 +102,8 @@ def __init__(self):
|
|||
|
||||
def restore(self):
|
||||
if _serialize:
|
||||
spack.repo.path = spack.repo._path(self.repo_dirs)
|
||||
spack.config.config = self.config
|
||||
spack.repo.path = spack.repo._path(self.config)
|
||||
spack.platforms.host = self.platform
|
||||
|
||||
new_store = spack.store.Store.deserialize(self.store_token)
|
||||
|
|
|
@ -50,8 +50,9 @@ def packages_with_tags(tags, installed, skip_empty):
|
|||
class TagIndex(Mapping):
|
||||
"""Maps tags to list of packages."""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, repository):
|
||||
self._tag_dict = collections.defaultdict(list)
|
||||
self.repository = repository
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
|
@ -61,7 +62,7 @@ def to_json(self, stream):
|
|||
sjson.dump({"tags": self._tag_dict}, stream)
|
||||
|
||||
@staticmethod
|
||||
def from_json(stream):
|
||||
def from_json(stream, repository):
|
||||
d = sjson.load(stream)
|
||||
|
||||
if not isinstance(d, dict):
|
||||
|
@ -70,7 +71,7 @@ def from_json(stream):
|
|||
if "tags" not in d:
|
||||
raise TagIndexError("TagIndex data does not start with 'tags'")
|
||||
|
||||
r = TagIndex()
|
||||
r = TagIndex(repository=repository)
|
||||
|
||||
for tag, packages in d["tags"].items():
|
||||
r[tag].extend(packages)
|
||||
|
@ -88,7 +89,7 @@ def __len__(self):
|
|||
|
||||
def copy(self):
|
||||
"""Return a deep copy of this index."""
|
||||
clone = TagIndex()
|
||||
clone = TagIndex(repository=self.repository)
|
||||
clone._tag_dict = copy.deepcopy(self._tag_dict)
|
||||
return clone
|
||||
|
||||
|
@ -117,9 +118,8 @@ def update_package(self, pkg_name):
|
|||
|
||||
Args:
|
||||
pkg_name (str): name of the package to be removed from the index
|
||||
|
||||
"""
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = self.repository.get_pkg_class(pkg_name)
|
||||
|
||||
# Remove the package from the list of packages, if present
|
||||
for pkg_list in self._tag_dict.values():
|
||||
|
|
|
@ -97,12 +97,12 @@ def config_directory(tmpdir_factory):
|
|||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def default_config(tmpdir_factory, config_directory, monkeypatch, install_mockery_mutable_config):
|
||||
def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutable_config):
|
||||
# This fixture depends on install_mockery_mutable_config to ensure
|
||||
# there is a clear order of initialization. The substitution of the
|
||||
# config scopes here is done on top of the substitution that comes with
|
||||
# install_mockery_mutable_config
|
||||
mutable_dir = tmpdir_factory.mktemp("mutable_config").join("tmp")
|
||||
mutable_dir = tmpdir.mkdir("mutable_config").join("tmp")
|
||||
config_directory.copy(mutable_dir)
|
||||
|
||||
cfg = spack.config.Configuration(
|
||||
|
@ -113,7 +113,7 @@ def default_config(tmpdir_factory, config_directory, monkeypatch, install_mocker
|
|||
)
|
||||
|
||||
spack.config.config, old_config = cfg, spack.config.config
|
||||
|
||||
spack.config.config.set("repos", [spack.paths.mock_packages_path])
|
||||
# This is essential, otherwise the cache will create weird side effects
|
||||
# that will compromise subsequent tests if compilers.yaml is modified
|
||||
monkeypatch.setattr(spack.compilers, "_cache_config_file", [])
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
from spack.schema.gitlab_ci import schema as gitlab_ci_schema
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
from spack.util.executable import which
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
ci_cmd = spack.main.SpackCommand("ci")
|
||||
|
@ -92,7 +91,7 @@ def mock_git_repo(tmpdir):
|
|||
yield repo_path
|
||||
|
||||
|
||||
def test_specs_staging(config):
|
||||
def test_specs_staging(config, tmpdir):
|
||||
"""Make sure we achieve the best possible staging for the following
|
||||
spec DAG::
|
||||
|
||||
|
@ -108,20 +107,17 @@ def test_specs_staging(config):
|
|||
and then 'd', 'b', and 'a' to be put in the next three stages, respectively.
|
||||
|
||||
"""
|
||||
default = ("build", "link")
|
||||
builder = repo.MockRepositoryBuilder(tmpdir)
|
||||
builder.add_package("g")
|
||||
builder.add_package("f")
|
||||
builder.add_package("e")
|
||||
builder.add_package("d", dependencies=[("f", None, None), ("g", None, None)])
|
||||
builder.add_package("c")
|
||||
builder.add_package("b", dependencies=[("d", None, None), ("e", None, None)])
|
||||
builder.add_package("a", dependencies=[("b", None, None), ("c", None, None)])
|
||||
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
g = mock_repo.add_package("g", [], [])
|
||||
f = mock_repo.add_package("f", [], [])
|
||||
e = mock_repo.add_package("e", [], [])
|
||||
d = mock_repo.add_package("d", [f, g], [default, default])
|
||||
c = mock_repo.add_package("c", [], [])
|
||||
b = mock_repo.add_package("b", [d, e], [default, default])
|
||||
mock_repo.add_package("a", [b, c], [default, default])
|
||||
|
||||
with repo.use_repositories(mock_repo):
|
||||
spec_a = Spec("a")
|
||||
spec_a.concretize()
|
||||
with repo.use_repositories(builder.root):
|
||||
spec_a = Spec("a").concretized()
|
||||
|
||||
spec_a_label = ci._spec_deps_key(spec_a)
|
||||
spec_b_label = ci._spec_deps_key(spec_a["b"])
|
||||
|
|
|
@ -27,7 +27,6 @@
|
|||
from spack.spec import Spec
|
||||
from spack.stage import stage_prefix
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
from spack.util.path import substitute_path_variables
|
||||
from spack.util.web import FetchError
|
||||
from spack.version import Version
|
||||
|
@ -440,7 +439,12 @@ def test_environment_status(capsys, tmpdir):
|
|||
|
||||
|
||||
def test_env_status_broken_view(
|
||||
mutable_mock_env_path, mock_archive, mock_fetch, mock_packages, install_mockery, tmpdir
|
||||
mutable_mock_env_path,
|
||||
mock_archive,
|
||||
mock_fetch,
|
||||
mock_custom_repository,
|
||||
install_mockery,
|
||||
tmpdir,
|
||||
):
|
||||
env_dir = str(tmpdir)
|
||||
with ev.Environment(env_dir):
|
||||
|
@ -448,7 +452,7 @@ def test_env_status_broken_view(
|
|||
|
||||
# switch to a new repo that doesn't include the installed package
|
||||
# test that Spack detects the missing package and warns the user
|
||||
with spack.repo.use_repositories(MockPackageMultiRepo()):
|
||||
with spack.repo.use_repositories(mock_custom_repository):
|
||||
with ev.Environment(env_dir):
|
||||
output = env("status")
|
||||
assert "includes out of date packages or repos" in output
|
||||
|
@ -460,15 +464,14 @@ def test_env_status_broken_view(
|
|||
|
||||
|
||||
def test_env_activate_broken_view(
|
||||
mutable_mock_env_path, mock_archive, mock_fetch, mock_packages, install_mockery
|
||||
mutable_mock_env_path, mock_archive, mock_fetch, mock_custom_repository, install_mockery
|
||||
):
|
||||
with ev.create("test"):
|
||||
install("trivial-install-test-package")
|
||||
|
||||
# switch to a new repo that doesn't include the installed package
|
||||
# test that Spack detects the missing package and fails gracefully
|
||||
new_repo = MockPackageMultiRepo()
|
||||
with spack.repo.use_repositories(new_repo):
|
||||
with spack.repo.use_repositories(mock_custom_repository):
|
||||
with pytest.raises(SpackCommandError):
|
||||
env("activate", "--sh", "test")
|
||||
|
||||
|
@ -1074,25 +1077,17 @@ def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery):
|
|||
|
||||
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_indirect_build_dep():
|
||||
def test_indirect_build_dep(tmpdir):
|
||||
"""Simple case of X->Y->Z where Y is a build/link dep and Z is a
|
||||
build-only dep. Make sure this concrete DAG is preserved when writing the
|
||||
environment out and reading it back.
|
||||
"""
|
||||
default = ("build", "link")
|
||||
build_only = ("build",)
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir)
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", "build", None)])
|
||||
builder.add_package("x", dependencies=[("y", None, None)])
|
||||
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
z = mock_repo.add_package("z", [], [])
|
||||
y = mock_repo.add_package("y", [z], [build_only])
|
||||
mock_repo.add_package("x", [y], [default])
|
||||
|
||||
def noop(*args):
|
||||
pass
|
||||
|
||||
setattr(mock_repo, "dump_provenance", noop)
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
x_spec = Spec("x")
|
||||
x_concretized = x_spec.concretized()
|
||||
|
||||
|
@ -1110,7 +1105,7 @@ def noop(*args):
|
|||
|
||||
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_store_different_build_deps():
|
||||
def test_store_different_build_deps(tmpdir):
|
||||
r"""Ensure that an environment can store two instances of a build-only
|
||||
dependency::
|
||||
|
||||
|
@ -1121,20 +1116,12 @@ def test_store_different_build_deps():
|
|||
z1
|
||||
|
||||
"""
|
||||
default = ("build", "link")
|
||||
build_only = ("build",)
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir)
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", "build", None)])
|
||||
builder.add_package("x", dependencies=[("y", None, None), ("z", "build", None)])
|
||||
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
z = mock_repo.add_package("z", [], [])
|
||||
y = mock_repo.add_package("y", [z], [build_only])
|
||||
mock_repo.add_package("x", [y, z], [default, build_only])
|
||||
|
||||
def noop(*args):
|
||||
pass
|
||||
|
||||
setattr(mock_repo, "dump_provenance", noop)
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
y_spec = Spec("y ^z@3")
|
||||
y_concretized = y_spec.concretized()
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ def mock_pkg_git_repo(tmpdir_factory):
|
|||
git("rm", "-rf", "pkg-c")
|
||||
git("-c", "commit.gpgsign=false", "commit", "-m", "change pkg-b, remove pkg-c, add pkg-d")
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.use_repositories(str(repo_path)):
|
||||
yield mock_repo_packages
|
||||
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import jinja2
|
||||
|
@ -177,6 +176,8 @@ class Changing(Package):
|
|||
{% endif %}
|
||||
"""
|
||||
|
||||
with spack.repo.use_repositories(str(repo_dir), override=False) as repository:
|
||||
|
||||
class _ChangingPackage(object):
|
||||
default_context = [
|
||||
("delete_version", True),
|
||||
|
@ -187,14 +188,13 @@ class _ChangingPackage(object):
|
|||
def __init__(self, repo_directory):
|
||||
self.repo_dir = repo_directory
|
||||
self.repo = spack.repo.Repo(str(repo_directory))
|
||||
mutable_mock_repo.put_first(self.repo)
|
||||
|
||||
def change(self, changes=None):
|
||||
changes = changes or {}
|
||||
context = dict(self.default_context)
|
||||
context.update(changes)
|
||||
# Remove the repo object and delete Python modules
|
||||
mutable_mock_repo.remove(self.repo)
|
||||
repository.remove(self.repo)
|
||||
# TODO: this mocks a change in the recipe that should happen in a
|
||||
# TODO: different process space. Leaving this comment as a hint
|
||||
# TODO: in case tests using this fixture start failing.
|
||||
|
@ -210,37 +210,13 @@ def change(self, changes=None):
|
|||
|
||||
# Re-add the repository
|
||||
self.repo = spack.repo.Repo(str(self.repo_dir))
|
||||
mutable_mock_repo.put_first(self.repo)
|
||||
repository.put_first(self.repo)
|
||||
|
||||
_changing_pkg = _ChangingPackage(repo_dir)
|
||||
_changing_pkg.change({"delete_version": False, "delete_variant": False, "add_variant": False})
|
||||
|
||||
return _changing_pkg
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def additional_repo_with_c(tmpdir_factory, mutable_mock_repo):
|
||||
"""Add a repository with a simple package"""
|
||||
repo_dir = tmpdir_factory.mktemp("myrepo")
|
||||
repo_dir.join("repo.yaml").write(
|
||||
"""
|
||||
repo:
|
||||
namespace: myrepo
|
||||
""",
|
||||
ensure=True,
|
||||
_changing_pkg.change(
|
||||
{"delete_version": False, "delete_variant": False, "add_variant": False}
|
||||
)
|
||||
packages_dir = repo_dir.ensure("packages", dir=True)
|
||||
package_py = """
|
||||
class C(Package):
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/root-1.0.tar.gz"
|
||||
|
||||
version(1.0, sha256='abcde')
|
||||
"""
|
||||
packages_dir.join("c", "package.py").write(package_py, ensure=True)
|
||||
repo = spack.repo.Repo(str(repo_dir))
|
||||
mutable_mock_repo.put_first(repo)
|
||||
return repo
|
||||
yield _changing_pkg
|
||||
|
||||
|
||||
# This must use the mutable_config fixture because the test
|
||||
|
@ -1558,36 +1534,31 @@ def test_installed_version_is_selected_only_for_reuse(
|
|||
assert not new_root["changing"].satisfies("@1.0")
|
||||
|
||||
@pytest.mark.regression("28259")
|
||||
def test_reuse_with_unknown_namespace_dont_raise(
|
||||
self, additional_repo_with_c, mutable_mock_repo
|
||||
):
|
||||
def test_reuse_with_unknown_namespace_dont_raise(self, mock_custom_repository):
|
||||
with spack.repo.use_repositories(mock_custom_repository, override=False):
|
||||
s = Spec("c").concretized()
|
||||
assert s.namespace == "myrepo"
|
||||
assert s.namespace != "builtin.mock"
|
||||
s.package.do_install(fake=True, explicit=True)
|
||||
|
||||
# TODO: To mock repo removal we need to recreate the RepoPath
|
||||
mutable_mock_repo.remove(additional_repo_with_c)
|
||||
spack.repo.path = spack.repo.RepoPath(*spack.repo.path.repos)
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = Spec("c").concretized()
|
||||
assert s.namespace == "builtin.mock"
|
||||
|
||||
@pytest.mark.regression("28259")
|
||||
def test_reuse_with_unknown_package_dont_raise(
|
||||
self, additional_repo_with_c, mutable_mock_repo, monkeypatch
|
||||
):
|
||||
def test_reuse_with_unknown_package_dont_raise(self, tmpdir, monkeypatch):
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir, namespace="myrepo")
|
||||
builder.add_package("c")
|
||||
with spack.repo.use_repositories(builder.root, override=False):
|
||||
s = Spec("c").concretized()
|
||||
assert s.namespace == "myrepo"
|
||||
s.package.do_install(fake=True, explicit=True)
|
||||
|
||||
# Here we delete the package.py instead of removing the repo and we
|
||||
# make it such that "c" doesn't exist in myrepo
|
||||
del sys.modules["spack.pkg.myrepo.c"]
|
||||
c_dir = os.path.join(additional_repo_with_c.root, "packages", "c")
|
||||
shutil.rmtree(c_dir)
|
||||
monkeypatch.setattr(additional_repo_with_c, "exists", lambda x: False)
|
||||
|
||||
del sys.modules["spack.pkg.myrepo"]
|
||||
builder.remove("c")
|
||||
with spack.repo.use_repositories(builder.root, override=False) as repos:
|
||||
# TODO (INJECT CONFIGURATION): unclear why the cache needs to be invalidated explicitly
|
||||
repos.repos[0]._pkg_checker.invalidate()
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = Spec("c").concretized()
|
||||
assert s.namespace == "builtin.mock"
|
||||
|
|
|
@ -55,6 +55,16 @@
|
|||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
def ensure_configuration_fixture_run_before(request):
|
||||
"""Ensure that fixture mutating the configuration run before the one where
|
||||
the function is called.
|
||||
"""
|
||||
if "config" in request.fixturenames:
|
||||
request.getfixturevalue("config")
|
||||
if "mutable_config" in request.fixturenames:
|
||||
request.getfixturevalue("mutable_config")
|
||||
|
||||
|
||||
#
|
||||
# Return list of shas for latest two git commits in local spack repo
|
||||
#
|
||||
|
@ -536,20 +546,30 @@ def mock_pkg_install(monkeypatch):
|
|||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mock_packages(mock_repo_path, mock_pkg_install):
|
||||
def mock_packages(mock_repo_path, mock_pkg_install, request):
|
||||
"""Use the 'builtin.mock' repository instead of 'builtin'"""
|
||||
ensure_configuration_fixture_run_before(request)
|
||||
with spack.repo.use_repositories(mock_repo_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mutable_mock_repo(mock_repo_path):
|
||||
def mutable_mock_repo(mock_repo_path, request):
|
||||
"""Function-scoped mock packages, for tests that need to modify them."""
|
||||
ensure_configuration_fixture_run_before(request)
|
||||
mock_repo = spack.repo.Repo(spack.paths.mock_packages_path)
|
||||
with spack.repo.use_repositories(mock_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_custom_repository(tmpdir, mutable_mock_repo):
|
||||
"""Create a custom repository with a single package "c" and return its path."""
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("myrepo"))
|
||||
builder.add_package("c")
|
||||
return builder.root
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def linux_os():
|
||||
"""Returns a named tuple with attributes 'name' and 'version'
|
||||
|
|
|
@ -2,11 +2,7 @@
|
|||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""
|
||||
These tests check the database is functioning properly,
|
||||
both in memory and in its file
|
||||
"""
|
||||
"""Check the database is functioning properly, both in memory and in its file."""
|
||||
import datetime
|
||||
import functools
|
||||
import json
|
||||
|
@ -36,7 +32,6 @@
|
|||
import spack.store
|
||||
from spack.schema.database_index import schema
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
@ -44,8 +39,8 @@
|
|||
|
||||
|
||||
@pytest.fixture()
|
||||
def upstream_and_downstream_db(tmpdir_factory, gen_mock_layout):
|
||||
mock_db_root = str(tmpdir_factory.mktemp("mock_db_root"))
|
||||
def upstream_and_downstream_db(tmpdir, gen_mock_layout):
|
||||
mock_db_root = str(tmpdir.mkdir("mock_db_root"))
|
||||
upstream_write_db = spack.database.Database(mock_db_root)
|
||||
upstream_db = spack.database.Database(mock_db_root, is_upstream=True)
|
||||
# Generate initial DB file to avoid reindex
|
||||
|
@ -53,7 +48,7 @@ def upstream_and_downstream_db(tmpdir_factory, gen_mock_layout):
|
|||
upstream_write_db._write_to_file(db_file)
|
||||
upstream_layout = gen_mock_layout("/a/")
|
||||
|
||||
downstream_db_root = str(tmpdir_factory.mktemp("mock_downstream_db_root"))
|
||||
downstream_db_root = str(tmpdir.mkdir("mock_downstream_db_root"))
|
||||
downstream_db = spack.database.Database(downstream_db_root, upstream_dbs=[upstream_db])
|
||||
with open(downstream_db._index_path, "w") as db_file:
|
||||
downstream_db._write_to_file(db_file)
|
||||
|
@ -63,7 +58,9 @@ def upstream_and_downstream_db(tmpdir_factory, gen_mock_layout):
|
|||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Upstreams currently unsupported on Windows")
|
||||
def test_spec_installed_upstream(upstream_and_downstream_db, config, monkeypatch):
|
||||
def test_spec_installed_upstream(
|
||||
upstream_and_downstream_db, mock_custom_repository, config, monkeypatch
|
||||
):
|
||||
"""Test whether Spec.installed_upstream() works."""
|
||||
(
|
||||
upstream_write_db,
|
||||
|
@ -74,11 +71,8 @@ def test_spec_installed_upstream(upstream_and_downstream_db, config, monkeypatch
|
|||
) = upstream_and_downstream_db
|
||||
|
||||
# a known installed spec should say that it's installed
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
mock_repo.add_package("x", [], [])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = spack.spec.Spec("x").concretized()
|
||||
with spack.repo.use_repositories(mock_custom_repository):
|
||||
spec = spack.spec.Spec("c").concretized()
|
||||
assert not spec.installed
|
||||
assert not spec.installed_upstream
|
||||
|
||||
|
@ -98,7 +92,7 @@ def test_spec_installed_upstream(upstream_and_downstream_db, config, monkeypatch
|
|||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Upstreams currently unsupported on Windows")
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_installed_upstream(upstream_and_downstream_db):
|
||||
def test_installed_upstream(upstream_and_downstream_db, tmpdir):
|
||||
(
|
||||
upstream_write_db,
|
||||
upstream_db,
|
||||
|
@ -107,17 +101,14 @@ def test_installed_upstream(upstream_and_downstream_db):
|
|||
downstream_layout,
|
||||
) = upstream_and_downstream_db
|
||||
|
||||
default = ("build", "link")
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
x = mock_repo.add_package("x", [], [])
|
||||
z = mock_repo.add_package("z", [], [])
|
||||
y = mock_repo.add_package("y", [z], [default])
|
||||
mock_repo.add_package("w", [x, y], [default, default])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = spack.spec.Spec("w")
|
||||
spec.concretize()
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("x")
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", None, None)])
|
||||
builder.add_package("w", dependencies=[("x", None, None), ("y", None, None)])
|
||||
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
spec = spack.spec.Spec("w").concretized()
|
||||
for dep in spec.traverse(root=False):
|
||||
upstream_write_db.add(dep, upstream_layout)
|
||||
upstream_db._read()
|
||||
|
@ -126,10 +117,9 @@ def test_installed_upstream(upstream_and_downstream_db):
|
|||
record = downstream_db.get_by_hash(dep.dag_hash())
|
||||
assert record is not None
|
||||
with pytest.raises(spack.database.ForbiddenLockError):
|
||||
record = upstream_db.get_by_hash(dep.dag_hash())
|
||||
upstream_db.get_by_hash(dep.dag_hash())
|
||||
|
||||
new_spec = spack.spec.Spec("w")
|
||||
new_spec.concretize()
|
||||
new_spec = spack.spec.Spec("w").concretized()
|
||||
downstream_db.add(new_spec, downstream_layout)
|
||||
for dep in new_spec.traverse(root=False):
|
||||
upstream, record = downstream_db.query_by_spec_hash(dep.dag_hash())
|
||||
|
@ -145,7 +135,7 @@ def test_installed_upstream(upstream_and_downstream_db):
|
|||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Upstreams currently unsupported on Windows")
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_removed_upstream_dep(upstream_and_downstream_db):
|
||||
def test_removed_upstream_dep(upstream_and_downstream_db, tmpdir):
|
||||
(
|
||||
upstream_write_db,
|
||||
upstream_db,
|
||||
|
@ -154,20 +144,17 @@ def test_removed_upstream_dep(upstream_and_downstream_db):
|
|||
downstream_layout,
|
||||
) = upstream_and_downstream_db
|
||||
|
||||
default = ("build", "link")
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
z = mock_repo.add_package("z", [], [])
|
||||
mock_repo.add_package("y", [z], [default])
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", None, None)])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = spack.spec.Spec("y")
|
||||
spec.concretize()
|
||||
with spack.repo.use_repositories(builder):
|
||||
spec = spack.spec.Spec("y").concretized()
|
||||
|
||||
upstream_write_db.add(spec["z"], upstream_layout)
|
||||
upstream_db._read()
|
||||
|
||||
new_spec = spack.spec.Spec("y")
|
||||
new_spec.concretize()
|
||||
new_spec = spack.spec.Spec("y").concretized()
|
||||
downstream_db.add(new_spec, downstream_layout)
|
||||
|
||||
upstream_write_db.remove(new_spec["z"])
|
||||
|
@ -181,7 +168,7 @@ def test_removed_upstream_dep(upstream_and_downstream_db):
|
|||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Upstreams currently unsupported on Windows")
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_add_to_upstream_after_downstream(upstream_and_downstream_db):
|
||||
def test_add_to_upstream_after_downstream(upstream_and_downstream_db, tmpdir):
|
||||
"""An upstream DB can add a package after it is installed in the downstream
|
||||
DB. When a package is recorded as installed in both, the results should
|
||||
refer to the downstream DB.
|
||||
|
@ -194,15 +181,13 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db):
|
|||
downstream_layout,
|
||||
) = upstream_and_downstream_db
|
||||
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
mock_repo.add_package("x", [], [])
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("x")
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = spack.spec.Spec("x")
|
||||
spec.concretize()
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
spec = spack.spec.Spec("x").concretized()
|
||||
|
||||
downstream_db.add(spec, downstream_layout)
|
||||
|
||||
upstream_write_db.add(spec, upstream_layout)
|
||||
upstream_db._read()
|
||||
|
||||
|
@ -224,12 +209,12 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db):
|
|||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Upstreams currently unsupported on Windows")
|
||||
@pytest.mark.usefixtures("config", "temporary_store")
|
||||
def test_cannot_write_upstream(tmpdir_factory, gen_mock_layout):
|
||||
roots = [str(tmpdir_factory.mktemp(x)) for x in ["a", "b"]]
|
||||
def test_cannot_write_upstream(tmpdir, gen_mock_layout):
|
||||
roots = [str(tmpdir.mkdir(x)) for x in ["a", "b"]]
|
||||
layouts = [gen_mock_layout(x) for x in ["/ra/", "/rb/"]]
|
||||
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
mock_repo.add_package("x", [], [])
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("x")
|
||||
|
||||
# Instantiate the database that will be used as the upstream DB and make
|
||||
# sure it has an index file
|
||||
|
@ -239,7 +224,7 @@ def test_cannot_write_upstream(tmpdir_factory, gen_mock_layout):
|
|||
|
||||
upstream_dbs = spack.store._construct_upstream_dbs_from_install_roots([roots[1]], _test=True)
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
spec = spack.spec.Spec("x")
|
||||
spec.concretize()
|
||||
|
||||
|
@ -249,19 +234,17 @@ def test_cannot_write_upstream(tmpdir_factory, gen_mock_layout):
|
|||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Upstreams currently unsupported on Windows")
|
||||
@pytest.mark.usefixtures("config", "temporary_store")
|
||||
def test_recursive_upstream_dbs(tmpdir_factory, gen_mock_layout):
|
||||
roots = [str(tmpdir_factory.mktemp(x)) for x in ["a", "b", "c"]]
|
||||
def test_recursive_upstream_dbs(tmpdir, gen_mock_layout):
|
||||
roots = [str(tmpdir.mkdir(x)) for x in ["a", "b", "c"]]
|
||||
layouts = [gen_mock_layout(x) for x in ["/ra/", "/rb/", "/rc/"]]
|
||||
|
||||
default = ("build", "link")
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
z = mock_repo.add_package("z", [], [])
|
||||
y = mock_repo.add_package("y", [z], [default])
|
||||
mock_repo.add_package("x", [y], [default])
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", None, None)])
|
||||
builder.add_package("x", dependencies=[("y", None, None)])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = spack.spec.Spec("x")
|
||||
spec.concretize()
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
spec = spack.spec.Spec("x").concretized()
|
||||
db_c = spack.database.Database(roots[2])
|
||||
db_c.add(spec["z"], layouts[2])
|
||||
|
||||
|
@ -721,11 +704,11 @@ def fail_while_writing():
|
|||
assert database.query("cmake", installed=any) == []
|
||||
|
||||
|
||||
def test_115_reindex_with_packages_not_in_repo(mutable_database):
|
||||
def test_115_reindex_with_packages_not_in_repo(mutable_database, tmpdir):
|
||||
# Dont add any package definitions to this repository, the idea is that
|
||||
# packages should not have to be defined in the repository once they
|
||||
# are installed
|
||||
with spack.repo.use_repositories(MockPackageMultiRepo()):
|
||||
with spack.repo.use_repositories(spack.repo.MockRepositoryBuilder(tmpdir).root):
|
||||
spack.store.store.reindex()
|
||||
_check_db_sanity(mutable_database)
|
||||
|
||||
|
@ -1063,11 +1046,11 @@ def test_consistency_of_dependents_upon_remove(mutable_database):
|
|||
|
||||
|
||||
@pytest.mark.regression("30187")
|
||||
def test_query_installed_when_package_unknown(database):
|
||||
def test_query_installed_when_package_unknown(database, tmpdir):
|
||||
"""Test that we can query the installation status of a spec
|
||||
when we don't know its package.py
|
||||
"""
|
||||
with spack.repo.use_repositories(MockPackageMultiRepo()):
|
||||
with spack.repo.use_repositories(spack.repo.MockRepositoryBuilder(tmpdir).root):
|
||||
specs = database.query("mpileaks")
|
||||
for s in specs:
|
||||
# Assert that we can query the installation methods even though we
|
||||
|
|
|
@ -178,7 +178,7 @@ def test_handle_unknown_package(temporary_store, config, mock_packages):
|
|||
layout.create_install_directory(spec)
|
||||
installed_specs[spec] = layout.path_for_spec(spec)
|
||||
|
||||
with spack.repo.use_repositories(mock_db):
|
||||
with spack.repo.use_repositories(spack.paths.mock_packages_path):
|
||||
# Now check that even without the package files, we know
|
||||
# enough to read a spec from the spec file.
|
||||
for spec, path in installed_specs.items():
|
||||
|
|
|
@ -26,19 +26,19 @@
|
|||
|
||||
|
||||
def test_provider_index_round_trip(mock_packages):
|
||||
p = ProviderIndex(spack.repo.all_package_names())
|
||||
p = ProviderIndex(specs=spack.repo.all_package_names(), repository=spack.repo.path)
|
||||
|
||||
ostream = StringIO()
|
||||
p.to_json(ostream)
|
||||
|
||||
istream = StringIO(ostream.getvalue())
|
||||
q = ProviderIndex.from_json(istream)
|
||||
q = ProviderIndex.from_json(istream, repository=spack.repo.path)
|
||||
|
||||
assert p == q
|
||||
|
||||
|
||||
def test_providers_for_simple(mock_packages):
|
||||
p = ProviderIndex(spack.repo.all_package_names())
|
||||
p = ProviderIndex(specs=spack.repo.all_package_names(), repository=spack.repo.path)
|
||||
|
||||
blas_providers = p.providers_for("blas")
|
||||
assert Spec("netlib-blas") in blas_providers
|
||||
|
@ -51,7 +51,7 @@ def test_providers_for_simple(mock_packages):
|
|||
|
||||
|
||||
def test_mpi_providers(mock_packages):
|
||||
p = ProviderIndex(spack.repo.all_package_names())
|
||||
p = ProviderIndex(specs=spack.repo.all_package_names(), repository=spack.repo.path)
|
||||
|
||||
mpi_2_providers = p.providers_for("mpi@2")
|
||||
assert Spec("mpich2") in mpi_2_providers
|
||||
|
@ -64,12 +64,12 @@ def test_mpi_providers(mock_packages):
|
|||
|
||||
|
||||
def test_equal(mock_packages):
|
||||
p = ProviderIndex(spack.repo.all_package_names())
|
||||
q = ProviderIndex(spack.repo.all_package_names())
|
||||
p = ProviderIndex(specs=spack.repo.all_package_names(), repository=spack.repo.path)
|
||||
q = ProviderIndex(specs=spack.repo.all_package_names(), repository=spack.repo.path)
|
||||
assert p == q
|
||||
|
||||
|
||||
def test_copy(mock_packages):
|
||||
p = ProviderIndex(spack.repo.all_package_names())
|
||||
p = ProviderIndex(specs=spack.repo.all_package_names(), repository=spack.repo.path)
|
||||
q = p.copy()
|
||||
assert p == q
|
||||
|
|
|
@ -141,3 +141,16 @@ def test_get_all_mock_packages(mock_packages):
|
|||
"""Get the mock packages once each too."""
|
||||
for name in mock_packages.all_package_names():
|
||||
mock_packages.get_pkg_class(name)
|
||||
|
||||
|
||||
def test_repo_path_handles_package_removal(tmpdir, mock_packages):
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir, namespace="removal")
|
||||
builder.add_package("c")
|
||||
with spack.repo.use_repositories(builder.root, override=False) as repos:
|
||||
r = repos.repo_for_pkg("c")
|
||||
assert r.namespace == "removal"
|
||||
|
||||
builder.remove("c")
|
||||
with spack.repo.use_repositories(builder.root, override=False) as repos:
|
||||
r = repos.repo_for_pkg("c")
|
||||
assert r.namespace == "builtin.mock"
|
||||
|
|
|
@ -9,10 +9,10 @@
|
|||
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.util.hash as hashutil
|
||||
from spack.dependency import Dependency, all_deptypes, canonical_deptype
|
||||
from spack.spec import Spec
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
|
||||
|
||||
def check_links(spec_to_check):
|
||||
|
@ -55,7 +55,7 @@ def _mock(pkg_name, spec, deptypes=all_deptypes):
|
|||
|
||||
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_test_deptype():
|
||||
def test_test_deptype(tmpdir):
|
||||
"""Ensure that test-only dependencies are only included for specified
|
||||
packages in the following spec DAG::
|
||||
|
||||
|
@ -67,19 +67,14 @@ def test_test_deptype():
|
|||
|
||||
w->y deptypes are (link, build), w->x and y->z deptypes are (test)
|
||||
"""
|
||||
default = ("build", "link")
|
||||
test_only = ("test",)
|
||||
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
x = mock_repo.add_package("x", [], [])
|
||||
z = mock_repo.add_package("z", [], [])
|
||||
y = mock_repo.add_package("y", [z], [test_only])
|
||||
w = mock_repo.add_package("w", [x, y], [test_only, default])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = Spec("w")
|
||||
spec.concretize(tests=(w.name,))
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir)
|
||||
builder.add_package("x")
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", "test", None)])
|
||||
builder.add_package("w", dependencies=[("x", "test", None), ("y", None, None)])
|
||||
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
spec = Spec("w").concretized(tests=("w",))
|
||||
assert "x" in spec
|
||||
assert "z" not in spec
|
||||
|
||||
|
@ -138,25 +133,21 @@ def _mock_installed(self):
|
|||
|
||||
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_specify_preinstalled_dep():
|
||||
def test_specify_preinstalled_dep(tmpdir, monkeypatch):
|
||||
"""Specify the use of a preinstalled package during concretization with a
|
||||
transitive dependency that is only supplied by the preinstalled package.
|
||||
"""
|
||||
default = ("build", "link")
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir)
|
||||
builder.add_package("c")
|
||||
builder.add_package("b", dependencies=[("c", None, None)])
|
||||
builder.add_package("a", dependencies=[("b", None, None)])
|
||||
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
c = mock_repo.add_package("c", [], [])
|
||||
b = mock_repo.add_package("b", [c], [default])
|
||||
mock_repo.add_package("a", [b], [default])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
b_spec = Spec("b")
|
||||
b_spec.concretize()
|
||||
for spec in b_spec.traverse():
|
||||
setattr(spec.package, "installed", True)
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
b_spec = Spec("b").concretized()
|
||||
monkeypatch.setattr(Spec, "installed", property(lambda x: x.name != "a"))
|
||||
|
||||
a_spec = Spec("a")
|
||||
a_spec._add_dependency(b_spec, default)
|
||||
a_spec._add_dependency(b_spec, ("build", "link"))
|
||||
a_spec.concretize()
|
||||
|
||||
assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"])
|
||||
|
@ -167,30 +158,19 @@ def test_specify_preinstalled_dep():
|
|||
"spec_str,expr_str,expected",
|
||||
[("x ^y@2", "y@2", True), ("x@1", "y", False), ("x", "y@3", True)],
|
||||
)
|
||||
def test_conditional_dep_with_user_constraints(spec_str, expr_str, expected):
|
||||
def test_conditional_dep_with_user_constraints(tmpdir, spec_str, expr_str, expected):
|
||||
"""This sets up packages X->Y such that X depends on Y conditionally. It
|
||||
then constructs a Spec with X but with no constraints on X, so that the
|
||||
initial normalization pass cannot determine whether the constraints are
|
||||
met to add the dependency; this checks whether a user-specified constraint
|
||||
on Y is applied properly.
|
||||
"""
|
||||
# FIXME: We need to tweak optimization rules to make this test
|
||||
# FIXME: not prefer a DAG with fewer nodes wrt more recent
|
||||
# FIXME: versions of the package
|
||||
if spack.config.get("config:concretizer") == "clingo":
|
||||
pytest.xfail("Clingo optimization rules prefer to trim a node")
|
||||
|
||||
default = ("build", "link")
|
||||
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
y = mock_repo.add_package("y", [], [])
|
||||
x_on_y_conditions = {y.name: {"x@2:": "y"}}
|
||||
mock_repo.add_package("x", [y], [default], conditions=x_on_y_conditions)
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = Spec(spec_str)
|
||||
spec.concretize()
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir)
|
||||
builder.add_package("y")
|
||||
builder.add_package("x", dependencies=[("y", None, "x@2:")])
|
||||
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
spec = Spec(spec_str).concretized()
|
||||
result = expr_str in spec
|
||||
assert result is expected, "{0} in {1}".format(expr_str, spec)
|
||||
|
||||
|
|
|
@ -535,6 +535,7 @@ def test_parse_yaml_simple(self, mock_packages, tmpdir):
|
|||
@pytest.mark.usefixtures("config")
|
||||
def test_parse_filename_missing_slash_as_spec(self, mock_packages, tmpdir):
|
||||
"""Ensure that libelf.yaml parses as a spec, NOT a file."""
|
||||
# TODO: This test is brittle, as it should cover also the JSON case now.
|
||||
s = Spec("libelf")
|
||||
s.concretize()
|
||||
|
||||
|
@ -559,7 +560,7 @@ def test_parse_filename_missing_slash_as_spec(self, mock_packages, tmpdir):
|
|||
|
||||
# check that if we concretize this spec, we get a good error
|
||||
# message that mentions we might've meant a file.
|
||||
with pytest.raises(spack.repo.UnknownPackageError) as exc_info:
|
||||
with pytest.raises(spack.repo.UnknownEntityError) as exc_info:
|
||||
spec.concretize()
|
||||
assert exc_info.value.long_message
|
||||
assert (
|
||||
|
|
|
@ -21,13 +21,12 @@
|
|||
|
||||
import spack.hash_types as ht
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.version
|
||||
from spack import repo
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
from spack.util.spack_yaml import SpackYAMLError, syaml_dict
|
||||
|
||||
|
||||
|
@ -345,20 +344,17 @@ def check_specs_equal(original_spec, spec_yaml_path):
|
|||
def test_save_dependency_spec_jsons_subset(tmpdir, config):
|
||||
output_path = str(tmpdir.mkdir("spec_jsons"))
|
||||
|
||||
default = ("build", "link")
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock-repo"))
|
||||
builder.add_package("g")
|
||||
builder.add_package("f")
|
||||
builder.add_package("e")
|
||||
builder.add_package("d", dependencies=[("f", None, None), ("g", None, None)])
|
||||
builder.add_package("c")
|
||||
builder.add_package("b", dependencies=[("d", None, None), ("e", None, None)])
|
||||
builder.add_package("a", dependencies=[("b", None, None), ("c", None, None)])
|
||||
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
g = mock_repo.add_package("g", [], [])
|
||||
f = mock_repo.add_package("f", [], [])
|
||||
e = mock_repo.add_package("e", [], [])
|
||||
d = mock_repo.add_package("d", [f, g], [default, default])
|
||||
c = mock_repo.add_package("c", [], [])
|
||||
b = mock_repo.add_package("b", [d, e], [default, default])
|
||||
mock_repo.add_package("a", [b, c], [default, default])
|
||||
|
||||
with repo.use_repositories(mock_repo):
|
||||
spec_a = Spec("a")
|
||||
spec_a.concretize()
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
spec_a = Spec("a").concretized()
|
||||
b_spec = spec_a["b"]
|
||||
c_spec = spec_a["c"]
|
||||
spec_a_json = spec_a.to_json()
|
||||
|
|
|
@ -40,7 +40,7 @@
|
|||
|
||||
|
||||
def test_tag_copy(mock_packages):
|
||||
index = spack.tag.TagIndex.from_json(StringIO(tags_json))
|
||||
index = spack.tag.TagIndex.from_json(StringIO(tags_json), repository=mock_packages)
|
||||
new_index = index.copy()
|
||||
|
||||
assert index.tags == new_index.tags
|
||||
|
@ -104,21 +104,21 @@ def test_tag_index_round_trip(mock_packages):
|
|||
mock_index.to_json(ostream)
|
||||
|
||||
istream = StringIO(ostream.getvalue())
|
||||
new_index = spack.tag.TagIndex.from_json(istream)
|
||||
new_index = spack.tag.TagIndex.from_json(istream, repository=mock_packages)
|
||||
|
||||
assert mock_index == new_index
|
||||
|
||||
|
||||
def test_tag_equal():
|
||||
first_index = spack.tag.TagIndex.from_json(StringIO(tags_json))
|
||||
second_index = spack.tag.TagIndex.from_json(StringIO(tags_json))
|
||||
def test_tag_equal(mock_packages):
|
||||
first_index = spack.tag.TagIndex.from_json(StringIO(tags_json), repository=mock_packages)
|
||||
second_index = spack.tag.TagIndex.from_json(StringIO(tags_json), repository=mock_packages)
|
||||
|
||||
assert first_index == second_index
|
||||
|
||||
|
||||
def test_tag_merge():
|
||||
first_index = spack.tag.TagIndex.from_json(StringIO(tags_json))
|
||||
second_index = spack.tag.TagIndex.from_json(StringIO(more_tags_json))
|
||||
def test_tag_merge(mock_packages):
|
||||
first_index = spack.tag.TagIndex.from_json(StringIO(tags_json), repository=mock_packages)
|
||||
second_index = spack.tag.TagIndex.from_json(StringIO(more_tags_json), repository=mock_packages)
|
||||
|
||||
assert first_index != second_index
|
||||
|
||||
|
@ -136,24 +136,23 @@ def test_tag_merge():
|
|||
assert tag_keys == all_tags
|
||||
|
||||
|
||||
def test_tag_not_dict():
|
||||
def test_tag_not_dict(mock_packages):
|
||||
list_json = "[]"
|
||||
with pytest.raises(spack.tag.TagIndexError) as e:
|
||||
spack.tag.TagIndex.from_json(StringIO(list_json))
|
||||
spack.tag.TagIndex.from_json(StringIO(list_json), repository=mock_packages)
|
||||
assert "not a dict" in str(e)
|
||||
|
||||
|
||||
def test_tag_no_tags():
|
||||
def test_tag_no_tags(mock_packages):
|
||||
pkg_json = '{"packages": []}'
|
||||
with pytest.raises(spack.tag.TagIndexError) as e:
|
||||
spack.tag.TagIndex.from_json(StringIO(pkg_json))
|
||||
spack.tag.TagIndex.from_json(StringIO(pkg_json), repository=mock_packages)
|
||||
assert "does not start with" in str(e)
|
||||
|
||||
|
||||
def test_tag_update_package(mock_packages):
|
||||
mock_index = spack.repo.path.tag_index
|
||||
|
||||
index = spack.tag.TagIndex()
|
||||
index = spack.tag.TagIndex(repository=mock_packages)
|
||||
for name in spack.repo.all_package_names():
|
||||
index.update_package(name)
|
||||
|
||||
|
|
|
@ -18,12 +18,10 @@
|
|||
import spack.spec
|
||||
from spack.directory_layout import DirectoryLayout
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.repo import RepoPath
|
||||
|
||||
pytestmark = pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Python activation not \
|
||||
currently supported on Windows",
|
||||
reason="Python activation not currently supported on Windows",
|
||||
)
|
||||
|
||||
|
||||
|
@ -60,9 +58,7 @@ def builtin_and_mock_packages():
|
|||
# precedence than the builtin repo, so we test builtin.perl against
|
||||
# builtin.mock.perl-extension.
|
||||
repo_dirs = [spack.paths.packages_path, spack.paths.mock_packages_path]
|
||||
path = RepoPath(*repo_dirs)
|
||||
|
||||
with spack.repo.use_repositories(path):
|
||||
with spack.repo.use_repositories(*repo_dirs):
|
||||
yield
|
||||
|
||||
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.repo
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
|
||||
|
||||
def test_mock_package_possible_dependencies():
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
e = mock_repo.add_package("e")
|
||||
d = mock_repo.add_package("d", [e])
|
||||
c = mock_repo.add_package("c", [d])
|
||||
b = mock_repo.add_package("b", [d])
|
||||
a = mock_repo.add_package("a", [b, c])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
assert set(a.possible_dependencies()) == set(["a", "b", "c", "d", "e"])
|
||||
assert set(b.possible_dependencies()) == set(["b", "d", "e"])
|
||||
assert set(c.possible_dependencies()) == set(["c", "d", "e"])
|
||||
assert set(d.possible_dependencies()) == set(["d", "e"])
|
||||
assert set(e.possible_dependencies()) == set(["e"])
|
||||
|
||||
assert set(a.possible_dependencies(transitive=False)) == set(["a", "b", "c"])
|
||||
assert set(b.possible_dependencies(transitive=False)) == set(["b", "d"])
|
||||
assert set(c.possible_dependencies(transitive=False)) == set(["c", "d"])
|
||||
assert set(d.possible_dependencies(transitive=False)) == set(["d", "e"])
|
||||
assert set(e.possible_dependencies(transitive=False)) == set(["e"])
|
||||
|
||||
|
||||
def test_mock_repo_is_virtual():
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
|
||||
# current implementation is always false
|
||||
assert mock_repo.is_virtual("foo") is False
|
||||
assert mock_repo.is_virtual("bar") is False
|
||||
assert mock_repo.is_virtual("baz") is False
|
|
@ -1,177 +0,0 @@
|
|||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Infrastructure used by tests for mocking packages and repos."""
|
||||
import collections
|
||||
|
||||
import spack.provider_index
|
||||
import spack.util.naming
|
||||
from spack.dependency import Dependency
|
||||
from spack.spec import Spec
|
||||
from spack.version import Version
|
||||
|
||||
__all__ = ["MockPackageMultiRepo"]
|
||||
|
||||
|
||||
class MockPackageBase(object):
|
||||
"""Internal base class for mocking ``spack.package_base.PackageBase``.
|
||||
|
||||
Use ``MockPackageMultiRepo.add_package()`` to create new instances.
|
||||
|
||||
"""
|
||||
|
||||
virtual = False
|
||||
|
||||
def __init__(self, dependencies, dependency_types, conditions=None, versions=None):
|
||||
"""Instantiate a new MockPackageBase.
|
||||
|
||||
This is not for general use; it needs to be constructed by a
|
||||
``MockPackageMultiRepo``, as we need to know about *all* packages
|
||||
to find possible depenencies.
|
||||
|
||||
"""
|
||||
self.spec = None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self
|
||||
|
||||
def provides(self, vname):
|
||||
return vname in self.provided
|
||||
|
||||
@property
|
||||
def virtuals_provided(self):
|
||||
return [v.name for v, c in self.provided]
|
||||
|
||||
@classmethod
|
||||
def possible_dependencies(cls, transitive=True, deptype="all", visited=None, virtuals=None):
|
||||
visited = {} if visited is None else visited
|
||||
|
||||
for name, conditions in cls.dependencies.items():
|
||||
# check whether this dependency could be of the type asked for
|
||||
types = [dep.type for cond, dep in conditions.items()]
|
||||
types = set.union(*types)
|
||||
if not any(d in types for d in deptype):
|
||||
continue
|
||||
|
||||
visited.setdefault(cls.name, set())
|
||||
for dep_name in cls.dependencies:
|
||||
if dep_name in visited:
|
||||
continue
|
||||
|
||||
visited.setdefault(dep_name, set())
|
||||
|
||||
if not transitive:
|
||||
continue
|
||||
|
||||
cls._repo.get(dep_name).possible_dependencies(transitive, deptype, visited, virtuals)
|
||||
|
||||
return visited
|
||||
|
||||
def content_hash(self):
|
||||
# Unlike real packages, MockPackage doesn't have a corresponding
|
||||
# package.py file; in that sense, the content_hash is always the same.
|
||||
return self.__class__.__name__
|
||||
|
||||
|
||||
class MockPackageMultiRepo(object):
|
||||
"""Mock package repository, mimicking ``spack.repo.Repo``."""
|
||||
|
||||
def __init__(self):
|
||||
self.spec_to_pkg = {}
|
||||
self.namespace = "mock" # repo namespace
|
||||
self.full_namespace = "spack.pkg.mock" # python import namespace
|
||||
|
||||
def get(self, spec):
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = Spec(spec)
|
||||
if spec.name not in self.spec_to_pkg:
|
||||
raise spack.repo.UnknownPackageError(spec.fullname)
|
||||
return self.spec_to_pkg[spec.name]
|
||||
|
||||
def get_pkg_class(self, name):
|
||||
namespace, _, name = name.rpartition(".")
|
||||
if namespace and namespace != self.namespace:
|
||||
raise spack.repo.InvalidNamespaceError("bad namespace: %s" % self.namespace)
|
||||
return self.spec_to_pkg[name]
|
||||
|
||||
def exists(self, name):
|
||||
return name in self.spec_to_pkg
|
||||
|
||||
def is_virtual(self, name, use_index=True):
|
||||
return False
|
||||
|
||||
def repo_for_pkg(self, name):
|
||||
Repo = collections.namedtuple("Repo", ["namespace"])
|
||||
return Repo("mockrepo")
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.spec_to_pkg
|
||||
|
||||
def add_package(self, name, dependencies=None, dependency_types=None, conditions=None):
|
||||
"""Factory method for creating mock packages.
|
||||
|
||||
This creates a new subclass of ``MockPackageBase``, ensures that its
|
||||
``name`` and ``__name__`` properties are set up correctly, and
|
||||
returns a new instance.
|
||||
|
||||
We use a factory function here because many functions and properties
|
||||
of packages need to be class functions.
|
||||
|
||||
Args:
|
||||
name (str): name of the new package
|
||||
dependencies (list): list of mock packages to be dependencies
|
||||
for this new package (optional; no deps if not provided)
|
||||
dependency_type (list): list of deptypes for each dependency
|
||||
(optional; will be default_deptype if not provided)
|
||||
conditions (list): condition specs for each dependency (optional)
|
||||
|
||||
"""
|
||||
if not dependencies:
|
||||
dependencies = []
|
||||
|
||||
if not dependency_types:
|
||||
dependency_types = [spack.dependency.default_deptype] * len(dependencies)
|
||||
|
||||
assert len(dependencies) == len(dependency_types)
|
||||
|
||||
# new class for the mock package
|
||||
class MockPackage(MockPackageBase):
|
||||
pass
|
||||
|
||||
MockPackage.__name__ = spack.util.naming.mod_to_class(name)
|
||||
MockPackage.name = name
|
||||
MockPackage._repo = self
|
||||
|
||||
# set up dependencies
|
||||
MockPackage.dependencies = collections.OrderedDict()
|
||||
for dep, dtype in zip(dependencies, dependency_types):
|
||||
d = Dependency(MockPackage, Spec(dep.name), type=dtype)
|
||||
if not conditions or dep.name not in conditions:
|
||||
MockPackage.dependencies[dep.name] = {Spec(name): d}
|
||||
else:
|
||||
dep_conditions = conditions[dep.name]
|
||||
dep_conditions = dict(
|
||||
(Spec(x), Dependency(MockPackage, Spec(y), type=dtype))
|
||||
for x, y in dep_conditions.items()
|
||||
)
|
||||
MockPackage.dependencies[dep.name] = dep_conditions
|
||||
|
||||
# each package has some fake versions
|
||||
versions = list(Version(x) for x in [1, 2, 3])
|
||||
MockPackage.versions = dict((x, {"preferred": False}) for x in versions)
|
||||
|
||||
MockPackage.variants = {}
|
||||
MockPackage.provided = {}
|
||||
MockPackage.conflicts = {}
|
||||
MockPackage.patches = {}
|
||||
|
||||
mock_package = MockPackage(dependencies, dependency_types, conditions, versions)
|
||||
self.spec_to_pkg[name] = mock_package
|
||||
self.spec_to_pkg["mockrepo." + name] = mock_package
|
||||
|
||||
return mock_package
|
||||
|
||||
@property
|
||||
def provider_index(self):
|
||||
return spack.provider_index.ProviderIndex()
|
|
@ -1,15 +0,0 @@
|
|||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
class Concretizationroot(Package):
|
||||
url = 'fake_url'
|
||||
|
||||
version('1.0')
|
||||
|
||||
{% for dep in specs %}
|
||||
depends_on('{{ dep }}')
|
||||
{% endfor %}
|
||||
|
19
share/spack/templates/mock-repository/package.pyt
Normal file
19
share/spack/templates/mock-repository/package.pyt
Normal file
|
@ -0,0 +1,19 @@
|
|||
class {{ cls_name }}(Package):
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/root-1.0.tar.gz"
|
||||
|
||||
version("3.0", sha256='abcde')
|
||||
version("2.0", sha256='abcde')
|
||||
version("1.0", sha256='abcde')
|
||||
|
||||
{% for dep_spec, dep_type, condition in dependencies %}
|
||||
{% if dep_type and condition %}
|
||||
depends_on("{{ dep_spec }}", type="{{ dep_type }}", when="{{ condition }}")
|
||||
{% elif dep_type %}
|
||||
depends_on("{{ dep_spec }}", type="{{ dep_type }}")
|
||||
{% elif condition %}
|
||||
depends_on("{{ dep_spec }}", when="{{ condition }}")
|
||||
{% else %}
|
||||
depends_on("{{ dep_spec }}")
|
||||
{% endif %}
|
||||
{% endfor %}
|
Loading…
Reference in a new issue