Remove LazyReference from code (#38944)

A LazyReference object is a reference to an attribute of a 
lazily evaluated singleton. Its only purpose is to let developers
use shorter names to refer to such attribute.

This class does more harm than good, as it obfuscates the fact
that we are using the attribute of a global object. Also, it can easily
go out of sync with the singleton it refers to if, for instance, the
singleton is updated but the references are not.

This commit removes the LazyReference class entirely, and access
the attributes explicitly passing through the global value to which
they are attached.
This commit is contained in:
Massimiliano Culpo 2023-07-19 11:08:51 +02:00 committed by GitHub
parent a99eaa9541
commit a7f2abf924
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
73 changed files with 377 additions and 426 deletions

View file

@ -843,27 +843,6 @@ def __repr__(self):
return repr(self.instance)
class LazyReference:
"""Lazily evaluated reference to part of a singleton."""
def __init__(self, ref_function):
self.ref_function = ref_function
def __getattr__(self, name):
if name == "ref_function":
raise AttributeError()
return getattr(self.ref_function(), name)
def __getitem__(self, name):
return self.ref_function()[name]
def __str__(self):
return str(self.ref_function())
def __repr__(self):
return repr(self.ref_function())
def load_module_from_file(module_name, module_path):
"""Loads a python module from the path of the corresponding file.

View file

@ -718,7 +718,7 @@ def get_buildfile_manifest(spec):
# look for them to decide if text file needs to be relocated or not
prefixes = [d.prefix for d in spec.traverse(root=True, deptype="all") if not d.external]
prefixes.append(spack.hooks.sbang.sbang_install_path())
prefixes.append(str(spack.store.layout.root))
prefixes.append(str(spack.store.STORE.layout.root))
# Create a giant regex that matches all prefixes
regex = utf8_paths_to_single_binary_regex(prefixes)
@ -731,7 +731,7 @@ def get_buildfile_manifest(spec):
for rel_path in visitor.symlinks:
abs_path = os.path.join(root, rel_path)
link = os.readlink(abs_path)
if os.path.isabs(link) and link.startswith(spack.store.layout.root):
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
data["link_to_relocate"].append(rel_path)
# Non-symlinks.
@ -779,9 +779,9 @@ def get_buildinfo_dict(spec):
return {
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
"buildpath": spack.store.layout.root,
"buildpath": spack.store.STORE.layout.root,
"spackprefix": spack.paths.prefix,
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
"relative_prefix": os.path.relpath(spec.prefix, spack.store.STORE.layout.root),
"relocate_textfiles": manifest["text_to_relocate"],
"relocate_binaries": manifest["binary_to_relocate"],
"relocate_links": manifest["link_to_relocate"],
@ -1262,7 +1262,7 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
# without concretizing with the current spack packages
# and preferences
spec_file = spack.store.layout.spec_file_path(spec)
spec_file = spack.store.STORE.layout.spec_file_path(spec)
specfile_name = tarball_name(spec, ".spec.json")
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
signed_specfile_path = "{0}.sig".format(specfile_path)
@ -1311,7 +1311,7 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
# Add original install prefix relative to layout root to spec.json.
# This will be used to determine is the directory layout has changed.
buildinfo = {}
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.STORE.layout.root)
spec_dict["buildinfo"] = buildinfo
with open(specfile_path, "w") as outfile:
@ -1369,7 +1369,7 @@ def specs_to_be_packaged(
packageable = lambda n: not n.external and n.installed
# Mass install check
with spack.store.db.read_transaction():
with spack.store.STORE.db.read_transaction():
return list(filter(packageable, nodes))
@ -1606,7 +1606,7 @@ def relocate_package(spec):
"""
workdir = str(spec.prefix)
buildinfo = read_buildinfo_file(workdir)
new_layout_root = str(spack.store.layout.root)
new_layout_root = str(spack.store.STORE.layout.root)
new_prefix = str(spec.prefix)
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
new_spack_prefix = str(spack.paths.prefix)
@ -1854,7 +1854,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
tarfile_path, size, contents, "sha256", expected, local_checksum
)
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.layout.root))
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.STORE.layout.root))
# if the original relative prefix is in the spec file use it
buildinfo = spec_dict.get("buildinfo", {})
old_relative_prefix = buildinfo.get("relative_prefix", new_relative_prefix)
@ -1866,7 +1866,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
# The directory created is the base directory name of the old prefix.
# Moving the old prefix name to the new prefix location should preserve
# hard links and symbolic links.
extract_tmp = os.path.join(spack.store.layout.root, ".tmp")
extract_tmp = os.path.join(spack.store.STORE.layout.root, ".tmp")
mkdirp(extract_tmp)
extracted_dir = os.path.join(extract_tmp, old_relative_prefix.split(os.path.sep)[-1])
@ -1893,7 +1893,9 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
raise e
else:
manifest_file = os.path.join(
spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
spec.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
)
if not os.path.exists(manifest_file):
spec_id = spec.format("{name}/{hash:7}")
@ -1952,7 +1954,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
extract_tarball(spec, download_result, unsigned, force)
spack.hooks.post_install(spec, False)
spack.store.db.add(spec, spack.store.layout)
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
def install_single_spec(spec, unsigned=False, force=False):

View file

@ -50,7 +50,7 @@ def _try_import_from_store(
# We have to run as part of this python interpreter
query_spec += " ^" + spec_for_current_python()
installed_specs = spack.store.db.query(query_spec, installed=True)
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
for candidate_spec in installed_specs:
pkg = candidate_spec["python"].package
@ -183,7 +183,7 @@ def _executables_in_store(
executables_str = ", ".join(executables)
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
tty.debug(msg.format(executables_str, query_spec))
installed_specs = spack.store.db.query(query_spec, installed=True)
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
if installed_specs:
for concrete_spec in installed_specs:
bin_dir = concrete_spec.prefix.bin

View file

@ -39,7 +39,7 @@ def check_paths(path_list, filetype, predicate):
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
ignore_file = llnl.util.lang.match_predicate(spack.store.layout.hidden_file_regexes)
ignore_file = llnl.util.lang.match_predicate(spack.store.STORE.layout.hidden_file_regexes)
if all(map(ignore_file, os.listdir(pkg.prefix))):
msg = "Install failed for {0}. Nothing was installed!"
raise spack.installer.InstallError(msg.format(pkg.name))

View file

@ -286,7 +286,7 @@ def get_external_python_for_prefix(self):
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
"""
python_externals_installed = [
s for s in spack.store.db.query("python") if s.prefix == self.spec.external_path
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
]
if python_externals_installed:
return python_externals_installed[0]

View file

@ -273,9 +273,9 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, fir
See ``spack.database.Database._query`` for details.
"""
if local:
matching_specs = spack.store.db.query_local(spec, hashes=hashes, installed=installed)
matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed)
else:
matching_specs = spack.store.db.query(spec, hashes=hashes, installed=installed)
matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed)
if not matching_specs:
tty.die("Spec '%s' matches no installed packages." % spec)
@ -473,7 +473,7 @@ def format_list(specs):
out = ""
# getting lots of prefixes requires DB lookups. Ensure
# all spec.prefix calls are in one transaction.
with spack.store.db.read_transaction():
with spack.store.STORE.db.read_transaction():
for string, spec in formatted:
if not string:
# print newline from above

View file

@ -115,7 +115,7 @@ def clean(parser, args):
tty.msg("Removing all temporary build stages")
spack.stage.purge()
# Temp directory where buildcaches are extracted
extract_tmp = os.path.join(spack.store.layout.root, ".tmp")
extract_tmp = os.path.join(spack.store.STORE.layout.root, ".tmp")
if os.path.exists(extract_tmp):
tty.debug("Removing {0}".format(extract_tmp))
shutil.rmtree(extract_tmp)

View file

@ -82,12 +82,12 @@ def _specs(self, **kwargs):
# return everything for an empty query.
if not qspecs:
return spack.store.db.query(**kwargs)
return spack.store.STORE.db.query(**kwargs)
# Return only matching stuff otherwise.
specs = {}
for spec in qspecs:
for s in spack.store.db.query(spec, **kwargs):
for s in spack.store.STORE.db.query(spec, **kwargs):
# This is fast for already-concrete specs
specs[s.dag_hash()] = s

View file

@ -106,7 +106,7 @@ def emulate_env_utility(cmd_name, context, args):
visitor = AreDepsInstalledVisitor(context=context)
# Mass install check needs read transaction.
with spack.store.db.read_transaction():
with spack.store.STORE.db.read_transaction():
traverse.traverse_breadth_first_with_visitor([spec], traverse.CoverNodesVisitor(visitor))
if visitor.has_uninstalled_deps:

View file

@ -399,8 +399,8 @@ def config_prefer_upstream(args):
if scope is None:
scope = spack.config.default_modify_scope("packages")
all_specs = set(spack.store.db.query(installed=True))
local_specs = set(spack.store.db.query_local(installed=True))
all_specs = set(spack.store.STORE.db.query(installed=True))
local_specs = set(spack.store.STORE.db.query_local(installed=True))
pref_specs = local_specs if args.local else all_specs - local_specs
conflicting_variants = set()

View file

@ -60,16 +60,16 @@ def create_db_tarball(args):
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
base = os.path.basename(str(spack.store.root))
base = os.path.basename(str(spack.store.STORE.root))
transform_args = []
if "GNU" in tar("--version", output=str):
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
else:
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
wd = os.path.dirname(str(spack.store.root))
wd = os.path.dirname(str(spack.store.STORE.root))
with working_dir(wd):
files = [spack.store.db._index_path]
files = [spack.store.STORE.db._index_path]
files += glob("%s/*/*/*/.spack/spec.json" % base)
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
files = [os.path.relpath(f) for f in files]

View file

@ -60,7 +60,7 @@ def dependencies(parser, args):
format_string = "{name}{@version}{%compiler}{/hash:7}"
if sys.stdout.isatty():
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
deps = spack.store.db.installed_relatives(
deps = spack.store.STORE.db.installed_relatives(
spec, "children", args.transitive, deptype=args.deptype
)
if deps:

View file

@ -96,7 +96,7 @@ def dependents(parser, args):
format_string = "{name}{@version}{%compiler}{/hash:7}"
if sys.stdout.isatty():
tty.msg("Dependents of %s" % spec.cformat(format_string))
deps = spack.store.db.installed_relatives(spec, "parents", args.transitive)
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
if deps:
spack.cmd.display_specs(deps, long=True)
else:

View file

@ -130,7 +130,7 @@ def deprecate(parser, args):
already_deprecated = []
already_deprecated_for = []
for spec in all_deprecate:
deprecated_for = spack.store.db.deprecator(spec)
deprecated_for = spack.store.STORE.db.deprecator(spec)
if deprecated_for:
already_deprecated.append(spec)
already_deprecated_for.append(deprecated_for)

View file

@ -91,7 +91,7 @@ def extensions(parser, args):
if args.show in ("installed", "all"):
# List specs of installed extensions.
installed = [s.spec for s in spack.store.db.installed_extensions_for(spec)]
installed = [s.spec for s in spack.store.STORE.db.installed_extensions_for(spec)]
if args.show == "all":
print

View file

@ -20,7 +20,7 @@ def setup_parser(subparser):
def gc(parser, args):
specs = spack.store.db.unused_specs
specs = spack.store.STORE.db.unused_specs
# Restrict garbage collection to the active environment
# speculating over roots that are yet to be installed

View file

@ -63,7 +63,7 @@ def graph(parser, args):
if env:
specs = env.all_specs()
else:
specs = spack.store.db.query()
specs = spack.store.STORE.db.query()
else:
specs = spack.cmd.parse_specs(args.specs, concretize=not args.static)

View file

@ -266,7 +266,7 @@ def require_user_confirmation_for_overwrite(concrete_specs, args):
if args.yes_to_all:
return
installed = list(filter(lambda x: x, map(spack.store.db.query_one, concrete_specs)))
installed = list(filter(lambda x: x, map(spack.store.STORE.db.query_one, concrete_specs)))
display_args = {"long": True, "show_flags": True, "variants": True}
if installed:

View file

@ -101,7 +101,7 @@ def load(parser, args):
)
return 1
with spack.store.db.read_transaction():
with spack.store.STORE.db.read_transaction():
if "dependencies" in args.things_to_load:
include_roots = "package" in args.things_to_load
specs = [

View file

@ -71,7 +71,7 @@ def find_matching_specs(specs, allow_multiple_matches=False):
for spec in specs:
install_query = [InstallStatuses.INSTALLED]
matching = spack.store.db.query_local(spec, installed=install_query)
matching = spack.store.STORE.db.query_local(spec, installed=install_query)
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:
@ -102,7 +102,7 @@ def do_mark(specs, explicit):
explicit (bool): whether to mark specs as explicitly installed
"""
for spec in specs:
spack.store.db.update_explicit(spec, explicit)
spack.store.STORE.db.update_explicit(spec, explicit)
def mark_specs(args, specs):

View file

@ -11,4 +11,4 @@
def reindex(parser, args):
spack.store.store.reindex()
spack.store.STORE.reindex()

View file

@ -100,7 +100,7 @@ def spec(parser, args):
# spec in the DAG. This avoids repeatedly querying the DB.
tree_context = lang.nullcontext
if args.install_status:
tree_context = spack.store.db.read_transaction
tree_context = spack.store.STORE.db.read_transaction
# Use command line specified specs, otherwise try to use environment specs.
if args.specs:

View file

@ -174,7 +174,7 @@ def test_run(args):
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
specs_to_test = []
for spec in specs:
matching = spack.store.db.query_local(spec, hashes=hashes, explicit=explicit)
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
if spec and not matching:
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
"""
@ -252,7 +252,7 @@ def has_test_and_tags(pkg_class):
env = ev.active_environment()
hashes = env.all_hashes() if env else None
specs = spack.store.db.query(hashes=hashes)
specs = spack.store.STORE.db.query(hashes=hashes)
specs = list(filter(lambda s: has_test_and_tags(s.package_class), specs))
spack.cmd.display_specs(specs, long=True)
@ -329,7 +329,7 @@ def _report_suite_results(test_suite, args, constraints):
qspecs = spack.cmd.parse_specs(constraints)
specs = {}
for spec in qspecs:
for s in spack.store.db.query(spec, installed=True):
for s in spack.store.STORE.db.query(spec, installed=True):
specs[s.dag_hash()] = s
specs = sorted(specs.values())
test_specs = dict((test_suite.test_pkg_id(s), s) for s in test_suite.specs if s in specs)

View file

@ -103,7 +103,7 @@ def find_matching_specs(
has_errors = False
for spec in specs:
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
matching = spack.store.db.query_local(
matching = spack.store.STORE.db.query_local(
spec, hashes=hashes, installed=install_query, origin=origin
)
# For each spec provided, make sure it refers to only one package.
@ -139,7 +139,7 @@ def installed_dependents(specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]:
# input; in that case we return an empty list.
def is_installed(spec):
record = spack.store.db.query_local_by_spec_hash(spec.dag_hash())
record = spack.store.STORE.db.query_local_by_spec_hash(spec.dag_hash())
return record and record.installed
specs = traverse.traverse_nodes(

View file

@ -71,7 +71,7 @@ def unload(parser, args):
for spec in spack.cmd.parse_specs(args.specs)
]
else:
specs = spack.store.db.query(hashes=hashes)
specs = spack.store.STORE.db.query(hashes=hashes)
if not args.shell:
specs_str = " ".join(args.specs) or "SPECS"

View file

@ -71,7 +71,7 @@ def verify(parser, args):
spec_args = spack.cmd.parse_specs(args.specs_or_files)
if args.all:
query = spack.store.db.query_local if local else spack.store.db.query
query = spack.store.STORE.db.query_local if local else spack.store.STORE.db.query
# construct spec list
if spec_args:

View file

@ -70,7 +70,7 @@ def squash(matching_specs):
return matching_in_view[0] if matching_in_view else matching_specs[0]
# make function always return a list to keep consistency between py2/3
return list(map(squash, map(spack.store.db.query, specs)))
return list(map(squash, map(spack.store.STORE.db.query, specs)))
def setup_parser(sp):
@ -200,7 +200,7 @@ def view(parser, args):
view = YamlFilesystemView(
path,
spack.store.layout,
spack.store.STORE.layout,
projections=ordered_projections,
ignore_conflicts=getattr(args, "ignore_conflicts", False),
link=link_fn,

View file

@ -194,7 +194,7 @@ def read(path, apply_updates):
spack.compilers.add_compilers_to_config(compilers, init_config=False)
if apply_updates:
for spec in specs.values():
spack.store.db.add(spec, directory_layout=None)
spack.store.STORE.db.add(spec, directory_layout=None)
class ManifestValidationError(spack.error.SpackError):

View file

@ -542,7 +542,7 @@ def mark_failed(self, spec: "spack.spec.Spec") -> lk.Lock:
containing the spec, in a subdirectory of the database to enable
persistence across overlapping but separate related build processes.
The failure lock file, ``spack.store.db.prefix_failures``, lives
The failure lock file, ``spack.store.STORE.db.prefix_failures``, lives
alongside the install DB. ``n`` is the sys.maxsize-bit prefix of the
associated DAG hash to make the likelihood of collision very low with
no cleanup required.
@ -620,7 +620,7 @@ def prefix_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None)
Prefix lock is a byte range lock on the nth byte of a file.
The lock file is ``spack.store.db.prefix_lock`` -- the DB
The lock file is ``spack.store.STORE.db.prefix_lock`` -- the DB
tells us what to call it and it lives alongside the install DB.
n is the sys.maxsize-bit prefix of the DAG hash. This makes
@ -844,7 +844,7 @@ def check(cond, msg):
% (version, _DB_VERSION)
)
self.reindex(spack.store.layout)
self.reindex(spack.store.STORE.layout)
installs = dict(
(k, v.to_dict(include_fields=self._record_fields))
for k, v in self._data.items()

View file

@ -325,7 +325,7 @@ def path_for_spec(self, spec):
if spec.external:
return spec.external_path
if self.check_upstream:
upstream, record = spack.store.db.query_by_spec_hash(spec.dag_hash())
upstream, record = spack.store.STORE.db.query_by_spec_hash(spec.dag_hash())
if upstream:
raise SpackError(
"Internal error: attempted to call path_for_spec on"

View file

@ -153,7 +153,7 @@ def installed_specs():
"""
env = spack.environment.active_environment()
hashes = env.all_hashes() if env else None
return spack.store.db.query(hashes=hashes)
return spack.store.STORE.db.query(hashes=hashes)
def valid_env_name(name):
@ -421,7 +421,7 @@ def _is_dev_spec_and_has_changed(spec):
# Not installed -> nothing to compare against
return False
_, record = spack.store.db.query_by_spec_hash(spec.dag_hash())
_, record = spack.store.STORE.db.query_by_spec_hash(spec.dag_hash())
mtime = fs.last_modification_time_recursive(dev_path_var.value)
return mtime > record.installation_time
@ -582,7 +582,7 @@ def view(self, new=None):
raise SpackEnvironmentViewError(msg)
return SimpleFilesystemView(
root,
spack.store.layout,
spack.store.STORE.layout,
ignore_conflicts=True,
projections=self.projections,
link=self.link_type,
@ -622,7 +622,7 @@ def specs_for_view(self, concretized_root_specs):
specs = list(dedupe(concretized_root_specs, key=traverse.by_dag_hash))
# Filter selected, installed specs
with spack.store.db.read_transaction():
with spack.store.STORE.db.read_transaction():
specs = [s for s in specs if s in self and s.installed]
return specs
@ -1840,7 +1840,7 @@ def _partition_roots_by_install_status(self):
specs. This is done in a single read transaction per environment instead
of per spec."""
installed, uninstalled = [], []
with spack.store.db.read_transaction():
with spack.store.STORE.db.read_transaction():
for concretized_hash in self.concretized_order:
spec = self.specs_by_hash[concretized_hash]
if not spec.installed or (
@ -1885,9 +1885,9 @@ def install_specs(self, specs=None, **install_args):
# Already installed root specs should be marked explicitly installed in the
# database.
if specs_dropped:
with spack.store.db.write_transaction(): # do all in one transaction
with spack.store.STORE.db.write_transaction(): # do all in one transaction
for spec in specs_dropped:
spack.store.db.update_explicit(spec, True)
spack.store.STORE.db.update_explicit(spec, True)
if not specs_to_install:
tty.msg("All of the packages are already installed")
@ -1950,7 +1950,7 @@ def added_specs(self):
"""
# use a transaction to avoid overhead of repeated calls
# to `package.spec.installed`
with spack.store.db.read_transaction():
with spack.store.STORE.db.read_transaction():
concretized = dict(self.concretized_specs())
for spec in self.user_specs:
concrete = concretized.get(spec)

View file

@ -130,7 +130,7 @@ def activate(env, use_env_repo=False, add_view=True):
#
try:
if add_view and ev.default_view_name in env.views:
with spack.store.db.read_transaction():
with spack.store.STORE.db.read_transaction():
env.add_default_view_to_env(env_mods)
except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
tty.error(e)
@ -165,7 +165,7 @@ def deactivate():
if ev.default_view_name in active.views:
try:
with spack.store.db.read_transaction():
with spack.store.STORE.db.read_transaction():
active.rm_default_view_from_env(env_mods)
except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
tty.warn(e)

View file

@ -88,7 +88,7 @@ def view_copy(src: str, dst: str, view, spec: Optional[spack.spec.Spec] = None):
elif spack.relocate.is_binary(dst):
spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
else:
prefix_to_projection[spack.store.layout.root] = view._root
prefix_to_projection[spack.store.STORE.layout.root] = view._root
# This is vestigial code for the *old* location of sbang.
prefix_to_projection[
@ -379,7 +379,7 @@ def needs_file(spec, file):
# check if this spec owns a file of that name (through the
# manifest in the metadata dir, which we have in the view).
manifest_file = os.path.join(
self.get_path_meta_folder(spec), spack.store.layout.manifest_file_name
self.get_path_meta_folder(spec), spack.store.STORE.layout.manifest_file_name
)
try:
with open(manifest_file, "r") as f:
@ -506,14 +506,16 @@ def get_projection_for_spec(self, spec):
def get_all_specs(self):
md_dirs = []
for root, dirs, files in os.walk(self._root):
if spack.store.layout.metadata_dir in dirs:
md_dirs.append(os.path.join(root, spack.store.layout.metadata_dir))
if spack.store.STORE.layout.metadata_dir in dirs:
md_dirs.append(os.path.join(root, spack.store.STORE.layout.metadata_dir))
specs = []
for md_dir in md_dirs:
if os.path.exists(md_dir):
for name_dir in os.listdir(md_dir):
filename = os.path.join(md_dir, name_dir, spack.store.layout.spec_file_name)
filename = os.path.join(
md_dir, name_dir, spack.store.STORE.layout.spec_file_name
)
spec = get_spec_from_file(filename)
if spec:
specs.append(spec)
@ -531,18 +533,18 @@ def get_path_meta_folder(self, spec):
"Get path to meta folder for either spec or spec name."
return os.path.join(
self.get_projection_for_spec(spec),
spack.store.layout.metadata_dir,
spack.store.STORE.layout.metadata_dir,
getattr(spec, "name", spec),
)
def get_spec(self, spec):
dotspack = self.get_path_meta_folder(spec)
filename = os.path.join(dotspack, spack.store.layout.spec_file_name)
filename = os.path.join(dotspack, spack.store.STORE.layout.spec_file_name)
return get_spec_from_file(filename)
def link_meta_folder(self, spec):
src = spack.store.layout.metadata_path(spec)
src = spack.store.STORE.layout.metadata_path(spec)
tgt = self.get_path_meta_folder(spec)
tree = LinkTree(src)
@ -673,7 +675,7 @@ def add_specs(self, *specs, **kwargs):
# Ignore spack meta data folder.
def skip_list(file):
return os.path.basename(file) == spack.store.layout.metadata_dir
return os.path.basename(file) == spack.store.STORE.layout.metadata_dir
visitor = SourceMergeVisitor(ignore=skip_list)
@ -735,14 +737,18 @@ def _source_merge_visitor_to_merge_map(self, visitor: SourceMergeVisitor):
def relative_metadata_dir_for_spec(self, spec):
return os.path.join(
self.get_relative_projection_for_spec(spec), spack.store.layout.metadata_dir, spec.name
self.get_relative_projection_for_spec(spec),
spack.store.STORE.layout.metadata_dir,
spec.name,
)
def link_metadata(self, specs):
metadata_visitor = SourceMergeVisitor()
for spec in specs:
src_prefix = os.path.join(spec.package.view_source(), spack.store.layout.metadata_dir)
src_prefix = os.path.join(
spec.package.view_source(), spack.store.STORE.layout.metadata_dir
)
proj = self.relative_metadata_dir_for_spec(spec)
metadata_visitor.set_projection(proj)
visit_directory_tree(src_prefix, metadata_visitor)

View file

@ -41,7 +41,7 @@
def sbang_install_path():
"""Location sbang should be installed within Spack's ``install_tree``."""
sbang_root = str(spack.store.unpadded_root)
sbang_root = str(spack.store.STORE.unpadded_root)
install_path = os.path.join(sbang_root, "bin", "sbang")
path_length = len(install_path)
if path_length > system_shebang_limit:

View file

@ -261,7 +261,7 @@ def _do_fake_install(pkg: "spack.package_base.PackageBase") -> None:
# Install fake man page
fs.mkdirp(pkg.prefix.man.man1)
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
packages_dir = spack.store.STORE.layout.build_packages_path(pkg.spec)
dump_packages(pkg.spec, packages_dir)
@ -430,9 +430,9 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
# Check if the package was already registered in the DB.
# If this is the case, then only make explicit if required.
tty.debug("{0} already registered in DB".format(pre))
record = spack.store.db.get_record(spec)
record = spack.store.STORE.db.get_record(spec)
if explicit and not record.explicit:
spack.store.db.update_explicit(spec, explicit)
spack.store.STORE.db.update_explicit(spec, explicit)
except KeyError:
# If not, register it and generate the module file.
@ -443,7 +443,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
# Add to the DB
tty.debug("{0} registering into DB".format(pre))
spack.store.db.add(spec, None, explicit=explicit)
spack.store.STORE.db.add(spec, None, explicit=explicit)
def _process_binary_cache_tarball(
@ -485,7 +485,7 @@ def _process_binary_cache_tarball(
)
pkg.installed_from_binary_cache = True
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
return True
@ -523,7 +523,7 @@ def clear_failures() -> None:
"""
Remove all failure tracking markers for the Spack instance.
"""
spack.store.db.clear_all_failures()
spack.store.STORE.db.clear_all_failures()
def combine_phase_logs(phase_log_files: List[str], log_path: str) -> None:
@ -566,7 +566,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
if node is not spec:
# Locate the dependency package in the install tree and find
# its provenance information.
source = spack.store.layout.build_packages_path(node)
source = spack.store.STORE.layout.build_packages_path(node)
source_repo_root = os.path.join(source, node.namespace)
# If there's no provenance installed for the package, skip it.
@ -659,7 +659,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
Args:
pkg: the package that was built and installed
"""
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
packages_dir = spack.store.STORE.layout.build_packages_path(pkg.spec)
# Remove first if we're overwriting another build
try:
@ -681,7 +681,9 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
# Finally, archive files that are specific to each package
with fs.working_dir(pkg.stage.path):
errors = io.StringIO()
target_dir = os.path.join(spack.store.layout.metadata_path(pkg.spec), "archived-files")
target_dir = os.path.join(
spack.store.STORE.layout.metadata_path(pkg.spec), "archived-files"
)
for glob_expr in pkg.builder.archive_files:
# Check that we are trying to copy things that are
@ -1153,7 +1155,7 @@ def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]]
self.installed: Set[str] = set()
# Data store layout
self.layout = spack.store.layout
self.layout = spack.store.STORE.layout
# Locks on specs being built, keyed on the package's unique id
self.locks: Dict[str, Tuple[str, Optional[lk.Lock]]] = {}
@ -1264,7 +1266,7 @@ def _check_db(
that's ``True`` iff the spec is considered installed
"""
try:
rec = spack.store.db.get_record(spec)
rec = spack.store.STORE.db.get_record(spec)
installed_in_db = rec.installed if rec else False
except KeyError:
# KeyError is raised if there is no matching spec in the database
@ -1285,7 +1287,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
dep_id = package_id(dep_pkg)
# Check for failure since a prefix lock is not required
if spack.store.db.prefix_failed(dep):
if spack.store.STORE.db.prefix_failed(dep):
action = "'spack install' the dependency"
msg = "{0} is marked as an install failure: {1}".format(dep_id, action)
raise InstallError(err.format(request.pkg_id, msg), pkg=dep_pkg)
@ -1349,7 +1351,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
if not installed_in_db:
# Ensure there is no other installed spec with the same prefix dir
if spack.store.db.is_occupied_install_prefix(task.pkg.spec.prefix):
if spack.store.STORE.db.is_occupied_install_prefix(task.pkg.spec.prefix):
raise InstallError(
"Install prefix collision for {0}".format(task.pkg_id),
long_msg="Prefix directory {0} already used by another "
@ -1381,7 +1383,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
# Only update the explicit entry once for the explicit package
if task.explicit:
spack.store.db.update_explicit(task.pkg.spec, True)
spack.store.STORE.db.update_explicit(task.pkg.spec, True)
def _cleanup_all_tasks(self) -> None:
"""Cleanup all build tasks to include releasing their locks."""
@ -1500,7 +1502,7 @@ def _ensure_locked(
if lock is None:
tty.debug(msg.format("Acquiring", desc, pkg_id, pretty_seconds(timeout or 0)))
op = "acquire"
lock = spack.store.db.prefix_lock(pkg.spec, timeout)
lock = spack.store.STORE.db.prefix_lock(pkg.spec, timeout)
if timeout != lock.default_timeout:
tty.warn(
"Expected prefix lock timeout {0}, not {1}".format(
@ -1625,12 +1627,12 @@ def _add_tasks(self, request: BuildRequest, all_deps):
# Clear any persistent failure markings _unless_ they are
# associated with another process in this parallel build
# of the spec.
spack.store.db.clear_failure(dep, force=False)
spack.store.STORE.db.clear_failure(dep, force=False)
install_package = request.install_args.get("install_package")
if install_package and request.pkg_id not in self.build_tasks:
# Be sure to clear any previous failure
spack.store.db.clear_failure(request.spec, force=True)
spack.store.STORE.db.clear_failure(request.spec, force=True)
# If not installing dependencies, then determine their
# installation status before proceeding
@ -1705,7 +1707,7 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
pkg.windows_establish_runtime_linkage()
# Note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
# If a compiler, ensure it is added to the configuration
if task.compiler:
@ -1848,7 +1850,7 @@ def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None:
if not os.path.exists(pkg.spec.prefix):
path = spack.util.path.debug_padded_filter(pkg.spec.prefix)
tty.debug("Creating the installation directory {0}".format(path))
spack.store.layout.create_install_directory(pkg.spec)
spack.store.STORE.layout.create_install_directory(pkg.spec)
else:
# Set the proper group for the prefix
group = prefs.get_package_group(pkg.spec)
@ -1864,10 +1866,10 @@ def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None:
os.chmod(pkg.spec.prefix, perms)
# Ensure the metadata path exists as well
fs.mkdirp(spack.store.layout.metadata_path(pkg.spec), mode=perms)
fs.mkdirp(spack.store.STORE.layout.metadata_path(pkg.spec), mode=perms)
# Always write host environment - we assume this can change
spack.store.layout.write_host_environment(pkg.spec)
spack.store.STORE.layout.write_host_environment(pkg.spec)
def _update_failed(
self, task: BuildTask, mark: bool = False, exc: Optional[BaseException] = None
@ -1886,7 +1888,7 @@ def _update_failed(
err = "" if exc is None else ": {0}".format(str(exc))
tty.debug("Flagging {0} as failed{1}".format(pkg_id, err))
if mark:
self.failed[pkg_id] = spack.store.db.mark_failed(task.pkg.spec)
self.failed[pkg_id] = spack.store.STORE.db.mark_failed(task.pkg.spec)
else:
self.failed[pkg_id] = None
task.status = STATUS_FAILED
@ -2072,7 +2074,7 @@ def install(self) -> None:
# Flag a failed spec. Do not need an (install) prefix lock since
# assume using a separate (failed) prefix lock file.
if pkg_id in self.failed or spack.store.db.prefix_failed(spec):
if pkg_id in self.failed or spack.store.STORE.db.prefix_failed(spec):
term_status.clear()
tty.warn("{0} failed to install".format(pkg_id))
self._update_failed(task)
@ -2168,9 +2170,9 @@ def install(self) -> None:
if action == InstallAction.INSTALL:
self._install_task(task, install_status)
elif action == InstallAction.OVERWRITE:
# spack.store.db is not really a Database object, but a small
# spack.store.STORE.db is not really a Database object, but a small
# wrapper -- silence mypy
OverwriteInstall(self, spack.store.db, task, install_status).install() # type: ignore[arg-type] # noqa: E501
OverwriteInstall(self, spack.store.STORE.db, task, install_status).install() # type: ignore[arg-type] # noqa: E501
self._update_installed(task)

View file

@ -847,7 +847,7 @@ def shell_set(var, value):
if "modules" in info:
generic_arch = archspec.cpu.host().family
module_spec = "environment-modules target={0}".format(generic_arch)
specs = spack.store.db.query(module_spec)
specs = spack.store.STORE.db.query(module_spec)
if specs:
shell_set("_sp_module_prefix", specs[-1].prefix)
else:

View file

@ -249,7 +249,7 @@ def generate_module_index(root, modules, overwrite=False):
def _generate_upstream_module_index():
module_indices = read_module_indices()
return UpstreamModuleIndex(spack.store.db, module_indices)
return UpstreamModuleIndex(spack.store.STORE.db, module_indices)
upstream_module_index = llnl.util.lang.Singleton(_generate_upstream_module_index)
@ -354,7 +354,7 @@ def get_module(module_type, spec, get_full_path, module_set_name="default", requ
try:
upstream = spec.installed_upstream
except spack.repo.UnknownPackageError:
upstream, record = spack.store.db.query_by_spec_hash(spec.dag_hash())
upstream, record = spack.store.STORE.db.query_by_spec_hash(spec.dag_hash())
if upstream:
module = spack.modules.common.upstream_module_index.upstream_module(spec, module_type)
if not module:

View file

@ -1081,7 +1081,7 @@ def env_mods_path(self):
@property
def metadata_dir(self):
"""Return the install metadata directory."""
return spack.store.layout.metadata_path(self.spec)
return spack.store.STORE.layout.metadata_path(self.spec)
@property
def install_env_path(self):
@ -1352,7 +1352,7 @@ def remove_prefix(self):
Removes the prefix for a package along with any empty parent
directories
"""
spack.store.layout.remove_install_directory(self.spec)
spack.store.STORE.layout.remove_install_directory(self.spec)
@property
def download_instr(self):
@ -2207,20 +2207,20 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
if not os.path.isdir(spec.prefix):
# prefix may not exist, but DB may be inconsistent. Try to fix by
# removing, but omit hooks.
specs = spack.store.db.query(spec, installed=True)
specs = spack.store.STORE.db.query(spec, installed=True)
if specs:
if deprecator:
spack.store.db.deprecate(specs[0], deprecator)
spack.store.STORE.db.deprecate(specs[0], deprecator)
tty.debug("Deprecating stale DB entry for {0}".format(spec.short_spec))
else:
spack.store.db.remove(specs[0])
spack.store.STORE.db.remove(specs[0])
tty.debug("Removed stale DB entry for {0}".format(spec.short_spec))
return
else:
raise InstallError(str(spec) + " is not installed.")
if not force:
dependents = spack.store.db.installed_relatives(
dependents = spack.store.STORE.db.installed_relatives(
spec, direction="parents", transitive=True, deptype=("link", "run")
)
if dependents:
@ -2233,7 +2233,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
pkg = None
# Pre-uninstall hook runs first.
with spack.store.db.prefix_write_lock(spec):
with spack.store.STORE.db.prefix_write_lock(spec):
if pkg is not None:
try:
spack.hooks.pre_uninstall(spec)
@ -2259,17 +2259,17 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
tty.debug(msg.format(spec.short_spec))
# test if spec is already deprecated, not whether we want to
# deprecate it now
deprecated = bool(spack.store.db.deprecator(spec))
spack.store.layout.remove_install_directory(spec, deprecated)
deprecated = bool(spack.store.STORE.db.deprecator(spec))
spack.store.STORE.layout.remove_install_directory(spec, deprecated)
# Delete DB entry
if deprecator:
msg = "deprecating DB entry [{0}] in favor of [{1}]"
tty.debug(msg.format(spec.short_spec, deprecator.short_spec))
spack.store.db.deprecate(spec, deprecator)
spack.store.STORE.db.deprecate(spec, deprecator)
else:
msg = "Deleting DB entry [{0}]"
tty.debug(msg.format(spec.short_spec))
spack.store.db.remove(spec)
spack.store.STORE.db.remove(spec)
if pkg is not None:
try:
@ -2300,24 +2300,24 @@ def do_deprecate(self, deprecator, link_fn):
spec = self.spec
# Install deprecator if it isn't installed already
if not spack.store.db.query(deprecator):
if not spack.store.STORE.db.query(deprecator):
deprecator.package.do_install()
old_deprecator = spack.store.db.deprecator(spec)
old_deprecator = spack.store.STORE.db.deprecator(spec)
if old_deprecator:
# Find this specs yaml file from its old deprecation
self_yaml = spack.store.layout.deprecated_file_path(spec, old_deprecator)
self_yaml = spack.store.STORE.layout.deprecated_file_path(spec, old_deprecator)
else:
self_yaml = spack.store.layout.spec_file_path(spec)
self_yaml = spack.store.STORE.layout.spec_file_path(spec)
# copy spec metadata to "deprecated" dir of deprecator
depr_yaml = spack.store.layout.deprecated_file_path(spec, deprecator)
depr_yaml = spack.store.STORE.layout.deprecated_file_path(spec, deprecator)
fsys.mkdirp(os.path.dirname(depr_yaml))
shutil.copy2(self_yaml, depr_yaml)
# Any specs deprecated in favor of this spec are re-deprecated in
# favor of its new deprecator
for deprecated in spack.store.db.specs_deprecated_by(spec):
for deprecated in spack.store.STORE.db.specs_deprecated_by(spec):
deprecated.package.do_deprecate(deprecator, link_fn)
# Now that we've handled metadata, uninstall and replace with link
@ -2333,7 +2333,7 @@ def view(self):
Extensions added to this view will modify the installation prefix of
this package.
"""
return YamlFilesystemView(self.prefix, spack.store.layout)
return YamlFilesystemView(self.prefix, spack.store.STORE.layout)
def do_restage(self):
"""Reverts expanded/checked out source to a pristine state."""
@ -2460,7 +2460,7 @@ def flatten_dependencies(spec, flat_dir):
for dep in spec.traverse(root=False):
name = dep.name
dep_path = spack.store.layout.path_for_spec(dep)
dep_path = spack.store.STORE.layout.path_for_spec(dep)
dep_files = LinkTree(dep_path)
os.mkdir(flat_dir + "/" + name)

View file

@ -703,7 +703,7 @@ def fixup_macos_rpath(root, filename):
args = []
# Check dependencies for non-rpath entries
spack_root = spack.store.layout.root
spack_root = spack.store.STORE.layout.root
for name in deps:
if name.startswith(spack_root):
tty.debug("Spack-installed dependency for {0}: {1}".format(abspath, name))

View file

@ -80,8 +80,8 @@ def rewire_node(spec, explicit):
if "macho" in platform.binary_formats:
relocate.relocate_macho_binaries(
bins_to_relocate,
str(spack.store.layout.root),
str(spack.store.layout.root),
str(spack.store.STORE.layout.root),
str(spack.store.STORE.layout.root),
prefix_to_prefix,
False,
spec.build_spec.prefix,
@ -90,8 +90,8 @@ def rewire_node(spec, explicit):
if "elf" in platform.binary_formats:
relocate.relocate_elf_binaries(
bins_to_relocate,
str(spack.store.layout.root),
str(spack.store.layout.root),
str(spack.store.STORE.layout.root),
str(spack.store.STORE.layout.root),
prefix_to_prefix,
False,
spec.build_spec.prefix,
@ -114,9 +114,9 @@ def rewire_node(spec, explicit):
# (spliced) spec into spec.json, without this, Database.add would fail on
# the next line (because it checks the spec.json in the prefix against the
# spec being added to look for mismatches)
spack.store.layout.write_spec(spec, spack.store.layout.spec_file_path(spec))
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
# add to database, not sure about explicit
spack.store.db.add(spec, spack.store.layout, explicit=explicit)
spack.store.STORE.db.add(spec, spack.store.STORE.layout, explicit=explicit)
# run post install hooks
spack.hooks.post_install(spec, explicit)

View file

@ -2733,11 +2733,11 @@ def _reusable_specs(self, specs):
reusable_specs = []
if self.reuse:
# Specs from the local Database
with spack.store.db.read_transaction():
with spack.store.STORE.db.read_transaction():
reusable_specs.extend(
[
s
for s in spack.store.db.query(installed=True)
for s in spack.store.STORE.db.query(installed=True)
if not s.satisfies("dev_path=*")
]
)

View file

@ -1780,7 +1780,7 @@ def installed(self):
try:
# If the spec is in the DB, check the installed
# attribute of the record
return spack.store.db.get_record(self).installed
return spack.store.STORE.db.get_record(self).installed
except KeyError:
# If the spec is not in the DB, the method
# above raises a Key error
@ -1796,7 +1796,7 @@ def installed_upstream(self):
if not self.concrete:
return False
upstream, _ = spack.store.db.query_by_spec_hash(self.dag_hash())
upstream, _ = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
return upstream
def traverse(self, **kwargs):
@ -1828,11 +1828,11 @@ def prefix(self):
raise spack.error.SpecError("Spec is not concrete: " + str(self))
if self._prefix is None:
upstream, record = spack.store.db.query_by_spec_hash(self.dag_hash())
upstream, record = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
if record and record.path:
self.prefix = record.path
else:
self.prefix = spack.store.layout.path_for_spec(self)
self.prefix = spack.store.STORE.layout.path_for_spec(self)
return self._prefix
@prefix.setter
@ -1933,7 +1933,7 @@ def _lookup_hash(self):
env_matches = active_env.get_by_hash(self.abstract_hash) or []
matches = [m for m in env_matches if m._satisfies(self)]
if not matches:
db_matches = spack.store.db.get_by_hash(self.abstract_hash) or []
db_matches = spack.store.STORE.db.get_by_hash(self.abstract_hash) or []
matches = [m for m in db_matches if m._satisfies(self)]
if not matches:
query = spack.binary_distribution.BinaryCacheQuery(True)
@ -2942,9 +2942,9 @@ def ensure_no_deprecated(root):
SpecDeprecatedError: if any deprecated spec is found
"""
deprecated = []
with spack.store.db.read_transaction():
with spack.store.STORE.db.read_transaction():
for x in root.traverse():
_, rec = spack.store.db.query_by_spec_hash(x.dag_hash())
_, rec = spack.store.STORE.db.query_by_spec_hash(x.dag_hash())
if rec and rec.deprecated_for:
deprecated.append(rec)
if deprecated:
@ -4377,7 +4377,7 @@ def write_attribute(spec, attribute, color):
write(morph(spec, spack.paths.spack_root))
return
elif attribute == "spack_install":
write(morph(spec, spack.store.layout.root))
write(morph(spec, spack.store.STORE.layout.root))
return
elif re.match(r"hash(:\d)?", attribute):
col = "#"
@ -4497,7 +4497,7 @@ def install_status(self):
if self.external:
return InstallStatus.external
upstream, record = spack.store.db.query_by_spec_hash(self.dag_hash())
upstream, record = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
if not record:
return InstallStatus.absent
elif upstream and record.installed:
@ -4512,7 +4512,7 @@ def _installed_explicitly(self):
if not self.concrete:
return None
try:
record = spack.store.db.get_record(self)
record = spack.store.STORE.db.get_record(self)
return record.explicit
except KeyError:
return None

View file

@ -217,78 +217,30 @@ def create(configuration: ConfigurationType) -> Store:
def _create_global() -> Store:
# Check that the user is not trying to install software into the store
# reserved by Spack to bootstrap its own dependencies, since this would
# lead to bizarre behaviors (e.g. cleaning the bootstrap area would wipe
# user installed software)
import spack.bootstrap
enable_bootstrap = spack.config.config.get("bootstrap:enable", True)
if enable_bootstrap and spack.bootstrap.store_path() == root:
msg = (
'please change the install tree root "{0}" in your '
"configuration [path reserved for Spack internal use]"
)
raise ValueError(msg.format(root))
return create(configuration=spack.config.config)
result = create(configuration=spack.config.config)
return result
#: Singleton store instance
store: Union[Store, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_create_global)
def _store_root() -> str:
return store.root
def _store_unpadded_root() -> str:
return store.unpadded_root
def _store_db() -> spack.database.Database:
return store.db
def _store_layout() -> spack.directory_layout.DirectoryLayout:
return store.layout
# convenience accessors for parts of the singleton store
root: Union[llnl.util.lang.LazyReference, str] = llnl.util.lang.LazyReference(_store_root)
unpadded_root: Union[llnl.util.lang.LazyReference, str] = llnl.util.lang.LazyReference(
_store_unpadded_root
)
db: Union[llnl.util.lang.LazyReference, spack.database.Database] = llnl.util.lang.LazyReference(
_store_db
)
layout: Union[
llnl.util.lang.LazyReference, "spack.directory_layout.DirectoryLayout"
] = llnl.util.lang.LazyReference(_store_layout)
STORE: Union[Store, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_create_global)
def reinitialize():
"""Restore globals to the same state they would have at start-up. Return a token
containing the state of the store before reinitialization.
"""
global store
global root, unpadded_root, db, layout
global STORE
token = store, root, unpadded_root, db, layout
store = llnl.util.lang.Singleton(_create_global)
root = llnl.util.lang.LazyReference(_store_root)
unpadded_root = llnl.util.lang.LazyReference(_store_unpadded_root)
db = llnl.util.lang.LazyReference(_store_db)
layout = llnl.util.lang.LazyReference(_store_layout)
token = STORE
STORE = llnl.util.lang.Singleton(_create_global)
return token
def restore(token):
"""Restore the environment from a token returned by reinitialize"""
global store
global root, unpadded_root, db, layout
store, root, unpadded_root, db, layout = token
global STORE
STORE = token
def _construct_upstream_dbs_from_install_roots(
@ -330,7 +282,7 @@ def find(
constraints: spec(s) to be matched against installed packages
multiple: if True multiple matches per constraint are admitted
query_fn (Callable): query function to get matching specs. By default,
``spack.store.db.query``
``spack.store.STORE.db.query``
**kwargs: keyword arguments forwarded to the query function
"""
if isinstance(constraints, str):
@ -338,7 +290,7 @@ def find(
matching_specs: List[spack.spec.Spec] = []
errors = []
query_fn = query_fn or spack.store.db.query
query_fn = query_fn or spack.store.STORE.db.query
for spec in constraints:
current_matches = query_fn(spec, **kwargs)
@ -388,7 +340,7 @@ def use_store(
Yields:
Store object associated with the context manager's store
"""
global store, db, layout, root, unpadded_root
global STORE
assert not isinstance(path, Store), "cannot pass a store anymore"
scope_name = "use-store-{}".format(uuid.uuid4())
@ -397,22 +349,18 @@ def use_store(
data.update(extra_data)
# Swap the store with the one just constructed and return it
_ = store.db
_ = STORE.db
spack.config.config.push_scope(
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
)
temporary_store = create(configuration=spack.config.config)
original_store, store = store, temporary_store
db, layout = store.db, store.layout
root, unpadded_root = store.root, store.unpadded_root
original_store, STORE = STORE, temporary_store
try:
yield temporary_store
finally:
# Restore the original store
store = original_store
db, layout = original_store.db, original_store.layout
root, unpadded_root = original_store.root, original_store.unpadded_root
STORE = original_store
spack.config.config.remove_scope(scope_name=scope_name)

View file

@ -97,20 +97,14 @@ def __init__(self):
self.config = spack.config.config
self.platform = spack.platforms.host
self.test_patches = store_patches()
self.store = spack.store.store
self.store = spack.store.STORE
def restore(self):
if _SERIALIZE:
spack.config.config = self.config
spack.repo.path = spack.repo.create(self.config)
spack.platforms.host = self.platform
spack.store.store = self.store
spack.store.root = self.store.root
spack.store.unpadded_root = self.store.unpadded_root
spack.store.db = self.store.db
spack.store.layout = self.store.layout
spack.store.STORE = self.store
self.test_patches.restore()

View file

@ -148,15 +148,15 @@ def install_dir_default_layout(tmpdir):
scheme = os.path.join(
"${architecture}", "${compiler.name}-${compiler.version}", "${name}-${version}-${hash}"
)
real_store, real_layout = spack.store.store, spack.store.layout
real_store, real_layout = spack.store.STORE, spack.store.STORE.layout
opt_dir = tmpdir.join("opt")
spack.store.store = spack.store.Store(str(opt_dir))
spack.store.layout = DirectoryLayout(str(opt_dir), path_scheme=scheme)
spack.store.STORE = spack.store.Store(str(opt_dir))
spack.store.STORE.layout = DirectoryLayout(str(opt_dir), path_scheme=scheme)
try:
yield spack.store
finally:
spack.store.store = real_store
spack.store.layout = real_layout
spack.store.STORE = real_store
spack.store.STORE.layout = real_layout
@pytest.fixture(scope="function")
@ -165,15 +165,15 @@ def install_dir_non_default_layout(tmpdir):
scheme = os.path.join(
"${name}", "${version}", "${architecture}-${compiler.name}-${compiler.version}-${hash}"
)
real_store, real_layout = spack.store.store, spack.store.layout
real_store, real_layout = spack.store.STORE, spack.store.STORE.layout
opt_dir = tmpdir.join("opt")
spack.store.store = spack.store.Store(str(opt_dir))
spack.store.layout = DirectoryLayout(str(opt_dir), path_scheme=scheme)
spack.store.STORE = spack.store.Store(str(opt_dir))
spack.store.STORE.layout = DirectoryLayout(str(opt_dir), path_scheme=scheme)
try:
yield spack.store
finally:
spack.store.store = real_store
spack.store.layout = real_layout
spack.store.STORE = real_store
spack.store.STORE.layout = real_layout
args = ["file"]

View file

@ -25,11 +25,11 @@ def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
"""Tests that the store is correctly swapped during bootstrapping, and restored afterward."""
user_path = str(tmpdir.join("store"))
with spack.store.use_store(user_path):
assert spack.store.root == user_path
assert spack.store.STORE.root == user_path
assert spack.config.config.get("config:install_tree:root") == user_path
with spack.bootstrap.ensure_bootstrap_configuration():
assert spack.store.root == spack.bootstrap.config.store_path()
assert spack.store.root == user_path
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
assert spack.store.STORE.root == user_path
assert spack.config.config.get("config:install_tree:root") == user_path
@ -42,7 +42,7 @@ def test_store_padding_length_is_zero_during_bootstrapping(mutable_config, tmpdi
with spack.store.use_store(user_path, extra_data={"padded_length": 512}):
assert spack.config.config.get("config:install_tree:padded_length") == 512
with spack.bootstrap.ensure_bootstrap_configuration():
assert spack.store.root == spack.bootstrap.config.store_path()
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
assert spack.config.config.get("config:install_tree:padded_length") == 0
assert spack.config.config.get("config:install_tree:padded_length") == 512
@ -161,7 +161,7 @@ def test_bootstrap_custom_store_in_environment(mutable_config, tmpdir):
# Don't trigger evaluation here
with spack.bootstrap.ensure_bootstrap_configuration():
pass
assert str(spack.store.root) == install_root
assert str(spack.store.STORE.root) == install_root
def test_nested_use_of_context_manager(mutable_config):

View file

@ -623,7 +623,7 @@ def test_config_prefer_upstream(
downstream_db_root = str(tmpdir_factory.mktemp("mock_downstream_db_root"))
db_for_test = spack.database.Database(downstream_db_root, upstream_dbs=[prepared_db])
monkeypatch.setattr(spack.store, "db", db_for_test)
monkeypatch.setattr(spack.store.STORE, "db", db_for_test)
output = config("prefer-upstream")
scope = spack.config.default_modify_scope("packages")

View file

@ -54,7 +54,9 @@ def test_direct_installed_dependencies(mock_packages, database):
lines = [line for line in out.strip().split("\n") if not line.startswith("--")]
hashes = set([re.split(r"\s+", line)[0] for line in lines])
expected = set([spack.store.db.query_one(s).dag_hash(7) for s in ["mpich", "callpath^mpich"]])
expected = set(
[spack.store.STORE.db.query_one(s).dag_hash(7) for s in ["mpich", "callpath^mpich"]]
)
assert expected == hashes
@ -69,7 +71,7 @@ def test_transitive_installed_dependencies(mock_packages, database):
expected = set(
[
spack.store.db.query_one(s).dag_hash(7)
spack.store.STORE.db.query_one(s).dag_hash(7)
for s in ["zmpi", "callpath^zmpi", "fake", "dyninst", "libdwarf", "libelf"]
]
)

View file

@ -57,9 +57,11 @@ def test_immediate_installed_dependents(mock_packages, database):
lines = [li for li in out.strip().split("\n") if not li.startswith("--")]
hashes = set([re.split(r"\s+", li)[0] for li in lines])
expected = set([spack.store.db.query_one(s).dag_hash(7) for s in ["dyninst", "libdwarf"]])
expected = set(
[spack.store.STORE.db.query_one(s).dag_hash(7) for s in ["dyninst", "libdwarf"]]
)
libelf = spack.store.db.query_one("libelf")
libelf = spack.store.STORE.db.query_one("libelf")
expected = set([d.dag_hash(7) for d in libelf.dependents()])
assert expected == hashes
@ -75,7 +77,7 @@ def test_transitive_installed_dependents(mock_packages, database):
expected = set(
[
spack.store.db.query_one(s).dag_hash(7)
spack.store.STORE.db.query_one(s).dag_hash(7)
for s in ["zmpi", "callpath^zmpi", "mpileaks^zmpi"]
]
)

View file

@ -23,15 +23,15 @@ def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
install("libelf@0.8.13")
install("libelf@0.8.10")
all_installed = spack.store.db.query()
all_installed = spack.store.STORE.db.query()
assert len(all_installed) == 2
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.db.query()
all_available = spack.store.db.query(installed=any)
non_deprecated = spack.store.STORE.db.query()
all_available = spack.store.STORE.db.query(installed=any)
assert all_available == all_installed
assert non_deprecated == spack.store.db.query("libelf@0.8.13")
assert non_deprecated == spack.store.STORE.db.query("libelf@0.8.13")
def test_deprecate_fails_no_such_package(mock_packages, mock_archive, mock_fetch, install_mockery):
@ -53,13 +53,13 @@ def test_deprecate_install(mock_packages, mock_archive, mock_fetch, install_mock
that is not yet installed."""
install("libelf@0.8.10")
to_deprecate = spack.store.db.query()
to_deprecate = spack.store.STORE.db.query()
assert len(to_deprecate) == 1
deprecate("-y", "-i", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.db.query()
deprecated = spack.store.db.query(installed=InstallStatuses.DEPRECATED)
non_deprecated = spack.store.STORE.db.query()
deprecated = spack.store.STORE.db.query(installed=InstallStatuses.DEPRECATED)
assert deprecated == to_deprecate
assert len(non_deprecated) == 1
assert non_deprecated[0].satisfies("libelf@0.8.13")
@ -73,13 +73,13 @@ def test_deprecate_deps(mock_packages, mock_archive, mock_fetch, install_mockery
new_spec = spack.spec.Spec("libdwarf@20130729^libelf@0.8.13").concretized()
old_spec = spack.spec.Spec("libdwarf@20130207^libelf@0.8.10").concretized()
all_installed = spack.store.db.query()
all_installed = spack.store.STORE.db.query()
deprecate("-y", "-d", "libdwarf@20130207", "libdwarf@20130729")
non_deprecated = spack.store.db.query()
all_available = spack.store.db.query(installed=any)
deprecated = spack.store.db.query(installed=InstallStatuses.DEPRECATED)
non_deprecated = spack.store.STORE.db.query()
all_available = spack.store.STORE.db.query(installed=any)
deprecated = spack.store.STORE.db.query(installed=InstallStatuses.DEPRECATED)
assert all_available == all_installed
assert sorted(all_available) == sorted(deprecated + non_deprecated)
@ -95,12 +95,12 @@ def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch, install_m
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.db.query()
non_deprecated = spack.store.STORE.db.query()
uninstall("-y", "libelf@0.8.10")
assert spack.store.db.query() == spack.store.db.query(installed=any)
assert spack.store.db.query() == non_deprecated
assert spack.store.STORE.db.query() == spack.store.STORE.db.query(installed=any)
assert spack.store.STORE.db.query() == non_deprecated
def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
@ -113,17 +113,17 @@ def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, i
deprecate("-y", "libelf@0.8.10", "libelf@0.8.12")
deprecator = spack.store.db.deprecator(deprecated_spec)
deprecator = spack.store.STORE.db.deprecator(deprecated_spec)
assert deprecator == spack.spec.Spec("libelf@0.8.12").concretized()
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.db.query()
all_available = spack.store.db.query(installed=any)
non_deprecated = spack.store.STORE.db.query()
all_available = spack.store.STORE.db.query(installed=any)
assert len(non_deprecated) == 2
assert len(all_available) == 3
deprecator = spack.store.db.deprecator(deprecated_spec)
deprecator = spack.store.STORE.db.deprecator(deprecated_spec)
assert deprecator == spack.spec.Spec("libelf@0.8.13").concretized()
@ -140,19 +140,19 @@ def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_m
deprecate("-y", "libelf@0.8.10", "libelf@0.8.12")
deprecator = spack.store.db.deprecator(first_deprecated_spec)
deprecator = spack.store.STORE.db.deprecator(first_deprecated_spec)
assert deprecator == second_deprecated_spec
deprecate("-y", "libelf@0.8.12", "libelf@0.8.13")
non_deprecated = spack.store.db.query()
all_available = spack.store.db.query(installed=any)
non_deprecated = spack.store.STORE.db.query()
all_available = spack.store.STORE.db.query(installed=any)
assert len(non_deprecated) == 1
assert len(all_available) == 3
first_deprecator = spack.store.db.deprecator(first_deprecated_spec)
first_deprecator = spack.store.STORE.db.deprecator(first_deprecated_spec)
assert first_deprecator == final_deprecator
second_deprecator = spack.store.db.deprecator(second_deprecated_spec)
second_deprecator = spack.store.STORE.db.deprecator(second_deprecated_spec)
assert second_deprecator == final_deprecator

View file

@ -72,7 +72,7 @@ def test_dev_build_until(tmpdir, mock_packages, install_mockery):
assert f.read() == spec.package.replacement_string
assert not os.path.exists(spec.prefix)
assert not spack.store.db.query(spec, installed=True)
assert not spack.store.STORE.db.query(spec, installed=True)
def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery):
@ -91,7 +91,7 @@ def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery):
assert f.read() == spec.package.replacement_string
assert os.path.exists(spec.prefix)
assert spack.store.db.query(spec, installed=True)
assert spack.store.STORE.db.query(spec, installed=True)
assert os.path.exists(str(tmpdir))

View file

@ -250,8 +250,8 @@ def test_env_roots_marked_explicit(install_mockery, mock_fetch):
install("dependent-install")
# Check one explicit, one implicit install
dependent = spack.store.db.query(explicit=True)
dependency = spack.store.db.query(explicit=False)
dependent = spack.store.STORE.db.query(explicit=True)
dependency = spack.store.STORE.db.query(explicit=False)
assert len(dependent) == 1
assert len(dependency) == 1
@ -262,7 +262,7 @@ def test_env_roots_marked_explicit(install_mockery, mock_fetch):
e.concretize()
e.install_all()
explicit = spack.store.db.query(explicit=True)
explicit = spack.store.STORE.db.query(explicit=True)
assert len(explicit) == 2
@ -362,10 +362,10 @@ def test_env_install_two_specs_same_dep(install_mockery, mock_fetch, tmpdir, cap
assert "depb: Executing phase:" in out
assert "a: Executing phase:" in out
depb = spack.store.db.query_one("depb", installed=True)
depb = spack.store.STORE.db.query_one("depb", installed=True)
assert depb, "Expected depb to be installed"
a = spack.store.db.query_one("a", installed=True)
a = spack.store.STORE.db.query_one("a", installed=True)
assert a, "Expected a to be installed"
@ -2802,11 +2802,11 @@ def test_custom_store_in_environment(mutable_config, tmpdir):
install_root
)
)
current_store_root = str(spack.store.root)
current_store_root = str(spack.store.STORE.root)
assert str(current_store_root) != install_root
with spack.environment.Environment(str(tmpdir)):
assert str(spack.store.root) == install_root
assert str(spack.store.root) == current_store_root
assert str(spack.store.STORE.root) == install_root
assert str(spack.store.STORE.root) == current_store_root
def test_activate_temp(monkeypatch, tmpdir):

View file

@ -275,7 +275,7 @@ def test_find_external_nonempty_default_manifest_dir(
monkeypatch.setenv("PATH", "")
monkeypatch.setattr(spack.cray_manifest, "default_path", str(directory_with_manifest))
external("find")
specs = spack.store.db.query("hwloc")
specs = spack.store.STORE.db.query("hwloc")
assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs)

View file

@ -352,7 +352,7 @@ def test_find_loaded(database, working_env):
assert output == ""
os.environ[uenv.spack_loaded_hashes_var] = ":".join(
[x.dag_hash() for x in spack.store.db.query()]
[x.dag_hash() for x in spack.store.STORE.db.query()]
)
output = find("--loaded")
expected = find()

View file

@ -216,7 +216,9 @@ def test_install_overwrite(mock_packages, mock_archive, mock_fetch, config, inst
# Ignore manifest and install times
manifest = os.path.join(
spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
spec.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
)
ignores = [manifest, spec.package.times_log_path]
@ -291,7 +293,9 @@ def test_install_overwrite_multiple(
install("cmake")
ld_manifest = os.path.join(
libdwarf.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
libdwarf.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
)
ld_ignores = [ld_manifest, libdwarf.package.times_log_path]
@ -300,7 +304,9 @@ def test_install_overwrite_multiple(
expected_libdwarf_md5 = fs.hash_directory(libdwarf.prefix, ignore=ld_ignores)
cm_manifest = os.path.join(
cmake.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
cmake.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
)
cm_ignores = [cm_manifest, cmake.package.times_log_path]
@ -512,7 +518,7 @@ def test_extra_files_are_archived(
install("archive-files")
archive_dir = os.path.join(spack.store.layout.metadata_path(s), "archived-files")
archive_dir = os.path.join(spack.store.STORE.layout.metadata_path(s), "archived-files")
config_log = os.path.join(archive_dir, mock_archive.expanded_archive_basedir, "config.log")
assert os.path.exists(config_log)
@ -699,7 +705,7 @@ def test_cache_only_fails(tmpdir, mock_fetch, install_mockery, capfd):
assert "was not installed" in out
# Check that failure prefix locks are still cached
failure_lock_prefixes = ",".join(spack.store.db._prefix_failures.keys())
failure_lock_prefixes = ",".join(spack.store.STORE.db._prefix_failures.keys())
assert "libelf" in failure_lock_prefixes
assert "libdwarf" in failure_lock_prefixes

View file

@ -30,7 +30,7 @@ def test_mark_spec_required(mutable_database):
def test_mark_all_explicit(mutable_database):
mark("-e", "-a")
gc("-y")
all_specs = spack.store.layout.all_specs()
all_specs = spack.store.STORE.layout.all_specs()
assert len(all_specs) == 15
@ -38,7 +38,7 @@ def test_mark_all_explicit(mutable_database):
def test_mark_all_implicit(mutable_database):
mark("-i", "-a")
gc("-y")
all_specs = spack.store.layout.all_specs()
all_specs = spack.store.STORE.layout.all_specs()
assert len(all_specs) == 0
@ -47,7 +47,7 @@ def test_mark_one_explicit(mutable_database):
mark("-e", "libelf")
uninstall("-y", "-a", "mpileaks")
gc("-y")
all_specs = spack.store.layout.all_specs()
all_specs = spack.store.STORE.layout.all_specs()
assert len(all_specs) == 3
@ -55,7 +55,7 @@ def test_mark_one_explicit(mutable_database):
def test_mark_one_implicit(mutable_database):
mark("-i", "externaltest")
gc("-y")
all_specs = spack.store.layout.all_specs()
all_specs = spack.store.STORE.layout.all_specs()
assert len(all_specs) == 14
@ -64,5 +64,5 @@ def test_mark_all_implicit_then_explicit(mutable_database):
mark("-i", "-a")
mark("-e", "-a")
gc("-y")
all_specs = spack.store.layout.all_specs()
all_specs = spack.store.STORE.layout.all_specs()
assert len(all_specs) == 15

View file

@ -21,23 +21,23 @@ def test_reindex_basic(mock_packages, mock_archive, mock_fetch, install_mockery)
install("libelf@0.8.13")
install("libelf@0.8.12")
all_installed = spack.store.db.query()
all_installed = spack.store.STORE.db.query()
reindex()
assert spack.store.db.query() == all_installed
assert spack.store.STORE.db.query() == all_installed
def test_reindex_db_deleted(mock_packages, mock_archive, mock_fetch, install_mockery):
install("libelf@0.8.13")
install("libelf@0.8.12")
all_installed = spack.store.db.query()
all_installed = spack.store.STORE.db.query()
os.remove(spack.store.db._index_path)
os.remove(spack.store.STORE.db._index_path)
reindex()
assert spack.store.db.query() == all_installed
assert spack.store.STORE.db.query() == all_installed
def test_reindex_with_deprecated_packages(
@ -48,11 +48,11 @@ def test_reindex_with_deprecated_packages(
deprecate("-y", "libelf@0.8.12", "libelf@0.8.13")
all_installed = spack.store.db.query(installed=any)
non_deprecated = spack.store.db.query(installed=True)
all_installed = spack.store.STORE.db.query(installed=any)
non_deprecated = spack.store.STORE.db.query(installed=True)
os.remove(spack.store.db._index_path)
os.remove(spack.store.STORE.db._index_path)
reindex()
assert spack.store.db.query(installed=any) == all_installed
assert spack.store.db.query(installed=True) == non_deprecated
assert spack.store.STORE.db.query(installed=any) == all_installed
assert spack.store.STORE.db.query(installed=True) == non_deprecated

View file

@ -49,7 +49,7 @@ def test_spec_concretizer_args(mutable_config, mutable_database):
uninstall("-y", "mpileaks^mpich2")
# get the hash of mpileaks^zmpi
mpileaks_zmpi = spack.store.db.query_one("mpileaks^zmpi")
mpileaks_zmpi = spack.store.STORE.db.query_one("mpileaks^zmpi")
h = mpileaks_zmpi.dag_hash()[:7]
output = spec("--fresh", "-l", "mpileaks")

View file

@ -46,7 +46,7 @@ def test_correct_installed_dependents(mutable_database):
# Test whether we return the right dependents.
# Take callpath from the database
callpath = spack.store.db.query_local("callpath")[0]
callpath = spack.store.STORE.db.query_local("callpath")[0]
# Ensure it still has dependents and dependencies
dependents = callpath.dependents(deptype=("run", "link"))
@ -78,7 +78,7 @@ def test_recursive_uninstall(mutable_database):
"""Test recursive uninstall."""
uninstall("-y", "-a", "--dependents", "callpath")
all_specs = spack.store.layout.all_specs()
all_specs = spack.store.STORE.layout.all_specs()
assert len(all_specs) == 9
# query specs with multiple configurations
mpileaks_specs = [s for s in all_specs if s.satisfies("mpileaks")]
@ -98,7 +98,7 @@ def test_uninstall_spec_with_multiple_roots(
):
uninstall("-y", "-a", "--dependents", constraint)
all_specs = spack.store.layout.all_specs()
all_specs = spack.store.STORE.layout.all_specs()
assert len(all_specs) == expected_number_of_specs
@ -109,7 +109,7 @@ def test_force_uninstall_spec_with_ref_count_not_zero(
):
uninstall("-f", "-y", constraint)
all_specs = spack.store.layout.all_specs()
all_specs = spack.store.STORE.layout.all_specs()
assert len(all_specs) == expected_number_of_specs
@ -117,41 +117,41 @@ def test_force_uninstall_spec_with_ref_count_not_zero(
def test_force_uninstall_and_reinstall_by_hash(mutable_database):
"""Test forced uninstall and reinstall of old specs."""
# this is the spec to be removed
callpath_spec = spack.store.db.query_one("callpath ^mpich")
callpath_spec = spack.store.STORE.db.query_one("callpath ^mpich")
dag_hash = callpath_spec.dag_hash()
# ensure can look up by hash and that it's a dependent of mpileaks
def validate_callpath_spec(installed):
assert installed is True or installed is False
specs = spack.store.db.get_by_hash(dag_hash, installed=installed)
specs = spack.store.STORE.db.get_by_hash(dag_hash, installed=installed)
assert len(specs) == 1 and specs[0] == callpath_spec
specs = spack.store.db.get_by_hash(dag_hash[:7], installed=installed)
specs = spack.store.STORE.db.get_by_hash(dag_hash[:7], installed=installed)
assert len(specs) == 1 and specs[0] == callpath_spec
specs = spack.store.db.get_by_hash(dag_hash, installed=any)
specs = spack.store.STORE.db.get_by_hash(dag_hash, installed=any)
assert len(specs) == 1 and specs[0] == callpath_spec
specs = spack.store.db.get_by_hash(dag_hash[:7], installed=any)
specs = spack.store.STORE.db.get_by_hash(dag_hash[:7], installed=any)
assert len(specs) == 1 and specs[0] == callpath_spec
specs = spack.store.db.get_by_hash(dag_hash, installed=not installed)
specs = spack.store.STORE.db.get_by_hash(dag_hash, installed=not installed)
assert specs is None
specs = spack.store.db.get_by_hash(dag_hash[:7], installed=not installed)
specs = spack.store.STORE.db.get_by_hash(dag_hash[:7], installed=not installed)
assert specs is None
mpileaks_spec = spack.store.db.query_one("mpileaks ^mpich")
mpileaks_spec = spack.store.STORE.db.query_one("mpileaks ^mpich")
assert callpath_spec in mpileaks_spec
spec = spack.store.db.query_one("callpath ^mpich", installed=installed)
spec = spack.store.STORE.db.query_one("callpath ^mpich", installed=installed)
assert spec == callpath_spec
spec = spack.store.db.query_one("callpath ^mpich", installed=any)
spec = spack.store.STORE.db.query_one("callpath ^mpich", installed=any)
assert spec == callpath_spec
spec = spack.store.db.query_one("callpath ^mpich", installed=not installed)
spec = spack.store.STORE.db.query_one("callpath ^mpich", installed=not installed)
assert spec is None
validate_callpath_spec(True)
@ -164,7 +164,7 @@ def validate_callpath_spec(installed):
# BUT, make sure that the removed callpath spec is not in queries
def db_specs():
all_specs = spack.store.layout.all_specs()
all_specs = spack.store.STORE.layout.all_specs()
return (
all_specs,
[s for s in all_specs if s.satisfies("mpileaks")],

View file

@ -22,7 +22,7 @@ def test_single_file_verify_cmd(tmpdir):
# Test the verify command interface to verifying a single file.
filedir = os.path.join(str(tmpdir), "a", "b", "c", "d")
filepath = os.path.join(filedir, "file")
metadir = os.path.join(str(tmpdir), spack.store.layout.metadata_dir)
metadir = os.path.join(str(tmpdir), spack.store.STORE.layout.metadata_dir)
fs.mkdirp(filedir)
fs.mkdirp(metadir)
@ -32,7 +32,7 @@ def test_single_file_verify_cmd(tmpdir):
data = spack.verify.create_manifest_entry(filepath)
manifest_file = os.path.join(metadir, spack.store.layout.manifest_file_name)
manifest_file = os.path.join(metadir, spack.store.STORE.layout.manifest_file_name)
with open(manifest_file, "w") as f:
sjson.dump({filepath: data}, f)

View file

@ -1222,7 +1222,7 @@ def mock_fn(*args, **kwargs):
return [first_spec]
if mock_db:
monkeypatch.setattr(spack.store.db, "query", mock_fn)
monkeypatch.setattr(spack.store.STORE.db, "query", mock_fn)
else:
monkeypatch.setattr(spack.binary_distribution, "update_cache_and_get_specs", mock_fn)
@ -1275,7 +1275,7 @@ def test_reuse_with_flags(self, mutable_database, mutable_config):
spack.config.set("concretizer:reuse", True)
spec = Spec("a cflags=-g cxxflags=-g").concretized()
spack.store.db.add(spec, None)
spack.store.STORE.db.add(spec, None)
testspec = Spec("a cflags=-g")
testspec.concretize()
@ -2013,7 +2013,7 @@ def test_external_python_extension_find_dependency_from_installed(self, monkeypa
# install python external
python = Spec("python").concretized()
monkeypatch.setattr(spack.store.db, "query", lambda x: [python])
monkeypatch.setattr(spack.store.STORE.db, "query", lambda x: [python])
# ensure that we can't be faking this by getting it from config
external_conf.pop("python")

View file

@ -959,7 +959,7 @@ def install_mockery(temporary_store, mutable_config, mock_packages):
# Also wipe out any cached prefix failure locks (associated with
# the session-scoped mock archive).
for pkg_id in list(temporary_store.db._prefix_failures.keys()):
lock = spack.store.db._prefix_failures.pop(pkg_id, None)
lock = spack.store.STORE.db._prefix_failures.pop(pkg_id, None)
if lock:
try:
lock.release_write()
@ -1946,4 +1946,4 @@ def nullify_globals(request, monkeypatch):
monkeypatch.setattr(spack.config, "config", None)
monkeypatch.setattr(spack.caches, "misc_cache", None)
monkeypatch.setattr(spack.repo, "path", None)
monkeypatch.setattr(spack.store, "store", None)
monkeypatch.setattr(spack.store, "STORE", None)

View file

@ -338,7 +338,7 @@ def test_read_cray_manifest(tmpdir, mutable_config, mock_packages, mutable_datab
with open(test_db_fname, "w") as db_file:
json.dump(create_manifest_content(), db_file)
cray_manifest.read(test_db_fname, True)
query_specs = spack.store.db.query("openmpi")
query_specs = spack.store.STORE.db.query("openmpi")
assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs)
concretized_specs = spack.cmd.parse_specs(

View file

@ -77,7 +77,7 @@ def test_spec_installed_upstream(
upstream_write_db.add(spec, upstream_layout)
upstream_db._read()
monkeypatch.setattr(spack.store, "db", downstream_db)
monkeypatch.setattr(spack.store.STORE, "db", downstream_db)
assert spec.installed
assert spec.installed_upstream
assert spec.copy().installed
@ -195,11 +195,11 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db, tmpdir):
assert len(qresults) == 1
(queried_spec,) = qresults
try:
orig_db = spack.store.db
spack.store.db = downstream_db
orig_db = spack.store.STORE.db
spack.store.STORE.db = downstream_db
assert queried_spec.prefix == downstream_layout.path_for_spec(spec)
finally:
spack.store.db = orig_db
spack.store.STORE.db = orig_db
@pytest.mark.usefixtures("config", "temporary_store")
@ -294,16 +294,16 @@ def _print_ref_counts():
recs = []
def add_rec(spec):
cspecs = spack.store.db.query(spec, installed=any)
cspecs = spack.store.STORE.db.query(spec, installed=any)
if not cspecs:
recs.append("[ %-7s ] %-20s-" % ("", spec))
else:
key = cspecs[0].dag_hash()
rec = spack.store.db.get_record(cspecs[0])
rec = spack.store.STORE.db.get_record(cspecs[0])
recs.append("[ %-7s ] %-20s%d" % (key[:7], spec, rec.ref_count))
with spack.store.db.read_transaction():
with spack.store.STORE.db.read_transaction():
add_rec("mpileaks ^mpich")
add_rec("callpath ^mpich")
add_rec("mpich")
@ -326,7 +326,7 @@ def add_rec(spec):
def _check_merkleiness():
"""Ensure the spack database is a valid merkle graph."""
all_specs = spack.store.db.query(installed=any)
all_specs = spack.store.STORE.db.query(installed=any)
seen = {}
for spec in all_specs:
@ -340,7 +340,7 @@ def _check_merkleiness():
def _check_db_sanity(database):
"""Utility function to check db against install layout."""
pkg_in_layout = sorted(spack.store.layout.all_specs())
pkg_in_layout = sorted(spack.store.STORE.layout.all_specs())
actual = sorted(database.query())
externals = sorted([x for x in actual if x.external])
@ -376,7 +376,7 @@ def _check_remove_and_add_package(database, spec):
assert concrete_spec not in remaining
# add it back and make sure everything is ok.
database.add(concrete_spec, spack.store.layout)
database.add(concrete_spec, spack.store.STORE.layout)
installed = database.query()
assert concrete_spec in installed
assert installed == original
@ -392,7 +392,7 @@ def _mock_install(spec):
def _mock_remove(spec):
specs = spack.store.db.query(spec)
specs = spack.store.STORE.db.query(spec)
assert len(specs) == 1
spec = specs[0]
spec.package.do_uninstall(spec)
@ -454,7 +454,7 @@ def test_005_db_exists(database):
def test_010_all_install_sanity(database):
"""Ensure that the install layout reflects what we think it does."""
all_specs = spack.store.layout.all_specs()
all_specs = spack.store.STORE.layout.all_specs()
assert len(all_specs) == 15
# Query specs with multiple configurations
@ -483,12 +483,12 @@ def test_010_all_install_sanity(database):
def test_015_write_and_read(mutable_database):
# write and read DB
with spack.store.db.write_transaction():
specs = spack.store.db.query()
recs = [spack.store.db.get_record(s) for s in specs]
with spack.store.STORE.db.write_transaction():
specs = spack.store.STORE.db.query()
recs = [spack.store.STORE.db.get_record(s) for s in specs]
for spec, rec in zip(specs, recs):
new_rec = spack.store.db.get_record(spec)
new_rec = spack.store.STORE.db.get_record(spec)
assert new_rec.ref_count == rec.ref_count
assert new_rec.spec == rec.spec
assert new_rec.path == rec.path
@ -498,12 +498,12 @@ def test_015_write_and_read(mutable_database):
def test_017_write_and_read_without_uuid(mutable_database, monkeypatch):
monkeypatch.setattr(spack.database, "_use_uuid", False)
# write and read DB
with spack.store.db.write_transaction():
specs = spack.store.db.query()
recs = [spack.store.db.get_record(s) for s in specs]
with spack.store.STORE.db.write_transaction():
specs = spack.store.STORE.db.query()
recs = [spack.store.STORE.db.get_record(s) for s in specs]
for spec, rec in zip(specs, recs):
new_rec = spack.store.db.get_record(spec)
new_rec = spack.store.STORE.db.get_record(spec)
assert new_rec.ref_count == rec.ref_count
assert new_rec.spec == rec.spec
assert new_rec.path == rec.path
@ -517,7 +517,7 @@ def test_020_db_sanity(database):
def test_025_reindex(mutable_database):
"""Make sure reindex works and ref counts are valid."""
spack.store.store.reindex()
spack.store.STORE.reindex()
_check_db_sanity(mutable_database)
@ -527,7 +527,7 @@ def test_026_reindex_after_deprecate(mutable_database):
zmpi = mutable_database.query_one("zmpi")
mutable_database.deprecate(mpich, zmpi)
spack.store.store.reindex()
spack.store.STORE.reindex()
_check_db_sanity(mutable_database)
@ -538,8 +538,8 @@ class ReadModify:
def __call__(self):
# check that other process can read DB
_check_db_sanity(spack.store.db)
with spack.store.db.write_transaction():
_check_db_sanity(spack.store.STORE.db)
with spack.store.STORE.db.write_transaction():
_mock_remove("mpileaks ^zmpi")
@ -571,7 +571,7 @@ def test_041_ref_counts_deprecate(mutable_database):
def test_050_basic_query(database):
"""Ensure querying database is consistent with what is installed."""
# query everything
total_specs = len(spack.store.db.query())
total_specs = len(spack.store.STORE.db.query())
assert total_specs == 17
# query specs with multiple configurations
@ -626,7 +626,7 @@ def test_080_root_ref_counts(mutable_database):
assert mutable_database.get_record("mpich").ref_count == 1
# Put the spec back
mutable_database.add(rec.spec, spack.store.layout)
mutable_database.add(rec.spec, spack.store.STORE.layout)
# record is present again
assert len(mutable_database.query("mpileaks ^mpich", installed=any)) == 1
@ -702,7 +702,7 @@ def test_115_reindex_with_packages_not_in_repo(mutable_database, tmpdir):
# packages should not have to be defined in the repository once they
# are installed
with spack.repo.use_repositories(spack.repo.MockRepositoryBuilder(tmpdir).root):
spack.store.store.reindex()
spack.store.STORE.reindex()
_check_db_sanity(mutable_database)
@ -740,7 +740,7 @@ def test_regression_issue_8036(mutable_database, usr_folder_exists):
@pytest.mark.regression("11118")
def test_old_external_entries_prefix(mutable_database):
with open(spack.store.db._index_path, "r") as f:
with open(spack.store.STORE.db._index_path, "r") as f:
db_obj = json.loads(f.read())
jsonschema.validate(db_obj, schema)
@ -750,13 +750,13 @@ def test_old_external_entries_prefix(mutable_database):
db_obj["database"]["installs"][s.dag_hash()]["path"] = "None"
with open(spack.store.db._index_path, "w") as f:
with open(spack.store.STORE.db._index_path, "w") as f:
f.write(json.dumps(db_obj))
if _use_uuid:
with open(spack.store.db._verifier_path, "w") as f:
with open(spack.store.STORE.db._verifier_path, "w") as f:
f.write(str(uuid.uuid4()))
record = spack.store.db.get_record(s)
record = spack.store.STORE.db.get_record(s)
assert record.path is None
assert record.spec._prefix is None
@ -779,7 +779,7 @@ def test_query_unused_specs(mutable_database):
s.concretize()
s.package.do_install(fake=True, explicit=True)
unused = spack.store.db.unused_specs
unused = spack.store.STORE.db.unused_specs
assert len(unused) == 1
assert unused[0].name == "cmake"
@ -792,7 +792,7 @@ def test_query_spec_with_conditional_dependency(mutable_database):
s.concretize()
s.package.do_install(fake=True, explicit=True)
results = spack.store.db.query_local("hdf5 ^mpich")
results = spack.store.STORE.db.query_local("hdf5 ^mpich")
assert not results
@ -800,7 +800,7 @@ def test_query_spec_with_conditional_dependency(mutable_database):
def test_query_spec_with_non_conditional_virtual_dependency(database):
# Ensure the same issue doesn't come up for virtual
# dependency that are not conditional on variants
results = spack.store.db.query_local("mpileaks ^mpich")
results = spack.store.STORE.db.query_local("mpileaks ^mpich")
assert len(results) == 1
@ -808,7 +808,7 @@ def test_failed_spec_path_error(database):
"""Ensure spec not concrete check is covered."""
s = spack.spec.Spec("a")
with pytest.raises(ValueError, match="Concrete spec required"):
spack.store.db._failed_spec_path(s)
spack.store.STORE.db._failed_spec_path(s)
@pytest.mark.db
@ -822,7 +822,7 @@ def _is(db, spec):
monkeypatch.setattr(spack.database.Database, "prefix_failure_locked", _is)
s = spack.spec.Spec("a")
spack.store.db.clear_failure(s)
spack.store.STORE.db.clear_failure(s)
out = capfd.readouterr()[0]
assert "Retaining failure marking" in out
@ -840,7 +840,7 @@ def _is(db, spec):
monkeypatch.setattr(spack.database.Database, "prefix_failure_marked", _is)
s = default_mock_concretization("a")
spack.store.db.clear_failure(s, force=True)
spack.store.STORE.db.clear_failure(s, force=True)
out = capfd.readouterr()[1]
assert "Removing failure marking despite lock" in out
assert "Unable to remove failure marking" in out
@ -858,14 +858,14 @@ def _raise_exc(lock):
with tmpdir.as_cwd():
s = default_mock_concretization("a")
spack.store.db.mark_failed(s)
spack.store.STORE.db.mark_failed(s)
out = str(capsys.readouterr()[1])
assert "Unable to mark a as failed" in out
# Clean up the failure mark to ensure it does not interfere with other
# tests using the same spec.
del spack.store.db._prefix_failures[s.prefix]
del spack.store.STORE.db._prefix_failures[s.prefix]
@pytest.mark.db
@ -878,19 +878,19 @@ def _is(db, spec):
s = default_mock_concretization("a")
# Confirm the spec is not already marked as failed
assert not spack.store.db.prefix_failed(s)
assert not spack.store.STORE.db.prefix_failed(s)
# Check that a failure entry is sufficient
spack.store.db._prefix_failures[s.prefix] = None
assert spack.store.db.prefix_failed(s)
spack.store.STORE.db._prefix_failures[s.prefix] = None
assert spack.store.STORE.db.prefix_failed(s)
# Remove the entry and check again
del spack.store.db._prefix_failures[s.prefix]
assert not spack.store.db.prefix_failed(s)
del spack.store.STORE.db._prefix_failures[s.prefix]
assert not spack.store.STORE.db.prefix_failed(s)
# Now pretend that the prefix failure is locked
monkeypatch.setattr(spack.database.Database, "prefix_failure_locked", _is)
assert spack.store.db.prefix_failed(s)
assert spack.store.STORE.db.prefix_failed(s)
def test_prefix_read_lock_error(default_mock_concretization, mutable_database, monkeypatch):
@ -905,7 +905,7 @@ def _raise(db, spec):
monkeypatch.setattr(lk.Lock, "acquire_read", _raise)
with pytest.raises(Exception):
with spack.store.db.prefix_read_lock(s):
with spack.store.STORE.db.prefix_read_lock(s):
assert False
@ -921,7 +921,7 @@ def _raise(db, spec):
monkeypatch.setattr(lk.Lock, "acquire_write", _raise)
with pytest.raises(Exception):
with spack.store.db.prefix_write_lock(s):
with spack.store.STORE.db.prefix_write_lock(s):
assert False
@ -969,7 +969,7 @@ def test_reindex_removed_prefix_is_not_installed(mutable_database, mock_store, c
shutil.rmtree(prefix)
# Reindex should pick up libelf as a dependency of libdwarf
spack.store.store.reindex()
spack.store.STORE.reindex()
# Reindexing should warn about libelf not being found on the filesystem
err = capfd.readouterr()[1]
@ -982,7 +982,7 @@ def test_reindex_removed_prefix_is_not_installed(mutable_database, mock_store, c
def test_reindex_when_all_prefixes_are_removed(mutable_database, mock_store):
# Remove all non-external installations from the filesystem
for spec in spack.store.db.query_local():
for spec in spack.store.STORE.db.query_local():
if not spec.external:
assert spec.prefix.startswith(str(mock_store))
shutil.rmtree(spec.prefix)
@ -992,7 +992,7 @@ def test_reindex_when_all_prefixes_are_removed(mutable_database, mock_store):
assert num > 0
# Reindex uses the current index to repopulate itself
spack.store.store.reindex()
spack.store.STORE.reindex()
# Make sure all explicit specs are still there, but are now uninstalled.
specs = mutable_database.query_local(installed=False, explicit=True)

View file

@ -159,7 +159,7 @@ def test_partial_install_delete_prefix_and_stage(install_mockery, mock_fetch, wo
s.package.remove_prefix = rm_prefix_checker.remove_prefix
# must clear failure markings for the package before re-installing it
spack.store.db.clear_failure(s, True)
spack.store.STORE.db.clear_failure(s, True)
s.package.set_install_succeed()
s.package.stage = MockStage(s.package.stage)
@ -354,7 +354,7 @@ def test_partial_install_keep_prefix(install_mockery, mock_fetch, monkeypatch, w
assert os.path.exists(s.package.prefix)
# must clear failure markings for the package before re-installing it
spack.store.db.clear_failure(s, True)
spack.store.STORE.db.clear_failure(s, True)
s.package.set_install_succeed()
s.package.stage = MockStage(s.package.stage)

View file

@ -557,7 +557,7 @@ def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_packages):
def test_dump_packages_deps_errs(install_mockery, tmpdir, monkeypatch, capsys):
"""Test error paths for dump_packages with dependencies."""
orig_bpp = spack.store.layout.build_packages_path
orig_bpp = spack.store.STORE.layout.build_packages_path
orig_dirname = spack.repo.Repo.dirname_for_package_name
repo_err_msg = "Mock dirname_for_package_name"
@ -576,7 +576,7 @@ def _repoerr(repo, name):
# Now mock the creation of the required directory structure to cover
# the try-except block
monkeypatch.setattr(spack.store.layout, "build_packages_path", bpp_path)
monkeypatch.setattr(spack.store.STORE.layout, "build_packages_path", bpp_path)
spec = spack.spec.Spec("simple-inheritance").concretized()
path = str(tmpdir)
@ -601,29 +601,29 @@ def test_clear_failures_success(install_mockery):
# Set up a test prefix failure lock
lock = lk.Lock(
spack.store.db.prefix_fail_path, start=1, length=1, default_timeout=1e-9, desc="test"
spack.store.STORE.db.prefix_fail_path, start=1, length=1, default_timeout=1e-9, desc="test"
)
try:
lock.acquire_write()
except lk.LockTimeoutError:
tty.warn("Failed to write lock the test install failure")
spack.store.db._prefix_failures["test"] = lock
spack.store.STORE.db._prefix_failures["test"] = lock
# Set up a fake failure mark (or file)
fs.touch(os.path.join(spack.store.db._failure_dir, "test"))
fs.touch(os.path.join(spack.store.STORE.db._failure_dir, "test"))
# Now clear failure tracking
inst.clear_failures()
# Ensure there are no cached failure locks or failure marks
assert len(spack.store.db._prefix_failures) == 0
assert len(os.listdir(spack.store.db._failure_dir)) == 0
assert len(spack.store.STORE.db._prefix_failures) == 0
assert len(os.listdir(spack.store.STORE.db._failure_dir)) == 0
# Ensure the core directory and failure lock file still exist
assert os.path.isdir(spack.store.db._failure_dir)
assert os.path.isdir(spack.store.STORE.db._failure_dir)
# Locks on windows are a no-op
if sys.platform != "win32":
assert os.path.isfile(spack.store.db.prefix_fail_path)
assert os.path.isfile(spack.store.STORE.db.prefix_fail_path)
def test_clear_failures_errs(install_mockery, monkeypatch, capsys):
@ -635,7 +635,7 @@ def _raise_except(path):
raise OSError(err_msg)
# Set up a fake failure mark (or file)
fs.touch(os.path.join(spack.store.db._failure_dir, "test"))
fs.touch(os.path.join(spack.store.STORE.db._failure_dir, "test"))
monkeypatch.setattr(os, "remove", _raise_except)
@ -932,7 +932,7 @@ def _chgrp(path, group, follow_symlinks=True):
spec = installer.build_requests[0].pkg.spec
fs.touchp(spec.prefix)
metadatadir = spack.store.layout.metadata_path(spec)
metadatadir = spack.store.STORE.layout.metadata_path(spec)
# Regex matching with Windows style paths typically fails
# so we skip the match check here
if sys.platform == "win32":
@ -1388,7 +1388,7 @@ def test_single_external_implicit_install(install_mockery, explicit_args, is_exp
s = spack.spec.Spec(pkg).concretized()
s.external_path = "/usr"
create_installer([(s, explicit_args)]).install()
assert spack.store.db.get_record(pkg).explicit == is_explicit
assert spack.store.STORE.db.get_record(pkg).explicit == is_explicit
@pytest.mark.parametrize("run_tests", [True, False])

View file

@ -38,7 +38,7 @@ def test_rewire_db(mock_fetch, install_mockery, transitive):
assert os.path.exists(spliced_spec.prefix)
# test that it made it into the database
rec = spack.store.db.get_record(spliced_spec)
rec = spack.store.STORE.db.get_record(spliced_spec)
installed_in_db = rec.installed if rec else False
assert installed_in_db
@ -68,7 +68,7 @@ def test_rewire_bin(mock_fetch, install_mockery, transitive):
assert os.path.exists(spliced_spec.prefix)
# test that it made it into the database
rec = spack.store.db.get_record(spliced_spec)
rec = spack.store.STORE.db.get_record(spliced_spec)
installed_in_db = rec.installed if rec else False
assert installed_in_db
@ -93,25 +93,31 @@ def test_rewire_writes_new_metadata(mock_fetch, install_mockery):
# test install manifests
for node in spliced_spec.traverse(root=True):
spack.store.layout.ensure_installed(node)
spack.store.STORE.layout.ensure_installed(node)
manifest_file_path = os.path.join(
node.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
node.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
)
assert os.path.exists(manifest_file_path)
orig_node = spec[node.name]
orig_manifest_file_path = os.path.join(
orig_node.prefix,
spack.store.layout.metadata_dir,
spack.store.layout.manifest_file_name,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
)
assert os.path.exists(orig_manifest_file_path)
assert not filecmp.cmp(orig_manifest_file_path, manifest_file_path, shallow=False)
specfile_path = os.path.join(
node.prefix, spack.store.layout.metadata_dir, spack.store.layout.spec_file_name
node.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.spec_file_name,
)
assert os.path.exists(specfile_path)
orig_specfile_path = os.path.join(
orig_node.prefix, spack.store.layout.metadata_dir, spack.store.layout.spec_file_name
orig_node.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.spec_file_name,
)
assert os.path.exists(orig_specfile_path)
assert not filecmp.cmp(orig_specfile_path, specfile_path, shallow=False)
@ -128,7 +134,7 @@ def test_uninstall_rewired_spec(mock_fetch, install_mockery, transitive):
spliced_spec = spec.splice(dep, transitive=transitive)
spack.rewiring.rewire(spliced_spec)
spliced_spec.package.do_uninstall()
assert len(spack.store.db.query(spliced_spec)) == 0
assert len(spack.store.STORE.db.query(spliced_spec)) == 0
assert not os.path.exists(spliced_spec.prefix)

View file

@ -53,13 +53,13 @@
php_line_patched = "<?php #!/this/" + ("x" * too_long) + "/is/php\n"
php_line_patched2 = "?>\n"
sbang_line = "#!/bin/sh %s/bin/sbang\n" % spack.store.store.unpadded_root
sbang_line = "#!/bin/sh %s/bin/sbang\n" % spack.store.STORE.unpadded_root
last_line = "last!\n"
@pytest.fixture # type: ignore[no-redef]
def sbang_line():
yield "#!/bin/sh %s/bin/sbang\n" % spack.store.layout.root
yield "#!/bin/sh %s/bin/sbang\n" % spack.store.STORE.layout.root
class ScriptDirectory:
@ -309,7 +309,7 @@ def configure_user_perms():
def check_sbang_installation(group=False):
sbang_path = sbang.sbang_install_path()
sbang_bin_dir = os.path.dirname(sbang_path)
assert sbang_path.startswith(spack.store.store.unpadded_root)
assert sbang_path.startswith(spack.store.STORE.unpadded_root)
assert os.path.exists(sbang_path)
assert fs.is_exe(sbang_path)
@ -333,7 +333,7 @@ def run_test_install_sbang(group):
sbang_path = sbang.sbang_install_path()
sbang_bin_dir = os.path.dirname(sbang_path)
assert sbang_path.startswith(spack.store.store.unpadded_root)
assert sbang_path.startswith(spack.store.STORE.unpadded_root)
assert not os.path.exists(sbang_bin_dir)
sbang.install_sbang()

View file

@ -670,7 +670,7 @@ def test_spec_formatting(self, default_mock_concretization):
other_segments = [
("{spack_root}", spack.paths.spack_root),
("{spack_install}", spack.store.layout.root),
("{spack_install}", spack.store.STORE.layout.root),
]
def depify(depname, fmt_str, sigil):

View file

@ -730,8 +730,8 @@ def test_ambiguous_hash(mutable_database, default_mock_concretization, config):
x2 = x1.copy()
x1._hash = "xyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy"
x2._hash = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
mutable_database.add(x1, spack.store.layout)
mutable_database.add(x2, spack.store.layout)
mutable_database.add(x1, spack.store.STORE.layout)
mutable_database.add(x2, spack.store.STORE.layout)
# ambiguity in first hash character
with pytest.raises(spack.spec.AmbiguousHashError):

View file

@ -174,7 +174,9 @@ def test_check_prefix_manifest(tmpdir):
assert results.errors[malware] == ["added"]
manifest_file = os.path.join(
spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
spec.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
)
with open(manifest_file, "w") as f:
f.write("{This) string is not proper json")
@ -189,7 +191,7 @@ def test_single_file_verification(tmpdir):
# to which it belongs
filedir = os.path.join(str(tmpdir), "a", "b", "c", "d")
filepath = os.path.join(filedir, "file")
metadir = os.path.join(str(tmpdir), spack.store.layout.metadata_dir)
metadir = os.path.join(str(tmpdir), spack.store.STORE.layout.metadata_dir)
fs.mkdirp(filedir)
fs.mkdirp(metadir)
@ -199,7 +201,7 @@ def test_single_file_verification(tmpdir):
data = spack.verify.create_manifest_entry(filepath)
manifest_file = os.path.join(metadir, spack.store.layout.manifest_file_name)
manifest_file = os.path.join(metadir, spack.store.STORE.layout.manifest_file_name)
with open(manifest_file, "w") as f:
sjson.dump({filepath: data}, f)

View file

@ -50,7 +50,9 @@ def create_manifest_entry(path: str) -> Dict[str, Any]:
def write_manifest(spec):
manifest_file = os.path.join(
spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
spec.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
)
if not os.path.exists(manifest_file):
@ -107,14 +109,14 @@ def check_file_manifest(filename):
dirname = os.path.dirname(filename)
results = VerificationResults()
while spack.store.layout.metadata_dir not in os.listdir(dirname):
while spack.store.STORE.layout.metadata_dir not in os.listdir(dirname):
if dirname == os.path.sep:
results.add_error(filename, "not owned by any package")
return results
dirname = os.path.dirname(dirname)
manifest_file = os.path.join(
dirname, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
dirname, spack.store.STORE.layout.metadata_dir, spack.store.STORE.layout.manifest_file_name
)
if not os.path.exists(manifest_file):
@ -140,7 +142,7 @@ def check_spec_manifest(spec):
results = VerificationResults()
manifest_file = os.path.join(
prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
prefix, spack.store.STORE.layout.metadata_dir, spack.store.STORE.layout.manifest_file_name
)
if not os.path.exists(manifest_file):

View file

@ -30,7 +30,7 @@ def install(self, spec, prefix):
{1}
""".format(
spack.store.unpadded_root, prefix.bin
spack.store.STORE.unpadded_root, prefix.bin
)
with open("%s/sbang-style-1.sh" % self.prefix.bin, "w") as f:
f.write(sbang_style_1)