Style: black 23, skip magic trailing comma (#35351)
* Style: black 23, skip magic trailing commas * isort should use same line length as black * Fix unused import * Update version of black used in CI * Update new packages * Update new packages
This commit is contained in:
parent
b935809948
commit
603569e321
510 changed files with 864 additions and 3856 deletions
2
.github/workflows/valid-style.yml
vendored
2
.github/workflows/valid-style.yml
vendored
|
@ -44,7 +44,7 @@ jobs:
|
|||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip six setuptools types-six black==22.12.0 mypy isort clingo flake8
|
||||
python3 -m pip install --upgrade pip six setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
|
|
|
@ -89,6 +89,7 @@
|
|||
# Enable todo items
|
||||
todo_include_todos = True
|
||||
|
||||
|
||||
#
|
||||
# Disable duplicate cross-reference warnings.
|
||||
#
|
||||
|
@ -353,9 +354,7 @@ class SpackStyle(DefaultStyle):
|
|||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"),
|
||||
]
|
||||
latex_documents = [("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual")]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
|
@ -402,7 +401,7 @@ class SpackStyle(DefaultStyle):
|
|||
"Spack",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
|
@ -418,6 +417,4 @@ class SpackStyle(DefaultStyle):
|
|||
# -- Extension configuration -------------------------------------------------
|
||||
|
||||
# sphinx.ext.intersphinx
|
||||
intersphinx_mapping = {
|
||||
"python": ("https://docs.python.org/3", None),
|
||||
}
|
||||
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
|
||||
|
|
|
@ -268,7 +268,6 @@ def groupid_to_group(x):
|
|||
regex = re.escape(regex)
|
||||
filenames = path_to_os_path(*filenames)
|
||||
for filename in filenames:
|
||||
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
tty.debug(msg.format(filename, regex))
|
||||
|
||||
|
@ -1220,7 +1219,6 @@ def traverse_tree(
|
|||
# target is relative to the link, then that may not resolve properly
|
||||
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
||||
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
|
||||
|
||||
# When follow_nonexisting isn't set, don't descend into dirs
|
||||
# in source that do not exist in dest
|
||||
if follow_nonexisting or os.path.exists(dest_child):
|
||||
|
@ -1662,7 +1660,6 @@ def find(root, files, recursive=True):
|
|||
|
||||
@system_path_filter
|
||||
def _find_recursive(root, search_files):
|
||||
|
||||
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||
# we want to poke the filesystem as little as possible, but still maintain
|
||||
# stability in the order of the answer. Thus we are recording each library
|
||||
|
|
|
@ -198,7 +198,7 @@ def _memoized_function(*args, **kwargs):
|
|||
except TypeError as e:
|
||||
# TypeError is raised when indexing into a dict if the key is unhashable.
|
||||
raise UnhashableArguments(
|
||||
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__),
|
||||
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__)
|
||||
) from e
|
||||
|
||||
return _memoized_function
|
||||
|
@ -237,6 +237,7 @@ def decorator_with_or_without_args(decorator):
|
|||
@decorator
|
||||
|
||||
"""
|
||||
|
||||
# See https://stackoverflow.com/questions/653368 for more on this
|
||||
@functools.wraps(decorator)
|
||||
def new_dec(*args, **kwargs):
|
||||
|
@ -990,8 +991,7 @@ def enum(**kwargs):
|
|||
|
||||
|
||||
def stable_partition(
|
||||
input_iterable: Iterable,
|
||||
predicate_fn: Callable[[Any], bool],
|
||||
input_iterable: Iterable, predicate_fn: Callable[[Any], bool]
|
||||
) -> Tuple[List[Any], List[Any]]:
|
||||
"""Partition the input iterable according to a custom predicate.
|
||||
|
||||
|
@ -1104,11 +1104,7 @@ def __enter__(self):
|
|||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
self._handler._receive_forwarded(
|
||||
self._context,
|
||||
exc_value,
|
||||
traceback.format_tb(tb),
|
||||
)
|
||||
self._handler._receive_forwarded(self._context, exc_value, traceback.format_tb(tb))
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||
|
|
|
@ -108,7 +108,6 @@ class SuppressOutput:
|
|||
"""Class for disabling output in a scope using 'with' keyword"""
|
||||
|
||||
def __init__(self, msg_enabled=True, warn_enabled=True, error_enabled=True):
|
||||
|
||||
self._msg_enabled_initial = _msg_enabled
|
||||
self._warn_enabled_initial = _warn_enabled
|
||||
self._error_enabled_initial = _error_enabled
|
||||
|
|
|
@ -161,10 +161,7 @@ def _is_background(self):
|
|||
def _get_canon_echo_flags(self):
|
||||
"""Get current termios canonical and echo settings."""
|
||||
cfg = termios.tcgetattr(self.stream)
|
||||
return (
|
||||
bool(cfg[3] & termios.ICANON),
|
||||
bool(cfg[3] & termios.ECHO),
|
||||
)
|
||||
return (bool(cfg[3] & termios.ICANON), bool(cfg[3] & termios.ECHO))
|
||||
|
||||
def _enable_keyboard_input(self):
|
||||
"""Disable canonical input and echoing on ``self.stream``."""
|
||||
|
|
|
@ -77,10 +77,7 @@ def __init__(self, pid, controller_fd, timeout=1, sleep_time=1e-1, debug=False):
|
|||
def get_canon_echo_attrs(self):
|
||||
"""Get echo and canon attributes of the terminal of controller_fd."""
|
||||
cfg = termios.tcgetattr(self.controller_fd)
|
||||
return (
|
||||
bool(cfg[3] & termios.ICANON),
|
||||
bool(cfg[3] & termios.ECHO),
|
||||
)
|
||||
return (bool(cfg[3] & termios.ICANON), bool(cfg[3] & termios.ECHO))
|
||||
|
||||
def horizontal_line(self, name):
|
||||
"""Labled horizontal line for debugging."""
|
||||
|
@ -92,11 +89,7 @@ def status(self):
|
|||
if self.debug:
|
||||
canon, echo = self.get_canon_echo_attrs()
|
||||
sys.stderr.write(
|
||||
"canon: %s, echo: %s\n"
|
||||
% (
|
||||
"on" if canon else "off",
|
||||
"on" if echo else "off",
|
||||
)
|
||||
"canon: %s, echo: %s\n" % ("on" if canon else "off", "on" if echo else "off")
|
||||
)
|
||||
sys.stderr.write("input: %s\n" % self.input_on())
|
||||
sys.stderr.write("bg: %s\n" % self.background())
|
||||
|
|
|
@ -321,8 +321,7 @@ def _check_patch_urls(pkgs, error_cls):
|
|||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name,
|
||||
full_index_arg,
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
|
|
|
@ -210,10 +210,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
|||
break
|
||||
else:
|
||||
self._mirrors_for_spec[dag_hash].append(
|
||||
{
|
||||
"mirror_url": mirror_url,
|
||||
"spec": indexed_spec,
|
||||
}
|
||||
{"mirror_url": mirror_url, "spec": indexed_spec}
|
||||
)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
@ -296,10 +293,7 @@ def update_spec(self, spec, found_list):
|
|||
break
|
||||
else:
|
||||
current_list.append(
|
||||
{
|
||||
"mirror_url": new_entry["mirror_url"],
|
||||
"spec": new_entry["spec"],
|
||||
}
|
||||
{"mirror_url": new_entry["mirror_url"], "spec": new_entry["spec"]}
|
||||
)
|
||||
|
||||
def update(self, with_cooldown=False):
|
||||
|
@ -367,8 +361,7 @@ def update(self, with_cooldown=False):
|
|||
# May need to fetch the index and update the local caches
|
||||
try:
|
||||
needs_regen = self._fetch_and_cache_index(
|
||||
cached_mirror_url,
|
||||
cache_entry=cache_entry,
|
||||
cached_mirror_url, cache_entry=cache_entry
|
||||
)
|
||||
self._last_fetch_times[cached_mirror_url] = (now, True)
|
||||
all_methods_failed = False
|
||||
|
@ -2006,12 +1999,7 @@ def try_direct_fetch(spec, mirrors=None):
|
|||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
fetched_spec._mark_concrete()
|
||||
|
||||
found_specs.append(
|
||||
{
|
||||
"mirror_url": mirror.fetch_url,
|
||||
"spec": fetched_spec,
|
||||
}
|
||||
)
|
||||
found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
|
||||
|
||||
return found_specs
|
||||
|
||||
|
@ -2313,11 +2301,7 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
|
|||
local_tarball_path = os.path.join(destination, tarball_dir_name)
|
||||
|
||||
files_to_fetch = [
|
||||
{
|
||||
"url": [tarball_path_name],
|
||||
"path": local_tarball_path,
|
||||
"required": True,
|
||||
},
|
||||
{"url": [tarball_path_name], "path": local_tarball_path, "required": True},
|
||||
{
|
||||
"url": [
|
||||
tarball_name(concrete_spec, ".spec.json.sig"),
|
||||
|
@ -2438,12 +2422,7 @@ def conditional_fetch(self):
|
|||
response.headers.get("Etag", None) or response.headers.get("etag", None)
|
||||
)
|
||||
|
||||
return FetchIndexResult(
|
||||
etag=etag,
|
||||
hash=computed_hash,
|
||||
data=result,
|
||||
fresh=False,
|
||||
)
|
||||
return FetchIndexResult(etag=etag, hash=computed_hash, data=result, fresh=False)
|
||||
|
||||
|
||||
class EtagIndexFetcher:
|
||||
|
|
|
@ -5,11 +5,7 @@
|
|||
"""Function and classes needed to bootstrap Spack itself."""
|
||||
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
||||
from .core import (
|
||||
all_core_root_specs,
|
||||
ensure_core_dependencies,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||
from .status import status_message
|
||||
|
||||
|
|
|
@ -59,10 +59,7 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
|||
# to be picked up and used, possibly depending on something in the store, first
|
||||
# allows the bootstrap version to work when an incompatible version is in
|
||||
# sys.path
|
||||
orders = [
|
||||
module_paths + sys.path,
|
||||
sys.path + module_paths,
|
||||
]
|
||||
orders = [module_paths + sys.path, sys.path + module_paths]
|
||||
for path in orders:
|
||||
sys.path = path
|
||||
try:
|
||||
|
|
|
@ -53,12 +53,7 @@
|
|||
import spack.util.url
|
||||
import spack.version
|
||||
|
||||
from ._common import (
|
||||
_executables_in_store,
|
||||
_python_import,
|
||||
_root_spec,
|
||||
_try_import_from_store,
|
||||
)
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
|
|
|
@ -171,7 +171,7 @@ def mypy_root_spec():
|
|||
|
||||
def black_root_spec():
|
||||
"""Return the root spec used to bootstrap black"""
|
||||
return _root_spec("py-black@:22.12.0")
|
||||
return _root_spec("py-black@:23.1.0")
|
||||
|
||||
|
||||
def flake8_root_spec():
|
||||
|
|
|
@ -1016,7 +1016,6 @@ def get_cmake_prefix_path(pkg):
|
|||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
|
||||
context = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
|
|
|
@ -110,11 +110,7 @@ class AutotoolsBuilder(BaseBuilder):
|
|||
phases = ("autoreconf", "configure", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = (
|
||||
"configure_args",
|
||||
"check",
|
||||
"installcheck",
|
||||
)
|
||||
legacy_methods = ("configure_args", "check", "installcheck")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
|
|
|
@ -31,7 +31,6 @@ def cmake_cache_option(name, boolean_value, comment=""):
|
|||
|
||||
|
||||
class CachedCMakeBuilder(CMakeBuilder):
|
||||
|
||||
#: Phases of a Cached CMake package
|
||||
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
|
||||
phases: Tuple[str, ...] = ("initconfig", "cmake", "build", "install")
|
||||
|
|
|
@ -252,10 +252,7 @@ def std_args(pkg, generator=None):
|
|||
|
||||
if platform.mac_ver()[0]:
|
||||
args.extend(
|
||||
[
|
||||
define("CMAKE_FIND_FRAMEWORK", "LAST"),
|
||||
define("CMAKE_FIND_APPBUNDLE", "LAST"),
|
||||
]
|
||||
[define("CMAKE_FIND_FRAMEWORK", "LAST"), define("CMAKE_FIND_APPBUNDLE", "LAST")]
|
||||
)
|
||||
|
||||
# Set up CMake rpath
|
||||
|
|
|
@ -38,10 +38,7 @@ class GenericBuilder(BaseBuilder):
|
|||
legacy_methods: Tuple[str, ...] = ()
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes: Tuple[str, ...] = (
|
||||
"archive_files",
|
||||
"install_time_test_callbacks",
|
||||
)
|
||||
legacy_attributes: Tuple[str, ...] = ("archive_files", "install_time_test_callbacks")
|
||||
|
||||
#: Callback names for post-install phase tests
|
||||
install_time_test_callbacks = []
|
||||
|
|
|
@ -857,10 +857,7 @@ def scalapack_libs(self):
|
|||
raise_lib_error("Cannot find a BLACS library for the given MPI.")
|
||||
|
||||
int_suff = "_" + self.intel64_int_suffix
|
||||
scalapack_libnames = [
|
||||
"libmkl_scalapack" + int_suff,
|
||||
blacs_lib + int_suff,
|
||||
]
|
||||
scalapack_libnames = ["libmkl_scalapack" + int_suff, blacs_lib + int_suff]
|
||||
sca_libs = find_libraries(
|
||||
scalapack_libnames, root=self.component_lib_dir("mkl"), shared=("+shared" in self.spec)
|
||||
)
|
||||
|
@ -1161,9 +1158,7 @@ def _determine_license_type(self):
|
|||
#
|
||||
# Ideally, we just tell the installer to look around on the system.
|
||||
# Thankfully, we neither need to care nor emulate where it looks:
|
||||
license_type = {
|
||||
"ACTIVATION_TYPE": "exist_lic",
|
||||
}
|
||||
license_type = {"ACTIVATION_TYPE": "exist_lic"}
|
||||
|
||||
# However (and only), if the spack-internal Intel license file has been
|
||||
# populated beyond its templated explanatory comments, proffer it to
|
||||
|
|
|
@ -68,10 +68,7 @@ def unpack(self, pkg, spec, prefix):
|
|||
|
||||
@staticmethod
|
||||
def _generate_tree_line(name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(
|
||||
name=name,
|
||||
prefix=prefix,
|
||||
)
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||
|
||||
def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
spec = self.pkg.spec
|
||||
|
|
|
@ -37,11 +37,7 @@ class IntelOneApiPackage(Package):
|
|||
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
||||
|
||||
# Add variant to toggle environment modifications from vars.sh
|
||||
variant(
|
||||
"envmods",
|
||||
default=True,
|
||||
description="Toggles environment modifications",
|
||||
)
|
||||
variant("envmods", default=True, description="Toggles environment modifications")
|
||||
|
||||
@staticmethod
|
||||
def update_description(cls):
|
||||
|
|
|
@ -61,10 +61,7 @@ def import_modules(self):
|
|||
list: list of strings of module names
|
||||
"""
|
||||
modules = []
|
||||
root = os.path.join(
|
||||
self.prefix,
|
||||
self.spec["python"].package.platlib,
|
||||
)
|
||||
root = os.path.join(self.prefix, self.spec["python"].package.platlib)
|
||||
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
|
|
|
@ -42,9 +42,7 @@
|
|||
from spack.reporters import CDash, CDashConfiguration
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
JOB_RETRY_CONDITIONS = [
|
||||
"always",
|
||||
]
|
||||
JOB_RETRY_CONDITIONS = ["always"]
|
||||
|
||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
|
@ -129,10 +127,7 @@ def _remove_reserved_tags(tags):
|
|||
|
||||
|
||||
def _get_spec_string(spec):
|
||||
format_elements = [
|
||||
"{name}{@version}",
|
||||
"{%compiler}",
|
||||
]
|
||||
format_elements = ["{name}{@version}", "{%compiler}"]
|
||||
|
||||
if spec.architecture:
|
||||
format_elements.append(" {arch=architecture}")
|
||||
|
@ -328,12 +323,7 @@ def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None)
|
|||
dependencies = []
|
||||
|
||||
def append_dep(s, d):
|
||||
dependencies.append(
|
||||
{
|
||||
"spec": s,
|
||||
"depends": d,
|
||||
}
|
||||
)
|
||||
dependencies.append({"spec": s, "depends": d})
|
||||
|
||||
for spec in spec_list:
|
||||
for s in spec.traverse(deptype=all):
|
||||
|
@ -346,10 +336,7 @@ def append_dep(s, d):
|
|||
)
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = {
|
||||
"spec": s,
|
||||
"needs_rebuild": not up_to_date_mirrors,
|
||||
}
|
||||
spec_labels[skey] = {"spec": s, "needs_rebuild": not up_to_date_mirrors}
|
||||
|
||||
for d in s.dependencies(deptype=all):
|
||||
dkey = _spec_deps_key(d)
|
||||
|
@ -368,10 +355,7 @@ def append_dep(s, d):
|
|||
}
|
||||
)
|
||||
|
||||
deps_json_obj = {
|
||||
"specs": specs,
|
||||
"dependencies": dependencies,
|
||||
}
|
||||
deps_json_obj = {"specs": specs, "dependencies": dependencies}
|
||||
|
||||
return deps_json_obj
|
||||
|
||||
|
@ -410,14 +394,7 @@ def _copy_attributes(attrs_list, src_dict, dest_dict):
|
|||
|
||||
def _find_matching_config(spec, gitlab_ci):
|
||||
runner_attributes = {}
|
||||
overridable_attrs = [
|
||||
"image",
|
||||
"tags",
|
||||
"variables",
|
||||
"before_script",
|
||||
"script",
|
||||
"after_script",
|
||||
]
|
||||
overridable_attrs = ["image", "tags", "variables", "before_script", "script", "after_script"]
|
||||
|
||||
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||
|
||||
|
@ -685,28 +662,14 @@ def generate_gitlab_ci_yaml(
|
|||
except AttributeError:
|
||||
phase_name = phase
|
||||
strip_compilers = False
|
||||
phases.append(
|
||||
{
|
||||
"name": phase_name,
|
||||
"strip-compilers": strip_compilers,
|
||||
}
|
||||
)
|
||||
phases.append({"name": phase_name, "strip-compilers": strip_compilers})
|
||||
|
||||
for bs in env.spec_lists[phase_name]:
|
||||
bootstrap_specs.append(
|
||||
{
|
||||
"spec": bs,
|
||||
"phase-name": phase_name,
|
||||
"strip-compilers": strip_compilers,
|
||||
}
|
||||
{"spec": bs, "phase-name": phase_name, "strip-compilers": strip_compilers}
|
||||
)
|
||||
|
||||
phases.append(
|
||||
{
|
||||
"name": "specs",
|
||||
"strip-compilers": False,
|
||||
}
|
||||
)
|
||||
phases.append({"name": "specs", "strip-compilers": False})
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
|
@ -1109,15 +1072,9 @@ def generate_gitlab_ci_yaml(
|
|||
"variables": variables,
|
||||
"script": job_script,
|
||||
"tags": tags,
|
||||
"artifacts": {
|
||||
"paths": artifact_paths,
|
||||
"when": "always",
|
||||
},
|
||||
"artifacts": {"paths": artifact_paths, "when": "always"},
|
||||
"needs": sorted(job_dependencies, key=lambda d: d["job"]),
|
||||
"retry": {
|
||||
"max": 2,
|
||||
"when": JOB_RETRY_CONDITIONS,
|
||||
},
|
||||
"retry": {"max": 2, "when": JOB_RETRY_CONDITIONS},
|
||||
"interruptible": True,
|
||||
}
|
||||
|
||||
|
@ -1135,10 +1092,7 @@ def generate_gitlab_ci_yaml(
|
|||
if image_name:
|
||||
job_object["image"] = image_name
|
||||
if image_entry is not None:
|
||||
job_object["image"] = {
|
||||
"name": image_name,
|
||||
"entrypoint": image_entry,
|
||||
}
|
||||
job_object["image"] = {"name": image_name, "entrypoint": image_entry}
|
||||
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
|
@ -1181,11 +1135,7 @@ def generate_gitlab_ci_yaml(
|
|||
|
||||
service_job_retries = {
|
||||
"max": 2,
|
||||
"when": [
|
||||
"runner_system_failure",
|
||||
"stuck_or_timeout_failure",
|
||||
"script_failure",
|
||||
],
|
||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||
}
|
||||
|
||||
if job_id > 0:
|
||||
|
@ -1357,9 +1307,7 @@ def generate_gitlab_ci_yaml(
|
|||
_copy_attributes(default_attrs, service_job_config, noop_job)
|
||||
|
||||
if "script" not in noop_job:
|
||||
noop_job["script"] = [
|
||||
'echo "All specs already up to date, nothing to rebuild."',
|
||||
]
|
||||
noop_job["script"] = ['echo "All specs already up to date, nothing to rebuild."']
|
||||
|
||||
noop_job["retry"] = service_job_retries
|
||||
|
||||
|
@ -1620,9 +1568,7 @@ def download_and_extract_artifacts(url, work_dir):
|
|||
"""
|
||||
tty.msg("Fetching artifacts from: {0}\n".format(url))
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/zip",
|
||||
}
|
||||
headers = {"Content-Type": "application/zip"}
|
||||
|
||||
token = os.environ.get("GITLAB_PRIVATE_TOKEN", None)
|
||||
if token:
|
||||
|
@ -2081,10 +2027,7 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
|
|||
with open(file_path, "w") as fd:
|
||||
fd.write(syaml.dump(broken_spec_details))
|
||||
web_util.push_to_url(
|
||||
file_path,
|
||||
url,
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain"},
|
||||
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
||||
)
|
||||
except Exception as err:
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
|
@ -2162,14 +2105,7 @@ def run_standalone_tests(**kwargs):
|
|||
tty.error("Reproduction directory is required for stand-alone tests")
|
||||
return
|
||||
|
||||
test_args = [
|
||||
"spack",
|
||||
"--color=always",
|
||||
"--backtrace",
|
||||
"--verbose",
|
||||
"test",
|
||||
"run",
|
||||
]
|
||||
test_args = ["spack", "--color=always", "--backtrace", "--verbose", "test", "run"]
|
||||
if fail_fast:
|
||||
test_args.append("--fail-fast")
|
||||
|
||||
|
@ -2319,19 +2255,9 @@ def populate_buildgroup(self, job_names):
|
|||
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
parent_group_id = self.create_buildgroup(
|
||||
opener,
|
||||
headers,
|
||||
url,
|
||||
self.build_group,
|
||||
"Daily",
|
||||
)
|
||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
||||
group_id = self.create_buildgroup(
|
||||
opener,
|
||||
headers,
|
||||
url,
|
||||
"Latest {0}".format(self.build_group),
|
||||
"Latest",
|
||||
opener, headers, url, "Latest {0}".format(self.build_group), "Latest"
|
||||
)
|
||||
|
||||
if not parent_group_id or not group_id:
|
||||
|
@ -2341,13 +2267,9 @@ def populate_buildgroup(self, job_names):
|
|||
|
||||
data = {
|
||||
"dynamiclist": [
|
||||
{
|
||||
"match": name,
|
||||
"parentgroupid": parent_group_id,
|
||||
"site": self.site,
|
||||
}
|
||||
{"match": name, "parentgroupid": parent_group_id, "site": self.site}
|
||||
for name in job_names
|
||||
],
|
||||
]
|
||||
}
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
|
|
@ -43,7 +43,6 @@ def matches(obj, proto):
|
|||
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
|
||||
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
||||
return False
|
||||
|
||||
|
|
|
@ -161,9 +161,7 @@ class _UnquotedFlags(object):
|
|||
"""
|
||||
|
||||
flags_arg_pattern = re.compile(
|
||||
r'^({0})=([^\'"].*)$'.format(
|
||||
"|".join(spack.spec.FlagMap.valid_compiler_flags()),
|
||||
)
|
||||
r'^({0})=([^\'"].*)$'.format("|".join(spack.spec.FlagMap.valid_compiler_flags()))
|
||||
)
|
||||
|
||||
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
|
||||
|
@ -227,7 +225,6 @@ def parse_specs(args, **kwargs):
|
|||
return specs
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
|
|
|
@ -53,7 +53,6 @@ def packages(parser, args):
|
|||
|
||||
|
||||
def packages_https(parser, args):
|
||||
|
||||
# Since packages takes a long time, --all is required without name
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
|
|
@ -103,9 +103,7 @@ def setup_parser(subparser):
|
|||
help="Regenerate buildcache index after building package(s)",
|
||||
)
|
||||
create.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="Create buildcache entry for spec from json or yaml file",
|
||||
"--spec-file", default=None, help="Create buildcache entry for spec from json or yaml file"
|
||||
)
|
||||
create.add_argument(
|
||||
"--only",
|
||||
|
|
|
@ -20,9 +20,7 @@ def setup_parser(subparser):
|
|||
help="name of the list to remove specs from",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--match-spec",
|
||||
dest="match_spec",
|
||||
help="if name is ambiguous, supply a spec to match",
|
||||
"--match-spec", dest="match_spec", help="if name is ambiguous, supply a spec to match"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-a",
|
||||
|
|
|
@ -548,13 +548,7 @@ def ci_rebuild(args):
|
|||
|
||||
commands = [
|
||||
# apparently there's a race when spack bootstraps? do it up front once
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
env.path,
|
||||
"bootstrap",
|
||||
"now",
|
||||
],
|
||||
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
|
|
|
@ -13,11 +13,7 @@
|
|||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.argparsewriter import (
|
||||
ArgparseCompletionWriter,
|
||||
ArgparseRstWriter,
|
||||
ArgparseWriter,
|
||||
)
|
||||
from llnl.util.argparsewriter import ArgparseCompletionWriter, ArgparseRstWriter, ArgparseWriter
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
|
@ -42,7 +38,7 @@
|
|||
"format": "bash",
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -408,13 +408,7 @@ def config_prefer_upstream(args):
|
|||
pkgs = {}
|
||||
for spec in pref_specs:
|
||||
# Collect all the upstream compilers and versions for this package.
|
||||
pkg = pkgs.get(
|
||||
spec.name,
|
||||
{
|
||||
"version": [],
|
||||
"compiler": [],
|
||||
},
|
||||
)
|
||||
pkg = pkgs.get(spec.name, {"version": [], "compiler": []})
|
||||
pkgs[spec.name] = pkg
|
||||
|
||||
# We have no existing variant if this is our first added version.
|
||||
|
|
|
@ -16,19 +16,10 @@
|
|||
import spack.stage
|
||||
import spack.util.web
|
||||
from spack.spec import Spec
|
||||
from spack.url import (
|
||||
UndetectableNameError,
|
||||
UndetectableVersionError,
|
||||
parse_name,
|
||||
parse_version,
|
||||
)
|
||||
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.naming import (
|
||||
mod_to_class,
|
||||
simplify_name,
|
||||
valid_fully_qualified_module_name,
|
||||
)
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
|
||||
description = "create a new package file"
|
||||
section = "packaging"
|
||||
|
|
|
@ -96,8 +96,5 @@ def report(args):
|
|||
|
||||
|
||||
def debug(parser, args):
|
||||
action = {
|
||||
"create-db-tarball": create_db_tarball,
|
||||
"report": report,
|
||||
}
|
||||
action = {"create-db-tarball": create_db_tarball, "report": report}
|
||||
action[args.debug_command](args)
|
||||
|
|
|
@ -33,12 +33,7 @@
|
|||
level = "long"
|
||||
|
||||
# Arguments for display_specs when we find ambiguity
|
||||
display_args = {
|
||||
"long": True,
|
||||
"show_flags": True,
|
||||
"variants": True,
|
||||
"indent": 4,
|
||||
}
|
||||
display_args = {"long": True, "show_flags": True, "variants": True, "indent": 4}
|
||||
|
||||
|
||||
def setup_parser(sp):
|
||||
|
|
|
@ -80,22 +80,12 @@ def compare_specs(a, b, to_string=False, color=None):
|
|||
# specs and to descend into dependency hashes so we include all facts.
|
||||
a_facts = set(
|
||||
shift(func)
|
||||
for func in setup.spec_clauses(
|
||||
a,
|
||||
body=True,
|
||||
expand_hashes=True,
|
||||
concrete_build_deps=True,
|
||||
)
|
||||
for func in setup.spec_clauses(a, body=True, expand_hashes=True, concrete_build_deps=True)
|
||||
if func.name == "attr"
|
||||
)
|
||||
b_facts = set(
|
||||
shift(func)
|
||||
for func in setup.spec_clauses(
|
||||
b,
|
||||
body=True,
|
||||
expand_hashes=True,
|
||||
concrete_build_deps=True,
|
||||
)
|
||||
for func in setup.spec_clauses(b, body=True, expand_hashes=True, concrete_build_deps=True)
|
||||
if func.name == "attr"
|
||||
)
|
||||
|
||||
|
|
|
@ -148,8 +148,7 @@ def env_activate(args):
|
|||
|
||||
if not args.shell:
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack env activate",
|
||||
" eval `spack env activate {sh_arg} [...]`",
|
||||
"spack env activate", " eval `spack env activate {sh_arg} [...]`"
|
||||
)
|
||||
return 1
|
||||
|
||||
|
@ -238,8 +237,7 @@ def env_deactivate_setup_parser(subparser):
|
|||
def env_deactivate(args):
|
||||
if not args.shell:
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack env deactivate",
|
||||
" eval `spack env deactivate {sh_arg}`",
|
||||
"spack env deactivate", " eval `spack env deactivate {sh_arg}`"
|
||||
)
|
||||
return 1
|
||||
|
||||
|
|
|
@ -38,11 +38,7 @@ def setup_parser(subparser):
|
|||
default=False,
|
||||
help="packages with detected externals won't be built with Spack",
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"--exclude",
|
||||
action="append",
|
||||
help="packages to exclude from search",
|
||||
)
|
||||
find_parser.add_argument("--exclude", action="append", help="packages to exclude from search")
|
||||
find_parser.add_argument(
|
||||
"-p",
|
||||
"--path",
|
||||
|
@ -187,7 +183,6 @@ def external_read_cray_manifest(args):
|
|||
def _collect_and_consume_cray_manifest_files(
|
||||
manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
|
||||
):
|
||||
|
||||
manifest_files = []
|
||||
if manifest_file:
|
||||
manifest_files.append(manifest_file)
|
||||
|
|
|
@ -25,10 +25,7 @@ def setup_parser(subparser):
|
|||
help="fetch only missing (not yet installed) dependencies",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-D",
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
help="also fetch all dependencies",
|
||||
"-D", "--dependencies", action="store_true", help="also fetch all dependencies"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["specs"])
|
||||
subparser.epilog = (
|
||||
|
|
|
@ -9,13 +9,7 @@
|
|||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
from spack.graph import (
|
||||
DAGWithDependencyTypes,
|
||||
SimpleDAG,
|
||||
graph_ascii,
|
||||
graph_dot,
|
||||
static_graph_dot,
|
||||
)
|
||||
from spack.graph import DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
|
||||
|
||||
description = "generate graphs of package dependency relationships"
|
||||
section = "basic"
|
||||
|
|
|
@ -87,9 +87,7 @@
|
|||
"""
|
||||
|
||||
|
||||
guides = {
|
||||
"spec": spec_guide,
|
||||
}
|
||||
guides = {"spec": spec_guide}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
|
|
@ -496,9 +496,7 @@ def reporter_factory(specs):
|
|||
return None
|
||||
|
||||
context_manager = spack.report.build_context_manager(
|
||||
reporter=args.reporter(),
|
||||
filename=report_filename(args, specs=specs),
|
||||
specs=specs,
|
||||
reporter=args.reporter(), filename=report_filename(args, specs=specs), specs=specs
|
||||
)
|
||||
return context_manager
|
||||
|
||||
|
|
|
@ -58,10 +58,7 @@
|
|||
|
||||
#: licensed files that can have LGPL language in them
|
||||
#: so far, just this command -- so it can find LGPL things elsewhere
|
||||
lgpl_exceptions = [
|
||||
r"lib/spack/spack/cmd/license.py",
|
||||
r"lib/spack/spack/test/cmd/license.py",
|
||||
]
|
||||
lgpl_exceptions = [r"lib/spack/spack/cmd/license.py", r"lib/spack/spack/test/cmd/license.py"]
|
||||
|
||||
|
||||
def _all_spack_files(root=spack.paths.prefix):
|
||||
|
@ -129,7 +126,6 @@ def error_messages(self):
|
|||
|
||||
|
||||
def _check_license(lines, path):
|
||||
|
||||
found = []
|
||||
|
||||
for line in lines:
|
||||
|
|
|
@ -98,8 +98,7 @@ def load(parser, args):
|
|||
if not args.shell:
|
||||
specs_str = " ".join(args.constraint) or "SPECS"
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack load",
|
||||
" eval `spack load {sh_arg} %s`" % specs_str,
|
||||
"spack load", " eval `spack load {sh_arg} %s`" % specs_str
|
||||
)
|
||||
return 1
|
||||
|
||||
|
|
|
@ -27,12 +27,7 @@
|
|||
"""
|
||||
|
||||
# Arguments for display_specs when we find ambiguity
|
||||
display_args = {
|
||||
"long": True,
|
||||
"show_flags": False,
|
||||
"variants": False,
|
||||
"indent": 4,
|
||||
}
|
||||
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
|
|
@ -445,9 +445,7 @@ def mirror_create(args):
|
|||
|
||||
mirror_specs = concrete_specs_from_user(args)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs,
|
||||
path=path,
|
||||
skip_unstable_versions=args.skip_unstable_versions,
|
||||
mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions
|
||||
)
|
||||
|
||||
|
||||
|
@ -467,9 +465,7 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
|||
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
|
||||
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs,
|
||||
path=path,
|
||||
skip_unstable_versions=skip_unstable_versions,
|
||||
mirror_specs, path=path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -180,10 +180,7 @@ def loads(module_type, specs, args, out=None):
|
|||
for spec in specs
|
||||
)
|
||||
|
||||
module_commands = {
|
||||
"tcl": "module load ",
|
||||
"lmod": "module load ",
|
||||
}
|
||||
module_commands = {"tcl": "module load ", "lmod": "module load "}
|
||||
|
||||
d = {"command": "" if not args.shell else module_commands[module_type], "prefix": args.prefix}
|
||||
|
||||
|
@ -368,18 +365,14 @@ def refresh(module_type, specs, args):
|
|||
|
||||
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": True},
|
||||
}
|
||||
constraint_qualifiers = {"refresh": {"installed": True, "known": True}}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
# Get the specs that match the query from the DB
|
||||
specs = args.specs(**query_args)
|
||||
|
||||
try:
|
||||
|
||||
callbacks[args.subparser_name](module_type, specs, args)
|
||||
|
||||
except MultipleSpecsMatch:
|
||||
|
|
|
@ -182,11 +182,7 @@ def solve(parser, args):
|
|||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
specs, out=output, timers=args.timers, stats=args.stats, setup_only=setup_only
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
|
|
|
@ -110,7 +110,7 @@ def spec(parser, args):
|
|||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
for (input, output) in specs:
|
||||
for input, output in specs:
|
||||
# With -y, just print YAML to output.
|
||||
if args.format:
|
||||
if args.format == "yaml":
|
||||
|
|
|
@ -30,20 +30,13 @@ def grouper(iterable, n, fillvalue=None):
|
|||
|
||||
|
||||
#: List of directories to exclude from checks -- relative to spack root
|
||||
exclude_directories = [
|
||||
os.path.relpath(spack.paths.external_path, spack.paths.prefix),
|
||||
]
|
||||
exclude_directories = [os.path.relpath(spack.paths.external_path, spack.paths.prefix)]
|
||||
|
||||
#: Order in which tools should be run. flake8 is last so that it can
|
||||
#: double-check the results of other tools (if, e.g., --fix was provided)
|
||||
#: The list maps an executable name to a method to ensure the tool is
|
||||
#: bootstrapped or present in the environment.
|
||||
tool_names = [
|
||||
"isort",
|
||||
"black",
|
||||
"flake8",
|
||||
"mypy",
|
||||
]
|
||||
tool_names = ["isort", "black", "flake8", "mypy"]
|
||||
|
||||
#: tools we run in spack style
|
||||
tools = {}
|
||||
|
@ -52,7 +45,7 @@ def grouper(iterable, n, fillvalue=None):
|
|||
mypy_ignores = [
|
||||
# same as `disable_error_code = "annotation-unchecked"` in pyproject.toml, which
|
||||
# doesn't exist in mypy 0.971 for Python 3.6
|
||||
"[annotation-unchecked]",
|
||||
"[annotation-unchecked]"
|
||||
]
|
||||
|
||||
|
||||
|
@ -150,10 +143,7 @@ def setup_parser(subparser):
|
|||
help="branch to compare against to determine changed files (default: develop)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-a",
|
||||
"--all",
|
||||
action="store_true",
|
||||
help="check all files, not just changed files",
|
||||
"-a", "--all", action="store_true", help="check all files, not just changed files"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-r",
|
||||
|
@ -178,10 +168,7 @@ def setup_parser(subparser):
|
|||
help="format automatically if possible (e.g., with isort, black)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--root",
|
||||
action="store",
|
||||
default=None,
|
||||
help="style check a different spack instance",
|
||||
"--root", action="store", default=None, help="style check a different spack instance"
|
||||
)
|
||||
|
||||
tool_group = subparser.add_mutually_exclusive_group()
|
||||
|
@ -211,6 +198,7 @@ def rewrite_and_print_output(
|
|||
output, args, re_obj=re.compile(r"^(.+):([0-9]+):"), replacement=r"{0}:{1}:"
|
||||
):
|
||||
"""rewrite ouput with <file>:<line>: format to respect path args"""
|
||||
|
||||
# print results relative to current working directory
|
||||
def translate(match):
|
||||
return replacement.format(cwd_relative(match.group(1), args), *list(match.groups()[1:]))
|
||||
|
@ -281,24 +269,10 @@ def run_mypy(mypy_cmd, file_list, args):
|
|||
os.path.join(spack.paths.prefix, "pyproject.toml"),
|
||||
"--show-error-codes",
|
||||
]
|
||||
mypy_arg_sets = [
|
||||
common_mypy_args
|
||||
+ [
|
||||
"--package",
|
||||
"spack",
|
||||
"--package",
|
||||
"llnl",
|
||||
]
|
||||
]
|
||||
mypy_arg_sets = [common_mypy_args + ["--package", "spack", "--package", "llnl"]]
|
||||
if "SPACK_MYPY_CHECK_PACKAGES" in os.environ:
|
||||
mypy_arg_sets.append(
|
||||
common_mypy_args
|
||||
+ [
|
||||
"--package",
|
||||
"packages",
|
||||
"--disable-error-code",
|
||||
"no-redef",
|
||||
]
|
||||
common_mypy_args + ["--package", "packages", "--disable-error-code", "no-redef"]
|
||||
)
|
||||
|
||||
returncode = 0
|
||||
|
|
|
@ -33,9 +33,7 @@ def setup_parser(subparser):
|
|||
|
||||
# Run
|
||||
run_parser = sp.add_parser(
|
||||
"run",
|
||||
description=test_run.__doc__,
|
||||
help=spack.cmd.first_line(test_run.__doc__),
|
||||
"run", description=test_run.__doc__, help=spack.cmd.first_line(test_run.__doc__)
|
||||
)
|
||||
|
||||
alias_help_msg = "Provide an alias for this test-suite"
|
||||
|
@ -80,9 +78,7 @@ def setup_parser(subparser):
|
|||
|
||||
# List
|
||||
list_parser = sp.add_parser(
|
||||
"list",
|
||||
description=test_list.__doc__,
|
||||
help=spack.cmd.first_line(test_list.__doc__),
|
||||
"list", description=test_list.__doc__, help=spack.cmd.first_line(test_list.__doc__)
|
||||
)
|
||||
list_parser.add_argument(
|
||||
"-a",
|
||||
|
@ -96,9 +92,7 @@ def setup_parser(subparser):
|
|||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
"find",
|
||||
description=test_find.__doc__,
|
||||
help=spack.cmd.first_line(test_find.__doc__),
|
||||
"find", description=test_find.__doc__, help=spack.cmd.first_line(test_find.__doc__)
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"filter",
|
||||
|
@ -108,9 +102,7 @@ def setup_parser(subparser):
|
|||
|
||||
# Status
|
||||
status_parser = sp.add_parser(
|
||||
"status",
|
||||
description=test_status.__doc__,
|
||||
help=spack.cmd.first_line(test_status.__doc__),
|
||||
"status", description=test_status.__doc__, help=spack.cmd.first_line(test_status.__doc__)
|
||||
)
|
||||
status_parser.add_argument(
|
||||
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
|
||||
|
@ -147,9 +139,7 @@ def setup_parser(subparser):
|
|||
|
||||
# Remove
|
||||
remove_parser = sp.add_parser(
|
||||
"remove",
|
||||
description=test_remove.__doc__,
|
||||
help=spack.cmd.first_line(test_remove.__doc__),
|
||||
"remove", description=test_remove.__doc__, help=spack.cmd.first_line(test_remove.__doc__)
|
||||
)
|
||||
arguments.add_common_arguments(remove_parser, ["yes_to_all"])
|
||||
remove_parser.add_argument(
|
||||
|
@ -189,11 +179,7 @@ def test_run(args):
|
|||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
specs_to_test = []
|
||||
for spec in specs:
|
||||
matching = spack.store.db.query_local(
|
||||
spec,
|
||||
hashes=hashes,
|
||||
explicit=explicit,
|
||||
)
|
||||
matching = spack.store.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||
if spec and not matching:
|
||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||
"""
|
||||
|
|
|
@ -31,12 +31,7 @@
|
|||
"""
|
||||
|
||||
# Arguments for display_specs when we find ambiguity
|
||||
display_args = {
|
||||
"long": True,
|
||||
"show_flags": False,
|
||||
"variants": False,
|
||||
"indent": 4,
|
||||
}
|
||||
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
@ -236,12 +231,7 @@ def do_uninstall(specs, force=False):
|
|||
hashes_to_remove = set(s.dag_hash() for s in specs)
|
||||
|
||||
for s in traverse.traverse_nodes(
|
||||
specs,
|
||||
order="topo",
|
||||
direction="children",
|
||||
root=True,
|
||||
cover="nodes",
|
||||
deptype="all",
|
||||
specs, order="topo", direction="children", root=True, cover="nodes", deptype="all"
|
||||
):
|
||||
if s.dag_hash() in hashes_to_remove:
|
||||
spack.package_base.PackageBase.uninstall_by_spec(s, force=force)
|
||||
|
|
|
@ -77,8 +77,7 @@ def unload(parser, args):
|
|||
specs_str = " ".join(args.specs) or "SPECS"
|
||||
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack unload",
|
||||
" eval `spack unload {sh_arg}` %s" % specs_str,
|
||||
"spack unload", " eval `spack unload {sh_arg}` %s" % specs_str
|
||||
)
|
||||
return 1
|
||||
|
||||
|
|
|
@ -106,12 +106,7 @@ def setup_parser(subparser):
|
|||
|
||||
|
||||
def url(parser, args):
|
||||
action = {
|
||||
"parse": url_parse,
|
||||
"list": url_list,
|
||||
"summary": url_summary,
|
||||
"stats": url_stats,
|
||||
}
|
||||
action = {"parse": url_parse, "list": url_list, "summary": url_summary, "stats": url_stats}
|
||||
|
||||
action[args.subcommand](args)
|
||||
|
||||
|
|
|
@ -619,11 +619,9 @@ def _default(search_paths):
|
|||
command_arguments = []
|
||||
files_to_be_tested = fs.files_in(*search_paths)
|
||||
for compiler_name in spack.compilers.supported_compilers():
|
||||
|
||||
compiler_cls = class_for_compiler_name(compiler_name)
|
||||
|
||||
for language in ("cc", "cxx", "f77", "fc"):
|
||||
|
||||
# Select only the files matching a regexp
|
||||
for (file, full_path), regexp in itertools.product(
|
||||
files_to_be_tested, compiler_cls.search_regexps(language)
|
||||
|
|
|
@ -154,10 +154,7 @@ def setup_custom_environment(self, pkg, env):
|
|||
),
|
||||
)
|
||||
|
||||
real_dirs = [
|
||||
"Toolchains/XcodeDefault.xctoolchain/usr/bin",
|
||||
"usr/bin",
|
||||
]
|
||||
real_dirs = ["Toolchains/XcodeDefault.xctoolchain/usr/bin", "usr/bin"]
|
||||
|
||||
bins = ["c++", "c89", "c99", "cc", "clang", "clang++", "cpp"]
|
||||
|
||||
|
|
|
@ -793,7 +793,7 @@ def _config():
|
|||
configuration_paths = [
|
||||
# Default configuration scope is the lowest-level scope. These are
|
||||
# versioned with Spack and can be overridden by systems, sites or users
|
||||
configuration_defaults_path,
|
||||
configuration_defaults_path
|
||||
]
|
||||
|
||||
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
||||
|
@ -801,15 +801,11 @@ def _config():
|
|||
# System configuration is per machine.
|
||||
# This is disabled if user asks for no local configuration.
|
||||
if not disable_local_config:
|
||||
configuration_paths.append(
|
||||
("system", spack.paths.system_config_path),
|
||||
)
|
||||
configuration_paths.append(("system", spack.paths.system_config_path))
|
||||
|
||||
# Site configuration is per spack instance, for sites or projects
|
||||
# No site-level configs should be checked into spack by default.
|
||||
configuration_paths.append(
|
||||
("site", os.path.join(spack.paths.etc_path)),
|
||||
)
|
||||
configuration_paths.append(("site", os.path.join(spack.paths.etc_path)))
|
||||
|
||||
# User configuration can override both spack defaults and site config
|
||||
# This is disabled if user asks for no local configuration.
|
||||
|
|
|
@ -18,10 +18,7 @@
|
|||
#: packages here.
|
||||
default_path = "/opt/cray/pe/cpe-descriptive-manifest/"
|
||||
|
||||
compiler_name_translation = {
|
||||
"nvidia": "nvhpc",
|
||||
"rocm": "rocmcc",
|
||||
}
|
||||
compiler_name_translation = {"nvidia": "nvhpc", "rocm": "rocmcc"}
|
||||
|
||||
|
||||
def translated_compiler_name(manifest_compiler_name):
|
||||
|
|
|
@ -46,10 +46,7 @@
|
|||
import spack.store
|
||||
import spack.util.lock as lk
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.directory_layout import (
|
||||
DirectoryLayoutError,
|
||||
InconsistentInstallDirectoryError,
|
||||
)
|
||||
from spack.directory_layout import DirectoryLayoutError, InconsistentInstallDirectoryError
|
||||
from spack.error import SpackError
|
||||
from spack.util.crypto import bit_length
|
||||
from spack.version import Version
|
||||
|
@ -108,10 +105,7 @@
|
|||
|
||||
|
||||
def reader(version):
|
||||
reader_cls = {
|
||||
Version("5"): spack.spec.SpecfileV1,
|
||||
Version("6"): spack.spec.SpecfileV3,
|
||||
}
|
||||
reader_cls = {Version("5"): spack.spec.SpecfileV1, Version("6"): spack.spec.SpecfileV3}
|
||||
return reader_cls[version]
|
||||
|
||||
|
||||
|
|
|
@ -377,7 +377,8 @@ def compute_windows_user_path_for_package(pkg):
|
|||
install location, return list of potential locations based
|
||||
on common heuristics. For more info on Windows user specific
|
||||
installs see:
|
||||
https://learn.microsoft.com/en-us/dotnet/api/system.environment.specialfolder?view=netframework-4.8"""
|
||||
https://learn.microsoft.com/en-us/dotnet/api/system.environment.specialfolder?view=netframework-4.8
|
||||
"""
|
||||
if not is_windows:
|
||||
return []
|
||||
|
||||
|
|
|
@ -45,11 +45,7 @@
|
|||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url
|
||||
from spack.filesystem_view import (
|
||||
SimpleFilesystemView,
|
||||
inverse_view_func_parser,
|
||||
view_func_parser,
|
||||
)
|
||||
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import InvalidSpecConstraintError, SpecList
|
||||
|
@ -304,12 +300,7 @@ def _write_yaml(data, str_or_file):
|
|||
def _eval_conditional(string):
|
||||
"""Evaluate conditional definitions using restricted variable scope."""
|
||||
valid_variables = spack.util.environment.get_host_environment()
|
||||
valid_variables.update(
|
||||
{
|
||||
"re": re,
|
||||
"env": os.environ,
|
||||
}
|
||||
)
|
||||
valid_variables.update({"re": re, "env": os.environ})
|
||||
return eval(string, valid_variables)
|
||||
|
||||
|
||||
|
@ -973,9 +964,7 @@ def included_config_scopes(self):
|
|||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path,
|
||||
self.config_stage_dir,
|
||||
skip_existing=True,
|
||||
config_path, self.config_stage_dir, skip_existing=True
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
|
|
|
@ -35,13 +35,7 @@
|
|||
import llnl.util
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import (
|
||||
get_single_file,
|
||||
mkdirp,
|
||||
temp_cwd,
|
||||
temp_rename,
|
||||
working_dir,
|
||||
)
|
||||
from llnl.util.filesystem import get_single_file, mkdirp, temp_cwd, temp_rename, working_dir
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.config
|
||||
|
@ -1555,11 +1549,7 @@ def for_package_version(pkg, version):
|
|||
# performance hit for branches on older versions of git.
|
||||
# Branches cannot be cached, so we tell the fetcher not to cache tags/branches
|
||||
ref_type = "commit" if version.is_commit else "tag"
|
||||
kwargs = {
|
||||
"git": pkg.git,
|
||||
ref_type: version.ref,
|
||||
"no_cache": True,
|
||||
}
|
||||
kwargs = {"git": pkg.git, ref_type: version.ref, "no_cache": True}
|
||||
|
||||
kwargs["submodules"] = getattr(pkg, "submodules", False)
|
||||
|
||||
|
|
|
@ -37,7 +37,6 @@ class SharedLibrariesVisitor(BaseDirectoryVisitor):
|
|||
exception of an exclude list."""
|
||||
|
||||
def __init__(self, exclude_list):
|
||||
|
||||
# List of file and directory names to be excluded
|
||||
self.exclude_list = frozenset(exclude_list)
|
||||
|
||||
|
|
|
@ -212,8 +212,7 @@ def install_sbang():
|
|||
|
||||
# copy over the fresh copy of `sbang`
|
||||
sbang_tmp_path = os.path.join(
|
||||
os.path.dirname(sbang_path),
|
||||
".%s.tmp" % os.path.basename(sbang_path),
|
||||
os.path.dirname(sbang_path), ".%s.tmp" % os.path.basename(sbang_path)
|
||||
)
|
||||
shutil.copy(spack.paths.sbang_script, sbang_tmp_path)
|
||||
|
||||
|
|
|
@ -423,11 +423,7 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=timer.NU
|
|||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
|
||||
|
||||
return _process_binary_cache_tarball(
|
||||
pkg,
|
||||
explicit,
|
||||
unsigned,
|
||||
mirrors_for_spec=matches,
|
||||
timer=timer,
|
||||
pkg, explicit, unsigned, mirrors_for_spec=matches, timer=timer
|
||||
)
|
||||
|
||||
|
||||
|
@ -789,7 +785,7 @@ def _add_bootstrap_compilers(self, compiler, architecture, pkgs, request, all_de
|
|||
associated dependents
|
||||
"""
|
||||
packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
|
||||
for (comp_pkg, is_compiler) in packages:
|
||||
for comp_pkg, is_compiler in packages:
|
||||
pkgid = package_id(comp_pkg)
|
||||
if pkgid not in self.build_tasks:
|
||||
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
|
||||
|
@ -813,8 +809,7 @@ def _modify_existing_task(self, pkgid, attr, value):
|
|||
key, task = tup
|
||||
if task.pkg_id == pkgid:
|
||||
tty.debug(
|
||||
"Modifying task for {0} to treat it as a compiler".format(pkgid),
|
||||
level=2,
|
||||
"Modifying task for {0} to treat it as a compiler".format(pkgid), level=2
|
||||
)
|
||||
setattr(task, attr, value)
|
||||
self.build_pq[i] = (key, task)
|
||||
|
@ -1212,7 +1207,6 @@ def _add_tasks(self, request, all_deps):
|
|||
|
||||
install_package = request.install_args.get("install_package")
|
||||
if install_package and request.pkg_id not in self.build_tasks:
|
||||
|
||||
# Be sure to clear any previous failure
|
||||
spack.store.db.clear_failure(request.spec, force=True)
|
||||
|
||||
|
@ -1948,11 +1942,7 @@ def run(self):
|
|||
# Run post install hooks before build stage is removed.
|
||||
spack.hooks.post_install(self.pkg.spec)
|
||||
|
||||
_print_timer(
|
||||
pre=self.pre,
|
||||
pkg_id=self.pkg_id,
|
||||
timer=self.timer,
|
||||
)
|
||||
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
|
||||
# Send final status that install is successful
|
||||
|
|
|
@ -249,10 +249,7 @@ def root_path(name, module_set_name):
|
|||
Returns:
|
||||
root folder for module file installation
|
||||
"""
|
||||
defaults = {
|
||||
"lmod": "$spack/share/spack/lmod",
|
||||
"tcl": "$spack/share/spack/modules",
|
||||
}
|
||||
defaults = {"lmod": "$spack/share/spack/lmod", "tcl": "$spack/share/spack/modules"}
|
||||
# Root folders where the various module files should be written
|
||||
roots = spack.config.get("modules:%s:roots" % module_set_name, {})
|
||||
|
||||
|
|
|
@ -65,9 +65,7 @@
|
|||
from spack.version import GitVersion, Version, VersionBase
|
||||
|
||||
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
||||
Optional[Iterable[str]],
|
||||
Optional[Iterable[str]],
|
||||
Optional[Iterable[str]],
|
||||
Optional[Iterable[str]], Optional[Iterable[str]], Optional[Iterable[str]]
|
||||
]
|
||||
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
|
||||
|
||||
|
@ -1705,11 +1703,7 @@ def _has_make_target(self, target):
|
|||
"don't know how to make {0}. Stop",
|
||||
]
|
||||
|
||||
kwargs = {
|
||||
"fail_on_error": False,
|
||||
"output": os.devnull,
|
||||
"error": str,
|
||||
}
|
||||
kwargs = {"fail_on_error": False, "output": os.devnull, "error": str}
|
||||
|
||||
stderr = make("-n", target, **kwargs)
|
||||
|
||||
|
@ -2220,10 +2214,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
|
|||
|
||||
if not force:
|
||||
dependents = spack.store.db.installed_relatives(
|
||||
spec,
|
||||
direction="parents",
|
||||
transitive=True,
|
||||
deptype=("link", "run"),
|
||||
spec, direction="parents", transitive=True, deptype=("link", "run")
|
||||
)
|
||||
if dependents:
|
||||
raise PackageStillNeededError(spec, dependents)
|
||||
|
@ -2236,7 +2227,6 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
|
|||
|
||||
# Pre-uninstall hook runs first.
|
||||
with spack.store.db.prefix_write_lock(spec):
|
||||
|
||||
if pkg is not None:
|
||||
try:
|
||||
spack.hooks.pre_uninstall(spec)
|
||||
|
@ -2399,11 +2389,7 @@ def fetch_remote_versions(self, concurrency=128):
|
|||
|
||||
try:
|
||||
return spack.util.web.find_versions_of_archive(
|
||||
self.all_urls,
|
||||
self.list_url,
|
||||
self.list_depth,
|
||||
concurrency,
|
||||
reference_package=self,
|
||||
self.all_urls, self.list_url, self.list_depth, concurrency, reference_package=self
|
||||
)
|
||||
except spack.util.web.NoNetworkConnectionError as e:
|
||||
tty.die("Package.fetch_versions couldn't connect to:", e.url, e.message)
|
||||
|
|
|
@ -409,13 +409,7 @@ def needs_text_relocation(m_type, m_subtype):
|
|||
|
||||
|
||||
def relocate_macho_binaries(
|
||||
path_names,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix,
|
||||
rel,
|
||||
old_prefix,
|
||||
new_prefix,
|
||||
path_names, old_layout_root, new_layout_root, prefix_to_prefix, rel, old_prefix, new_prefix
|
||||
):
|
||||
"""
|
||||
Use macholib python package to get the rpaths, depedent libraries
|
||||
|
@ -829,7 +823,7 @@ def fixup_macos_rpath(root, filename):
|
|||
|
||||
# Check for nonexistent rpaths (often added by spack linker overzealousness
|
||||
# with both lib/ and lib64/) and duplicate rpaths
|
||||
for (rpath, count) in rpaths.items():
|
||||
for rpath, count in rpaths.items():
|
||||
if rpath.startswith("@loader_path") or rpath.startswith("@executable_path"):
|
||||
# Allowable relative paths
|
||||
pass
|
||||
|
|
|
@ -1066,7 +1066,6 @@ def dump_provenance(self, spec, path):
|
|||
# Install patch files needed by the package.
|
||||
fs.mkdirp(path)
|
||||
for patch in itertools.chain.from_iterable(spec.package.patches.values()):
|
||||
|
||||
if patch.path:
|
||||
if os.path.exists(patch.path):
|
||||
fs.install(patch.path, path)
|
||||
|
|
|
@ -113,7 +113,6 @@ def wrapper(instance, *args, **kwargs):
|
|||
|
||||
start_time = time.time()
|
||||
try:
|
||||
|
||||
value = wrapped_fn(instance, *args, **kwargs)
|
||||
package["stdout"] = self.fetch_log(pkg)
|
||||
package["installed_from_binary_cache"] = pkg.installed_from_binary_cache
|
||||
|
@ -234,9 +233,7 @@ def extract_package_from_signature(self, instance, *args, **kwargs):
|
|||
|
||||
@contextlib.contextmanager
|
||||
def build_context_manager(
|
||||
reporter: spack.reporters.Reporter,
|
||||
filename: str,
|
||||
specs: List[spack.spec.Spec],
|
||||
reporter: spack.reporters.Reporter, filename: str, specs: List[spack.spec.Spec]
|
||||
):
|
||||
"""Decorate a package to generate a report after the installation function is executed.
|
||||
|
||||
|
|
|
@ -47,8 +47,7 @@
|
|||
|
||||
|
||||
CDashConfiguration = collections.namedtuple(
|
||||
"CDashConfiguration",
|
||||
["upload_url", "packages", "build", "site", "buildstamp", "track"],
|
||||
"CDashConfiguration", ["upload_url", "packages", "build", "site", "buildstamp", "track"]
|
||||
)
|
||||
|
||||
|
||||
|
@ -336,12 +335,7 @@ def test_skipped_report(self, directory_name, spec, reason=None):
|
|||
if reason:
|
||||
output += "\n{0}".format(reason)
|
||||
|
||||
package = {
|
||||
"name": spec.name,
|
||||
"id": spec.dag_hash(),
|
||||
"result": "skipped",
|
||||
"stdout": output,
|
||||
}
|
||||
package = {"name": spec.name, "id": spec.dag_hash(), "result": "skipped", "stdout": output}
|
||||
self.test_report_for_package(directory_name, package, duration=0.0)
|
||||
|
||||
def concretization_report(self, directory_name, msg):
|
||||
|
|
|
@ -10,11 +10,7 @@
|
|||
import llnl.util.tty as tty
|
||||
|
||||
# The keys here represent the only recognized (ctest/cdash) status values
|
||||
completed = {
|
||||
"failed": "Completed",
|
||||
"passed": "Completed",
|
||||
"notrun": "No tests to run",
|
||||
}
|
||||
completed = {"failed": "Completed", "passed": "Completed", "notrun": "No tests to run"}
|
||||
|
||||
log_regexp = re.compile(r"^==> \[([0-9:.\-]*)(?:, [0-9]*)?\] (.*)")
|
||||
returns_regexp = re.compile(r"\[([0-9 ,]*)\]")
|
||||
|
|
|
@ -32,10 +32,7 @@
|
|||
},
|
||||
"binary_cache_checksum": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"hash_algorithm": {"type": "string"},
|
||||
"hash": {"type": "string"},
|
||||
},
|
||||
"properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}},
|
||||
},
|
||||
"buildcache_layout_version": {"type": "number"},
|
||||
},
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
r"project": {"type": "string"},
|
||||
r"site": {"type": "string"},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
"flags": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"keep_werror": {"type": "string", "enum": ["all", "specific", "none"]},
|
||||
"keep_werror": {"type": "string", "enum": ["all", "specific", "none"]}
|
||||
},
|
||||
},
|
||||
"shared_linking": {
|
||||
|
@ -54,12 +54,12 @@
|
|||
),
|
||||
},
|
||||
{"type": "string"}, # deprecated
|
||||
],
|
||||
]
|
||||
},
|
||||
"install_hash_length": {"type": "integer", "minimum": 1},
|
||||
"install_path_scheme": {"type": "string"}, # deprecated
|
||||
"build_stage": {
|
||||
"oneOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}],
|
||||
"oneOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}]
|
||||
},
|
||||
"test_stage": {"type": "string"},
|
||||
"extensions": {"type": "array", "items": {"type": "string"}},
|
||||
|
@ -82,7 +82,7 @@
|
|||
"concretizer": {"type": "string", "enum": ["original", "clingo"]},
|
||||
"db_lock_timeout": {"type": "integer", "minimum": 1},
|
||||
"package_lock_timeout": {
|
||||
"anyOf": [{"type": "integer", "minimum": 1}, {"type": "null"}],
|
||||
"anyOf": [{"type": "integer", "minimum": 1}, {"type": "null"}]
|
||||
},
|
||||
"allow_sgid": {"type": "boolean"},
|
||||
"binary_index_root": {"type": "string"},
|
||||
|
@ -96,7 +96,7 @@
|
|||
"modules:[module set]:roots and is ignored",
|
||||
"error": False,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -62,9 +62,7 @@
|
|||
"additionalProperties": False,
|
||||
},
|
||||
# Add labels to the image
|
||||
"labels": {
|
||||
"type": "object",
|
||||
},
|
||||
"labels": {"type": "object"},
|
||||
# Add a custom extra section at the bottom of a stage
|
||||
"extra_instructions": {
|
||||
"type": "object",
|
||||
|
@ -83,11 +81,7 @@
|
|||
"help": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"docker": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"default": {},
|
||||
},
|
||||
"docker": {"type": "object", "additionalProperties": False, "default": {}},
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -114,9 +114,7 @@
|
|||
"prefix": {"type": "string", "minLength": 1},
|
||||
"rpm": {"type": "string", "minLength": 1},
|
||||
"hash": {"type": "string", "minLength": 1},
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
},
|
||||
"parameters": {"type": "object"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -32,27 +32,17 @@
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"spec": spack.schema.spec.properties,
|
||||
"path": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "null"},
|
||||
],
|
||||
},
|
||||
"path": {"oneOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"installed": {"type": "boolean"},
|
||||
"ref_count": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
},
|
||||
"ref_count": {"type": "integer", "minimum": 0},
|
||||
"explicit": {"type": "boolean"},
|
||||
"installation_time": {
|
||||
"type": "number",
|
||||
},
|
||||
},
|
||||
"installation_time": {"type": "number"},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"version": {"type": "string"},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -28,12 +28,7 @@
|
|||
"properties": {
|
||||
"matrix": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
"items": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
"exclude": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
|
@ -61,11 +56,7 @@
|
|||
spack.schema.merged.properties,
|
||||
# extra environment schema properties
|
||||
{
|
||||
"include": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"develop": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
|
@ -78,7 +69,7 @@
|
|||
"spec": {"type": "string"},
|
||||
"path": {"type": "string"},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"definitions": {
|
||||
|
|
|
@ -18,28 +18,16 @@
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"entrypoint": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"entrypoint": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
]
|
||||
}
|
||||
|
||||
runner_attributes_schema_items = {
|
||||
"image": image_schema,
|
||||
"tags": {"type": "array", "items": {"type": "string"}},
|
||||
"variables": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
r"[\w\d\-_\.]+": {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
},
|
||||
"variables": {"type": "object", "patternProperties": {r"[\w\d\-_\.]+": {"type": "string"}}},
|
||||
"before_script": {"type": "array", "items": {"type": "string"}},
|
||||
"script": {"type": "array", "items": {"type": "string"}},
|
||||
"after_script": {"type": "array", "items": {"type": "string"}},
|
||||
|
@ -56,9 +44,7 @@
|
|||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["tags"],
|
||||
"properties": {
|
||||
"tags": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
"properties": {"tags": {"type": "array", "items": {"type": "string"}}},
|
||||
}
|
||||
|
||||
|
||||
|
@ -69,24 +55,17 @@
|
|||
"type": "array",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
},
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
},
|
||||
"compiler-agnostic": {
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
"name": {"type": "string"},
|
||||
"compiler-agnostic": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
]
|
||||
},
|
||||
},
|
||||
"match_behavior": {"type": "string", "enum": ["first", "merge"], "default": "first"},
|
||||
|
@ -97,12 +76,7 @@
|
|||
"additionalProperties": False,
|
||||
"required": ["match"],
|
||||
"properties": {
|
||||
"match": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
"match": {"type": "array", "items": {"type": "string"}},
|
||||
"remove-attributes": remove_attributes_schema,
|
||||
"runner-attributes": runner_selector_schema,
|
||||
},
|
||||
|
@ -112,12 +86,7 @@
|
|||
"signing-job-attributes": runner_selector_schema,
|
||||
"rebuild-index": {"type": "boolean"},
|
||||
"broken-specs-url": {"type": "string"},
|
||||
"broken-tests-packages": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
"broken-tests-packages": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -128,12 +97,7 @@
|
|||
"additionalProperties": False,
|
||||
"required": ["mappings"],
|
||||
"properties": union_dicts(
|
||||
core_shared_properties,
|
||||
{
|
||||
"enable-artifacts-buildcache": {
|
||||
"type": "boolean",
|
||||
},
|
||||
},
|
||||
core_shared_properties, {"enable-artifacts-buildcache": {"type": "boolean"}}
|
||||
),
|
||||
},
|
||||
{
|
||||
|
@ -141,21 +105,14 @@
|
|||
"additionalProperties": False,
|
||||
"required": ["mappings"],
|
||||
"properties": union_dicts(
|
||||
core_shared_properties,
|
||||
{
|
||||
"temporary-storage-url-prefix": {
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
core_shared_properties, {"temporary-storage-url-prefix": {"type": "string"}}
|
||||
),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {
|
||||
"gitlab-ci": gitlab_ci_properties,
|
||||
}
|
||||
properties = {"gitlab-ci": gitlab_ci_properties}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
|
|
|
@ -27,9 +27,9 @@
|
|||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -110,10 +110,7 @@
|
|||
"arch_folder": {"type": "boolean"},
|
||||
"roots": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"tcl": {"type": "string"},
|
||||
"lmod": {"type": "string"},
|
||||
},
|
||||
"properties": {"tcl": {"type": "string"}, "lmod": {"type": "string"}},
|
||||
},
|
||||
"enable": {
|
||||
"type": "array",
|
||||
|
@ -165,7 +162,7 @@
|
|||
# prefix-relative path to be inspected for existence
|
||||
r"^[\w-]*": array_of_strings
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
"patternProperties": {
|
||||
valid_module_set_name: {
|
||||
|
@ -173,7 +170,7 @@
|
|||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": module_config_properties,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,25 +61,14 @@
|
|||
"default": [],
|
||||
"items": {"type": "string"},
|
||||
}, # compiler specs
|
||||
"buildable": {
|
||||
"type": "boolean",
|
||||
"default": True,
|
||||
},
|
||||
"buildable": {"type": "boolean", "default": True},
|
||||
"permissions": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"read": {
|
||||
"type": "string",
|
||||
"enum": ["user", "group", "world"],
|
||||
},
|
||||
"write": {
|
||||
"type": "string",
|
||||
"enum": ["user", "group", "world"],
|
||||
},
|
||||
"group": {
|
||||
"type": "string",
|
||||
},
|
||||
"read": {"type": "string", "enum": ["user", "group", "world"]},
|
||||
"write": {"type": "string", "enum": ["user", "group", "world"]},
|
||||
"group": {"type": "string"},
|
||||
},
|
||||
},
|
||||
# If 'get_full_repo' is promoted to a Package-level
|
||||
|
@ -87,9 +76,7 @@
|
|||
"package_attributes": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"patternProperties": {
|
||||
r"\w+": {},
|
||||
},
|
||||
"patternProperties": {r"\w+": {}},
|
||||
},
|
||||
"providers": {
|
||||
"type": "object",
|
||||
|
@ -100,14 +87,14 @@
|
|||
"type": "array",
|
||||
"default": [],
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"variants": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "array", "items": {"type": "string"}},
|
||||
],
|
||||
]
|
||||
},
|
||||
"externals": {
|
||||
"type": "array",
|
||||
|
@ -124,9 +111,9 @@
|
|||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -12,12 +12,7 @@
|
|||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {
|
||||
"projections": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
r"all|\w[\w-]*": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"projections": {"type": "object", "patternProperties": {r"all|\w[\w-]*": {"type": "string"}}}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -11,13 +11,7 @@
|
|||
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {
|
||||
"repos": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
}
|
||||
properties = {"repos": {"type": "array", "default": [], "items": {"type": "string"}}}
|
||||
|
||||
|
||||
#: Full schema with metadata
|
||||
|
|
|
@ -14,44 +14,26 @@
|
|||
|
||||
target = {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string",
|
||||
},
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": [
|
||||
"name",
|
||||
"vendor",
|
||||
"features",
|
||||
"generation",
|
||||
"parents",
|
||||
],
|
||||
"required": ["name", "vendor", "features", "generation", "parents"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"vendor": {"type": "string"},
|
||||
"features": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"features": {"type": "array", "items": {"type": "string"}},
|
||||
"generation": {"type": "integer"},
|
||||
"parents": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"parents": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
]
|
||||
}
|
||||
|
||||
arch = {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"platform": {},
|
||||
"platform_os": {},
|
||||
"target": target,
|
||||
},
|
||||
"properties": {"platform": {}, "platform_os": {}, "target": target},
|
||||
}
|
||||
|
||||
dependencies = {
|
||||
|
@ -61,12 +43,9 @@
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"hash": {"type": "string"},
|
||||
"type": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"type": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -90,13 +69,7 @@
|
|||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": [
|
||||
"version",
|
||||
"arch",
|
||||
"compiler",
|
||||
"namespace",
|
||||
"parameters",
|
||||
],
|
||||
"required": ["version", "arch", "compiler", "namespace", "parameters"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"hash": {"type": "string"},
|
||||
|
@ -104,12 +77,7 @@
|
|||
# these hashes were used on some specs prior to 0.18
|
||||
"full_hash": {"type": "string"},
|
||||
"build_hash": {"type": "string"},
|
||||
"version": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "number"},
|
||||
],
|
||||
},
|
||||
"version": {"oneOf": [{"type": "string"}, {"type": "number"}]},
|
||||
"arch": arch,
|
||||
"compiler": {
|
||||
"type": "object",
|
||||
|
@ -119,12 +87,7 @@
|
|||
"version": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"develop": {
|
||||
"anyOf": [
|
||||
{"type": "boolean"},
|
||||
{"type": "string"},
|
||||
],
|
||||
},
|
||||
"develop": {"anyOf": [{"type": "boolean"}, {"type": "string"}]},
|
||||
"namespace": {"type": "string"},
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
|
@ -138,40 +101,16 @@
|
|||
],
|
||||
"additionalProperties": True,
|
||||
"properties": {
|
||||
"patches": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"cflags": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"cppflags": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"cxxflags": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"fflags": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"ldflags": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"ldlib": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"patches": {"type": "array", "items": {"type": "string"}},
|
||||
"cflags": {"type": "array", "items": {"type": "string"}},
|
||||
"cppflags": {"type": "array", "items": {"type": "string"}},
|
||||
"cxxflags": {"type": "array", "items": {"type": "string"}},
|
||||
"fflags": {"type": "array", "items": {"type": "string"}},
|
||||
"ldflags": {"type": "array", "items": {"type": "string"}},
|
||||
"ldlib": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
"patches": {
|
||||
"type": "array",
|
||||
"items": {},
|
||||
},
|
||||
"patches": {"type": "array", "items": {}},
|
||||
"dependencies": dependencies,
|
||||
"build_spec": build_spec,
|
||||
},
|
||||
|
|
|
@ -112,8 +112,7 @@ def getter(node):
|
|||
|
||||
#: Enumeration like object to mark version provenance
|
||||
version_provenance = collections.namedtuple( # type: ignore
|
||||
"VersionProvenance",
|
||||
version_origin_fields,
|
||||
"VersionProvenance", version_origin_fields
|
||||
)(**{name: i for i, name in enumerate(version_origin_fields)})
|
||||
|
||||
#: Named tuple to contain information on declared versions
|
||||
|
@ -1138,8 +1137,7 @@ def pkg_rules(self, pkg, tests):
|
|||
|
||||
# virtual preferences
|
||||
self.virtual_preferences(
|
||||
pkg.name,
|
||||
lambda v, p, i: self.gen.fact(fn.pkg_provider_preference(pkg.name, v, p, i)),
|
||||
pkg.name, lambda v, p, i: self.gen.fact(fn.pkg_provider_preference(pkg.name, v, p, i))
|
||||
)
|
||||
|
||||
self.package_requirement_rules(pkg)
|
||||
|
@ -1248,8 +1246,7 @@ def provider_defaults(self):
|
|||
)
|
||||
assert self.possible_virtuals is not None, msg
|
||||
self.virtual_preferences(
|
||||
"all",
|
||||
lambda v, p, i: self.gen.fact(fn.default_provider_preference(v, p, i)),
|
||||
"all", lambda v, p, i: self.gen.fact(fn.default_provider_preference(v, p, i))
|
||||
)
|
||||
|
||||
def provider_requirements(self):
|
||||
|
@ -1427,12 +1424,7 @@ def spec_clauses(self, *args, **kwargs):
|
|||
return clauses
|
||||
|
||||
def _spec_clauses(
|
||||
self,
|
||||
spec,
|
||||
body=False,
|
||||
transitive=True,
|
||||
expand_hashes=False,
|
||||
concrete_build_deps=False,
|
||||
self, spec, body=False, transitive=True, expand_hashes=False, concrete_build_deps=False
|
||||
):
|
||||
"""Return a list of clauses for a spec mandates are true.
|
||||
|
||||
|
@ -2521,15 +2513,7 @@ def _reusable_specs(self):
|
|||
pass
|
||||
return reusable_specs
|
||||
|
||||
def solve(
|
||||
self,
|
||||
specs,
|
||||
out=None,
|
||||
timers=False,
|
||||
stats=False,
|
||||
tests=False,
|
||||
setup_only=False,
|
||||
):
|
||||
def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_only=False):
|
||||
"""
|
||||
Arguments:
|
||||
specs (list): List of ``Spec`` objects to solve for.
|
||||
|
@ -2549,14 +2533,7 @@ def solve(
|
|||
result, _, _ = self.driver.solve(setup, specs, reuse=reusable_specs, output=output)
|
||||
return result
|
||||
|
||||
def solve_in_rounds(
|
||||
self,
|
||||
specs,
|
||||
out=None,
|
||||
timers=False,
|
||||
stats=False,
|
||||
tests=False,
|
||||
):
|
||||
def solve_in_rounds(self, specs, out=None, timers=False, stats=False, tests=False):
|
||||
"""Solve for a stable model of specs in multiple rounds.
|
||||
|
||||
This relaxes the assumption of solve that everything must be consistent and
|
||||
|
|
|
@ -229,6 +229,7 @@ def __init__(self, spec_or_platform_tuple=(None, None, None)):
|
|||
Otherwise information on platform, OS and target should be
|
||||
passed in either as a spec string or as a tuple.
|
||||
"""
|
||||
|
||||
# If the argument to __init__ is a spec string, parse it
|
||||
# and construct an ArchSpec
|
||||
def _string_or_none(s):
|
||||
|
@ -731,7 +732,6 @@ def __new__(cls, value, **kwargs):
|
|||
|
||||
|
||||
class FlagMap(lang.HashableMap):
|
||||
|
||||
__slots__ = ("spec",)
|
||||
|
||||
def __init__(self, spec):
|
||||
|
@ -1221,7 +1221,6 @@ def copy(self, *args, **kwargs):
|
|||
|
||||
@lang.lazy_lexicographic_ordering(set_hash=False)
|
||||
class Spec(object):
|
||||
|
||||
#: Cache for spec's prefix, computed lazily in the corresponding property
|
||||
_prefix = None
|
||||
|
||||
|
@ -1550,12 +1549,7 @@ def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
|||
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
|
||||
)
|
||||
|
||||
def add_dependency_edge(
|
||||
self,
|
||||
dependency_spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
):
|
||||
def add_dependency_edge(self, dependency_spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
"""Add a dependency edge to this spec.
|
||||
|
||||
Args:
|
||||
|
@ -2241,7 +2235,6 @@ def spec_and_dependency_types(s):
|
|||
|
||||
# Recurse on dependencies
|
||||
for s, s_dependencies in dep_like.items():
|
||||
|
||||
if isinstance(s, str):
|
||||
dag_node, dependency_types = name_and_dependency_types(s)
|
||||
else:
|
||||
|
@ -2897,7 +2890,6 @@ def flat_dependencies(self, **kwargs):
|
|||
try:
|
||||
deptree = self.traverse(root=False)
|
||||
for spec in deptree:
|
||||
|
||||
if spec.name not in flat_deps:
|
||||
if copy:
|
||||
spec = spec.copy(deps=False)
|
||||
|
@ -5245,7 +5237,6 @@ def __init__(self, spec, matches):
|
|||
match_fmt_custom = '{0}. "{1}" conflicts with "{2}" [{3}]\n'
|
||||
|
||||
for idx, (s, c, w, msg) in enumerate(matches):
|
||||
|
||||
if s not in visited:
|
||||
visited.add(s)
|
||||
long_message += "List of matching conflicts for spec:\n\n"
|
||||
|
|
|
@ -416,11 +416,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
|||
assert rebuild
|
||||
|
||||
|
||||
@pytest.mark.usefixtures(
|
||||
"install_mockery_mutable_config",
|
||||
"mock_packages",
|
||||
"mock_fetch",
|
||||
)
|
||||
@pytest.mark.usefixtures("install_mockery_mutable_config", "mock_packages", "mock_fetch")
|
||||
def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
"""Ensure spack buildcache index only reports available packages"""
|
||||
|
||||
|
|
|
@ -67,18 +67,12 @@ def test_raising_exception_if_bootstrap_disabled(mutable_config):
|
|||
|
||||
|
||||
def test_raising_exception_module_importable():
|
||||
with pytest.raises(
|
||||
ImportError,
|
||||
match='cannot bootstrap the "asdf" Python module',
|
||||
):
|
||||
with pytest.raises(ImportError, match='cannot bootstrap the "asdf" Python module'):
|
||||
spack.bootstrap.core.ensure_module_importable_or_raise("asdf")
|
||||
|
||||
|
||||
def test_raising_exception_executables_in_path():
|
||||
with pytest.raises(
|
||||
RuntimeError,
|
||||
match="cannot bootstrap any of the asdf, fdsa executables",
|
||||
):
|
||||
with pytest.raises(RuntimeError, match="cannot bootstrap any of the asdf, fdsa executables"):
|
||||
spack.bootstrap.core.ensure_executables_in_path_or_raise(["asdf", "fdsa"], "python")
|
||||
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
|
||||
|
||||
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
spec = spack.spec.Spec("trivial-install-test-package").concretized()
|
||||
install(str(spec))
|
||||
|
|
|
@ -17,11 +17,7 @@
|
|||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.build_environment import (
|
||||
_static_to_shared_library,
|
||||
determine_number_of_jobs,
|
||||
dso_suffix,
|
||||
)
|
||||
from spack.build_environment import _static_to_shared_library, determine_number_of_jobs, dso_suffix
|
||||
from spack.paths import build_env_path
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
|
@ -160,7 +156,6 @@ def test_static_to_shared_library(build_environment):
|
|||
@pytest.mark.regression("8345")
|
||||
@pytest.mark.usefixtures("config", "mock_packages")
|
||||
def test_cc_not_changed_by_modules(monkeypatch, working_env):
|
||||
|
||||
s = spack.spec.Spec("cmake")
|
||||
s.concretize()
|
||||
pkg = s.package
|
||||
|
|
|
@ -63,32 +63,12 @@ def builder_test_repository():
|
|||
# Generate custom phases using a GenericBuilder
|
||||
(
|
||||
"custom-phases",
|
||||
[
|
||||
("CONFIGURE_CALLED", "1"),
|
||||
("INSTALL_CALLED", "1"),
|
||||
("LAST_PHASE", "INSTALL"),
|
||||
],
|
||||
[("CONFIGURE_CALLED", "1"), ("INSTALL_CALLED", "1"), ("LAST_PHASE", "INSTALL")],
|
||||
),
|
||||
# Old-style package, with phase defined in base builder
|
||||
(
|
||||
"old-style-autotools@1.0",
|
||||
[
|
||||
("AFTER_AUTORECONF_1_CALLED", "1"),
|
||||
],
|
||||
),
|
||||
(
|
||||
"old-style-autotools@2.0",
|
||||
[
|
||||
("AFTER_AUTORECONF_2_CALLED", "1"),
|
||||
],
|
||||
),
|
||||
(
|
||||
"old-style-custom-phases",
|
||||
[
|
||||
("AFTER_CONFIGURE_CALLED", "1"),
|
||||
("TEST_VALUE", "0"),
|
||||
],
|
||||
),
|
||||
("old-style-autotools@1.0", [("AFTER_AUTORECONF_1_CALLED", "1")]),
|
||||
("old-style-autotools@2.0", [("AFTER_AUTORECONF_2_CALLED", "1")]),
|
||||
("old-style-custom-phases", [("AFTER_CONFIGURE_CALLED", "1"), ("TEST_VALUE", "0")]),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("builder_test_repository", "config")
|
||||
|
|
|
@ -514,7 +514,6 @@ def test_ccld_with_system_dirs(wrapper_environment):
|
|||
SPACK_RPATH_DIRS="xlib:ylib:zlib",
|
||||
SPACK_LINK_DIRS="xlib:ylib:zlib",
|
||||
):
|
||||
|
||||
sys_path_args = [
|
||||
"-I/usr/include",
|
||||
"-L/usr/local/lib",
|
||||
|
@ -551,7 +550,6 @@ def test_ccld_with_system_dirs_isystem(wrapper_environment):
|
|||
SPACK_RPATH_DIRS="xlib:ylib:zlib",
|
||||
SPACK_LINK_DIRS="xlib:ylib:zlib",
|
||||
):
|
||||
|
||||
sys_path_args = [
|
||||
"-isystem",
|
||||
"/usr/include",
|
||||
|
@ -717,15 +715,9 @@ def test_keep_and_replace(wrapper_environment):
|
|||
werror_specific = ["-Werror=meh"]
|
||||
werror = ["-Werror"]
|
||||
werror_all = werror_specific + werror
|
||||
with set_env(
|
||||
SPACK_COMPILER_FLAGS_KEEP="",
|
||||
SPACK_COMPILER_FLAGS_REPLACE="-Werror*|",
|
||||
):
|
||||
with set_env(SPACK_COMPILER_FLAGS_KEEP="", SPACK_COMPILER_FLAGS_REPLACE="-Werror*|"):
|
||||
check_args_contents(cc, test_args + werror_all, ["-Wl,--end-group"], werror_all)
|
||||
with set_env(
|
||||
SPACK_COMPILER_FLAGS_KEEP="-Werror=*",
|
||||
SPACK_COMPILER_FLAGS_REPLACE="-Werror*|",
|
||||
):
|
||||
with set_env(SPACK_COMPILER_FLAGS_KEEP="-Werror=*", SPACK_COMPILER_FLAGS_REPLACE="-Werror*|"):
|
||||
check_args_contents(cc, test_args + werror_all, werror_specific, werror)
|
||||
with set_env(
|
||||
SPACK_COMPILER_FLAGS_KEEP="-Werror=*",
|
||||
|
|
|
@ -84,7 +84,6 @@ def getcode(self):
|
|||
return self._resp_code
|
||||
|
||||
def read(self, length=None):
|
||||
|
||||
if len(self._content) <= 0:
|
||||
return None
|
||||
|
||||
|
@ -103,11 +102,7 @@ def read(self, length=None):
|
|||
|
||||
|
||||
def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
|
||||
os.environ.update(
|
||||
{
|
||||
"GITLAB_PRIVATE_TOKEN": "faketoken",
|
||||
}
|
||||
)
|
||||
os.environ.update({"GITLAB_PRIVATE_TOKEN": "faketoken"})
|
||||
|
||||
url = "https://www.nosuchurlexists.itsfake/artifacts.zip"
|
||||
working_dir = os.path.join(tmpdir.strpath, "repro")
|
||||
|
@ -234,24 +229,14 @@ def __call__(self, *args, **kwargs):
|
|||
assert "Unable to merge {0}".format(c1) in err
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"obj, proto",
|
||||
[
|
||||
({}, []),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize("obj, proto", [({}, [])])
|
||||
def test_ci_opt_argument_checking(obj, proto):
|
||||
"""Check that matches() and subkeys() return False when `proto` is not a dict."""
|
||||
assert not ci_opt.matches(obj, proto)
|
||||
assert not ci_opt.subkeys(obj, proto)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"yaml",
|
||||
[
|
||||
{"extends": 1},
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize("yaml", [{"extends": 1}])
|
||||
def test_ci_opt_add_extends_non_sequence(yaml):
|
||||
"""Check that add_extends() exits if 'extends' is not a sequence."""
|
||||
yaml_copy = yaml.copy()
|
||||
|
@ -263,10 +248,7 @@ def test_ci_workarounds():
|
|||
fake_root_spec = "x" * 544
|
||||
fake_spack_ref = "x" * 40
|
||||
|
||||
common_variables = {
|
||||
"SPACK_COMPILER_ACTION": "NONE",
|
||||
"SPACK_IS_PR_PIPELINE": "False",
|
||||
}
|
||||
common_variables = {"SPACK_COMPILER_ACTION": "NONE", "SPACK_IS_PR_PIPELINE": "False"}
|
||||
|
||||
common_before_script = [
|
||||
'git clone "https://github.com/spack/spack"',
|
||||
|
@ -307,7 +289,6 @@ def make_build_job(name, deps, stage, use_artifact_buildcache, optimize, use_dep
|
|||
return {name: result}
|
||||
|
||||
def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
|
||||
|
||||
result = {
|
||||
"stage": "stage-rebuild-index",
|
||||
"script": "spack buildcache update-index --mirror-url s3://mirror",
|
||||
|
|
|
@ -35,7 +35,6 @@ def test_audit_configs(mutable_config, mock_packages):
|
|||
|
||||
|
||||
def test_audit_packages_https(mutable_config, mock_packages):
|
||||
|
||||
# Without providing --all should fail
|
||||
audit("packages-https", fail_on_error=False)
|
||||
# The mock configuration has duplicate definitions of some compilers
|
||||
|
|
|
@ -23,14 +23,7 @@ def test_error_when_multiple_specs_are_given():
|
|||
assert "only takes one spec" in output
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"args",
|
||||
[
|
||||
("--", "/bin/bash", "-c", "echo test"),
|
||||
("--",),
|
||||
(),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize("args", [("--", "/bin/bash", "-c", "echo test"), ("--",), ()])
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
def test_build_env_requires_a_spec(args):
|
||||
output = build_env(*args, fail_on_error=False)
|
||||
|
|
|
@ -258,12 +258,7 @@ def _validate_needs_graph(yaml_contents, needs_graph, artifacts):
|
|||
|
||||
|
||||
def test_ci_generate_bootstrap_gcc(
|
||||
tmpdir,
|
||||
working_env,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
mock_packages,
|
||||
ci_base_environment,
|
||||
tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
|
||||
):
|
||||
"""Test that we can bootstrap a compiler and use it as the
|
||||
compiler for a spec in the environment"""
|
||||
|
@ -300,21 +295,10 @@ def test_ci_generate_bootstrap_gcc(
|
|||
|
||||
needs_graph = {
|
||||
"(bootstrap) conflict": [],
|
||||
"(bootstrap) gcc": [
|
||||
"(bootstrap) conflict",
|
||||
],
|
||||
"(specs) libelf": [
|
||||
"(bootstrap) gcc",
|
||||
],
|
||||
"(specs) libdwarf": [
|
||||
"(bootstrap) gcc",
|
||||
"(specs) libelf",
|
||||
],
|
||||
"(specs) dyninst": [
|
||||
"(bootstrap) gcc",
|
||||
"(specs) libelf",
|
||||
"(specs) libdwarf",
|
||||
],
|
||||
"(bootstrap) gcc": ["(bootstrap) conflict"],
|
||||
"(specs) libelf": ["(bootstrap) gcc"],
|
||||
"(specs) libdwarf": ["(bootstrap) gcc", "(specs) libelf"],
|
||||
"(specs) dyninst": ["(bootstrap) gcc", "(specs) libelf", "(specs) libdwarf"],
|
||||
}
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
|
@ -331,12 +315,7 @@ def test_ci_generate_bootstrap_gcc(
|
|||
|
||||
|
||||
def test_ci_generate_bootstrap_artifacts_buildcache(
|
||||
tmpdir,
|
||||
working_env,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
mock_packages,
|
||||
ci_base_environment,
|
||||
tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
|
||||
):
|
||||
"""Test that we can bootstrap a compiler when artifacts buildcache
|
||||
is turned on"""
|
||||
|
@ -373,18 +352,9 @@ def test_ci_generate_bootstrap_artifacts_buildcache(
|
|||
|
||||
needs_graph = {
|
||||
"(bootstrap) conflict": [],
|
||||
"(bootstrap) gcc": [
|
||||
"(bootstrap) conflict",
|
||||
],
|
||||
"(specs) libelf": [
|
||||
"(bootstrap) gcc",
|
||||
"(bootstrap) conflict",
|
||||
],
|
||||
"(specs) libdwarf": [
|
||||
"(bootstrap) gcc",
|
||||
"(bootstrap) conflict",
|
||||
"(specs) libelf",
|
||||
],
|
||||
"(bootstrap) gcc": ["(bootstrap) conflict"],
|
||||
"(specs) libelf": ["(bootstrap) gcc", "(bootstrap) conflict"],
|
||||
"(specs) libdwarf": ["(bootstrap) gcc", "(bootstrap) conflict", "(specs) libelf"],
|
||||
"(specs) dyninst": [
|
||||
"(bootstrap) gcc",
|
||||
"(bootstrap) conflict",
|
||||
|
@ -447,11 +417,7 @@ def test_ci_generate_with_cdash_token(
|
|||
mock_binary_index,
|
||||
):
|
||||
"""Make sure we it doesn't break if we configure cdash"""
|
||||
os.environ.update(
|
||||
{
|
||||
"SPACK_CDASH_AUTH_TOKEN": "notreallyatokenbutshouldnotmatter",
|
||||
}
|
||||
)
|
||||
os.environ.update({"SPACK_CDASH_AUTH_TOKEN": "notreallyatokenbutshouldnotmatter"})
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
|
@ -598,12 +564,7 @@ def test_ci_generate_with_custom_scripts(
|
|||
|
||||
|
||||
def test_ci_generate_pkg_with_deps(
|
||||
tmpdir,
|
||||
working_env,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
mock_packages,
|
||||
ci_base_environment,
|
||||
tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
|
||||
):
|
||||
"""Test pipeline generation for a package w/ dependencies"""
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
|
@ -670,10 +631,7 @@ def test_ci_generate_for_pr_pipeline(
|
|||
rebuilding the mirror index, even if that job is specifically
|
||||
configured"""
|
||||
os.environ.update(
|
||||
{
|
||||
"SPACK_PIPELINE_TYPE": "spack_pull_request",
|
||||
"SPACK_PR_BRANCH": "fake-test-branch",
|
||||
}
|
||||
{"SPACK_PIPELINE_TYPE": "spack_pull_request", "SPACK_PR_BRANCH": "fake-test-branch"}
|
||||
)
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
|
@ -928,7 +886,6 @@ def test_ci_rebuild_mock_success(
|
|||
monkeypatch,
|
||||
broken_tests,
|
||||
):
|
||||
|
||||
pkg_name = "archive-files"
|
||||
rebuild_env = create_rebuild_env(tmpdir, pkg_name, broken_tests)
|
||||
|
||||
|
@ -1129,11 +1086,7 @@ def test_ci_generate_mirror_override(
|
|||
"""Ensure that protected pipelines using --buildcache-destination do not
|
||||
skip building specs that are not in the override mirror when they are
|
||||
found in the main mirror."""
|
||||
os.environ.update(
|
||||
{
|
||||
"SPACK_PIPELINE_TYPE": "spack_protected_branch",
|
||||
}
|
||||
)
|
||||
os.environ.update({"SPACK_PIPELINE_TYPE": "spack_protected_branch"})
|
||||
|
||||
working_dir = tmpdir.join("working_dir")
|
||||
|
||||
|
@ -1727,12 +1680,7 @@ def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False
|
|||
if spec.name == "gcc":
|
||||
return []
|
||||
else:
|
||||
return [
|
||||
{
|
||||
"spec": spec,
|
||||
"mirror_url": mirror_url,
|
||||
}
|
||||
]
|
||||
return [{"spec": spec, "mirror_url": mirror_url}]
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
|
@ -1766,12 +1714,7 @@ def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False
|
|||
# not otherwise need to be rebuilt (thanks to DAG pruning), they
|
||||
# both end up in the generated pipeline because the compiler they
|
||||
# depend on is bootstrapped, and *does* need to be rebuilt.
|
||||
needs_graph = {
|
||||
"(bootstrap) gcc": [],
|
||||
"(specs) b": [
|
||||
"(bootstrap) gcc",
|
||||
],
|
||||
}
|
||||
needs_graph = {"(bootstrap) gcc": [], "(specs) b": ["(bootstrap) gcc"]}
|
||||
|
||||
_validate_needs_graph(new_yaml_contents, needs_graph, False)
|
||||
|
||||
|
@ -1788,11 +1731,7 @@ def test_ci_generate_prune_untouched(
|
|||
):
|
||||
"""Test pipeline generation with pruning works to eliminate
|
||||
specs that were not affected by a change"""
|
||||
os.environ.update(
|
||||
{
|
||||
"SPACK_PRUNE_UNTOUCHED": "TRUE", # enables pruning of untouched specs
|
||||
}
|
||||
)
|
||||
os.environ.update({"SPACK_PRUNE_UNTOUCHED": "TRUE"}) # enables pruning of untouched specs
|
||||
mirror_url = "https://my.fake.mirror"
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
|
@ -2216,14 +2155,7 @@ def fake_download_and_extract_artifacts(url, work_dir):
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"subcmd",
|
||||
[
|
||||
(""),
|
||||
("generate"),
|
||||
("rebuild-index"),
|
||||
("rebuild"),
|
||||
("reproduce-build"),
|
||||
],
|
||||
"subcmd", [(""), ("generate"), ("rebuild-index"), ("rebuild"), ("reproduce-build")]
|
||||
)
|
||||
def test_ci_help(subcmd, capsys):
|
||||
"""Make sure `spack ci` --help describes the (sub)command help."""
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
|
||||
@pytest.fixture()
|
||||
def mock_calls_for_clean(monkeypatch):
|
||||
|
||||
counts = {}
|
||||
|
||||
class Counter(object):
|
||||
|
@ -61,7 +60,6 @@ def __call__(self, *args, **kwargs):
|
|||
],
|
||||
)
|
||||
def test_function_calls(command_line, effects, mock_calls_for_clean):
|
||||
|
||||
# Call the command with the supplied command line
|
||||
clean(command_line)
|
||||
|
||||
|
|
|
@ -195,7 +195,7 @@ def test_update_completion_arg(tmpdir, monkeypatch):
|
|||
"format": "bash",
|
||||
"header": str(mock_infile),
|
||||
"update": str(mock_bashfile),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
# make a mock completion file missing the --update-completion argument
|
||||
|
|
|
@ -271,10 +271,7 @@ def test_compiler_find_path_order(no_compilers_yaml, working_env, clangdir):
|
|||
shutil.copy("gfortran-8", "first_in_path/gfortran-8")
|
||||
|
||||
# the first_in_path folder should be searched first
|
||||
os.environ["PATH"] = "{0}:{1}".format(
|
||||
str(clangdir.join("first_in_path")),
|
||||
str(clangdir),
|
||||
)
|
||||
os.environ["PATH"] = "{0}:{1}".format(str(clangdir.join("first_in_path")), str(clangdir))
|
||||
|
||||
compiler("find", "--scope=site")
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue