Style: black 23, skip magic trailing comma (#35351)

* Style: black 23, skip magic trailing commas

* isort should use same line length as black

* Fix unused import

* Update version of black used in CI

* Update new packages

* Update new packages
This commit is contained in:
Adam J. Stewart 2023-02-16 22:06:12 -07:00 committed by GitHub
parent b935809948
commit 603569e321
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
510 changed files with 864 additions and 3856 deletions

View file

@ -44,7 +44,7 @@ jobs:
cache: 'pip' cache: 'pip'
- name: Install Python packages - name: Install Python packages
run: | run: |
python3 -m pip install --upgrade pip six setuptools types-six black==22.12.0 mypy isort clingo flake8 python3 -m pip install --upgrade pip six setuptools types-six black==23.1.0 mypy isort clingo flake8
- name: Setup git configuration - name: Setup git configuration
run: | run: |
# Need this for the git tests to succeed. # Need this for the git tests to succeed.

View file

@ -89,6 +89,7 @@
# Enable todo items # Enable todo items
todo_include_todos = True todo_include_todos = True
# #
# Disable duplicate cross-reference warnings. # Disable duplicate cross-reference warnings.
# #
@ -353,9 +354,7 @@ class SpackStyle(DefaultStyle):
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]). # (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [ latex_documents = [("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual")]
("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of # The name of an image file (relative to this directory) to place at the top of
# the title page. # the title page.
@ -402,7 +401,7 @@ class SpackStyle(DefaultStyle):
"Spack", "Spack",
"One line description of project.", "One line description of project.",
"Miscellaneous", "Miscellaneous",
), )
] ]
# Documents to append as an appendix to all manuals. # Documents to append as an appendix to all manuals.
@ -418,6 +417,4 @@ class SpackStyle(DefaultStyle):
# -- Extension configuration ------------------------------------------------- # -- Extension configuration -------------------------------------------------
# sphinx.ext.intersphinx # sphinx.ext.intersphinx
intersphinx_mapping = { intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
"python": ("https://docs.python.org/3", None),
}

View file

@ -268,7 +268,6 @@ def groupid_to_group(x):
regex = re.escape(regex) regex = re.escape(regex)
filenames = path_to_os_path(*filenames) filenames = path_to_os_path(*filenames)
for filename in filenames: for filename in filenames:
msg = 'FILTER FILE: {0} [replacing "{1}"]' msg = 'FILTER FILE: {0} [replacing "{1}"]'
tty.debug(msg.format(filename, regex)) tty.debug(msg.format(filename, regex))
@ -1220,7 +1219,6 @@ def traverse_tree(
# target is relative to the link, then that may not resolve properly # target is relative to the link, then that may not resolve properly
# relative to our cwd - see resolve_link_target_relative_to_the_link # relative to our cwd - see resolve_link_target_relative_to_the_link
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)): if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs # When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest # in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child): if follow_nonexisting or os.path.exists(dest_child):
@ -1662,7 +1660,6 @@ def find(root, files, recursive=True):
@system_path_filter @system_path_filter
def _find_recursive(root, search_files): def _find_recursive(root, search_files):
# The variable here is **on purpose** a defaultdict. The idea is that # The variable here is **on purpose** a defaultdict. The idea is that
# we want to poke the filesystem as little as possible, but still maintain # we want to poke the filesystem as little as possible, but still maintain
# stability in the order of the answer. Thus we are recording each library # stability in the order of the answer. Thus we are recording each library

View file

@ -198,7 +198,7 @@ def _memoized_function(*args, **kwargs):
except TypeError as e: except TypeError as e:
# TypeError is raised when indexing into a dict if the key is unhashable. # TypeError is raised when indexing into a dict if the key is unhashable.
raise UnhashableArguments( raise UnhashableArguments(
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__), "args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__)
) from e ) from e
return _memoized_function return _memoized_function
@ -237,6 +237,7 @@ def decorator_with_or_without_args(decorator):
@decorator @decorator
""" """
# See https://stackoverflow.com/questions/653368 for more on this # See https://stackoverflow.com/questions/653368 for more on this
@functools.wraps(decorator) @functools.wraps(decorator)
def new_dec(*args, **kwargs): def new_dec(*args, **kwargs):
@ -990,8 +991,7 @@ def enum(**kwargs):
def stable_partition( def stable_partition(
input_iterable: Iterable, input_iterable: Iterable, predicate_fn: Callable[[Any], bool]
predicate_fn: Callable[[Any], bool],
) -> Tuple[List[Any], List[Any]]: ) -> Tuple[List[Any], List[Any]]:
"""Partition the input iterable according to a custom predicate. """Partition the input iterable according to a custom predicate.
@ -1104,11 +1104,7 @@ def __enter__(self):
def __exit__(self, exc_type, exc_value, tb): def __exit__(self, exc_type, exc_value, tb):
if exc_value is not None: if exc_value is not None:
self._handler._receive_forwarded( self._handler._receive_forwarded(self._context, exc_value, traceback.format_tb(tb))
self._context,
exc_value,
traceback.format_tb(tb),
)
# Suppress any exception from being re-raised: # Suppress any exception from being re-raised:
# https://docs.python.org/3/reference/datamodel.html#object.__exit__. # https://docs.python.org/3/reference/datamodel.html#object.__exit__.

View file

@ -108,7 +108,6 @@ class SuppressOutput:
"""Class for disabling output in a scope using 'with' keyword""" """Class for disabling output in a scope using 'with' keyword"""
def __init__(self, msg_enabled=True, warn_enabled=True, error_enabled=True): def __init__(self, msg_enabled=True, warn_enabled=True, error_enabled=True):
self._msg_enabled_initial = _msg_enabled self._msg_enabled_initial = _msg_enabled
self._warn_enabled_initial = _warn_enabled self._warn_enabled_initial = _warn_enabled
self._error_enabled_initial = _error_enabled self._error_enabled_initial = _error_enabled

View file

@ -161,10 +161,7 @@ def _is_background(self):
def _get_canon_echo_flags(self): def _get_canon_echo_flags(self):
"""Get current termios canonical and echo settings.""" """Get current termios canonical and echo settings."""
cfg = termios.tcgetattr(self.stream) cfg = termios.tcgetattr(self.stream)
return ( return (bool(cfg[3] & termios.ICANON), bool(cfg[3] & termios.ECHO))
bool(cfg[3] & termios.ICANON),
bool(cfg[3] & termios.ECHO),
)
def _enable_keyboard_input(self): def _enable_keyboard_input(self):
"""Disable canonical input and echoing on ``self.stream``.""" """Disable canonical input and echoing on ``self.stream``."""

View file

@ -77,10 +77,7 @@ def __init__(self, pid, controller_fd, timeout=1, sleep_time=1e-1, debug=False):
def get_canon_echo_attrs(self): def get_canon_echo_attrs(self):
"""Get echo and canon attributes of the terminal of controller_fd.""" """Get echo and canon attributes of the terminal of controller_fd."""
cfg = termios.tcgetattr(self.controller_fd) cfg = termios.tcgetattr(self.controller_fd)
return ( return (bool(cfg[3] & termios.ICANON), bool(cfg[3] & termios.ECHO))
bool(cfg[3] & termios.ICANON),
bool(cfg[3] & termios.ECHO),
)
def horizontal_line(self, name): def horizontal_line(self, name):
"""Labled horizontal line for debugging.""" """Labled horizontal line for debugging."""
@ -92,11 +89,7 @@ def status(self):
if self.debug: if self.debug:
canon, echo = self.get_canon_echo_attrs() canon, echo = self.get_canon_echo_attrs()
sys.stderr.write( sys.stderr.write(
"canon: %s, echo: %s\n" "canon: %s, echo: %s\n" % ("on" if canon else "off", "on" if echo else "off")
% (
"on" if canon else "off",
"on" if echo else "off",
)
) )
sys.stderr.write("input: %s\n" % self.input_on()) sys.stderr.write("input: %s\n" % self.input_on())
sys.stderr.write("bg: %s\n" % self.background()) sys.stderr.write("bg: %s\n" % self.background())

View file

@ -321,8 +321,7 @@ def _check_patch_urls(pkgs, error_cls):
errors.append( errors.append(
error_cls( error_cls(
"patch URL in package {0} must end with {1}".format( "patch URL in package {0} must end with {1}".format(
pkg_cls.name, pkg_cls.name, full_index_arg
full_index_arg,
), ),
[patch.url], [patch.url],
) )

View file

@ -210,10 +210,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
break break
else: else:
self._mirrors_for_spec[dag_hash].append( self._mirrors_for_spec[dag_hash].append(
{ {"mirror_url": mirror_url, "spec": indexed_spec}
"mirror_url": mirror_url,
"spec": indexed_spec,
}
) )
finally: finally:
shutil.rmtree(tmpdir) shutil.rmtree(tmpdir)
@ -296,10 +293,7 @@ def update_spec(self, spec, found_list):
break break
else: else:
current_list.append( current_list.append(
{ {"mirror_url": new_entry["mirror_url"], "spec": new_entry["spec"]}
"mirror_url": new_entry["mirror_url"],
"spec": new_entry["spec"],
}
) )
def update(self, with_cooldown=False): def update(self, with_cooldown=False):
@ -367,8 +361,7 @@ def update(self, with_cooldown=False):
# May need to fetch the index and update the local caches # May need to fetch the index and update the local caches
try: try:
needs_regen = self._fetch_and_cache_index( needs_regen = self._fetch_and_cache_index(
cached_mirror_url, cached_mirror_url, cache_entry=cache_entry
cache_entry=cache_entry,
) )
self._last_fetch_times[cached_mirror_url] = (now, True) self._last_fetch_times[cached_mirror_url] = (now, True)
all_methods_failed = False all_methods_failed = False
@ -2006,12 +1999,7 @@ def try_direct_fetch(spec, mirrors=None):
fetched_spec = Spec.from_json(specfile_contents) fetched_spec = Spec.from_json(specfile_contents)
fetched_spec._mark_concrete() fetched_spec._mark_concrete()
found_specs.append( found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
{
"mirror_url": mirror.fetch_url,
"spec": fetched_spec,
}
)
return found_specs return found_specs
@ -2313,11 +2301,7 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
local_tarball_path = os.path.join(destination, tarball_dir_name) local_tarball_path = os.path.join(destination, tarball_dir_name)
files_to_fetch = [ files_to_fetch = [
{ {"url": [tarball_path_name], "path": local_tarball_path, "required": True},
"url": [tarball_path_name],
"path": local_tarball_path,
"required": True,
},
{ {
"url": [ "url": [
tarball_name(concrete_spec, ".spec.json.sig"), tarball_name(concrete_spec, ".spec.json.sig"),
@ -2438,12 +2422,7 @@ def conditional_fetch(self):
response.headers.get("Etag", None) or response.headers.get("etag", None) response.headers.get("Etag", None) or response.headers.get("etag", None)
) )
return FetchIndexResult( return FetchIndexResult(etag=etag, hash=computed_hash, data=result, fresh=False)
etag=etag,
hash=computed_hash,
data=result,
fresh=False,
)
class EtagIndexFetcher: class EtagIndexFetcher:

View file

@ -5,11 +5,7 @@
"""Function and classes needed to bootstrap Spack itself.""" """Function and classes needed to bootstrap Spack itself."""
from .config import ensure_bootstrap_configuration, is_bootstrapping from .config import ensure_bootstrap_configuration, is_bootstrapping
from .core import ( from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
all_core_root_specs,
ensure_core_dependencies,
ensure_patchelf_in_path_or_raise,
)
from .environment import BootstrapEnvironment, ensure_environment_dependencies from .environment import BootstrapEnvironment, ensure_environment_dependencies
from .status import status_message from .status import status_message

View file

@ -59,10 +59,7 @@ def _try_import_from_store(module, query_spec, query_info=None):
# to be picked up and used, possibly depending on something in the store, first # to be picked up and used, possibly depending on something in the store, first
# allows the bootstrap version to work when an incompatible version is in # allows the bootstrap version to work when an incompatible version is in
# sys.path # sys.path
orders = [ orders = [module_paths + sys.path, sys.path + module_paths]
module_paths + sys.path,
sys.path + module_paths,
]
for path in orders: for path in orders:
sys.path = path sys.path = path
try: try:

View file

@ -53,12 +53,7 @@
import spack.util.url import spack.util.url
import spack.version import spack.version
from ._common import ( from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
_executables_in_store,
_python_import,
_root_spec,
_try_import_from_store,
)
from .config import spack_python_interpreter, spec_for_current_python from .config import spack_python_interpreter, spec_for_current_python
#: Name of the file containing metadata about the bootstrapping source #: Name of the file containing metadata about the bootstrapping source

View file

@ -171,7 +171,7 @@ def mypy_root_spec():
def black_root_spec(): def black_root_spec():
"""Return the root spec used to bootstrap black""" """Return the root spec used to bootstrap black"""
return _root_spec("py-black@:22.12.0") return _root_spec("py-black@:23.1.0")
def flake8_root_spec(): def flake8_root_spec():

View file

@ -1016,7 +1016,6 @@ def get_cmake_prefix_path(pkg):
def _setup_pkg_and_run( def _setup_pkg_and_run(
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2 serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
): ):
context = kwargs.get("context", "build") context = kwargs.get("context", "build")
try: try:

View file

@ -110,11 +110,7 @@ class AutotoolsBuilder(BaseBuilder):
phases = ("autoreconf", "configure", "build", "install") phases = ("autoreconf", "configure", "build", "install")
#: Names associated with package methods in the old build-system format #: Names associated with package methods in the old build-system format
legacy_methods = ( legacy_methods = ("configure_args", "check", "installcheck")
"configure_args",
"check",
"installcheck",
)
#: Names associated with package attributes in the old build-system format #: Names associated with package attributes in the old build-system format
legacy_attributes = ( legacy_attributes = (

View file

@ -31,7 +31,6 @@ def cmake_cache_option(name, boolean_value, comment=""):
class CachedCMakeBuilder(CMakeBuilder): class CachedCMakeBuilder(CMakeBuilder):
#: Phases of a Cached CMake package #: Phases of a Cached CMake package
#: Note: the initconfig phase is used for developer builds as a final phase to stop on #: Note: the initconfig phase is used for developer builds as a final phase to stop on
phases: Tuple[str, ...] = ("initconfig", "cmake", "build", "install") phases: Tuple[str, ...] = ("initconfig", "cmake", "build", "install")

View file

@ -252,10 +252,7 @@ def std_args(pkg, generator=None):
if platform.mac_ver()[0]: if platform.mac_ver()[0]:
args.extend( args.extend(
[ [define("CMAKE_FIND_FRAMEWORK", "LAST"), define("CMAKE_FIND_APPBUNDLE", "LAST")]
define("CMAKE_FIND_FRAMEWORK", "LAST"),
define("CMAKE_FIND_APPBUNDLE", "LAST"),
]
) )
# Set up CMake rpath # Set up CMake rpath

View file

@ -38,10 +38,7 @@ class GenericBuilder(BaseBuilder):
legacy_methods: Tuple[str, ...] = () legacy_methods: Tuple[str, ...] = ()
#: Names associated with package attributes in the old build-system format #: Names associated with package attributes in the old build-system format
legacy_attributes: Tuple[str, ...] = ( legacy_attributes: Tuple[str, ...] = ("archive_files", "install_time_test_callbacks")
"archive_files",
"install_time_test_callbacks",
)
#: Callback names for post-install phase tests #: Callback names for post-install phase tests
install_time_test_callbacks = [] install_time_test_callbacks = []

View file

@ -857,10 +857,7 @@ def scalapack_libs(self):
raise_lib_error("Cannot find a BLACS library for the given MPI.") raise_lib_error("Cannot find a BLACS library for the given MPI.")
int_suff = "_" + self.intel64_int_suffix int_suff = "_" + self.intel64_int_suffix
scalapack_libnames = [ scalapack_libnames = ["libmkl_scalapack" + int_suff, blacs_lib + int_suff]
"libmkl_scalapack" + int_suff,
blacs_lib + int_suff,
]
sca_libs = find_libraries( sca_libs = find_libraries(
scalapack_libnames, root=self.component_lib_dir("mkl"), shared=("+shared" in self.spec) scalapack_libnames, root=self.component_lib_dir("mkl"), shared=("+shared" in self.spec)
) )
@ -1161,9 +1158,7 @@ def _determine_license_type(self):
# #
# Ideally, we just tell the installer to look around on the system. # Ideally, we just tell the installer to look around on the system.
# Thankfully, we neither need to care nor emulate where it looks: # Thankfully, we neither need to care nor emulate where it looks:
license_type = { license_type = {"ACTIVATION_TYPE": "exist_lic"}
"ACTIVATION_TYPE": "exist_lic",
}
# However (and only), if the spack-internal Intel license file has been # However (and only), if the spack-internal Intel license file has been
# populated beyond its templated explanatory comments, proffer it to # populated beyond its templated explanatory comments, proffer it to

View file

@ -68,10 +68,7 @@ def unpack(self, pkg, spec, prefix):
@staticmethod @staticmethod
def _generate_tree_line(name, prefix): def _generate_tree_line(name, prefix):
return """{{ name = "{name}", root = "{prefix}" }};""".format( return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
name=name,
prefix=prefix,
)
def generate_luarocks_config(self, pkg, spec, prefix): def generate_luarocks_config(self, pkg, spec, prefix):
spec = self.pkg.spec spec = self.pkg.spec

View file

@ -37,11 +37,7 @@ class IntelOneApiPackage(Package):
conflicts(c, msg="This package in only available for x86_64 and Linux") conflicts(c, msg="This package in only available for x86_64 and Linux")
# Add variant to toggle environment modifications from vars.sh # Add variant to toggle environment modifications from vars.sh
variant( variant("envmods", default=True, description="Toggles environment modifications")
"envmods",
default=True,
description="Toggles environment modifications",
)
@staticmethod @staticmethod
def update_description(cls): def update_description(cls):

View file

@ -61,10 +61,7 @@ def import_modules(self):
list: list of strings of module names list: list of strings of module names
""" """
modules = [] modules = []
root = os.path.join( root = os.path.join(self.prefix, self.spec["python"].package.platlib)
self.prefix,
self.spec["python"].package.platlib,
)
# Some Python libraries are packages: collections of modules # Some Python libraries are packages: collections of modules
# distributed in directories containing __init__.py files # distributed in directories containing __init__.py files

View file

@ -42,9 +42,7 @@
from spack.reporters import CDash, CDashConfiguration from spack.reporters import CDash, CDashConfiguration
from spack.reporters.cdash import build_stamp as cdash_build_stamp from spack.reporters.cdash import build_stamp as cdash_build_stamp
JOB_RETRY_CONDITIONS = [ JOB_RETRY_CONDITIONS = ["always"]
"always",
]
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror" TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
SPACK_RESERVED_TAGS = ["public", "protected", "notary"] SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
@ -129,10 +127,7 @@ def _remove_reserved_tags(tags):
def _get_spec_string(spec): def _get_spec_string(spec):
format_elements = [ format_elements = ["{name}{@version}", "{%compiler}"]
"{name}{@version}",
"{%compiler}",
]
if spec.architecture: if spec.architecture:
format_elements.append(" {arch=architecture}") format_elements.append(" {arch=architecture}")
@ -328,12 +323,7 @@ def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None)
dependencies = [] dependencies = []
def append_dep(s, d): def append_dep(s, d):
dependencies.append( dependencies.append({"spec": s, "depends": d})
{
"spec": s,
"depends": d,
}
)
for spec in spec_list: for spec in spec_list:
for s in spec.traverse(deptype=all): for s in spec.traverse(deptype=all):
@ -346,10 +336,7 @@ def append_dep(s, d):
) )
skey = _spec_deps_key(s) skey = _spec_deps_key(s)
spec_labels[skey] = { spec_labels[skey] = {"spec": s, "needs_rebuild": not up_to_date_mirrors}
"spec": s,
"needs_rebuild": not up_to_date_mirrors,
}
for d in s.dependencies(deptype=all): for d in s.dependencies(deptype=all):
dkey = _spec_deps_key(d) dkey = _spec_deps_key(d)
@ -368,10 +355,7 @@ def append_dep(s, d):
} }
) )
deps_json_obj = { deps_json_obj = {"specs": specs, "dependencies": dependencies}
"specs": specs,
"dependencies": dependencies,
}
return deps_json_obj return deps_json_obj
@ -410,14 +394,7 @@ def _copy_attributes(attrs_list, src_dict, dest_dict):
def _find_matching_config(spec, gitlab_ci): def _find_matching_config(spec, gitlab_ci):
runner_attributes = {} runner_attributes = {}
overridable_attrs = [ overridable_attrs = ["image", "tags", "variables", "before_script", "script", "after_script"]
"image",
"tags",
"variables",
"before_script",
"script",
"after_script",
]
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes) _copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
@ -685,28 +662,14 @@ def generate_gitlab_ci_yaml(
except AttributeError: except AttributeError:
phase_name = phase phase_name = phase
strip_compilers = False strip_compilers = False
phases.append( phases.append({"name": phase_name, "strip-compilers": strip_compilers})
{
"name": phase_name,
"strip-compilers": strip_compilers,
}
)
for bs in env.spec_lists[phase_name]: for bs in env.spec_lists[phase_name]:
bootstrap_specs.append( bootstrap_specs.append(
{ {"spec": bs, "phase-name": phase_name, "strip-compilers": strip_compilers}
"spec": bs,
"phase-name": phase_name,
"strip-compilers": strip_compilers,
}
) )
phases.append( phases.append({"name": "specs", "strip-compilers": False})
{
"name": "specs",
"strip-compilers": False,
}
)
# If a remote mirror override (alternate buildcache destination) was # If a remote mirror override (alternate buildcache destination) was
# specified, add it here in case it has already built hashes we might # specified, add it here in case it has already built hashes we might
@ -1109,15 +1072,9 @@ def generate_gitlab_ci_yaml(
"variables": variables, "variables": variables,
"script": job_script, "script": job_script,
"tags": tags, "tags": tags,
"artifacts": { "artifacts": {"paths": artifact_paths, "when": "always"},
"paths": artifact_paths,
"when": "always",
},
"needs": sorted(job_dependencies, key=lambda d: d["job"]), "needs": sorted(job_dependencies, key=lambda d: d["job"]),
"retry": { "retry": {"max": 2, "when": JOB_RETRY_CONDITIONS},
"max": 2,
"when": JOB_RETRY_CONDITIONS,
},
"interruptible": True, "interruptible": True,
} }
@ -1135,10 +1092,7 @@ def generate_gitlab_ci_yaml(
if image_name: if image_name:
job_object["image"] = image_name job_object["image"] = image_name
if image_entry is not None: if image_entry is not None:
job_object["image"] = { job_object["image"] = {"name": image_name, "entrypoint": image_entry}
"name": image_name,
"entrypoint": image_entry,
}
output_object[job_name] = job_object output_object[job_name] = job_object
job_id += 1 job_id += 1
@ -1181,11 +1135,7 @@ def generate_gitlab_ci_yaml(
service_job_retries = { service_job_retries = {
"max": 2, "max": 2,
"when": [ "when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
"runner_system_failure",
"stuck_or_timeout_failure",
"script_failure",
],
} }
if job_id > 0: if job_id > 0:
@ -1357,9 +1307,7 @@ def generate_gitlab_ci_yaml(
_copy_attributes(default_attrs, service_job_config, noop_job) _copy_attributes(default_attrs, service_job_config, noop_job)
if "script" not in noop_job: if "script" not in noop_job:
noop_job["script"] = [ noop_job["script"] = ['echo "All specs already up to date, nothing to rebuild."']
'echo "All specs already up to date, nothing to rebuild."',
]
noop_job["retry"] = service_job_retries noop_job["retry"] = service_job_retries
@ -1620,9 +1568,7 @@ def download_and_extract_artifacts(url, work_dir):
""" """
tty.msg("Fetching artifacts from: {0}\n".format(url)) tty.msg("Fetching artifacts from: {0}\n".format(url))
headers = { headers = {"Content-Type": "application/zip"}
"Content-Type": "application/zip",
}
token = os.environ.get("GITLAB_PRIVATE_TOKEN", None) token = os.environ.get("GITLAB_PRIVATE_TOKEN", None)
if token: if token:
@ -2081,10 +2027,7 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
with open(file_path, "w") as fd: with open(file_path, "w") as fd:
fd.write(syaml.dump(broken_spec_details)) fd.write(syaml.dump(broken_spec_details))
web_util.push_to_url( web_util.push_to_url(
file_path, file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
url,
keep_original=False,
extra_args={"ContentType": "text/plain"},
) )
except Exception as err: except Exception as err:
# If there is an S3 error (e.g., access denied or connection # If there is an S3 error (e.g., access denied or connection
@ -2162,14 +2105,7 @@ def run_standalone_tests(**kwargs):
tty.error("Reproduction directory is required for stand-alone tests") tty.error("Reproduction directory is required for stand-alone tests")
return return
test_args = [ test_args = ["spack", "--color=always", "--backtrace", "--verbose", "test", "run"]
"spack",
"--color=always",
"--backtrace",
"--verbose",
"test",
"run",
]
if fail_fast: if fail_fast:
test_args.append("--fail-fast") test_args.append("--fail-fast")
@ -2319,19 +2255,9 @@ def populate_buildgroup(self, job_names):
opener = build_opener(HTTPHandler) opener = build_opener(HTTPHandler)
parent_group_id = self.create_buildgroup( parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
opener,
headers,
url,
self.build_group,
"Daily",
)
group_id = self.create_buildgroup( group_id = self.create_buildgroup(
opener, opener, headers, url, "Latest {0}".format(self.build_group), "Latest"
headers,
url,
"Latest {0}".format(self.build_group),
"Latest",
) )
if not parent_group_id or not group_id: if not parent_group_id or not group_id:
@ -2341,13 +2267,9 @@ def populate_buildgroup(self, job_names):
data = { data = {
"dynamiclist": [ "dynamiclist": [
{ {"match": name, "parentgroupid": parent_group_id, "site": self.site}
"match": name,
"parentgroupid": parent_group_id,
"site": self.site,
}
for name in job_names for name in job_names
], ]
} }
enc_data = json.dumps(data).encode("utf-8") enc_data = json.dumps(data).encode("utf-8")

View file

@ -43,7 +43,6 @@ def matches(obj, proto):
return all((key in obj and matches(obj[key], val)) for key, val in proto.items()) return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str): if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)): if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
return False return False

View file

@ -161,9 +161,7 @@ class _UnquotedFlags(object):
""" """
flags_arg_pattern = re.compile( flags_arg_pattern = re.compile(
r'^({0})=([^\'"].*)$'.format( r'^({0})=([^\'"].*)$'.format("|".join(spack.spec.FlagMap.valid_compiler_flags()))
"|".join(spack.spec.FlagMap.valid_compiler_flags()),
)
) )
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]): def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
@ -227,7 +225,6 @@ def parse_specs(args, **kwargs):
return specs return specs
except spack.error.SpecError as e: except spack.error.SpecError as e:
msg = e.message msg = e.message
if e.long_message: if e.long_message:
msg += e.long_message msg += e.long_message

View file

@ -53,7 +53,6 @@ def packages(parser, args):
def packages_https(parser, args): def packages_https(parser, args):
# Since packages takes a long time, --all is required without name # Since packages takes a long time, --all is required without name
if not args.check_all and not args.name: if not args.check_all and not args.name:
tty.die("Please specify one or more packages to audit, or --all.") tty.die("Please specify one or more packages to audit, or --all.")

View file

@ -103,9 +103,7 @@ def setup_parser(subparser):
help="Regenerate buildcache index after building package(s)", help="Regenerate buildcache index after building package(s)",
) )
create.add_argument( create.add_argument(
"--spec-file", "--spec-file", default=None, help="Create buildcache entry for spec from json or yaml file"
default=None,
help="Create buildcache entry for spec from json or yaml file",
) )
create.add_argument( create.add_argument(
"--only", "--only",

View file

@ -20,9 +20,7 @@ def setup_parser(subparser):
help="name of the list to remove specs from", help="name of the list to remove specs from",
) )
subparser.add_argument( subparser.add_argument(
"--match-spec", "--match-spec", dest="match_spec", help="if name is ambiguous, supply a spec to match"
dest="match_spec",
help="if name is ambiguous, supply a spec to match",
) )
subparser.add_argument( subparser.add_argument(
"-a", "-a",

View file

@ -548,13 +548,7 @@ def ci_rebuild(args):
commands = [ commands = [
# apparently there's a race when spack bootstraps? do it up front once # apparently there's a race when spack bootstraps? do it up front once
[ [SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
SPACK_COMMAND,
"-e",
env.path,
"bootstrap",
"now",
],
[ [
SPACK_COMMAND, SPACK_COMMAND,
"-e", "-e",

View file

@ -13,11 +13,7 @@
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.argparsewriter import ( from llnl.util.argparsewriter import ArgparseCompletionWriter, ArgparseRstWriter, ArgparseWriter
ArgparseCompletionWriter,
ArgparseRstWriter,
ArgparseWriter,
)
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
import spack.cmd import spack.cmd
@ -42,7 +38,7 @@
"format": "bash", "format": "bash",
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"), "header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"), "update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
}, }
} }

View file

@ -408,13 +408,7 @@ def config_prefer_upstream(args):
pkgs = {} pkgs = {}
for spec in pref_specs: for spec in pref_specs:
# Collect all the upstream compilers and versions for this package. # Collect all the upstream compilers and versions for this package.
pkg = pkgs.get( pkg = pkgs.get(spec.name, {"version": [], "compiler": []})
spec.name,
{
"version": [],
"compiler": [],
},
)
pkgs[spec.name] = pkg pkgs[spec.name] = pkg
# We have no existing variant if this is our first added version. # We have no existing variant if this is our first added version.

View file

@ -16,19 +16,10 @@
import spack.stage import spack.stage
import spack.util.web import spack.util.web
from spack.spec import Spec from spack.spec import Spec
from spack.url import ( from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
UndetectableNameError,
UndetectableVersionError,
parse_name,
parse_version,
)
from spack.util.editor import editor from spack.util.editor import editor
from spack.util.executable import ProcessError, which from spack.util.executable import ProcessError, which
from spack.util.naming import ( from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
mod_to_class,
simplify_name,
valid_fully_qualified_module_name,
)
description = "create a new package file" description = "create a new package file"
section = "packaging" section = "packaging"

View file

@ -96,8 +96,5 @@ def report(args):
def debug(parser, args): def debug(parser, args):
action = { action = {"create-db-tarball": create_db_tarball, "report": report}
"create-db-tarball": create_db_tarball,
"report": report,
}
action[args.debug_command](args) action[args.debug_command](args)

View file

@ -33,12 +33,7 @@
level = "long" level = "long"
# Arguments for display_specs when we find ambiguity # Arguments for display_specs when we find ambiguity
display_args = { display_args = {"long": True, "show_flags": True, "variants": True, "indent": 4}
"long": True,
"show_flags": True,
"variants": True,
"indent": 4,
}
def setup_parser(sp): def setup_parser(sp):

View file

@ -80,22 +80,12 @@ def compare_specs(a, b, to_string=False, color=None):
# specs and to descend into dependency hashes so we include all facts. # specs and to descend into dependency hashes so we include all facts.
a_facts = set( a_facts = set(
shift(func) shift(func)
for func in setup.spec_clauses( for func in setup.spec_clauses(a, body=True, expand_hashes=True, concrete_build_deps=True)
a,
body=True,
expand_hashes=True,
concrete_build_deps=True,
)
if func.name == "attr" if func.name == "attr"
) )
b_facts = set( b_facts = set(
shift(func) shift(func)
for func in setup.spec_clauses( for func in setup.spec_clauses(b, body=True, expand_hashes=True, concrete_build_deps=True)
b,
body=True,
expand_hashes=True,
concrete_build_deps=True,
)
if func.name == "attr" if func.name == "attr"
) )

View file

@ -148,8 +148,7 @@ def env_activate(args):
if not args.shell: if not args.shell:
spack.cmd.common.shell_init_instructions( spack.cmd.common.shell_init_instructions(
"spack env activate", "spack env activate", " eval `spack env activate {sh_arg} [...]`"
" eval `spack env activate {sh_arg} [...]`",
) )
return 1 return 1
@ -238,8 +237,7 @@ def env_deactivate_setup_parser(subparser):
def env_deactivate(args): def env_deactivate(args):
if not args.shell: if not args.shell:
spack.cmd.common.shell_init_instructions( spack.cmd.common.shell_init_instructions(
"spack env deactivate", "spack env deactivate", " eval `spack env deactivate {sh_arg}`"
" eval `spack env deactivate {sh_arg}`",
) )
return 1 return 1

View file

@ -38,11 +38,7 @@ def setup_parser(subparser):
default=False, default=False,
help="packages with detected externals won't be built with Spack", help="packages with detected externals won't be built with Spack",
) )
find_parser.add_argument( find_parser.add_argument("--exclude", action="append", help="packages to exclude from search")
"--exclude",
action="append",
help="packages to exclude from search",
)
find_parser.add_argument( find_parser.add_argument(
"-p", "-p",
"--path", "--path",
@ -187,7 +183,6 @@ def external_read_cray_manifest(args):
def _collect_and_consume_cray_manifest_files( def _collect_and_consume_cray_manifest_files(
manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
): ):
manifest_files = [] manifest_files = []
if manifest_file: if manifest_file:
manifest_files.append(manifest_file) manifest_files.append(manifest_file)

View file

@ -25,10 +25,7 @@ def setup_parser(subparser):
help="fetch only missing (not yet installed) dependencies", help="fetch only missing (not yet installed) dependencies",
) )
subparser.add_argument( subparser.add_argument(
"-D", "-D", "--dependencies", action="store_true", help="also fetch all dependencies"
"--dependencies",
action="store_true",
help="also fetch all dependencies",
) )
arguments.add_common_arguments(subparser, ["specs"]) arguments.add_common_arguments(subparser, ["specs"])
subparser.epilog = ( subparser.epilog = (

View file

@ -9,13 +9,7 @@
import spack.config import spack.config
import spack.environment as ev import spack.environment as ev
import spack.store import spack.store
from spack.graph import ( from spack.graph import DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
DAGWithDependencyTypes,
SimpleDAG,
graph_ascii,
graph_dot,
static_graph_dot,
)
description = "generate graphs of package dependency relationships" description = "generate graphs of package dependency relationships"
section = "basic" section = "basic"

View file

@ -87,9 +87,7 @@
""" """
guides = { guides = {"spec": spec_guide}
"spec": spec_guide,
}
def setup_parser(subparser): def setup_parser(subparser):

View file

@ -496,9 +496,7 @@ def reporter_factory(specs):
return None return None
context_manager = spack.report.build_context_manager( context_manager = spack.report.build_context_manager(
reporter=args.reporter(), reporter=args.reporter(), filename=report_filename(args, specs=specs), specs=specs
filename=report_filename(args, specs=specs),
specs=specs,
) )
return context_manager return context_manager

View file

@ -58,10 +58,7 @@
#: licensed files that can have LGPL language in them #: licensed files that can have LGPL language in them
#: so far, just this command -- so it can find LGPL things elsewhere #: so far, just this command -- so it can find LGPL things elsewhere
lgpl_exceptions = [ lgpl_exceptions = [r"lib/spack/spack/cmd/license.py", r"lib/spack/spack/test/cmd/license.py"]
r"lib/spack/spack/cmd/license.py",
r"lib/spack/spack/test/cmd/license.py",
]
def _all_spack_files(root=spack.paths.prefix): def _all_spack_files(root=spack.paths.prefix):
@ -129,7 +126,6 @@ def error_messages(self):
def _check_license(lines, path): def _check_license(lines, path):
found = [] found = []
for line in lines: for line in lines:

View file

@ -98,8 +98,7 @@ def load(parser, args):
if not args.shell: if not args.shell:
specs_str = " ".join(args.constraint) or "SPECS" specs_str = " ".join(args.constraint) or "SPECS"
spack.cmd.common.shell_init_instructions( spack.cmd.common.shell_init_instructions(
"spack load", "spack load", " eval `spack load {sh_arg} %s`" % specs_str
" eval `spack load {sh_arg} %s`" % specs_str,
) )
return 1 return 1

View file

@ -27,12 +27,7 @@
""" """
# Arguments for display_specs when we find ambiguity # Arguments for display_specs when we find ambiguity
display_args = { display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
"long": True,
"show_flags": False,
"variants": False,
"indent": 4,
}
def setup_parser(subparser): def setup_parser(subparser):

View file

@ -445,9 +445,7 @@ def mirror_create(args):
mirror_specs = concrete_specs_from_user(args) mirror_specs = concrete_specs_from_user(args)
create_mirror_for_individual_specs( create_mirror_for_individual_specs(
mirror_specs, mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions
path=path,
skip_unstable_versions=args.skip_unstable_versions,
) )
@ -467,9 +465,7 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn): def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn) mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
create_mirror_for_individual_specs( create_mirror_for_individual_specs(
mirror_specs, mirror_specs, path=path, skip_unstable_versions=skip_unstable_versions
path=path,
skip_unstable_versions=skip_unstable_versions,
) )

View file

@ -180,10 +180,7 @@ def loads(module_type, specs, args, out=None):
for spec in specs for spec in specs
) )
module_commands = { module_commands = {"tcl": "module load ", "lmod": "module load "}
"tcl": "module load ",
"lmod": "module load ",
}
d = {"command": "" if not args.shell else module_commands[module_type], "prefix": args.prefix} d = {"command": "" if not args.shell else module_commands[module_type], "prefix": args.prefix}
@ -368,18 +365,14 @@ def refresh(module_type, specs, args):
def modules_cmd(parser, args, module_type, callbacks=callbacks): def modules_cmd(parser, args, module_type, callbacks=callbacks):
# Qualifiers to be used when querying the db for specs # Qualifiers to be used when querying the db for specs
constraint_qualifiers = { constraint_qualifiers = {"refresh": {"installed": True, "known": True}}
"refresh": {"installed": True, "known": True},
}
query_args = constraint_qualifiers.get(args.subparser_name, {}) query_args = constraint_qualifiers.get(args.subparser_name, {})
# Get the specs that match the query from the DB # Get the specs that match the query from the DB
specs = args.specs(**query_args) specs = args.specs(**query_args)
try: try:
callbacks[args.subparser_name](module_type, specs, args) callbacks[args.subparser_name](module_type, specs, args)
except MultipleSpecsMatch: except MultipleSpecsMatch:

View file

@ -182,11 +182,7 @@ def solve(parser, args):
# set up solver parameters # set up solver parameters
# Note: reuse and other concretizer prefs are passed as configuration # Note: reuse and other concretizer prefs are passed as configuration
result = solver.solve( result = solver.solve(
specs, specs, out=output, timers=args.timers, stats=args.stats, setup_only=setup_only
out=output,
timers=args.timers,
stats=args.stats,
setup_only=setup_only,
) )
if not setup_only: if not setup_only:
_process_result(result, show, required_format, kwargs) _process_result(result, show, required_format, kwargs)

View file

@ -110,7 +110,7 @@ def spec(parser, args):
else: else:
tty.die("spack spec requires at least one spec or an active environment") tty.die("spack spec requires at least one spec or an active environment")
for (input, output) in specs: for input, output in specs:
# With -y, just print YAML to output. # With -y, just print YAML to output.
if args.format: if args.format:
if args.format == "yaml": if args.format == "yaml":

View file

@ -30,20 +30,13 @@ def grouper(iterable, n, fillvalue=None):
#: List of directories to exclude from checks -- relative to spack root #: List of directories to exclude from checks -- relative to spack root
exclude_directories = [ exclude_directories = [os.path.relpath(spack.paths.external_path, spack.paths.prefix)]
os.path.relpath(spack.paths.external_path, spack.paths.prefix),
]
#: Order in which tools should be run. flake8 is last so that it can #: Order in which tools should be run. flake8 is last so that it can
#: double-check the results of other tools (if, e.g., --fix was provided) #: double-check the results of other tools (if, e.g., --fix was provided)
#: The list maps an executable name to a method to ensure the tool is #: The list maps an executable name to a method to ensure the tool is
#: bootstrapped or present in the environment. #: bootstrapped or present in the environment.
tool_names = [ tool_names = ["isort", "black", "flake8", "mypy"]
"isort",
"black",
"flake8",
"mypy",
]
#: tools we run in spack style #: tools we run in spack style
tools = {} tools = {}
@ -52,7 +45,7 @@ def grouper(iterable, n, fillvalue=None):
mypy_ignores = [ mypy_ignores = [
# same as `disable_error_code = "annotation-unchecked"` in pyproject.toml, which # same as `disable_error_code = "annotation-unchecked"` in pyproject.toml, which
# doesn't exist in mypy 0.971 for Python 3.6 # doesn't exist in mypy 0.971 for Python 3.6
"[annotation-unchecked]", "[annotation-unchecked]"
] ]
@ -150,10 +143,7 @@ def setup_parser(subparser):
help="branch to compare against to determine changed files (default: develop)", help="branch to compare against to determine changed files (default: develop)",
) )
subparser.add_argument( subparser.add_argument(
"-a", "-a", "--all", action="store_true", help="check all files, not just changed files"
"--all",
action="store_true",
help="check all files, not just changed files",
) )
subparser.add_argument( subparser.add_argument(
"-r", "-r",
@ -178,10 +168,7 @@ def setup_parser(subparser):
help="format automatically if possible (e.g., with isort, black)", help="format automatically if possible (e.g., with isort, black)",
) )
subparser.add_argument( subparser.add_argument(
"--root", "--root", action="store", default=None, help="style check a different spack instance"
action="store",
default=None,
help="style check a different spack instance",
) )
tool_group = subparser.add_mutually_exclusive_group() tool_group = subparser.add_mutually_exclusive_group()
@ -211,6 +198,7 @@ def rewrite_and_print_output(
output, args, re_obj=re.compile(r"^(.+):([0-9]+):"), replacement=r"{0}:{1}:" output, args, re_obj=re.compile(r"^(.+):([0-9]+):"), replacement=r"{0}:{1}:"
): ):
"""rewrite ouput with <file>:<line>: format to respect path args""" """rewrite ouput with <file>:<line>: format to respect path args"""
# print results relative to current working directory # print results relative to current working directory
def translate(match): def translate(match):
return replacement.format(cwd_relative(match.group(1), args), *list(match.groups()[1:])) return replacement.format(cwd_relative(match.group(1), args), *list(match.groups()[1:]))
@ -281,24 +269,10 @@ def run_mypy(mypy_cmd, file_list, args):
os.path.join(spack.paths.prefix, "pyproject.toml"), os.path.join(spack.paths.prefix, "pyproject.toml"),
"--show-error-codes", "--show-error-codes",
] ]
mypy_arg_sets = [ mypy_arg_sets = [common_mypy_args + ["--package", "spack", "--package", "llnl"]]
common_mypy_args
+ [
"--package",
"spack",
"--package",
"llnl",
]
]
if "SPACK_MYPY_CHECK_PACKAGES" in os.environ: if "SPACK_MYPY_CHECK_PACKAGES" in os.environ:
mypy_arg_sets.append( mypy_arg_sets.append(
common_mypy_args common_mypy_args + ["--package", "packages", "--disable-error-code", "no-redef"]
+ [
"--package",
"packages",
"--disable-error-code",
"no-redef",
]
) )
returncode = 0 returncode = 0

View file

@ -33,9 +33,7 @@ def setup_parser(subparser):
# Run # Run
run_parser = sp.add_parser( run_parser = sp.add_parser(
"run", "run", description=test_run.__doc__, help=spack.cmd.first_line(test_run.__doc__)
description=test_run.__doc__,
help=spack.cmd.first_line(test_run.__doc__),
) )
alias_help_msg = "Provide an alias for this test-suite" alias_help_msg = "Provide an alias for this test-suite"
@ -80,9 +78,7 @@ def setup_parser(subparser):
# List # List
list_parser = sp.add_parser( list_parser = sp.add_parser(
"list", "list", description=test_list.__doc__, help=spack.cmd.first_line(test_list.__doc__)
description=test_list.__doc__,
help=spack.cmd.first_line(test_list.__doc__),
) )
list_parser.add_argument( list_parser.add_argument(
"-a", "-a",
@ -96,9 +92,7 @@ def setup_parser(subparser):
# Find # Find
find_parser = sp.add_parser( find_parser = sp.add_parser(
"find", "find", description=test_find.__doc__, help=spack.cmd.first_line(test_find.__doc__)
description=test_find.__doc__,
help=spack.cmd.first_line(test_find.__doc__),
) )
find_parser.add_argument( find_parser.add_argument(
"filter", "filter",
@ -108,9 +102,7 @@ def setup_parser(subparser):
# Status # Status
status_parser = sp.add_parser( status_parser = sp.add_parser(
"status", "status", description=test_status.__doc__, help=spack.cmd.first_line(test_status.__doc__)
description=test_status.__doc__,
help=spack.cmd.first_line(test_status.__doc__),
) )
status_parser.add_argument( status_parser.add_argument(
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status" "names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
@ -147,9 +139,7 @@ def setup_parser(subparser):
# Remove # Remove
remove_parser = sp.add_parser( remove_parser = sp.add_parser(
"remove", "remove", description=test_remove.__doc__, help=spack.cmd.first_line(test_remove.__doc__)
description=test_remove.__doc__,
help=spack.cmd.first_line(test_remove.__doc__),
) )
arguments.add_common_arguments(remove_parser, ["yes_to_all"]) arguments.add_common_arguments(remove_parser, ["yes_to_all"])
remove_parser.add_argument( remove_parser.add_argument(
@ -189,11 +179,7 @@ def test_run(args):
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None] specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
specs_to_test = [] specs_to_test = []
for spec in specs: for spec in specs:
matching = spack.store.db.query_local( matching = spack.store.db.query_local(spec, hashes=hashes, explicit=explicit)
spec,
hashes=hashes,
explicit=explicit,
)
if spec and not matching: if spec and not matching:
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec)) tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
""" """

View file

@ -31,12 +31,7 @@
""" """
# Arguments for display_specs when we find ambiguity # Arguments for display_specs when we find ambiguity
display_args = { display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
"long": True,
"show_flags": False,
"variants": False,
"indent": 4,
}
def setup_parser(subparser): def setup_parser(subparser):
@ -236,12 +231,7 @@ def do_uninstall(specs, force=False):
hashes_to_remove = set(s.dag_hash() for s in specs) hashes_to_remove = set(s.dag_hash() for s in specs)
for s in traverse.traverse_nodes( for s in traverse.traverse_nodes(
specs, specs, order="topo", direction="children", root=True, cover="nodes", deptype="all"
order="topo",
direction="children",
root=True,
cover="nodes",
deptype="all",
): ):
if s.dag_hash() in hashes_to_remove: if s.dag_hash() in hashes_to_remove:
spack.package_base.PackageBase.uninstall_by_spec(s, force=force) spack.package_base.PackageBase.uninstall_by_spec(s, force=force)

View file

@ -77,8 +77,7 @@ def unload(parser, args):
specs_str = " ".join(args.specs) or "SPECS" specs_str = " ".join(args.specs) or "SPECS"
spack.cmd.common.shell_init_instructions( spack.cmd.common.shell_init_instructions(
"spack unload", "spack unload", " eval `spack unload {sh_arg}` %s" % specs_str
" eval `spack unload {sh_arg}` %s" % specs_str,
) )
return 1 return 1

View file

@ -106,12 +106,7 @@ def setup_parser(subparser):
def url(parser, args): def url(parser, args):
action = { action = {"parse": url_parse, "list": url_list, "summary": url_summary, "stats": url_stats}
"parse": url_parse,
"list": url_list,
"summary": url_summary,
"stats": url_stats,
}
action[args.subcommand](args) action[args.subcommand](args)

View file

@ -619,11 +619,9 @@ def _default(search_paths):
command_arguments = [] command_arguments = []
files_to_be_tested = fs.files_in(*search_paths) files_to_be_tested = fs.files_in(*search_paths)
for compiler_name in spack.compilers.supported_compilers(): for compiler_name in spack.compilers.supported_compilers():
compiler_cls = class_for_compiler_name(compiler_name) compiler_cls = class_for_compiler_name(compiler_name)
for language in ("cc", "cxx", "f77", "fc"): for language in ("cc", "cxx", "f77", "fc"):
# Select only the files matching a regexp # Select only the files matching a regexp
for (file, full_path), regexp in itertools.product( for (file, full_path), regexp in itertools.product(
files_to_be_tested, compiler_cls.search_regexps(language) files_to_be_tested, compiler_cls.search_regexps(language)

View file

@ -154,10 +154,7 @@ def setup_custom_environment(self, pkg, env):
), ),
) )
real_dirs = [ real_dirs = ["Toolchains/XcodeDefault.xctoolchain/usr/bin", "usr/bin"]
"Toolchains/XcodeDefault.xctoolchain/usr/bin",
"usr/bin",
]
bins = ["c++", "c89", "c99", "cc", "clang", "clang++", "cpp"] bins = ["c++", "c89", "c99", "cc", "clang", "clang++", "cpp"]

View file

@ -793,7 +793,7 @@ def _config():
configuration_paths = [ configuration_paths = [
# Default configuration scope is the lowest-level scope. These are # Default configuration scope is the lowest-level scope. These are
# versioned with Spack and can be overridden by systems, sites or users # versioned with Spack and can be overridden by systems, sites or users
configuration_defaults_path, configuration_defaults_path
] ]
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
@ -801,15 +801,11 @@ def _config():
# System configuration is per machine. # System configuration is per machine.
# This is disabled if user asks for no local configuration. # This is disabled if user asks for no local configuration.
if not disable_local_config: if not disable_local_config:
configuration_paths.append( configuration_paths.append(("system", spack.paths.system_config_path))
("system", spack.paths.system_config_path),
)
# Site configuration is per spack instance, for sites or projects # Site configuration is per spack instance, for sites or projects
# No site-level configs should be checked into spack by default. # No site-level configs should be checked into spack by default.
configuration_paths.append( configuration_paths.append(("site", os.path.join(spack.paths.etc_path)))
("site", os.path.join(spack.paths.etc_path)),
)
# User configuration can override both spack defaults and site config # User configuration can override both spack defaults and site config
# This is disabled if user asks for no local configuration. # This is disabled if user asks for no local configuration.

View file

@ -18,10 +18,7 @@
#: packages here. #: packages here.
default_path = "/opt/cray/pe/cpe-descriptive-manifest/" default_path = "/opt/cray/pe/cpe-descriptive-manifest/"
compiler_name_translation = { compiler_name_translation = {"nvidia": "nvhpc", "rocm": "rocmcc"}
"nvidia": "nvhpc",
"rocm": "rocmcc",
}
def translated_compiler_name(manifest_compiler_name): def translated_compiler_name(manifest_compiler_name):

View file

@ -46,10 +46,7 @@
import spack.store import spack.store
import spack.util.lock as lk import spack.util.lock as lk
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
from spack.directory_layout import ( from spack.directory_layout import DirectoryLayoutError, InconsistentInstallDirectoryError
DirectoryLayoutError,
InconsistentInstallDirectoryError,
)
from spack.error import SpackError from spack.error import SpackError
from spack.util.crypto import bit_length from spack.util.crypto import bit_length
from spack.version import Version from spack.version import Version
@ -108,10 +105,7 @@
def reader(version): def reader(version):
reader_cls = { reader_cls = {Version("5"): spack.spec.SpecfileV1, Version("6"): spack.spec.SpecfileV3}
Version("5"): spack.spec.SpecfileV1,
Version("6"): spack.spec.SpecfileV3,
}
return reader_cls[version] return reader_cls[version]

View file

@ -377,7 +377,8 @@ def compute_windows_user_path_for_package(pkg):
install location, return list of potential locations based install location, return list of potential locations based
on common heuristics. For more info on Windows user specific on common heuristics. For more info on Windows user specific
installs see: installs see:
https://learn.microsoft.com/en-us/dotnet/api/system.environment.specialfolder?view=netframework-4.8""" https://learn.microsoft.com/en-us/dotnet/api/system.environment.specialfolder?view=netframework-4.8
"""
if not is_windows: if not is_windows:
return [] return []

View file

@ -45,11 +45,7 @@
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
import spack.util.url import spack.util.url
from spack.filesystem_view import ( from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
SimpleFilesystemView,
inverse_view_func_parser,
view_func_parser,
)
from spack.installer import PackageInstaller from spack.installer import PackageInstaller
from spack.spec import Spec from spack.spec import Spec
from spack.spec_list import InvalidSpecConstraintError, SpecList from spack.spec_list import InvalidSpecConstraintError, SpecList
@ -304,12 +300,7 @@ def _write_yaml(data, str_or_file):
def _eval_conditional(string): def _eval_conditional(string):
"""Evaluate conditional definitions using restricted variable scope.""" """Evaluate conditional definitions using restricted variable scope."""
valid_variables = spack.util.environment.get_host_environment() valid_variables = spack.util.environment.get_host_environment()
valid_variables.update( valid_variables.update({"re": re, "env": os.environ})
{
"re": re,
"env": os.environ,
}
)
return eval(string, valid_variables) return eval(string, valid_variables)
@ -973,9 +964,7 @@ def included_config_scopes(self):
config_path = os.path.join(config_path, basename) config_path = os.path.join(config_path, basename)
else: else:
staged_path = spack.config.fetch_remote_configs( staged_path = spack.config.fetch_remote_configs(
config_path, config_path, self.config_stage_dir, skip_existing=True
self.config_stage_dir,
skip_existing=True,
) )
if not staged_path: if not staged_path:
raise SpackEnvironmentError( raise SpackEnvironmentError(

View file

@ -35,13 +35,7 @@
import llnl.util import llnl.util
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import ( from llnl.util.filesystem import get_single_file, mkdirp, temp_cwd, temp_rename, working_dir
get_single_file,
mkdirp,
temp_cwd,
temp_rename,
working_dir,
)
from llnl.util.symlink import symlink from llnl.util.symlink import symlink
import spack.config import spack.config
@ -1555,11 +1549,7 @@ def for_package_version(pkg, version):
# performance hit for branches on older versions of git. # performance hit for branches on older versions of git.
# Branches cannot be cached, so we tell the fetcher not to cache tags/branches # Branches cannot be cached, so we tell the fetcher not to cache tags/branches
ref_type = "commit" if version.is_commit else "tag" ref_type = "commit" if version.is_commit else "tag"
kwargs = { kwargs = {"git": pkg.git, ref_type: version.ref, "no_cache": True}
"git": pkg.git,
ref_type: version.ref,
"no_cache": True,
}
kwargs["submodules"] = getattr(pkg, "submodules", False) kwargs["submodules"] = getattr(pkg, "submodules", False)

View file

@ -37,7 +37,6 @@ class SharedLibrariesVisitor(BaseDirectoryVisitor):
exception of an exclude list.""" exception of an exclude list."""
def __init__(self, exclude_list): def __init__(self, exclude_list):
# List of file and directory names to be excluded # List of file and directory names to be excluded
self.exclude_list = frozenset(exclude_list) self.exclude_list = frozenset(exclude_list)

View file

@ -212,8 +212,7 @@ def install_sbang():
# copy over the fresh copy of `sbang` # copy over the fresh copy of `sbang`
sbang_tmp_path = os.path.join( sbang_tmp_path = os.path.join(
os.path.dirname(sbang_path), os.path.dirname(sbang_path), ".%s.tmp" % os.path.basename(sbang_path)
".%s.tmp" % os.path.basename(sbang_path),
) )
shutil.copy(spack.paths.sbang_script, sbang_tmp_path) shutil.copy(spack.paths.sbang_script, sbang_tmp_path)

View file

@ -423,11 +423,7 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=timer.NU
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True) matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
return _process_binary_cache_tarball( return _process_binary_cache_tarball(
pkg, pkg, explicit, unsigned, mirrors_for_spec=matches, timer=timer
explicit,
unsigned,
mirrors_for_spec=matches,
timer=timer,
) )
@ -789,7 +785,7 @@ def _add_bootstrap_compilers(self, compiler, architecture, pkgs, request, all_de
associated dependents associated dependents
""" """
packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs) packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
for (comp_pkg, is_compiler) in packages: for comp_pkg, is_compiler in packages:
pkgid = package_id(comp_pkg) pkgid = package_id(comp_pkg)
if pkgid not in self.build_tasks: if pkgid not in self.build_tasks:
self._add_init_task(comp_pkg, request, is_compiler, all_deps) self._add_init_task(comp_pkg, request, is_compiler, all_deps)
@ -813,8 +809,7 @@ def _modify_existing_task(self, pkgid, attr, value):
key, task = tup key, task = tup
if task.pkg_id == pkgid: if task.pkg_id == pkgid:
tty.debug( tty.debug(
"Modifying task for {0} to treat it as a compiler".format(pkgid), "Modifying task for {0} to treat it as a compiler".format(pkgid), level=2
level=2,
) )
setattr(task, attr, value) setattr(task, attr, value)
self.build_pq[i] = (key, task) self.build_pq[i] = (key, task)
@ -1212,7 +1207,6 @@ def _add_tasks(self, request, all_deps):
install_package = request.install_args.get("install_package") install_package = request.install_args.get("install_package")
if install_package and request.pkg_id not in self.build_tasks: if install_package and request.pkg_id not in self.build_tasks:
# Be sure to clear any previous failure # Be sure to clear any previous failure
spack.store.db.clear_failure(request.spec, force=True) spack.store.db.clear_failure(request.spec, force=True)
@ -1948,11 +1942,7 @@ def run(self):
# Run post install hooks before build stage is removed. # Run post install hooks before build stage is removed.
spack.hooks.post_install(self.pkg.spec) spack.hooks.post_install(self.pkg.spec)
_print_timer( _print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
pre=self.pre,
pkg_id=self.pkg_id,
timer=self.timer,
)
_print_installed_pkg(self.pkg.prefix) _print_installed_pkg(self.pkg.prefix)
# Send final status that install is successful # Send final status that install is successful

View file

@ -249,10 +249,7 @@ def root_path(name, module_set_name):
Returns: Returns:
root folder for module file installation root folder for module file installation
""" """
defaults = { defaults = {"lmod": "$spack/share/spack/lmod", "tcl": "$spack/share/spack/modules"}
"lmod": "$spack/share/spack/lmod",
"tcl": "$spack/share/spack/modules",
}
# Root folders where the various module files should be written # Root folders where the various module files should be written
roots = spack.config.get("modules:%s:roots" % module_set_name, {}) roots = spack.config.get("modules:%s:roots" % module_set_name, {})

View file

@ -65,9 +65,7 @@
from spack.version import GitVersion, Version, VersionBase from spack.version import GitVersion, Version, VersionBase
FLAG_HANDLER_RETURN_TYPE = Tuple[ FLAG_HANDLER_RETURN_TYPE = Tuple[
Optional[Iterable[str]], Optional[Iterable[str]], Optional[Iterable[str]], Optional[Iterable[str]]
Optional[Iterable[str]],
Optional[Iterable[str]],
] ]
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE] FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
@ -1705,11 +1703,7 @@ def _has_make_target(self, target):
"don't know how to make {0}. Stop", "don't know how to make {0}. Stop",
] ]
kwargs = { kwargs = {"fail_on_error": False, "output": os.devnull, "error": str}
"fail_on_error": False,
"output": os.devnull,
"error": str,
}
stderr = make("-n", target, **kwargs) stderr = make("-n", target, **kwargs)
@ -2220,10 +2214,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
if not force: if not force:
dependents = spack.store.db.installed_relatives( dependents = spack.store.db.installed_relatives(
spec, spec, direction="parents", transitive=True, deptype=("link", "run")
direction="parents",
transitive=True,
deptype=("link", "run"),
) )
if dependents: if dependents:
raise PackageStillNeededError(spec, dependents) raise PackageStillNeededError(spec, dependents)
@ -2236,7 +2227,6 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
# Pre-uninstall hook runs first. # Pre-uninstall hook runs first.
with spack.store.db.prefix_write_lock(spec): with spack.store.db.prefix_write_lock(spec):
if pkg is not None: if pkg is not None:
try: try:
spack.hooks.pre_uninstall(spec) spack.hooks.pre_uninstall(spec)
@ -2399,11 +2389,7 @@ def fetch_remote_versions(self, concurrency=128):
try: try:
return spack.util.web.find_versions_of_archive( return spack.util.web.find_versions_of_archive(
self.all_urls, self.all_urls, self.list_url, self.list_depth, concurrency, reference_package=self
self.list_url,
self.list_depth,
concurrency,
reference_package=self,
) )
except spack.util.web.NoNetworkConnectionError as e: except spack.util.web.NoNetworkConnectionError as e:
tty.die("Package.fetch_versions couldn't connect to:", e.url, e.message) tty.die("Package.fetch_versions couldn't connect to:", e.url, e.message)

View file

@ -409,13 +409,7 @@ def needs_text_relocation(m_type, m_subtype):
def relocate_macho_binaries( def relocate_macho_binaries(
path_names, path_names, old_layout_root, new_layout_root, prefix_to_prefix, rel, old_prefix, new_prefix
old_layout_root,
new_layout_root,
prefix_to_prefix,
rel,
old_prefix,
new_prefix,
): ):
""" """
Use macholib python package to get the rpaths, depedent libraries Use macholib python package to get the rpaths, depedent libraries
@ -829,7 +823,7 @@ def fixup_macos_rpath(root, filename):
# Check for nonexistent rpaths (often added by spack linker overzealousness # Check for nonexistent rpaths (often added by spack linker overzealousness
# with both lib/ and lib64/) and duplicate rpaths # with both lib/ and lib64/) and duplicate rpaths
for (rpath, count) in rpaths.items(): for rpath, count in rpaths.items():
if rpath.startswith("@loader_path") or rpath.startswith("@executable_path"): if rpath.startswith("@loader_path") or rpath.startswith("@executable_path"):
# Allowable relative paths # Allowable relative paths
pass pass

View file

@ -1066,7 +1066,6 @@ def dump_provenance(self, spec, path):
# Install patch files needed by the package. # Install patch files needed by the package.
fs.mkdirp(path) fs.mkdirp(path)
for patch in itertools.chain.from_iterable(spec.package.patches.values()): for patch in itertools.chain.from_iterable(spec.package.patches.values()):
if patch.path: if patch.path:
if os.path.exists(patch.path): if os.path.exists(patch.path):
fs.install(patch.path, path) fs.install(patch.path, path)

View file

@ -113,7 +113,6 @@ def wrapper(instance, *args, **kwargs):
start_time = time.time() start_time = time.time()
try: try:
value = wrapped_fn(instance, *args, **kwargs) value = wrapped_fn(instance, *args, **kwargs)
package["stdout"] = self.fetch_log(pkg) package["stdout"] = self.fetch_log(pkg)
package["installed_from_binary_cache"] = pkg.installed_from_binary_cache package["installed_from_binary_cache"] = pkg.installed_from_binary_cache
@ -234,9 +233,7 @@ def extract_package_from_signature(self, instance, *args, **kwargs):
@contextlib.contextmanager @contextlib.contextmanager
def build_context_manager( def build_context_manager(
reporter: spack.reporters.Reporter, reporter: spack.reporters.Reporter, filename: str, specs: List[spack.spec.Spec]
filename: str,
specs: List[spack.spec.Spec],
): ):
"""Decorate a package to generate a report after the installation function is executed. """Decorate a package to generate a report after the installation function is executed.

View file

@ -47,8 +47,7 @@
CDashConfiguration = collections.namedtuple( CDashConfiguration = collections.namedtuple(
"CDashConfiguration", "CDashConfiguration", ["upload_url", "packages", "build", "site", "buildstamp", "track"]
["upload_url", "packages", "build", "site", "buildstamp", "track"],
) )
@ -336,12 +335,7 @@ def test_skipped_report(self, directory_name, spec, reason=None):
if reason: if reason:
output += "\n{0}".format(reason) output += "\n{0}".format(reason)
package = { package = {"name": spec.name, "id": spec.dag_hash(), "result": "skipped", "stdout": output}
"name": spec.name,
"id": spec.dag_hash(),
"result": "skipped",
"stdout": output,
}
self.test_report_for_package(directory_name, package, duration=0.0) self.test_report_for_package(directory_name, package, duration=0.0)
def concretization_report(self, directory_name, msg): def concretization_report(self, directory_name, msg):

View file

@ -10,11 +10,7 @@
import llnl.util.tty as tty import llnl.util.tty as tty
# The keys here represent the only recognized (ctest/cdash) status values # The keys here represent the only recognized (ctest/cdash) status values
completed = { completed = {"failed": "Completed", "passed": "Completed", "notrun": "No tests to run"}
"failed": "Completed",
"passed": "Completed",
"notrun": "No tests to run",
}
log_regexp = re.compile(r"^==> \[([0-9:.\-]*)(?:, [0-9]*)?\] (.*)") log_regexp = re.compile(r"^==> \[([0-9:.\-]*)(?:, [0-9]*)?\] (.*)")
returns_regexp = re.compile(r"\[([0-9 ,]*)\]") returns_regexp = re.compile(r"\[([0-9 ,]*)\]")

View file

@ -32,10 +32,7 @@
}, },
"binary_cache_checksum": { "binary_cache_checksum": {
"type": "object", "type": "object",
"properties": { "properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}},
"hash_algorithm": {"type": "string"},
"hash": {"type": "string"},
},
}, },
"buildcache_layout_version": {"type": "number"}, "buildcache_layout_version": {"type": "number"},
}, },

View file

@ -22,7 +22,7 @@
r"project": {"type": "string"}, r"project": {"type": "string"},
r"site": {"type": "string"}, r"site": {"type": "string"},
}, },
}, }
} }

View file

@ -21,7 +21,7 @@
"flags": { "flags": {
"type": "object", "type": "object",
"properties": { "properties": {
"keep_werror": {"type": "string", "enum": ["all", "specific", "none"]}, "keep_werror": {"type": "string", "enum": ["all", "specific", "none"]}
}, },
}, },
"shared_linking": { "shared_linking": {
@ -54,12 +54,12 @@
), ),
}, },
{"type": "string"}, # deprecated {"type": "string"}, # deprecated
], ]
}, },
"install_hash_length": {"type": "integer", "minimum": 1}, "install_hash_length": {"type": "integer", "minimum": 1},
"install_path_scheme": {"type": "string"}, # deprecated "install_path_scheme": {"type": "string"}, # deprecated
"build_stage": { "build_stage": {
"oneOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}], "oneOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}]
}, },
"test_stage": {"type": "string"}, "test_stage": {"type": "string"},
"extensions": {"type": "array", "items": {"type": "string"}}, "extensions": {"type": "array", "items": {"type": "string"}},
@ -82,7 +82,7 @@
"concretizer": {"type": "string", "enum": ["original", "clingo"]}, "concretizer": {"type": "string", "enum": ["original", "clingo"]},
"db_lock_timeout": {"type": "integer", "minimum": 1}, "db_lock_timeout": {"type": "integer", "minimum": 1},
"package_lock_timeout": { "package_lock_timeout": {
"anyOf": [{"type": "integer", "minimum": 1}, {"type": "null"}], "anyOf": [{"type": "integer", "minimum": 1}, {"type": "null"}]
}, },
"allow_sgid": {"type": "boolean"}, "allow_sgid": {"type": "boolean"},
"binary_index_root": {"type": "string"}, "binary_index_root": {"type": "string"},
@ -96,7 +96,7 @@
"modules:[module set]:roots and is ignored", "modules:[module set]:roots and is ignored",
"error": False, "error": False,
}, },
}, }
} }

View file

@ -62,9 +62,7 @@
"additionalProperties": False, "additionalProperties": False,
}, },
# Add labels to the image # Add labels to the image
"labels": { "labels": {"type": "object"},
"type": "object",
},
# Add a custom extra section at the bottom of a stage # Add a custom extra section at the bottom of a stage
"extra_instructions": { "extra_instructions": {
"type": "object", "type": "object",
@ -83,11 +81,7 @@
"help": {"type": "string"}, "help": {"type": "string"},
}, },
}, },
"docker": { "docker": {"type": "object", "additionalProperties": False, "default": {}},
"type": "object",
"additionalProperties": False,
"default": {},
},
}, },
} }

View file

@ -114,9 +114,7 @@
"prefix": {"type": "string", "minLength": 1}, "prefix": {"type": "string", "minLength": 1},
"rpm": {"type": "string", "minLength": 1}, "rpm": {"type": "string", "minLength": 1},
"hash": {"type": "string", "minLength": 1}, "hash": {"type": "string", "minLength": 1},
"parameters": { "parameters": {"type": "object"},
"type": "object",
},
}, },
}, },
}, },

View file

@ -32,27 +32,17 @@
"type": "object", "type": "object",
"properties": { "properties": {
"spec": spack.schema.spec.properties, "spec": spack.schema.spec.properties,
"path": { "path": {"oneOf": [{"type": "string"}, {"type": "null"}]},
"oneOf": [
{"type": "string"},
{"type": "null"},
],
},
"installed": {"type": "boolean"}, "installed": {"type": "boolean"},
"ref_count": { "ref_count": {"type": "integer", "minimum": 0},
"type": "integer",
"minimum": 0,
},
"explicit": {"type": "boolean"}, "explicit": {"type": "boolean"},
"installation_time": { "installation_time": {"type": "number"},
"type": "number",
},
},
}, },
}
}, },
}, },
"version": {"type": "string"}, "version": {"type": "string"},
}, },
}, }
}, },
} }

View file

@ -28,12 +28,7 @@
"properties": { "properties": {
"matrix": { "matrix": {
"type": "array", "type": "array",
"items": { "items": {"type": "array", "items": {"type": "string"}},
"type": "array",
"items": {
"type": "string",
},
},
}, },
"exclude": {"type": "array", "items": {"type": "string"}}, "exclude": {"type": "array", "items": {"type": "string"}},
}, },
@ -61,11 +56,7 @@
spack.schema.merged.properties, spack.schema.merged.properties,
# extra environment schema properties # extra environment schema properties
{ {
"include": { "include": {"type": "array", "default": [], "items": {"type": "string"}},
"type": "array",
"default": [],
"items": {"type": "string"},
},
"develop": { "develop": {
"type": "object", "type": "object",
"default": {}, "default": {},
@ -78,7 +69,7 @@
"spec": {"type": "string"}, "spec": {"type": "string"},
"path": {"type": "string"}, "path": {"type": "string"},
}, },
}, }
}, },
}, },
"definitions": { "definitions": {

View file

@ -18,28 +18,16 @@
"type": "object", "type": "object",
"properties": { "properties": {
"name": {"type": "string"}, "name": {"type": "string"},
"entrypoint": { "entrypoint": {"type": "array", "items": {"type": "string"}},
"type": "array",
"items": {
"type": "string",
}, },
}, },
}, ]
},
],
} }
runner_attributes_schema_items = { runner_attributes_schema_items = {
"image": image_schema, "image": image_schema,
"tags": {"type": "array", "items": {"type": "string"}}, "tags": {"type": "array", "items": {"type": "string"}},
"variables": { "variables": {"type": "object", "patternProperties": {r"[\w\d\-_\.]+": {"type": "string"}}},
"type": "object",
"patternProperties": {
r"[\w\d\-_\.]+": {
"type": "string",
},
},
},
"before_script": {"type": "array", "items": {"type": "string"}}, "before_script": {"type": "array", "items": {"type": "string"}},
"script": {"type": "array", "items": {"type": "string"}}, "script": {"type": "array", "items": {"type": "string"}},
"after_script": {"type": "array", "items": {"type": "string"}}, "after_script": {"type": "array", "items": {"type": "string"}},
@ -56,9 +44,7 @@
"type": "object", "type": "object",
"additionalProperties": False, "additionalProperties": False,
"required": ["tags"], "required": ["tags"],
"properties": { "properties": {"tags": {"type": "array", "items": {"type": "string"}}},
"tags": {"type": "array", "items": {"type": "string"}},
},
} }
@ -69,24 +55,17 @@
"type": "array", "type": "array",
"items": { "items": {
"anyOf": [ "anyOf": [
{ {"type": "string"},
"type": "string",
},
{ {
"type": "object", "type": "object",
"additionalProperties": False, "additionalProperties": False,
"required": ["name"], "required": ["name"],
"properties": { "properties": {
"name": { "name": {"type": "string"},
"type": "string", "compiler-agnostic": {"type": "boolean", "default": False},
},
"compiler-agnostic": {
"type": "boolean",
"default": False,
}, },
}, },
}, ]
],
}, },
}, },
"match_behavior": {"type": "string", "enum": ["first", "merge"], "default": "first"}, "match_behavior": {"type": "string", "enum": ["first", "merge"], "default": "first"},
@ -97,12 +76,7 @@
"additionalProperties": False, "additionalProperties": False,
"required": ["match"], "required": ["match"],
"properties": { "properties": {
"match": { "match": {"type": "array", "items": {"type": "string"}},
"type": "array",
"items": {
"type": "string",
},
},
"remove-attributes": remove_attributes_schema, "remove-attributes": remove_attributes_schema,
"runner-attributes": runner_selector_schema, "runner-attributes": runner_selector_schema,
}, },
@ -112,12 +86,7 @@
"signing-job-attributes": runner_selector_schema, "signing-job-attributes": runner_selector_schema,
"rebuild-index": {"type": "boolean"}, "rebuild-index": {"type": "boolean"},
"broken-specs-url": {"type": "string"}, "broken-specs-url": {"type": "string"},
"broken-tests-packages": { "broken-tests-packages": {"type": "array", "items": {"type": "string"}},
"type": "array",
"items": {
"type": "string",
},
},
}, },
) )
@ -128,12 +97,7 @@
"additionalProperties": False, "additionalProperties": False,
"required": ["mappings"], "required": ["mappings"],
"properties": union_dicts( "properties": union_dicts(
core_shared_properties, core_shared_properties, {"enable-artifacts-buildcache": {"type": "boolean"}}
{
"enable-artifacts-buildcache": {
"type": "boolean",
},
},
), ),
}, },
{ {
@ -141,21 +105,14 @@
"additionalProperties": False, "additionalProperties": False,
"required": ["mappings"], "required": ["mappings"],
"properties": union_dicts( "properties": union_dicts(
core_shared_properties, core_shared_properties, {"temporary-storage-url-prefix": {"type": "string"}}
{
"temporary-storage-url-prefix": {
"type": "string",
},
},
), ),
}, },
] ]
} }
#: Properties for inclusion in other schemas #: Properties for inclusion in other schemas
properties = { properties = {"gitlab-ci": gitlab_ci_properties}
"gitlab-ci": gitlab_ci_properties,
}
#: Full schema with metadata #: Full schema with metadata
schema = { schema = {

View file

@ -27,9 +27,9 @@
}, },
}, },
] ]
}
}, },
}, }
},
} }

View file

@ -110,10 +110,7 @@
"arch_folder": {"type": "boolean"}, "arch_folder": {"type": "boolean"},
"roots": { "roots": {
"type": "object", "type": "object",
"properties": { "properties": {"tcl": {"type": "string"}, "lmod": {"type": "string"}},
"tcl": {"type": "string"},
"lmod": {"type": "string"},
},
}, },
"enable": { "enable": {
"type": "array", "type": "array",
@ -165,7 +162,7 @@
# prefix-relative path to be inspected for existence # prefix-relative path to be inspected for existence
r"^[\w-]*": array_of_strings r"^[\w-]*": array_of_strings
}, },
}, }
}, },
"patternProperties": { "patternProperties": {
valid_module_set_name: { valid_module_set_name: {
@ -173,7 +170,7 @@
"default": {}, "default": {},
"additionalProperties": False, "additionalProperties": False,
"properties": module_config_properties, "properties": module_config_properties,
}, }
}, },
} }
} }

View file

@ -61,25 +61,14 @@
"default": [], "default": [],
"items": {"type": "string"}, "items": {"type": "string"},
}, # compiler specs }, # compiler specs
"buildable": { "buildable": {"type": "boolean", "default": True},
"type": "boolean",
"default": True,
},
"permissions": { "permissions": {
"type": "object", "type": "object",
"additionalProperties": False, "additionalProperties": False,
"properties": { "properties": {
"read": { "read": {"type": "string", "enum": ["user", "group", "world"]},
"type": "string", "write": {"type": "string", "enum": ["user", "group", "world"]},
"enum": ["user", "group", "world"], "group": {"type": "string"},
},
"write": {
"type": "string",
"enum": ["user", "group", "world"],
},
"group": {
"type": "string",
},
}, },
}, },
# If 'get_full_repo' is promoted to a Package-level # If 'get_full_repo' is promoted to a Package-level
@ -87,9 +76,7 @@
"package_attributes": { "package_attributes": {
"type": "object", "type": "object",
"additionalProperties": False, "additionalProperties": False,
"patternProperties": { "patternProperties": {r"\w+": {}},
r"\w+": {},
},
}, },
"providers": { "providers": {
"type": "object", "type": "object",
@ -100,14 +87,14 @@
"type": "array", "type": "array",
"default": [], "default": [],
"items": {"type": "string"}, "items": {"type": "string"},
}, }
}, },
}, },
"variants": { "variants": {
"oneOf": [ "oneOf": [
{"type": "string"}, {"type": "string"},
{"type": "array", "items": {"type": "string"}}, {"type": "array", "items": {"type": "string"}},
], ]
}, },
"externals": { "externals": {
"type": "array", "type": "array",
@ -124,9 +111,9 @@
}, },
}, },
}, },
}
}, },
}, }
},
} }

View file

@ -12,12 +12,7 @@
#: Properties for inclusion in other schemas #: Properties for inclusion in other schemas
properties = { properties = {
"projections": { "projections": {"type": "object", "patternProperties": {r"all|\w[\w-]*": {"type": "string"}}}
"type": "object",
"patternProperties": {
r"all|\w[\w-]*": {"type": "string"},
},
},
} }

View file

@ -11,13 +11,7 @@
#: Properties for inclusion in other schemas #: Properties for inclusion in other schemas
properties = { properties = {"repos": {"type": "array", "default": [], "items": {"type": "string"}}}
"repos": {
"type": "array",
"default": [],
"items": {"type": "string"},
},
}
#: Full schema with metadata #: Full schema with metadata

View file

@ -14,44 +14,26 @@
target = { target = {
"oneOf": [ "oneOf": [
{ {"type": "string"},
"type": "string",
},
{ {
"type": "object", "type": "object",
"additionalProperties": False, "additionalProperties": False,
"required": [ "required": ["name", "vendor", "features", "generation", "parents"],
"name",
"vendor",
"features",
"generation",
"parents",
],
"properties": { "properties": {
"name": {"type": "string"}, "name": {"type": "string"},
"vendor": {"type": "string"}, "vendor": {"type": "string"},
"features": { "features": {"type": "array", "items": {"type": "string"}},
"type": "array",
"items": {"type": "string"},
},
"generation": {"type": "integer"}, "generation": {"type": "integer"},
"parents": { "parents": {"type": "array", "items": {"type": "string"}},
"type": "array",
"items": {"type": "string"},
}, },
}, },
}, ]
],
} }
arch = { arch = {
"type": "object", "type": "object",
"additionalProperties": False, "additionalProperties": False,
"properties": { "properties": {"platform": {}, "platform_os": {}, "target": target},
"platform": {},
"platform_os": {},
"target": target,
},
} }
dependencies = { dependencies = {
@ -61,12 +43,9 @@
"type": "object", "type": "object",
"properties": { "properties": {
"hash": {"type": "string"}, "hash": {"type": "string"},
"type": { "type": {"type": "array", "items": {"type": "string"}},
"type": "array",
"items": {"type": "string"},
},
},
}, },
}
}, },
} }
@ -90,13 +69,7 @@
"items": { "items": {
"type": "object", "type": "object",
"additionalProperties": False, "additionalProperties": False,
"required": [ "required": ["version", "arch", "compiler", "namespace", "parameters"],
"version",
"arch",
"compiler",
"namespace",
"parameters",
],
"properties": { "properties": {
"name": {"type": "string"}, "name": {"type": "string"},
"hash": {"type": "string"}, "hash": {"type": "string"},
@ -104,12 +77,7 @@
# these hashes were used on some specs prior to 0.18 # these hashes were used on some specs prior to 0.18
"full_hash": {"type": "string"}, "full_hash": {"type": "string"},
"build_hash": {"type": "string"}, "build_hash": {"type": "string"},
"version": { "version": {"oneOf": [{"type": "string"}, {"type": "number"}]},
"oneOf": [
{"type": "string"},
{"type": "number"},
],
},
"arch": arch, "arch": arch,
"compiler": { "compiler": {
"type": "object", "type": "object",
@ -119,12 +87,7 @@
"version": {"type": "string"}, "version": {"type": "string"},
}, },
}, },
"develop": { "develop": {"anyOf": [{"type": "boolean"}, {"type": "string"}]},
"anyOf": [
{"type": "boolean"},
{"type": "string"},
],
},
"namespace": {"type": "string"}, "namespace": {"type": "string"},
"parameters": { "parameters": {
"type": "object", "type": "object",
@ -138,40 +101,16 @@
], ],
"additionalProperties": True, "additionalProperties": True,
"properties": { "properties": {
"patches": { "patches": {"type": "array", "items": {"type": "string"}},
"type": "array", "cflags": {"type": "array", "items": {"type": "string"}},
"items": {"type": "string"}, "cppflags": {"type": "array", "items": {"type": "string"}},
}, "cxxflags": {"type": "array", "items": {"type": "string"}},
"cflags": { "fflags": {"type": "array", "items": {"type": "string"}},
"type": "array", "ldflags": {"type": "array", "items": {"type": "string"}},
"items": {"type": "string"}, "ldlib": {"type": "array", "items": {"type": "string"}},
},
"cppflags": {
"type": "array",
"items": {"type": "string"},
},
"cxxflags": {
"type": "array",
"items": {"type": "string"},
},
"fflags": {
"type": "array",
"items": {"type": "string"},
},
"ldflags": {
"type": "array",
"items": {"type": "string"},
},
"ldlib": {
"type": "array",
"items": {"type": "string"},
}, },
}, },
}, "patches": {"type": "array", "items": {}},
"patches": {
"type": "array",
"items": {},
},
"dependencies": dependencies, "dependencies": dependencies,
"build_spec": build_spec, "build_spec": build_spec,
}, },

View file

@ -112,8 +112,7 @@ def getter(node):
#: Enumeration like object to mark version provenance #: Enumeration like object to mark version provenance
version_provenance = collections.namedtuple( # type: ignore version_provenance = collections.namedtuple( # type: ignore
"VersionProvenance", "VersionProvenance", version_origin_fields
version_origin_fields,
)(**{name: i for i, name in enumerate(version_origin_fields)}) )(**{name: i for i, name in enumerate(version_origin_fields)})
#: Named tuple to contain information on declared versions #: Named tuple to contain information on declared versions
@ -1138,8 +1137,7 @@ def pkg_rules(self, pkg, tests):
# virtual preferences # virtual preferences
self.virtual_preferences( self.virtual_preferences(
pkg.name, pkg.name, lambda v, p, i: self.gen.fact(fn.pkg_provider_preference(pkg.name, v, p, i))
lambda v, p, i: self.gen.fact(fn.pkg_provider_preference(pkg.name, v, p, i)),
) )
self.package_requirement_rules(pkg) self.package_requirement_rules(pkg)
@ -1248,8 +1246,7 @@ def provider_defaults(self):
) )
assert self.possible_virtuals is not None, msg assert self.possible_virtuals is not None, msg
self.virtual_preferences( self.virtual_preferences(
"all", "all", lambda v, p, i: self.gen.fact(fn.default_provider_preference(v, p, i))
lambda v, p, i: self.gen.fact(fn.default_provider_preference(v, p, i)),
) )
def provider_requirements(self): def provider_requirements(self):
@ -1427,12 +1424,7 @@ def spec_clauses(self, *args, **kwargs):
return clauses return clauses
def _spec_clauses( def _spec_clauses(
self, self, spec, body=False, transitive=True, expand_hashes=False, concrete_build_deps=False
spec,
body=False,
transitive=True,
expand_hashes=False,
concrete_build_deps=False,
): ):
"""Return a list of clauses for a spec mandates are true. """Return a list of clauses for a spec mandates are true.
@ -2521,15 +2513,7 @@ def _reusable_specs(self):
pass pass
return reusable_specs return reusable_specs
def solve( def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_only=False):
self,
specs,
out=None,
timers=False,
stats=False,
tests=False,
setup_only=False,
):
""" """
Arguments: Arguments:
specs (list): List of ``Spec`` objects to solve for. specs (list): List of ``Spec`` objects to solve for.
@ -2549,14 +2533,7 @@ def solve(
result, _, _ = self.driver.solve(setup, specs, reuse=reusable_specs, output=output) result, _, _ = self.driver.solve(setup, specs, reuse=reusable_specs, output=output)
return result return result
def solve_in_rounds( def solve_in_rounds(self, specs, out=None, timers=False, stats=False, tests=False):
self,
specs,
out=None,
timers=False,
stats=False,
tests=False,
):
"""Solve for a stable model of specs in multiple rounds. """Solve for a stable model of specs in multiple rounds.
This relaxes the assumption of solve that everything must be consistent and This relaxes the assumption of solve that everything must be consistent and

View file

@ -229,6 +229,7 @@ def __init__(self, spec_or_platform_tuple=(None, None, None)):
Otherwise information on platform, OS and target should be Otherwise information on platform, OS and target should be
passed in either as a spec string or as a tuple. passed in either as a spec string or as a tuple.
""" """
# If the argument to __init__ is a spec string, parse it # If the argument to __init__ is a spec string, parse it
# and construct an ArchSpec # and construct an ArchSpec
def _string_or_none(s): def _string_or_none(s):
@ -731,7 +732,6 @@ def __new__(cls, value, **kwargs):
class FlagMap(lang.HashableMap): class FlagMap(lang.HashableMap):
__slots__ = ("spec",) __slots__ = ("spec",)
def __init__(self, spec): def __init__(self, spec):
@ -1221,7 +1221,6 @@ def copy(self, *args, **kwargs):
@lang.lazy_lexicographic_ordering(set_hash=False) @lang.lazy_lexicographic_ordering(set_hash=False)
class Spec(object): class Spec(object):
#: Cache for spec's prefix, computed lazily in the corresponding property #: Cache for spec's prefix, computed lazily in the corresponding property
_prefix = None _prefix = None
@ -1550,12 +1549,7 @@ def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec) "Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
) )
def add_dependency_edge( def add_dependency_edge(self, dependency_spec: "Spec", *, deptypes: dp.DependencyArgument):
self,
dependency_spec: "Spec",
*,
deptypes: dp.DependencyArgument,
):
"""Add a dependency edge to this spec. """Add a dependency edge to this spec.
Args: Args:
@ -2241,7 +2235,6 @@ def spec_and_dependency_types(s):
# Recurse on dependencies # Recurse on dependencies
for s, s_dependencies in dep_like.items(): for s, s_dependencies in dep_like.items():
if isinstance(s, str): if isinstance(s, str):
dag_node, dependency_types = name_and_dependency_types(s) dag_node, dependency_types = name_and_dependency_types(s)
else: else:
@ -2897,7 +2890,6 @@ def flat_dependencies(self, **kwargs):
try: try:
deptree = self.traverse(root=False) deptree = self.traverse(root=False)
for spec in deptree: for spec in deptree:
if spec.name not in flat_deps: if spec.name not in flat_deps:
if copy: if copy:
spec = spec.copy(deps=False) spec = spec.copy(deps=False)
@ -5245,7 +5237,6 @@ def __init__(self, spec, matches):
match_fmt_custom = '{0}. "{1}" conflicts with "{2}" [{3}]\n' match_fmt_custom = '{0}. "{1}" conflicts with "{2}" [{3}]\n'
for idx, (s, c, w, msg) in enumerate(matches): for idx, (s, c, w, msg) in enumerate(matches):
if s not in visited: if s not in visited:
visited.add(s) visited.add(s)
long_message += "List of matching conflicts for spec:\n\n" long_message += "List of matching conflicts for spec:\n\n"

View file

@ -416,11 +416,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
assert rebuild assert rebuild
@pytest.mark.usefixtures( @pytest.mark.usefixtures("install_mockery_mutable_config", "mock_packages", "mock_fetch")
"install_mockery_mutable_config",
"mock_packages",
"mock_fetch",
)
def test_generate_index_missing(monkeypatch, tmpdir, mutable_config): def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
"""Ensure spack buildcache index only reports available packages""" """Ensure spack buildcache index only reports available packages"""

View file

@ -67,18 +67,12 @@ def test_raising_exception_if_bootstrap_disabled(mutable_config):
def test_raising_exception_module_importable(): def test_raising_exception_module_importable():
with pytest.raises( with pytest.raises(ImportError, match='cannot bootstrap the "asdf" Python module'):
ImportError,
match='cannot bootstrap the "asdf" Python module',
):
spack.bootstrap.core.ensure_module_importable_or_raise("asdf") spack.bootstrap.core.ensure_module_importable_or_raise("asdf")
def test_raising_exception_executables_in_path(): def test_raising_exception_executables_in_path():
with pytest.raises( with pytest.raises(RuntimeError, match="cannot bootstrap any of the asdf, fdsa executables"):
RuntimeError,
match="cannot bootstrap any of the asdf, fdsa executables",
):
spack.bootstrap.core.ensure_executables_in_path_or_raise(["asdf", "fdsa"], "python") spack.bootstrap.core.ensure_executables_in_path_or_raise(["asdf", "fdsa"], "python")

View file

@ -20,7 +20,6 @@
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir): def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir):
with tmpdir.as_cwd(): with tmpdir.as_cwd():
spec = spack.spec.Spec("trivial-install-test-package").concretized() spec = spack.spec.Spec("trivial-install-test-package").concretized()
install(str(spec)) install(str(spec))

View file

@ -17,11 +17,7 @@
import spack.package_base import spack.package_base
import spack.spec import spack.spec
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
from spack.build_environment import ( from spack.build_environment import _static_to_shared_library, determine_number_of_jobs, dso_suffix
_static_to_shared_library,
determine_number_of_jobs,
dso_suffix,
)
from spack.paths import build_env_path from spack.paths import build_env_path
from spack.util.environment import EnvironmentModifications from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable from spack.util.executable import Executable
@ -160,7 +156,6 @@ def test_static_to_shared_library(build_environment):
@pytest.mark.regression("8345") @pytest.mark.regression("8345")
@pytest.mark.usefixtures("config", "mock_packages") @pytest.mark.usefixtures("config", "mock_packages")
def test_cc_not_changed_by_modules(monkeypatch, working_env): def test_cc_not_changed_by_modules(monkeypatch, working_env):
s = spack.spec.Spec("cmake") s = spack.spec.Spec("cmake")
s.concretize() s.concretize()
pkg = s.package pkg = s.package

View file

@ -63,32 +63,12 @@ def builder_test_repository():
# Generate custom phases using a GenericBuilder # Generate custom phases using a GenericBuilder
( (
"custom-phases", "custom-phases",
[ [("CONFIGURE_CALLED", "1"), ("INSTALL_CALLED", "1"), ("LAST_PHASE", "INSTALL")],
("CONFIGURE_CALLED", "1"),
("INSTALL_CALLED", "1"),
("LAST_PHASE", "INSTALL"),
],
), ),
# Old-style package, with phase defined in base builder # Old-style package, with phase defined in base builder
( ("old-style-autotools@1.0", [("AFTER_AUTORECONF_1_CALLED", "1")]),
"old-style-autotools@1.0", ("old-style-autotools@2.0", [("AFTER_AUTORECONF_2_CALLED", "1")]),
[ ("old-style-custom-phases", [("AFTER_CONFIGURE_CALLED", "1"), ("TEST_VALUE", "0")]),
("AFTER_AUTORECONF_1_CALLED", "1"),
],
),
(
"old-style-autotools@2.0",
[
("AFTER_AUTORECONF_2_CALLED", "1"),
],
),
(
"old-style-custom-phases",
[
("AFTER_CONFIGURE_CALLED", "1"),
("TEST_VALUE", "0"),
],
),
], ],
) )
@pytest.mark.usefixtures("builder_test_repository", "config") @pytest.mark.usefixtures("builder_test_repository", "config")

View file

@ -514,7 +514,6 @@ def test_ccld_with_system_dirs(wrapper_environment):
SPACK_RPATH_DIRS="xlib:ylib:zlib", SPACK_RPATH_DIRS="xlib:ylib:zlib",
SPACK_LINK_DIRS="xlib:ylib:zlib", SPACK_LINK_DIRS="xlib:ylib:zlib",
): ):
sys_path_args = [ sys_path_args = [
"-I/usr/include", "-I/usr/include",
"-L/usr/local/lib", "-L/usr/local/lib",
@ -551,7 +550,6 @@ def test_ccld_with_system_dirs_isystem(wrapper_environment):
SPACK_RPATH_DIRS="xlib:ylib:zlib", SPACK_RPATH_DIRS="xlib:ylib:zlib",
SPACK_LINK_DIRS="xlib:ylib:zlib", SPACK_LINK_DIRS="xlib:ylib:zlib",
): ):
sys_path_args = [ sys_path_args = [
"-isystem", "-isystem",
"/usr/include", "/usr/include",
@ -717,15 +715,9 @@ def test_keep_and_replace(wrapper_environment):
werror_specific = ["-Werror=meh"] werror_specific = ["-Werror=meh"]
werror = ["-Werror"] werror = ["-Werror"]
werror_all = werror_specific + werror werror_all = werror_specific + werror
with set_env( with set_env(SPACK_COMPILER_FLAGS_KEEP="", SPACK_COMPILER_FLAGS_REPLACE="-Werror*|"):
SPACK_COMPILER_FLAGS_KEEP="",
SPACK_COMPILER_FLAGS_REPLACE="-Werror*|",
):
check_args_contents(cc, test_args + werror_all, ["-Wl,--end-group"], werror_all) check_args_contents(cc, test_args + werror_all, ["-Wl,--end-group"], werror_all)
with set_env( with set_env(SPACK_COMPILER_FLAGS_KEEP="-Werror=*", SPACK_COMPILER_FLAGS_REPLACE="-Werror*|"):
SPACK_COMPILER_FLAGS_KEEP="-Werror=*",
SPACK_COMPILER_FLAGS_REPLACE="-Werror*|",
):
check_args_contents(cc, test_args + werror_all, werror_specific, werror) check_args_contents(cc, test_args + werror_all, werror_specific, werror)
with set_env( with set_env(
SPACK_COMPILER_FLAGS_KEEP="-Werror=*", SPACK_COMPILER_FLAGS_KEEP="-Werror=*",

View file

@ -84,7 +84,6 @@ def getcode(self):
return self._resp_code return self._resp_code
def read(self, length=None): def read(self, length=None):
if len(self._content) <= 0: if len(self._content) <= 0:
return None return None
@ -103,11 +102,7 @@ def read(self, length=None):
def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env): def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
os.environ.update( os.environ.update({"GITLAB_PRIVATE_TOKEN": "faketoken"})
{
"GITLAB_PRIVATE_TOKEN": "faketoken",
}
)
url = "https://www.nosuchurlexists.itsfake/artifacts.zip" url = "https://www.nosuchurlexists.itsfake/artifacts.zip"
working_dir = os.path.join(tmpdir.strpath, "repro") working_dir = os.path.join(tmpdir.strpath, "repro")
@ -234,24 +229,14 @@ def __call__(self, *args, **kwargs):
assert "Unable to merge {0}".format(c1) in err assert "Unable to merge {0}".format(c1) in err
@pytest.mark.parametrize( @pytest.mark.parametrize("obj, proto", [({}, [])])
"obj, proto",
[
({}, []),
],
)
def test_ci_opt_argument_checking(obj, proto): def test_ci_opt_argument_checking(obj, proto):
"""Check that matches() and subkeys() return False when `proto` is not a dict.""" """Check that matches() and subkeys() return False when `proto` is not a dict."""
assert not ci_opt.matches(obj, proto) assert not ci_opt.matches(obj, proto)
assert not ci_opt.subkeys(obj, proto) assert not ci_opt.subkeys(obj, proto)
@pytest.mark.parametrize( @pytest.mark.parametrize("yaml", [{"extends": 1}])
"yaml",
[
{"extends": 1},
],
)
def test_ci_opt_add_extends_non_sequence(yaml): def test_ci_opt_add_extends_non_sequence(yaml):
"""Check that add_extends() exits if 'extends' is not a sequence.""" """Check that add_extends() exits if 'extends' is not a sequence."""
yaml_copy = yaml.copy() yaml_copy = yaml.copy()
@ -263,10 +248,7 @@ def test_ci_workarounds():
fake_root_spec = "x" * 544 fake_root_spec = "x" * 544
fake_spack_ref = "x" * 40 fake_spack_ref = "x" * 40
common_variables = { common_variables = {"SPACK_COMPILER_ACTION": "NONE", "SPACK_IS_PR_PIPELINE": "False"}
"SPACK_COMPILER_ACTION": "NONE",
"SPACK_IS_PR_PIPELINE": "False",
}
common_before_script = [ common_before_script = [
'git clone "https://github.com/spack/spack"', 'git clone "https://github.com/spack/spack"',
@ -307,7 +289,6 @@ def make_build_job(name, deps, stage, use_artifact_buildcache, optimize, use_dep
return {name: result} return {name: result}
def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies): def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
result = { result = {
"stage": "stage-rebuild-index", "stage": "stage-rebuild-index",
"script": "spack buildcache update-index --mirror-url s3://mirror", "script": "spack buildcache update-index --mirror-url s3://mirror",

View file

@ -35,7 +35,6 @@ def test_audit_configs(mutable_config, mock_packages):
def test_audit_packages_https(mutable_config, mock_packages): def test_audit_packages_https(mutable_config, mock_packages):
# Without providing --all should fail # Without providing --all should fail
audit("packages-https", fail_on_error=False) audit("packages-https", fail_on_error=False)
# The mock configuration has duplicate definitions of some compilers # The mock configuration has duplicate definitions of some compilers

View file

@ -23,14 +23,7 @@ def test_error_when_multiple_specs_are_given():
assert "only takes one spec" in output assert "only takes one spec" in output
@pytest.mark.parametrize( @pytest.mark.parametrize("args", [("--", "/bin/bash", "-c", "echo test"), ("--",), ()])
"args",
[
("--", "/bin/bash", "-c", "echo test"),
("--",),
(),
],
)
@pytest.mark.usefixtures("config", "mock_packages", "working_env") @pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_build_env_requires_a_spec(args): def test_build_env_requires_a_spec(args):
output = build_env(*args, fail_on_error=False) output = build_env(*args, fail_on_error=False)

View file

@ -258,12 +258,7 @@ def _validate_needs_graph(yaml_contents, needs_graph, artifacts):
def test_ci_generate_bootstrap_gcc( def test_ci_generate_bootstrap_gcc(
tmpdir, tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
working_env,
mutable_mock_env_path,
install_mockery,
mock_packages,
ci_base_environment,
): ):
"""Test that we can bootstrap a compiler and use it as the """Test that we can bootstrap a compiler and use it as the
compiler for a spec in the environment""" compiler for a spec in the environment"""
@ -300,21 +295,10 @@ def test_ci_generate_bootstrap_gcc(
needs_graph = { needs_graph = {
"(bootstrap) conflict": [], "(bootstrap) conflict": [],
"(bootstrap) gcc": [ "(bootstrap) gcc": ["(bootstrap) conflict"],
"(bootstrap) conflict", "(specs) libelf": ["(bootstrap) gcc"],
], "(specs) libdwarf": ["(bootstrap) gcc", "(specs) libelf"],
"(specs) libelf": [ "(specs) dyninst": ["(bootstrap) gcc", "(specs) libelf", "(specs) libdwarf"],
"(bootstrap) gcc",
],
"(specs) libdwarf": [
"(bootstrap) gcc",
"(specs) libelf",
],
"(specs) dyninst": [
"(bootstrap) gcc",
"(specs) libelf",
"(specs) libdwarf",
],
} }
with tmpdir.as_cwd(): with tmpdir.as_cwd():
@ -331,12 +315,7 @@ def test_ci_generate_bootstrap_gcc(
def test_ci_generate_bootstrap_artifacts_buildcache( def test_ci_generate_bootstrap_artifacts_buildcache(
tmpdir, tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
working_env,
mutable_mock_env_path,
install_mockery,
mock_packages,
ci_base_environment,
): ):
"""Test that we can bootstrap a compiler when artifacts buildcache """Test that we can bootstrap a compiler when artifacts buildcache
is turned on""" is turned on"""
@ -373,18 +352,9 @@ def test_ci_generate_bootstrap_artifacts_buildcache(
needs_graph = { needs_graph = {
"(bootstrap) conflict": [], "(bootstrap) conflict": [],
"(bootstrap) gcc": [ "(bootstrap) gcc": ["(bootstrap) conflict"],
"(bootstrap) conflict", "(specs) libelf": ["(bootstrap) gcc", "(bootstrap) conflict"],
], "(specs) libdwarf": ["(bootstrap) gcc", "(bootstrap) conflict", "(specs) libelf"],
"(specs) libelf": [
"(bootstrap) gcc",
"(bootstrap) conflict",
],
"(specs) libdwarf": [
"(bootstrap) gcc",
"(bootstrap) conflict",
"(specs) libelf",
],
"(specs) dyninst": [ "(specs) dyninst": [
"(bootstrap) gcc", "(bootstrap) gcc",
"(bootstrap) conflict", "(bootstrap) conflict",
@ -447,11 +417,7 @@ def test_ci_generate_with_cdash_token(
mock_binary_index, mock_binary_index,
): ):
"""Make sure we it doesn't break if we configure cdash""" """Make sure we it doesn't break if we configure cdash"""
os.environ.update( os.environ.update({"SPACK_CDASH_AUTH_TOKEN": "notreallyatokenbutshouldnotmatter"})
{
"SPACK_CDASH_AUTH_TOKEN": "notreallyatokenbutshouldnotmatter",
}
)
filename = str(tmpdir.join("spack.yaml")) filename = str(tmpdir.join("spack.yaml"))
with open(filename, "w") as f: with open(filename, "w") as f:
f.write( f.write(
@ -598,12 +564,7 @@ def test_ci_generate_with_custom_scripts(
def test_ci_generate_pkg_with_deps( def test_ci_generate_pkg_with_deps(
tmpdir, tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
working_env,
mutable_mock_env_path,
install_mockery,
mock_packages,
ci_base_environment,
): ):
"""Test pipeline generation for a package w/ dependencies""" """Test pipeline generation for a package w/ dependencies"""
filename = str(tmpdir.join("spack.yaml")) filename = str(tmpdir.join("spack.yaml"))
@ -670,10 +631,7 @@ def test_ci_generate_for_pr_pipeline(
rebuilding the mirror index, even if that job is specifically rebuilding the mirror index, even if that job is specifically
configured""" configured"""
os.environ.update( os.environ.update(
{ {"SPACK_PIPELINE_TYPE": "spack_pull_request", "SPACK_PR_BRANCH": "fake-test-branch"}
"SPACK_PIPELINE_TYPE": "spack_pull_request",
"SPACK_PR_BRANCH": "fake-test-branch",
}
) )
filename = str(tmpdir.join("spack.yaml")) filename = str(tmpdir.join("spack.yaml"))
with open(filename, "w") as f: with open(filename, "w") as f:
@ -928,7 +886,6 @@ def test_ci_rebuild_mock_success(
monkeypatch, monkeypatch,
broken_tests, broken_tests,
): ):
pkg_name = "archive-files" pkg_name = "archive-files"
rebuild_env = create_rebuild_env(tmpdir, pkg_name, broken_tests) rebuild_env = create_rebuild_env(tmpdir, pkg_name, broken_tests)
@ -1129,11 +1086,7 @@ def test_ci_generate_mirror_override(
"""Ensure that protected pipelines using --buildcache-destination do not """Ensure that protected pipelines using --buildcache-destination do not
skip building specs that are not in the override mirror when they are skip building specs that are not in the override mirror when they are
found in the main mirror.""" found in the main mirror."""
os.environ.update( os.environ.update({"SPACK_PIPELINE_TYPE": "spack_protected_branch"})
{
"SPACK_PIPELINE_TYPE": "spack_protected_branch",
}
)
working_dir = tmpdir.join("working_dir") working_dir = tmpdir.join("working_dir")
@ -1727,12 +1680,7 @@ def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False
if spec.name == "gcc": if spec.name == "gcc":
return [] return []
else: else:
return [ return [{"spec": spec, "mirror_url": mirror_url}]
{
"spec": spec,
"mirror_url": mirror_url,
}
]
with tmpdir.as_cwd(): with tmpdir.as_cwd():
env_cmd("create", "test", "./spack.yaml") env_cmd("create", "test", "./spack.yaml")
@ -1766,12 +1714,7 @@ def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False
# not otherwise need to be rebuilt (thanks to DAG pruning), they # not otherwise need to be rebuilt (thanks to DAG pruning), they
# both end up in the generated pipeline because the compiler they # both end up in the generated pipeline because the compiler they
# depend on is bootstrapped, and *does* need to be rebuilt. # depend on is bootstrapped, and *does* need to be rebuilt.
needs_graph = { needs_graph = {"(bootstrap) gcc": [], "(specs) b": ["(bootstrap) gcc"]}
"(bootstrap) gcc": [],
"(specs) b": [
"(bootstrap) gcc",
],
}
_validate_needs_graph(new_yaml_contents, needs_graph, False) _validate_needs_graph(new_yaml_contents, needs_graph, False)
@ -1788,11 +1731,7 @@ def test_ci_generate_prune_untouched(
): ):
"""Test pipeline generation with pruning works to eliminate """Test pipeline generation with pruning works to eliminate
specs that were not affected by a change""" specs that were not affected by a change"""
os.environ.update( os.environ.update({"SPACK_PRUNE_UNTOUCHED": "TRUE"}) # enables pruning of untouched specs
{
"SPACK_PRUNE_UNTOUCHED": "TRUE", # enables pruning of untouched specs
}
)
mirror_url = "https://my.fake.mirror" mirror_url = "https://my.fake.mirror"
filename = str(tmpdir.join("spack.yaml")) filename = str(tmpdir.join("spack.yaml"))
with open(filename, "w") as f: with open(filename, "w") as f:
@ -2216,14 +2155,7 @@ def fake_download_and_extract_artifacts(url, work_dir):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"subcmd", "subcmd", [(""), ("generate"), ("rebuild-index"), ("rebuild"), ("reproduce-build")]
[
(""),
("generate"),
("rebuild-index"),
("rebuild"),
("reproduce-build"),
],
) )
def test_ci_help(subcmd, capsys): def test_ci_help(subcmd, capsys):
"""Make sure `spack ci` --help describes the (sub)command help.""" """Make sure `spack ci` --help describes the (sub)command help."""

View file

@ -22,7 +22,6 @@
@pytest.fixture() @pytest.fixture()
def mock_calls_for_clean(monkeypatch): def mock_calls_for_clean(monkeypatch):
counts = {} counts = {}
class Counter(object): class Counter(object):
@ -61,7 +60,6 @@ def __call__(self, *args, **kwargs):
], ],
) )
def test_function_calls(command_line, effects, mock_calls_for_clean): def test_function_calls(command_line, effects, mock_calls_for_clean):
# Call the command with the supplied command line # Call the command with the supplied command line
clean(command_line) clean(command_line)

View file

@ -195,7 +195,7 @@ def test_update_completion_arg(tmpdir, monkeypatch):
"format": "bash", "format": "bash",
"header": str(mock_infile), "header": str(mock_infile),
"update": str(mock_bashfile), "update": str(mock_bashfile),
}, }
} }
# make a mock completion file missing the --update-completion argument # make a mock completion file missing the --update-completion argument

View file

@ -271,10 +271,7 @@ def test_compiler_find_path_order(no_compilers_yaml, working_env, clangdir):
shutil.copy("gfortran-8", "first_in_path/gfortran-8") shutil.copy("gfortran-8", "first_in_path/gfortran-8")
# the first_in_path folder should be searched first # the first_in_path folder should be searched first
os.environ["PATH"] = "{0}:{1}".format( os.environ["PATH"] = "{0}:{1}".format(str(clangdir.join("first_in_path")), str(clangdir))
str(clangdir.join("first_in_path")),
str(clangdir),
)
compiler("find", "--scope=site") compiler("find", "--scope=site")

Some files were not shown because too many files have changed in this diff Show more