Apply black 2024 style to Spack (#42317)

This commit is contained in:
Adam J. Stewart 2024-01-27 16:15:35 +01:00 committed by GitHub
parent 1865e228c4
commit 2b51980904
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
26 changed files with 85 additions and 107 deletions

View file

@ -1,4 +1,4 @@
black==23.12.1
black==24.1.0
clingo==5.6.2
flake8==7.0.0
isort==5.13.2

View file

@ -146,7 +146,7 @@ def mypy_root_spec() -> str:
def black_root_spec() -> str:
"""Return the root spec used to bootstrap black"""
return _root_spec("py-black@:23.1.0")
return _root_spec("py-black@:24.1.0")
def flake8_root_spec() -> str:

View file

@ -218,7 +218,7 @@ def pset_components(self):
"+inspector": " intel-inspector",
"+itac": " intel-itac intel-ta intel-tc" " intel-trace-analyzer intel-trace-collector",
# Trace Analyzer and Collector
"+vtune": " intel-vtune"
"+vtune": " intel-vtune",
# VTune, ..-profiler since 2020, ..-amplifier before
}.items():
if variant in self.spec:

View file

@ -35,9 +35,9 @@ def _misc_cache():
#: Spack's cache for small data
MISC_CACHE: Union[
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
] = llnl.util.lang.Singleton(_misc_cache)
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
llnl.util.lang.Singleton(_misc_cache)
)
def fetch_cache_location():
@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
#: Spack's local cache for downloaded source archives
FETCH_CACHE: Union[
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
] = llnl.util.lang.Singleton(_fetch_cache)
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
llnl.util.lang.Singleton(_fetch_cache)
)

View file

@ -7,9 +7,7 @@
get_job_name = lambda needs_entry: (
needs_entry.get("job")
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
else needs_entry
if isinstance(needs_entry, str)
else None
else needs_entry if isinstance(needs_entry, str) else None
)

View file

@ -292,9 +292,11 @@ def head(n, span_id, title, anchor=None):
out.write("<dd>\n")
out.write(
", ".join(
d
if d not in pkg_names
else '<a class="reference internal" href="#%s">%s</a>' % (d, d)
(
d
if d not in pkg_names
else '<a class="reference internal" href="#%s">%s</a>' % (d, d)
)
for d in deps
)
)

View file

@ -826,7 +826,6 @@ def __init__(self, spec):
class InsufficientArchitectureInfoError(spack.error.SpackError):
"""Raised when details on architecture cannot be collected from the
system"""

View file

@ -697,7 +697,6 @@ def __str__(self):
@fetcher
class GitFetchStrategy(VCSFetchStrategy):
"""
Fetch strategy that gets source code from a git repository.
Use like this in a package:
@ -1089,7 +1088,6 @@ def __str__(self):
@fetcher
class SvnFetchStrategy(VCSFetchStrategy):
"""Fetch strategy that gets source code from a subversion repository.
Use like this in a package:
@ -1184,7 +1182,6 @@ def __str__(self):
@fetcher
class HgFetchStrategy(VCSFetchStrategy):
"""
Fetch strategy that gets source code from a Mercurial repository.
Use like this in a package:

View file

@ -91,9 +91,9 @@ def view_copy(src: str, dst: str, view, spec: Optional[spack.spec.Spec] = None):
prefix_to_projection[spack.store.STORE.layout.root] = view._root
# This is vestigial code for the *old* location of sbang.
prefix_to_projection[
"#!/bin/bash {0}/bin/sbang".format(spack.paths.spack_root)
] = sbang.sbang_shebang_line()
prefix_to_projection["#!/bin/bash {0}/bin/sbang".format(spack.paths.spack_root)] = (
sbang.sbang_shebang_line()
)
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)

View file

@ -199,9 +199,11 @@ def get_stage_root():
def _mirror_roots():
mirrors = spack.config.get("mirrors")
return [
sup.substitute_path_variables(root)
if root.endswith(os.sep)
else sup.substitute_path_variables(root) + os.sep
(
sup.substitute_path_variables(root)
if root.endswith(os.sep)
else sup.substitute_path_variables(root) + os.sep
)
for root in mirrors.values()
]

View file

@ -98,13 +98,9 @@ def test_url_list(mock_packages):
def test_url_summary(mock_packages):
"""Test the URL summary command."""
# test url_summary, the internal function that does the work
(
total_urls,
correct_names,
correct_versions,
name_count_dict,
version_count_dict,
) = url_summary(None)
(total_urls, correct_names, correct_versions, name_count_dict, version_count_dict) = (
url_summary(None)
)
assert 0 < correct_names <= sum(name_count_dict.values()) <= total_urls
assert 0 < correct_versions <= sum(version_count_dict.values()) <= total_urls

View file

@ -103,9 +103,9 @@ def hello_world_with_module_in_root(extension_creator):
@contextlib.contextmanager
def _hwwmir(extension_name=None):
with extension_creator(
extension_name
) if extension_name else extension_creator() as extension:
with (
extension_creator(extension_name) if extension_name else extension_creator()
) as extension:
# Note that the namespace of the extension is derived from the
# fixture.
extension.add_command(

View file

@ -422,7 +422,7 @@ def test_xl_version_detection(version_str, expected_version):
("pgi", "19.1"),
("pgi", "19.1a"),
("intel", "9.0.0"),
("intel", "0.0.0-foobar")
("intel", "0.0.0-foobar"),
# ('oneapi', '2021.1'),
# ('oneapi', '2021.1-foobar')
],

View file

@ -60,13 +60,9 @@ def test_spec_installed_upstream(
upstream_and_downstream_db, mock_custom_repository, config, monkeypatch
):
"""Test whether Spec.installed_upstream() works."""
(
upstream_write_db,
upstream_db,
upstream_layout,
downstream_db,
downstream_layout,
) = upstream_and_downstream_db
(upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout) = (
upstream_and_downstream_db
)
# a known installed spec should say that it's installed
with spack.repo.use_repositories(mock_custom_repository):
@ -90,13 +86,9 @@ def test_spec_installed_upstream(
@pytest.mark.usefixtures("config")
def test_installed_upstream(upstream_and_downstream_db, tmpdir):
(
upstream_write_db,
upstream_db,
upstream_layout,
downstream_db,
downstream_layout,
) = upstream_and_downstream_db
(upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout) = (
upstream_and_downstream_db
)
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
builder.add_package("x")
@ -132,13 +124,9 @@ def test_installed_upstream(upstream_and_downstream_db, tmpdir):
@pytest.mark.usefixtures("config")
def test_removed_upstream_dep(upstream_and_downstream_db, tmpdir):
(
upstream_write_db,
upstream_db,
upstream_layout,
downstream_db,
downstream_layout,
) = upstream_and_downstream_db
(upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout) = (
upstream_and_downstream_db
)
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
builder.add_package("z")
@ -168,13 +156,9 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db, tmpdir):
DB. When a package is recorded as installed in both, the results should
refer to the downstream DB.
"""
(
upstream_write_db,
upstream_db,
upstream_layout,
downstream_db,
downstream_layout,
) = upstream_and_downstream_db
(upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout) = (
upstream_and_downstream_db
)
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
builder.add_package("x")

View file

@ -444,12 +444,9 @@ def test_composite_stage_with_noexpand_resource(
@pytest.mark.disable_clean_stage_check
def test_composite_stage_with_expand_resource(self, composite_stage_with_expanding_resource):
(
composite_stage,
root_stage,
resource_stage,
mock_resource,
) = composite_stage_with_expanding_resource
(composite_stage, root_stage, resource_stage, mock_resource) = (
composite_stage_with_expanding_resource
)
composite_stage.create()
composite_stage.fetch()
@ -474,12 +471,9 @@ def test_composite_stage_with_expand_resource_default_placement(
directory.
"""
(
composite_stage,
root_stage,
resource_stage,
mock_resource,
) = composite_stage_with_expanding_resource
(composite_stage, root_stage, resource_stage, mock_resource) = (
composite_stage_with_expanding_resource
)
resource_stage.resource.placement = None

View file

@ -50,8 +50,8 @@ def test_gzip_compressed_tarball_is_reproducible(tmpdir):
# Expected mode for non-dirs is 644 if not executable, 755 if executable. Better to compute
# that as we don't know the umask of the user running the test.
expected_mode = (
lambda name: 0o755 if Path(*name.split("/")).lstat().st_mode & 0o100 else 0o644
expected_mode = lambda name: (
0o755 if Path(*name.split("/")).lstat().st_mode & 0o100 else 0o644
)
# Verify the tarball contents

View file

@ -144,9 +144,11 @@ def test_run_ctest(self):
f"-DCMAKE_CXX_COMPILER={os.environ['CXX']}",
self.define(
"Kokkos_ROOT",
self.spec["kokkos"].prefix
if "~trilinos" in self.spec
else self.spec["trilinos"].prefix,
(
self.spec["kokkos"].prefix
if "~trilinos" in self.spec
else self.spec["trilinos"].prefix
),
),
]
cmake = which(self.spec["cmake"].prefix.bin.cmake)

View file

@ -9,7 +9,6 @@
class Bricks(CMakePackage):
"""Bricks is a data layout and code generation framework,
enabling performance-portable stencil computations across
a multitude of architectures."""

View file

@ -169,9 +169,11 @@ def cmake_args(self):
]
elif mkl_provider == "intel-mkl":
args += [
self.define("DLAF_WITH_MKL", True)
if spec.version <= Version("0.3")
else self.define("DLAF_WITH_MKL_LEGACY", True),
(
self.define("DLAF_WITH_MKL", True)
if spec.version <= Version("0.3")
else self.define("DLAF_WITH_MKL_LEGACY", True)
),
self.define("MKL_LAPACK_TARGET", f"mkl::mkl_intel_32bit_{mkl_threads}_dyn"),
]

View file

@ -9,7 +9,6 @@
class Elbencho(MakefilePackage):
"""
Elbencho storage benchmark
"""

View file

@ -967,9 +967,9 @@ def cmake_args(self):
"openmp",
]
runtimes.sort(
key=lambda x: runtimes_order.index(x)
if x in runtimes_order
else len(runtimes_order)
key=lambda x: (
runtimes_order.index(x) if x in runtimes_order else len(runtimes_order)
)
)
cmake_args.extend(
[

View file

@ -7,7 +7,6 @@
class Mpip(AutotoolsPackage):
"""mpiP: Lightweight, Scalable MPI Profiling"""
homepage = "https://software.llnl.gov/mpiP/"

View file

@ -135,16 +135,18 @@ def write_makefile_inc(self):
[
"IMETIS = -I%s" % self.spec["parmetis"].prefix.include,
(
"LMETIS = -L%s -l%s -L%s -l%s"
% (
self.spec["parmetis"].prefix.lib,
"parmetis",
self.spec["metis"].prefix.lib,
"metis",
(
"LMETIS = -L%s -l%s -L%s -l%s"
% (
self.spec["parmetis"].prefix.lib,
"parmetis",
self.spec["metis"].prefix.lib,
"metis",
)
)
)
if not shared
else "LMETIS =",
if not shared
else "LMETIS ="
),
]
)
@ -153,9 +155,11 @@ def write_makefile_inc(self):
makefile_conf.extend(
[
"IMETIS = -I%s" % self.spec["metis"].prefix.include,
("LMETIS = -L%s -l%s" % (self.spec["metis"].prefix.lib, "metis"))
if not shared
else "LMETIS =",
(
("LMETIS = -L%s -l%s" % (self.spec["metis"].prefix.lib, "metis"))
if not shared
else "LMETIS ="
),
]
)

View file

@ -58,7 +58,7 @@ class Npb(MakefilePackage):
# ~4X size increase going from one class to the next
"D",
"E",
"F" # large test problems
"F", # large test problems
# ~16X size increase from each of the previous classes
)

View file

@ -10,7 +10,6 @@
class Openvdb(CMakePackage):
"""OpenVDB - a sparse volume data format."""
homepage = "https://github.com/AcademySoftwareFoundation/openvdb"

View file

@ -649,9 +649,11 @@ def define_enable(suffix, value=None):
options.append(
define(
"Trilinos_CXX11_FLAGS",
self.compiler.cxx14_flag
if spec.variants["cxxstd"].value == "14"
else self.compiler.cxx11_flag,
(
self.compiler.cxx14_flag
if spec.variants["cxxstd"].value == "14"
else self.compiler.cxx11_flag
),
)
)