black: clean up noqa comments from most of the code

Many noqa's in the code are no longer necessary now that the column limit is 99
characters. Others can easily be eliminated, and still more can just be made more
specific if they do not have to do with line length.

The only E501's still in the code are in the tests for `spack.util.path` and the tests
for `spack style`.
This commit is contained in:
Todd Gamblin 2022-07-23 20:19:29 -07:00
parent 3fa090f490
commit 156af2a60a
45 changed files with 178 additions and 111 deletions

View file

@ -8,7 +8,7 @@
def getpywin(): def getpywin():
try: try:
import win32con # noqa import win32con # noqa: F401
except ImportError: except ImportError:
subprocess.check_call( subprocess.check_call(
[sys.executable, "-m", "pip", "-q", "install", "--upgrade", "pip"]) [sys.executable, "-m", "pip", "-q", "install", "--upgrade", "pip"])

View file

@ -82,7 +82,7 @@ except ImportError:
raise raise
try: try:
os.remove(argparse_pyc) os.remove(argparse_pyc)
import argparse # noqa import argparse # noqa: F401
except Exception: except Exception:
msg = ('The file\n\n\t{0}\n\nis corrupted and cannot be deleted by Spack. ' msg = ('The file\n\n\t{0}\n\nis corrupted and cannot be deleted by Spack. '
'Either delete it manually or ask some administrator to ' 'Either delete it manually or ask some administrator to '
@ -91,7 +91,7 @@ except ImportError:
sys.exit(1) sys.exit(1)
import spack.main # noqa import spack.main # noqa: E402
# Once we've set up the system path, run the spack main method # Once we've set up the system path, run the spack main method
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -577,7 +577,7 @@ def _default_generator(is_activated):
line = '--{0}-{1}'.format( line = '--{0}-{1}'.format(
activation_word, option_value activation_word, option_value
) )
if activation_value is not None and activation_value(option_value): # NOQA=ignore=E501 if activation_value is not None and activation_value(option_value):
line += '={0}'.format( line += '={0}'.format(
activation_value(option_value) activation_value(option_value)
) )

View file

@ -503,7 +503,7 @@ def __init__(self, name, *args, **kwargs):
# octave-octave-splines # octave-octave-splines
if not name.startswith('octave-'): if not name.startswith('octave-'):
# Make it more obvious that we are renaming the package # Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to octave-{0}".format(name)) # noqa tty.msg("Changing package name from {0} to octave-{0}".format(name))
name = 'octave-{0}'.format(name) name = 'octave-{0}'.format(name)
super(OctavePackageTemplate, self).__init__(name, *args, **kwargs) super(OctavePackageTemplate, self).__init__(name, *args, **kwargs)

View file

@ -60,11 +60,11 @@ def setup_parser(sp):
help='Concretize and install deprecator spec') help='Concretize and install deprecator spec')
install.add_argument('-I', '--no-install-deprecator', install.add_argument('-I', '--no-install-deprecator',
action='store_false', default=False, dest='install', action='store_false', default=False, dest='install',
help='Deprecator spec must already be installed (default)') # noqa 501 help='Deprecator spec must already be installed (default)')
sp.add_argument('-l', '--link-type', type=str, sp.add_argument('-l', '--link-type', type=str,
default='soft', choices=['soft', 'hard'], default='soft', choices=['soft', 'hard'],
help="Type of filesystem link to use for deprecation (default soft)") # noqa 501 help="Type of filesystem link to use for deprecation (default soft)")
sp.add_argument('specs', nargs=argparse.REMAINDER, sp.add_argument('specs', nargs=argparse.REMAINDER,
help="spec to deprecate and spec to use as deprecator") help="spec to deprecate and spec to use as deprecator")

View file

@ -93,7 +93,7 @@ def ipython_interpreter(args):
% (spack.spack_version, platform.python_version(), % (spack.spack_version, platform.python_version(),
platform.system(), platform.machine())) platform.system(), platform.machine()))
__name__ = "__main__" # noqa __name__ = "__main__" # noqa: F841
IPython.embed(module="__main__", header=header) IPython.embed(module="__main__", header=header)

View file

@ -32,7 +32,7 @@ class Arm(spack.compiler.Compiler):
# arm compilers. Output looks like this: # arm compilers. Output looks like this:
# #
# $ arm<c/f>lang --version # $ arm<c/f>lang --version
# Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2) # NOQA # Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)
# Target: aarch64--linux-gnu # Target: aarch64--linux-gnu
# Thread model: posix # Thread model: posix
# InstalledDir: # InstalledDir:

View file

@ -304,5 +304,5 @@ def __call__(self):
# Import after function definition all the modules in this package, # Import after function definition all the modules in this package,
# so that registration of writers will happen automatically # so that registration of writers will happen automatically
import spack.container.writers.singularity # noqa import spack.container.writers.singularity # noqa: E402
import spack.container.writers.docker # noqa import spack.container.writers.docker # noqa: E402

View file

@ -965,7 +965,7 @@ def _construct_from_directory_layout(self, directory_layout, old_data):
'spec': entry.spec, 'spec': entry.spec,
'directory_layout': layout, 'directory_layout': layout,
'explicit': entry.explicit, 'explicit': entry.explicit,
'installation_time': entry.installation_time # noqa: E501 'installation_time': entry.installation_time
} }
self._add(**kwargs) self._add(**kwargs)
processed_specs.add(entry.spec) processed_specs.add(entry.spec)

View file

@ -784,7 +784,7 @@ def environment_modifications(self):
for x in env: for x in env:
# Ensure all the tokens are valid in this context # Ensure all the tokens are valid in this context
msg = 'some tokens cannot be expanded in an environment variable name' # noqa: E501 msg = 'some tokens cannot be expanded in an environment variable name'
_check_tokens_are_valid(x.name, message=msg) _check_tokens_are_valid(x.name, message=msg)
# Transform them # Transform them
x.name = spec.format(x.name, transform=transform) x.name = spec.format(x.name, transform=transform)

View file

@ -348,8 +348,8 @@ def __new__(cls, name, bases, attr_dict):
if 'phases' in attr_dict: if 'phases' in attr_dict:
# Turn the strings in 'phases' into InstallPhase instances # Turn the strings in 'phases' into InstallPhase instances
# and add them as private attributes # and add them as private attributes
_InstallPhase_phases = [PackageMeta.phase_fmt.format(x) for x in attr_dict['phases']] # NOQA: ignore=E501 _InstallPhase_phases = [PackageMeta.phase_fmt.format(x) for x in attr_dict['phases']]
for phase_name, callback_name in zip(_InstallPhase_phases, attr_dict['phases']): # NOQA: ignore=E501 for phase_name, callback_name in zip(_InstallPhase_phases, attr_dict['phases']):
attr_dict[phase_name] = InstallPhase(callback_name) attr_dict[phase_name] = InstallPhase(callback_name)
attr_dict['_InstallPhase_phases'] = _InstallPhase_phases attr_dict['_InstallPhase_phases'] = _InstallPhase_phases
@ -429,7 +429,7 @@ def _wrapper(instance, *args, **kwargs):
) )
if has_all_attributes: if has_all_attributes:
has_the_right_values = all( has_the_right_values = all(
[getattr(instance, key) == value for key, value in attr_dict.items()] # NOQA: ignore=E501 [getattr(instance, key) == value for key, value in attr_dict.items()]
) )
if has_the_right_values: if has_the_right_values:
func(instance, *args, **kwargs) func(instance, *args, **kwargs)

View file

@ -160,7 +160,7 @@ def spec_externals(spec):
one for each known external installation. one for each known external installation.
""" """
# break circular import. # break circular import.
from spack.util.module_cmd import path_from_modules # NOQA: ignore=F401 from spack.util.module_cmd import path_from_modules # noqa: F401
def _package(maybe_abstract_spec): def _package(maybe_abstract_spec):
pkg_cls = spack.repo.path.get_pkg_class(spec.name) pkg_cls = spack.repo.path.get_pkg_class(spec.name)

View file

@ -46,8 +46,7 @@ def __getattr__(self, key):
def _s3_open(url): def _s3_open(url):
parsed = url_util.parse(url) parsed = url_util.parse(url)
s3 = s3_util.create_s3_session(parsed, s3 = s3_util.create_s3_session(parsed, connection=s3_util.get_mirror_connection(parsed))
connection=s3_util.get_mirror_connection(parsed)) # noqa: E501
bucket = parsed.netloc bucket = parsed.netloc
key = parsed.path key = parsed.path

View file

@ -28,17 +28,17 @@
@pytest.mark.parametrize('version_str,expected_version', [ @pytest.mark.parametrize('version_str,expected_version', [
('Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)\n' # NOQA ('Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)\n'
'Target: aarch64--linux-gnu\n' 'Target: aarch64--linux-gnu\n'
'Thread model: posix\n' 'Thread model: posix\n'
'InstalledDir:\n' 'InstalledDir:\n'
'/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n', # NOQA '/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n',
'19.0.0.73'), '19.0.0.73'),
('Arm C/C++/Fortran Compiler version 19.3.1 (build number 75) (based on LLVM 7.0.2)\n' # NOQA ('Arm C/C++/Fortran Compiler version 19.3.1 (build number 75) (based on LLVM 7.0.2)\n'
'Target: aarch64--linux-gnu\n' 'Target: aarch64--linux-gnu\n'
'Thread model: posix\n' 'Thread model: posix\n'
'InstalledDir:\n' 'InstalledDir:\n'
'/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n', # NOQA '/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n',
'19.3.1.75') '19.3.1.75')
]) ])
def test_arm_version_detection(version_str, expected_version): def test_arm_version_detection(version_str, expected_version):
@ -62,7 +62,9 @@ def test_cce_version_detection(version_str, expected_version):
('Apple clang version 11.0.0 (clang-1100.0.33.8)\n' ('Apple clang version 11.0.0 (clang-1100.0.33.8)\n'
'Target: x86_64-apple-darwin18.7.0\n' 'Target: x86_64-apple-darwin18.7.0\n'
'Thread model: posix\n' 'Thread model: posix\n'
'InstalledDir: /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin\n', # noqa 'InstalledDir: '
'/Applications/Xcode.app/Contents/Developer/Toolchains/'
'XcodeDefault.xctoolchain/usr/bin\n',
'11.0.0'), '11.0.0'),
('Apple LLVM version 7.0.2 (clang-700.1.81)\n' ('Apple LLVM version 7.0.2 (clang-700.1.81)\n'
'Target: x86_64-apple-darwin15.2.0\n' 'Target: x86_64-apple-darwin15.2.0\n'
@ -79,7 +81,7 @@ def test_apple_clang_version_detection(
@pytest.mark.regression('10191') @pytest.mark.regression('10191')
@pytest.mark.parametrize('version_str,expected_version', [ @pytest.mark.parametrize('version_str,expected_version', [
# LLVM Clang # LLVM Clang
('clang version 6.0.1-svn334776-1~exp1~20181018152737.116 (branches/release_60)\n' # noqa ('clang version 6.0.1-svn334776-1~exp1~20181018152737.116 (branches/release_60)\n'
'Target: x86_64-pc-linux-gnu\n' 'Target: x86_64-pc-linux-gnu\n'
'Thread model: posix\n' 'Thread model: posix\n'
'InstalledDir: /usr/bin\n', '6.0.1'), 'InstalledDir: /usr/bin\n', '6.0.1'),
@ -90,7 +92,7 @@ def test_apple_clang_version_detection(
'Target: x86_64-pc-linux-gnu\n' 'Target: x86_64-pc-linux-gnu\n'
'Thread model: posix\n' 'Thread model: posix\n'
'InstalledDir: /usr/bin\n', '8.0.0'), 'InstalledDir: /usr/bin\n', '8.0.0'),
('clang version 9.0.1-+201911131414230800840845a1eea-1~exp1~20191113231141.78\n' # noqa ('clang version 9.0.1-+201911131414230800840845a1eea-1~exp1~20191113231141.78\n'
'Target: x86_64-pc-linux-gnu\n' 'Target: x86_64-pc-linux-gnu\n'
'Thread model: posix\n' 'Thread model: posix\n'
'InstalledDir: /usr/bin\n', '9.0.1'), 'InstalledDir: /usr/bin\n', '9.0.1'),

View file

@ -59,8 +59,10 @@ def test_yaml_directory_layout_parameters(tmpdir, config):
# Test separation of architecture or namespace # Test separation of architecture or namespace
spec2 = Spec('libelf').concretized() spec2 = Spec('libelf').concretized()
arch_scheme = "{architecture.platform}/{architecture.target}/{architecture.os}/{name}/{version}/{hash:7}" # NOQA: ignore=E501 arch_scheme = (
ns_scheme = "${ARCHITECTURE}/${NAMESPACE}/${PACKAGE}-${VERSION}-${HASH:7}" # NOQA: ignore=E501 "{architecture.platform}/{architecture.target}/{architecture.os}/{name}/{version}/{hash:7}"
)
ns_scheme = "${ARCHITECTURE}/${NAMESPACE}/${PACKAGE}-${VERSION}-${HASH:7}"
arch_ns_scheme_projections = {'all': arch_scheme, arch_ns_scheme_projections = {'all': arch_scheme,
'python': ns_scheme} 'python': ns_scheme}
layout_arch_ns = DirectoryLayout( layout_arch_ns = DirectoryLayout(

View file

@ -61,14 +61,26 @@ def test_repr(self, library_list):
def test_joined_and_str(self, library_list): def test_joined_and_str(self, library_list):
s1 = library_list.joined() s1 = library_list.joined()
expected = '/dir1/liblapack.a /dir2/libpython3.6.dylib /dir1/libblas.a /dir3/libz.so libmpi.so.20.10.1' # noqa: E501 expected = " ".join([
"/dir1/liblapack.a",
"/dir2/libpython3.6.dylib",
"/dir1/libblas.a",
"/dir3/libz.so",
"libmpi.so.20.10.1",
])
assert s1 == expected assert s1 == expected
s2 = str(library_list) s2 = str(library_list)
assert s1 == s2 assert s1 == s2
s3 = library_list.joined(';') s3 = library_list.joined(';')
expected = '/dir1/liblapack.a;/dir2/libpython3.6.dylib;/dir1/libblas.a;/dir3/libz.so;libmpi.so.20.10.1' # noqa: E501 expected = ";".join([
"/dir1/liblapack.a",
"/dir2/libpython3.6.dylib",
"/dir1/libblas.a",
"/dir3/libz.so",
"libmpi.so.20.10.1",
])
assert s3 == expected assert s3 == expected
def test_flags(self, library_list): def test_flags(self, library_list):
@ -135,14 +147,26 @@ def test_repr(self, header_list):
def test_joined_and_str(self, header_list): def test_joined_and_str(self, header_list):
s1 = header_list.joined() s1 = header_list.joined()
expected = '/dir1/Python.h /dir2/date.time.h /dir1/pyconfig.hpp /dir3/core.hh pymem.cuh' # noqa: E501 expected = " ".join([
"/dir1/Python.h",
"/dir2/date.time.h",
"/dir1/pyconfig.hpp",
"/dir3/core.hh",
"pymem.cuh",
])
assert s1 == expected assert s1 == expected
s2 = str(header_list) s2 = str(header_list)
assert s1 == s2 assert s1 == s2
s3 = header_list.joined(';') s3 = header_list.joined(';')
expected = '/dir1/Python.h;/dir2/date.time.h;/dir1/pyconfig.hpp;/dir3/core.hh;pymem.cuh' # noqa: E501 expected = ";".join([
"/dir1/Python.h",
"/dir2/date.time.h",
"/dir1/pyconfig.hpp",
"/dir3/core.hh",
"pymem.cuh",
])
assert s3 == expected assert s3 == expected
def test_flags(self, header_list): def test_flags(self, header_list):

View file

@ -288,7 +288,7 @@ def successful_apply(*args, **kwargs):
assert not (set([ assert not (set([
'abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234', 'abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
'abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz' # NOQA: ignore=E501 'abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz',
]) - files_cached_in_mirror) ]) - files_cached_in_mirror)

View file

@ -123,7 +123,7 @@ def test_alter_environment(
content = modulefile_content('mpileaks platform=test target=x86_64') content = modulefile_content('mpileaks platform=test target=x86_64')
assert len( assert len(
[x for x in content if x.startswith('prepend_path("CMAKE_PREFIX_PATH"')] # NOQA: ignore=E501 [x for x in content if x.startswith('prepend_path("CMAKE_PREFIX_PATH"')]
) == 0 ) == 0
assert len([x for x in content if 'setenv("FOO", "foo")' in x]) == 1 assert len([x for x in content if 'setenv("FOO", "foo")' in x]) == 1
assert len([x for x in content if 'unsetenv("BAR")' in x]) == 1 assert len([x for x in content if 'unsetenv("BAR")' in x]) == 1
@ -133,7 +133,7 @@ def test_alter_environment(
) )
assert len( assert len(
[x for x in content if x.startswith('prepend-path("CMAKE_PREFIX_PATH"')] # NOQA: ignore=E501 [x for x in content if x.startswith('prepend-path("CMAKE_PREFIX_PATH"')]
) == 0 ) == 0
assert len([x for x in content if 'setenv("FOO", "foo")' in x]) == 0 assert len([x for x in content if 'setenv("FOO", "foo")' in x]) == 0
assert len([x for x in content if 'unsetenv("BAR")' in x]) == 0 assert len([x for x in content if 'unsetenv("BAR")' in x]) == 0

View file

@ -361,7 +361,7 @@ def test_extend_context(
assert 'puts stderr "sentence from package"' in content assert 'puts stderr "sentence from package"' in content
short_description = 'module-whatis "This package updates the context for TCL modulefiles."' # NOQA: ignore=E501 short_description = 'module-whatis "This package updates the context for TCL modulefiles."'
assert short_description in content assert short_description in content
@pytest.mark.regression('4400') @pytest.mark.regression('4400')

View file

@ -55,13 +55,13 @@ def test_package_class_names(self):
# Below tests target direct imports of spack packages from the # Below tests target direct imports of spack packages from the
# spack.pkg namespace # spack.pkg namespace
def test_import_package(self): def test_import_package(self):
import spack.pkg.builtin.mock.mpich # type: ignore[import] # noqa import spack.pkg.builtin.mock.mpich # type: ignore[import] # noqa: F401
def test_import_package_as(self): def test_import_package_as(self):
import spack.pkg.builtin.mock # noqa import spack.pkg.builtin.mock # noqa: F401
import spack.pkg.builtin.mock as m # noqa import spack.pkg.builtin.mock as m # noqa: F401
import spack.pkg.builtin.mock.mpich as mp # noqa import spack.pkg.builtin.mock.mpich as mp # noqa: F401
from spack.pkg.builtin import mock # noqa from spack.pkg.builtin import mock # noqa: F401
def test_inheritance_of_diretives(self): def test_inheritance_of_diretives(self):
pkg_cls = spack.repo.path.get_pkg_class('simple-inheritance') pkg_cls = spack.repo.path.get_pkg_class('simple-inheritance')
@ -99,21 +99,21 @@ def test_dependency_extensions(self):
assert deps == set(['extension1']) assert deps == set(['extension1'])
def test_import_class_from_package(self): def test_import_class_from_package(self):
from spack.pkg.builtin.mock.mpich import Mpich # noqa from spack.pkg.builtin.mock.mpich import Mpich # noqa: F401
def test_import_module_from_package(self): def test_import_module_from_package(self):
from spack.pkg.builtin.mock import mpich # noqa from spack.pkg.builtin.mock import mpich # noqa: F401
def test_import_namespace_container_modules(self): def test_import_namespace_container_modules(self):
import spack.pkg # noqa import spack.pkg # noqa: F401
import spack.pkg as p # noqa import spack.pkg as p # noqa: F401
import spack.pkg.builtin # noqa import spack.pkg.builtin # noqa: F401
import spack.pkg.builtin as b # noqa import spack.pkg.builtin as b # noqa: F401
import spack.pkg.builtin.mock # noqa import spack.pkg.builtin.mock # noqa: F401
import spack.pkg.builtin.mock as m # noqa import spack.pkg.builtin.mock as m # noqa: F401
from spack import pkg # noqa from spack import pkg # noqa: F401
from spack.pkg import builtin # noqa from spack.pkg import builtin # noqa: F401
from spack.pkg.builtin import mock # noqa from spack.pkg.builtin import mock # noqa: F401
@pytest.mark.regression('2737') @pytest.mark.regression('2737')

View file

@ -290,7 +290,7 @@ class FindFunctions(ast.NodeVisitor):
def __init__(self): def __init__(self):
self.nodes = [] self.nodes = []
def visit_FunctionDef(self, node): # noqa def visit_FunctionDef(self, node):
if node.name in ("to_node_dict", "to_dict", "to_dict_or_value"): if node.name in ("to_node_dict", "to_dict", "to_dict_or_value"):
self.nodes.append(node) self.nodes.append(node)
@ -306,10 +306,10 @@ def add_error(self, node):
% (self.filename, node.lineno, node.col_offset) % (self.filename, node.lineno, node.col_offset)
) )
def visit_Dict(self, node): # noqa def visit_Dict(self, node):
self.add_error(node) self.add_error(node)
def visit_Call(self, node): # noqa def visit_Call(self, node):
name = None name = None
if isinstance(node.func, ast.Name): if isinstance(node.func, ast.Name):
name = node.func.id name = node.func.id

View file

@ -270,7 +270,7 @@ def test_gather_s3_information(monkeypatch, capfd):
"access_pair": ("SPA", "CK"), "access_pair": ("SPA", "CK"),
"endpoint_url": "https://127.0.0.1:8888"} "endpoint_url": "https://127.0.0.1:8888"}
session_args, client_args = spack.util.s3.get_mirror_s3_connection_info(mock_connection_data) # noqa: E501 session_args, client_args = spack.util.s3.get_mirror_s3_connection_info(mock_connection_data)
# Session args are used to create the S3 Session object # Session args are used to create the S3 Session object
assert "aws_session_token" in session_args assert "aws_session_token" in session_args

View file

@ -576,7 +576,7 @@ def parse_version_offset(path):
# e.g. http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0 # e.g. http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0
# e.g. http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1 # e.g. http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1
# e.g. https://software.broadinstitute.org/gatk/download/auth?package=GATK-archive&version=3.8-1-0-gf15c1c3ef # e.g. https://software.broadinstitute.org/gatk/download/auth?package=GATK-archive&version=3.8-1-0-gf15c1c3ef
(r'[?&](?:sha|ref|version)=[a-zA-Z\d+-]*[_-]?v?(\d[\da-zA-Z._-]*)$', suffix), # noqa: E501 (r'[?&](?:sha|ref|version)=[a-zA-Z\d+-]*[_-]?v?(\d[\da-zA-Z._-]*)$', suffix),
# e.g. http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz # e.g. http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz
# e.g. http://laws-green.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0beta.1_r20171213193219.tgz # e.g. http://laws-green.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0beta.1_r20171213193219.tgz
@ -588,7 +588,7 @@ def parse_version_offset(path):
# github.com/repo/name/releases/download/vver/name # github.com/repo/name/releases/download/vver/name
# e.g. https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow # e.g. https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow
(r'github\.com/[^/]+/[^/]+/releases/download/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/', path), # noqa: E501 (r'github\.com/[^/]+/[^/]+/releases/download/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/', path),
# e.g. ftp://ftp.ncbi.nlm.nih.gov/blast/executables/legacy.NOTSUPPORTED/2.2.26/ncbi.tar.gz # e.g. ftp://ftp.ncbi.nlm.nih.gov/blast/executables/legacy.NOTSUPPORTED/2.2.26/ncbi.tar.gz
(r'(\d[\da-zA-Z._-]*)/[^/]+$', path), (r'(\d[\da-zA-Z._-]*)/[^/]+$', path),

View file

@ -25,7 +25,7 @@
def bz2_support(): def bz2_support():
try: try:
import bz2 # noqa import bz2 # noqa: F401
return True return True
except ImportError: except ImportError:
return False return False
@ -33,7 +33,7 @@ def bz2_support():
def gzip_support(): def gzip_support():
try: try:
import gzip # noqa import gzip # noqa: F401
return True return True
except ImportError: except ImportError:
return False return False
@ -41,7 +41,7 @@ def gzip_support():
def lzma_support(): def lzma_support():
try: try:
import lzma # noqa # novermin import lzma # noqa: F401 # novm
return True return True
except ImportError: except ImportError:
return False return False
@ -49,7 +49,7 @@ def lzma_support():
def tar_support(): def tar_support():
try: try:
import tarfile # noqa import tarfile # noqa: F401
return True return True
except ImportError: except ImportError:
return False return False

View file

@ -131,7 +131,7 @@ def write_transaction(self, key):
# TODO: the locking code. # TODO: the locking code.
class WriteContextManager(object): class WriteContextManager(object):
def __enter__(cm): # noqa def __enter__(cm):
cm.orig_filename = self.cache_path(key) cm.orig_filename = self.cache_path(key)
cm.orig_file = None cm.orig_file = None
if os.path.exists(cm.orig_filename): if os.path.exists(cm.orig_filename):
@ -142,7 +142,7 @@ def __enter__(cm): # noqa
return cm.orig_file, cm.tmp_file return cm.orig_file, cm.tmp_file
def __exit__(cm, type, value, traceback): # noqa def __exit__(cm, type, value, traceback):
if cm.orig_file: if cm.orig_file:
cm.orig_file.close() cm.orig_file.close()
cm.tmp_file.close() cm.tmp_file.close()

View file

@ -10,16 +10,21 @@
import llnl.util.lock import llnl.util.lock
# import some llnl.util.lock names as though they're part of spack.util.lock
from llnl.util.lock import LockError # noqa: F401
from llnl.util.lock import LockTimeoutError # noqa: F401
from llnl.util.lock import LockUpgradeError # noqa: F401
from llnl.util.lock import ReadTransaction # noqa: F401
from llnl.util.lock import WriteTransaction # noqa: F401
import spack.config import spack.config
import spack.error import spack.error
import spack.paths import spack.paths
from llnl.util.lock import * # noqa
is_windows = sys.platform == 'win32' is_windows = sys.platform == 'win32'
class Lock(llnl.util.lock.Lock): # type: ignore[no-redef] class Lock(llnl.util.lock.Lock):
"""Lock that can be disabled. """Lock that can be disabled.
This overrides the ``_lock()`` and ``_unlock()`` methods from This overrides the ``_lock()`` and ``_unlock()`` methods from

View file

@ -17,7 +17,7 @@ def get_mirror_connection(url, url_type="push"):
# Check to see if desired file starts with any of the mirror URLs # Check to see if desired file starts with any of the mirror URLs
rebuilt_path = url_util.format(url) rebuilt_path = url_util.format(url)
# Gather dict of push URLS point to the value of the whole mirror # Gather dict of push URLS point to the value of the whole mirror
mirror_dict = {x.push_url: x for x in spack.mirror.MirrorCollection().values()} # noqa: E501 mirror_dict = {x.push_url: x for x in spack.mirror.MirrorCollection().values()}
# Ensure most specific URLs (longest) are presented first # Ensure most specific URLs (longest) are presented first
mirror_url_keys = mirror_dict.keys() mirror_url_keys = mirror_dict.keys()
mirror_url_keys = sorted(mirror_url_keys, key=len, reverse=True) mirror_url_keys = sorted(mirror_url_keys, key=len, reverse=True)
@ -55,7 +55,7 @@ def get_mirror_s3_connection_info(connection):
if endpoint_url: if endpoint_url:
s3_client_args['endpoint_url'] = _parse_s3_endpoint_url(endpoint_url) s3_client_args['endpoint_url'] = _parse_s3_endpoint_url(endpoint_url)
elif s3_connection_is_dict and connection.get("endpoint_url"): elif s3_connection_is_dict and connection.get("endpoint_url"):
s3_client_args["endpoint_url"] = _parse_s3_endpoint_url(connection["endpoint_url"]) # noqa: E501 s3_client_args["endpoint_url"] = _parse_s3_endpoint_url(connection["endpoint_url"])
return (s3_connection, s3_client_args) return (s3_connection, s3_client_args)

View file

@ -670,7 +670,7 @@ def visit_Str(self, tree):
self.write(repr(tree.s)) self.write(repr(tree.s))
elif isinstance(tree.s, str): elif isinstance(tree.s, str):
self.write("b" + repr(tree.s)) self.write("b" + repr(tree.s))
elif isinstance(tree.s, unicode): # noqa elif isinstance(tree.s, unicode): # noqa: F821
self.write(repr(tree.s).lstrip("u")) self.write(repr(tree.s).lstrip("u"))
else: else:
assert False, "shouldn't get here" assert False, "shouldn't get here"

View file

@ -207,7 +207,7 @@ def push_to_url(
remote_path = remote_path[1:] remote_path = remote_path[1:]
s3 = s3_util.create_s3_session(remote_url, s3 = s3_util.create_s3_session(remote_url,
connection=s3_util.get_mirror_connection(remote_url)) # noqa: E501 connection=s3_util.get_mirror_connection(remote_url))
s3.upload_file(local_file_path, remote_url.netloc, s3.upload_file(local_file_path, remote_url.netloc,
remote_path, ExtraArgs=extra_args) remote_path, ExtraArgs=extra_args)
@ -234,7 +234,7 @@ def url_exists(url):
if url.scheme == 's3': if url.scheme == 's3':
# Check for URL specific connection information # Check for URL specific connection information
s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url)) # noqa: E501 s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url))
try: try:
s3.get_object(Bucket=url.netloc, Key=url.path.lstrip('/')) s3.get_object(Bucket=url.netloc, Key=url.path.lstrip('/'))
@ -280,7 +280,7 @@ def remove_url(url, recursive=False):
if url.scheme == 's3': if url.scheme == 's3':
# Try to find a mirror for potential connection information # Try to find a mirror for potential connection information
s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url)) # noqa: E501 s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url))
bucket = url.netloc bucket = url.netloc
if recursive: if recursive:
# Because list_objects_v2 can only return up to 1000 items # Because list_objects_v2 can only return up to 1000 items
@ -391,7 +391,7 @@ def list_url(url, recursive=False):
if os.path.isfile(os.path.join(local_path, subpath))] if os.path.isfile(os.path.join(local_path, subpath))]
if url.scheme == 's3': if url.scheme == 's3':
s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url)) # noqa: E501 s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url))
if recursive: if recursive:
return list(_iter_s3_prefix(s3, url)) return list(_iter_s3_prefix(s3, url))

View file

@ -26,5 +26,5 @@ class DepDiamondPatchMid2(Package):
# single patch file in repo # single patch file in repo
depends_on('patch', patches=[ depends_on('patch', patches=[
patch('http://example.com/urlpatch.patch', patch('http://example.com/urlpatch.patch',
sha256='mid21234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234'), # noqa: E501 sha256='mid21234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234'),
]) ])

View file

@ -138,7 +138,7 @@ def install(self, spec, prefix):
make('test') make('test')
make('install') make('install')
@when('@3.3.0') # noqa @when('@3.3.0')
def install(self, spec, prefix): def install(self, spec, prefix):
# Apparently autotools are not bootstrapped # Apparently autotools are not bootstrapped
which('libtoolize')() which('libtoolize')()

View file

@ -80,14 +80,19 @@ def patch(self):
kwargs = {'ignore_absent': False, 'backup': False, 'string': True} kwargs = {'ignore_absent': False, 'backup': False, 'string': True}
mf = FileFilter('Makefile-libbz2_so') mf = FileFilter('Makefile-libbz2_so')
mf.filter('$(CC) -shared -Wl,-soname -Wl,libbz2.so.{0} -o libbz2.so.{1} $(OBJS)' # noqa mf.filter(
.format(v2, v3), '$(CC) -shared -Wl,-soname -Wl,libbz2.so.{0} -o libbz2.so.{1} $(OBJS)'.format(
'$(CC) -dynamiclib -Wl,-install_name -Wl,@rpath/libbz2.{0}.dylib -current_version {1} -compatibility_version {2} -o libbz2.{3}.dylib $(OBJS)' # noqa v2, v3
.format(v1, v2, v3, v3), ),
**kwargs) (
'$(CC) -dynamiclib -Wl,-install_name -Wl,@rpath/libbz2.{0}.dylib '
'-current_version {1} -compatibility_version {2} -o libbz2.{3}.dylib $(OBJS)'
) .format(v1, v2, v3, v3),
**kwargs
)
mf.filter( mf.filter(
'$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.so.{0}'.format(v3), # noqa '$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.so.{0}'.format(v3),
'$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.{0}.dylib' '$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.{0}.dylib'
.format(v3), **kwargs) .format(v3), **kwargs)
mf.filter( mf.filter(

View file

@ -51,7 +51,7 @@ class Cctools(AutotoolsPackage):
# This is a belt and suspenders solution to the problem. # This is a belt and suspenders solution to the problem.
def patch(self): def patch(self):
before = '#if defined(__linux__) && defined(SYS_memfd_create)' before = '#if defined(__linux__) && defined(SYS_memfd_create)'
after = '#if defined(__linux__) && defined(SYS_memfd_create) && defined(__NR_memfd_create)' # noqa: E501 after = '#if defined(__linux__) && defined(SYS_memfd_create) && defined(__NR_memfd_create)'
f = 'dttools/src/memfdexe.c' f = 'dttools/src/memfdexe.c'
kwargs = {'ignore_absent': False, 'backup': True, 'string': True} kwargs = {'ignore_absent': False, 'backup': True, 'string': True}
filter_file(before, after, f, **kwargs) filter_file(before, after, f, **kwargs)

View file

@ -221,8 +221,14 @@ class Dealii(CMakePackage, CudaPackage):
# See https://github.com/symengine/symengine/issues/1516 # See https://github.com/symengine/symengine/issues/1516
# TODO: uncomment when the following is fixed # TODO: uncomment when the following is fixed
# https://github.com/spack/spack/issues/11160 # https://github.com/spack/spack/issues/11160
# depends_on("symengine@0.4: build_type=Release", when="@9.1:+symengine+trilinos^trilinos~debug") # NOQA: ignore=E501 # depends_on(
# depends_on("symengine@0.4: build_type=Debug", when="@9.1:+symengine+trilinos^trilinos+debug") # NOQA: ignore=E501 # "symengine@0.4: build_type=Release",
# when="@9.1:+symengine+trilinos^trilinos~debug"
# )
# depends_on(
# "symengine@0.4: build_type=Debug",
# when="@9.1:+symengine+trilinos^trilinos+debug"
# )
depends_on('symengine@0.4:', when='@9.1:+symengine') depends_on('symengine@0.4:', when='@9.1:+symengine')
depends_on('symengine@0.6:', when='@9.2:+symengine') depends_on('symengine@0.6:', when='@9.2:+symengine')
depends_on('tbb', when='+threads') depends_on('tbb', when='+threads')

View file

@ -76,9 +76,13 @@ def chmod_scripts(self):
def url_for_version(self, version): def url_for_version(self, version):
if version < Version('8.0.0'): if version < Version('8.0.0'):
return "http://www.earthsystemmodeling.org/esmf_releases/public/ESMF_{0}/esmf_{0}_src.tar.gz".format(version.underscored) return "http://www.earthsystemmodeling.org/esmf_releases/public/ESMF_{0}/esmf_{0}_src.tar.gz".format(
version.underscored
)
else: else:
return "https://github.com/esmf-org/esmf/archive/ESMF_{0}.tar.gz".format(version.underscored) return "https://github.com/esmf-org/esmf/archive/ESMF_{0}.tar.gz".format(
version.underscored
)
def edit(self, spec, prefix): def edit(self, spec, prefix):
# Installation instructions can be found at: # Installation instructions can be found at:
@ -217,7 +221,7 @@ def edit(self, spec, prefix):
# Specifies the linker directive needed to link the LAPACK library # Specifies the linker directive needed to link the LAPACK library
# to the application. # to the application.
os.environ['ESMF_LAPACK_LIBS'] = spec['lapack'].libs.link_flags # noqa os.environ['ESMF_LAPACK_LIBS'] = spec['lapack'].libs.link_flags
else: else:
os.environ['ESMF_LAPACK'] = 'internal' os.environ['ESMF_LAPACK'] = 'internal'

View file

@ -219,27 +219,34 @@ def patch(self):
# Adjust ParMGridGen - this is still a mess # Adjust ParMGridGen - this is still a mess
files = [ files = [
'src/dbns/Make/options', 'src/dbns/Make/options',
'src/fvAgglomerationMethods/MGridGenGamgAgglomeration/Make/options' # noqa: E501 'src/fvAgglomerationMethods/MGridGenGamgAgglomeration/Make/options'
] ]
for f in files: for f in files:
filter_file(r'-lMGridGen', r'-lmgrid', f, backup=False) filter_file(r'-lMGridGen', r'-lmgrid', f, backup=False)
# Adjust for flex version check # Adjust for flex version check
files = [ files = [
'src/thermophysicalModels/reactionThermo/chemistryReaders/chemkinReader/chemkinLexer.L', # noqa: E501 (
'src/surfMesh/surfaceFormats/stl/STLsurfaceFormatASCII.L', # noqa: E501 'src/thermophysicalModels/reactionThermo/chemistryReaders/chemkinReader/'
'src/meshTools/triSurface/triSurface/interfaces/STL/readSTLASCII.L', # noqa: E501 'chemkinLexer.L'
'applications/utilities/preProcessing/fluentDataToFoam/fluentDataToFoam.L', # noqa: E501 ),
'applications/utilities/mesh/conversion/gambitToFoam/gambitToFoam.L', # noqa: E501 'src/surfMesh/surfaceFormats/stl/STLsurfaceFormatASCII.L',
'applications/utilities/mesh/conversion/fluent3DMeshToFoam/fluent3DMeshToFoam.L', # noqa: E501 'src/meshTools/triSurface/triSurface/interfaces/STL/readSTLASCII.L',
'applications/utilities/mesh/conversion/ansysToFoam/ansysToFoam.L', # noqa: E501 'applications/utilities/preProcessing/fluentDataToFoam/fluentDataToFoam.L',
'applications/utilities/mesh/conversion/fluentMeshToFoam/fluentMeshToFoam.L', # noqa: E501 'applications/utilities/mesh/conversion/gambitToFoam/gambitToFoam.L',
'applications/utilities/mesh/conversion/fluent3DMeshToElmer/fluent3DMeshToElmer.L' # noqa: E501 'applications/utilities/mesh/conversion/fluent3DMeshToFoam/fluent3DMeshToFoam.L',
'applications/utilities/mesh/conversion/ansysToFoam/ansysToFoam.L',
'applications/utilities/mesh/conversion/fluentMeshToFoam/fluentMeshToFoam.L',
'applications/utilities/mesh/conversion/fluent3DMeshToElmer/fluent3DMeshToElmer.L'
] ]
for f in files: for f in files:
filter_file( filter_file(
r'#if YY_FLEX_SUBMINOR_VERSION < 34', r'#if YY_FLEX_SUBMINOR_VERSION < 34',
r'#if YY_FLEX_MAJOR_VERSION <= 2 && YY_FLEX_MINOR_VERSION <= 5 && YY_FLEX_SUBMINOR_VERSION < 34', # noqa: E501 (
r'#if YY_FLEX_MAJOR_VERSION <= 2 '
r'&& YY_FLEX_MINOR_VERSION <= 5 '
r'&& YY_FLEX_SUBMINOR_VERSION < 34'
),
f, backup=False) f, backup=False)
def configure(self, spec, prefix): def configure(self, spec, prefix):

View file

@ -50,7 +50,7 @@ class Gnuplot(AutotoolsPackage):
variant('libcerf', default=True, variant('libcerf', default=True,
description='Build with libcerf support') description='Build with libcerf support')
variant('pbm', default=False, variant('pbm', default=False,
description='Enable PBM (Portable Bit Map) and other older bitmap terminals') # NOQA: ignore=E501 description='Enable PBM (Portable Bit Map) and other older bitmap terminals')
variant('qt', default=False, variant('qt', default=False,
description='Build with QT') description='Build with QT')

View file

@ -35,7 +35,9 @@ class Grackle(Package):
def install(self, spec, prefix): def install(self, spec, prefix):
template_name = '{0.architecture}-{0.compiler.name}' template_name = '{0.architecture}-{0.compiler.name}'
grackle_architecture = template_name.format(spec) grackle_architecture = template_name.format(spec)
link_variables = 'MACH_AR = ar' if spec.version < Version(2.2) else 'MACH_LIBTOOL = libtool' # NOQA: ignore=E501 link_variables = (
'MACH_AR = ar' if spec.version < Version(2.2) else 'MACH_LIBTOOL = libtool'
)
substitutions = { substitutions = {
'@ARCHITECTURE': grackle_architecture, '@ARCHITECTURE': grackle_architecture,
'@CC': spec['mpi'].mpicc, '@CC': spec['mpi'].mpicc,

View file

@ -23,6 +23,6 @@ class Luit(AutotoolsPackage, XorgPackage):
depends_on('pkgconfig', type='build') depends_on('pkgconfig', type='build')
depends_on('util-macros', type='build') depends_on('util-macros', type='build')
# see https://www.linuxquestions.org/questions/linux-from-scratch-13/can't-compile-luit-xorg-applications-4175476308/ # noqa # see https://www.linuxquestions.org/questions/linux-from-scratch-13/can't-compile-luit-xorg-applications-4175476308/
def configure_args(self): def configure_args(self):
return ['CFLAGS=-U_XOPEN_SOURCE -D_XOPEN_SOURCE=600'] return ['CFLAGS=-U_XOPEN_SOURCE -D_XOPEN_SOURCE=600']

View file

@ -55,7 +55,7 @@ def configure(self, spec, prefix):
with open(config, 'w') as out: with open(config, 'w') as out:
out.write( out.write(
"""# Local tweaks for building """# Local tweaks for building
CPLUS_INCLUDE_PATH="{precice_dir}/include/precice${{CPLUS_INCLUDE_PATH:+:}}$CPLUS_INCLUDE_PATH" ## noqa: E501 CPLUS_INCLUDE_PATH="{precice_dir}/include/precice${{CPLUS_INCLUDE_PATH:+:}}$CPLUS_INCLUDE_PATH"
export CPLUS_INCLUDE_PATH export CPLUS_INCLUDE_PATH
# Local build (for user appbin, libbin) # Local build (for user appbin, libbin)
. ./change-userdir.sh $PWD/{user_dir} . ./change-userdir.sh $PWD/{user_dir}

View file

@ -33,9 +33,9 @@ class Parmetis(CMakePackage):
patch('enable_external_metis.patch') patch('enable_external_metis.patch')
# bug fixes from PETSc developers # bug fixes from PETSc developers
# https://bitbucket.org/petsc/pkg-parmetis/commits/1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b/raw/ # NOQA: E501 # https://bitbucket.org/petsc/pkg-parmetis/commits/1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b/raw/
patch('pkg-parmetis-1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b.patch') patch('pkg-parmetis-1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b.patch')
# https://bitbucket.org/petsc/pkg-parmetis/commits/82409d68aa1d6cbc70740d0f35024aae17f7d5cb/raw/ # NOQA: E501 # https://bitbucket.org/petsc/pkg-parmetis/commits/82409d68aa1d6cbc70740d0f35024aae17f7d5cb/raw/
patch('pkg-parmetis-82409d68aa1d6cbc70740d0f35024aae17f7d5cb.patch') patch('pkg-parmetis-82409d68aa1d6cbc70740d0f35024aae17f7d5cb.patch')
def flag_handler(self, name, flags): def flag_handler(self, name, flags):

View file

@ -19,7 +19,14 @@ class PyCvxopt(PythonPackage):
variant('gsl', default=False, description='Use GSL random number generators for constructing random matrices') variant('gsl', default=False, description='Use GSL random number generators for constructing random matrices')
variant('fftw', default=False, description='Install the cvxopt.fftw interface to FFTW') variant('fftw', default=False, description='Install the cvxopt.fftw interface to FFTW')
variant('glpk', default=False, description='Enable support for the linear programming solver GLPK') variant('glpk', default=False, description='Enable support for the linear programming solver GLPK')
# variant('mosek', default=False, description='Enable support for the linear, second-order cone, and quadratic programming solvers in MOSEK') # noqa # variant(
# 'mosek',
# default=False,
# description=(
# 'Enable support for the linear, second-order cone, and quadratic '
# 'programming solvers in MOSEK'
# ),
# )
variant('dsdp', default=False, description='Enable support for the semidefinite programming solver DSDP') variant('dsdp', default=False, description='Enable support for the semidefinite programming solver DSDP')
# Required dependencies # Required dependencies

View file

@ -638,7 +638,7 @@ def protobuf_deps():
filter_file( filter_file(
'"//third_party/systemlibs:protobuf.bzl": "protobuf.bzl",', '"//third_party/systemlibs:protobuf.bzl": "protobuf.bzl",',
'"//third_party/systemlibs:protobuf.bzl": "protobuf.bzl",\n' '"//third_party/systemlibs:protobuf.bzl": "protobuf.bzl",\n'
'"//third_party/systemlibs:protobuf_deps.bzl": "protobuf_deps.bzl",', # noqa: E501 '"//third_party/systemlibs:protobuf_deps.bzl": "protobuf_deps.bzl",',
file_to_patch, file_to_patch,
string=True) string=True)

View file

@ -42,7 +42,11 @@ class Tcsh(AutotoolsPackage):
fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-009-fix-regexp-for-backlash-quoting-tests.patch', when='@6.20.00', sha256='d2358c930d5ab89e5965204dded499591b42a22d0a865e2149b8c0f1446fac34') fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-009-fix-regexp-for-backlash-quoting-tests.patch', when='@6.20.00', sha256='d2358c930d5ab89e5965204dded499591b42a22d0a865e2149b8c0f1446fac34')
# Downstream patches # Downstream patches
fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-manpage-memoryuse.patch', sha256='3a4e60fe56a450632140c48acbf14d22850c1d72835bf441e3f8514d6c617a9f') # noqa: E501 fedora_patch(
'8a6066c901fb4fc75013dd488ba958387f00c74d',
'tcsh-6.20.00-manpage-memoryuse.patch',
sha256='3a4e60fe56a450632140c48acbf14d22850c1d72835bf441e3f8514d6c617a9f',
)
depends_on('ncurses+termlib') depends_on('ncurses+termlib')

View file

@ -32,7 +32,7 @@ class Yambo(AutotoolsPackage):
variant( variant(
'io', values=any_combination_of('iotk', 'etsf-io'), 'io', values=any_combination_of('iotk', 'etsf-io'),
description='Activate support for different io formats (requires network access)', # noqa description='Activate support for different io formats (requires network access)',
) )
# MPI + OpenMP parallelism # MPI + OpenMP parallelism
@ -76,10 +76,10 @@ def filter_configure(self):
filter_file('config/report', report_abspath, 'configure') filter_file('config/report', report_abspath, 'configure')
def enable_or_disable_time(self, activated): def enable_or_disable_time(self, activated):
return '--enable-time-profile' if activated else '--disable-time-profile' # noqa: E501 return '--enable-time-profile' if activated else '--disable-time-profile'
def enable_or_disable_memory(self, activated): def enable_or_disable_memory(self, activated):
return '--enable-memory-profile' if activated else '--disable-memory-profile' # noqa: E501 return '--enable-memory-profile' if activated else '--disable-memory-profile'
def enable_or_disable_openmp(self, activated): def enable_or_disable_openmp(self, activated):
return '--enable-open-mp' if activated else '--disable-open-mp' return '--enable-open-mp' if activated else '--disable-open-mp'