style: make core comply with pep8-naming
This commit is contained in:
parent
1713cb37da
commit
20e4038a72
35 changed files with 106 additions and 104 deletions
5
.flake8
5
.flake8
|
@ -16,6 +16,9 @@
|
|||
# These are required to get the package.py files to test clean:
|
||||
# - F999: syntax error in doctest
|
||||
#
|
||||
# Exempt to allow decorator classes to be lowercase, but follow otherwise:
|
||||
# - N801: CapWords for class names.
|
||||
#
|
||||
[flake8]
|
||||
ignore = E129,E221,E241,E272,E731,F999
|
||||
ignore = E129,E221,E241,E272,E731,F999,N801
|
||||
max-line-length = 79
|
||||
|
|
|
@ -282,8 +282,8 @@ def _cmp_key(self):
|
|||
def copy(self):
|
||||
"""Type-agnostic clone method. Preserves subclass type."""
|
||||
# Construct a new dict of my type
|
||||
T = type(self)
|
||||
clone = T()
|
||||
self_type = type(self)
|
||||
clone = self_type()
|
||||
|
||||
# Copy everything from this dict into it.
|
||||
for key in self:
|
||||
|
|
|
@ -39,10 +39,10 @@ def fun(pipe, x):
|
|||
return fun
|
||||
|
||||
|
||||
def parmap(f, X):
|
||||
pipe = [Pipe() for x in X]
|
||||
def parmap(f, elements):
|
||||
pipe = [Pipe() for x in elements]
|
||||
proc = [Process(target=spawn(f), args=(c, x))
|
||||
for x, (p, c) in zip(X, pipe)]
|
||||
for x, (p, c) in zip(elements, pipe)]
|
||||
[p.start() for p in proc]
|
||||
[p.join() for p in proc]
|
||||
return [p.recv() for (p, c) in pipe]
|
||||
|
|
|
@ -246,18 +246,18 @@ def hline(label=None, **kwargs):
|
|||
|
||||
def terminal_size():
|
||||
"""Gets the dimensions of the console: (rows, cols)."""
|
||||
def ioctl_GWINSZ(fd):
|
||||
def ioctl_gwinsz(fd):
|
||||
try:
|
||||
rc = struct.unpack('hh', fcntl.ioctl(
|
||||
fd, termios.TIOCGWINSZ, '1234'))
|
||||
except BaseException:
|
||||
return
|
||||
return rc
|
||||
rc = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||
rc = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
|
||||
if not rc:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
rc = ioctl_GWINSZ(fd)
|
||||
rc = ioctl_gwinsz(fd)
|
||||
os.close(fd)
|
||||
except BaseException:
|
||||
pass
|
||||
|
|
|
@ -193,14 +193,13 @@ def operating_system(self, name):
|
|||
return self.operating_sys.get(name, None)
|
||||
|
||||
@classmethod
|
||||
def setup_platform_environment(self, pkg, env):
|
||||
def setup_platform_environment(cls, pkg, env):
|
||||
""" Subclass can override this method if it requires any
|
||||
platform-specific build environment modifications.
|
||||
"""
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def detect(self):
|
||||
def detect(cls):
|
||||
""" Subclass is responsible for implementing this method.
|
||||
Returns True if the Platform class detects that
|
||||
it is the current platform
|
||||
|
|
|
@ -203,13 +203,13 @@ def tarball_path_name(spec, ext):
|
|||
|
||||
def checksum_tarball(file):
|
||||
# calculate sha256 hash of tar file
|
||||
BLOCKSIZE = 65536
|
||||
block_size = 65536
|
||||
hasher = hashlib.sha256()
|
||||
with open(file, 'rb') as tfile:
|
||||
buf = tfile.read(BLOCKSIZE)
|
||||
buf = tfile.read(block_size)
|
||||
while len(buf) > 0:
|
||||
hasher.update(buf)
|
||||
buf = tfile.read(BLOCKSIZE)
|
||||
buf = tfile.read(block_size)
|
||||
return hasher.hexdigest()
|
||||
|
||||
|
||||
|
|
|
@ -676,8 +676,8 @@ def create(parser, args):
|
|||
build_system = get_build_system(args, guesser)
|
||||
|
||||
# Create the package template object
|
||||
PackageClass = templates[build_system]
|
||||
package = PackageClass(name, url, versions)
|
||||
package_class = templates[build_system]
|
||||
package = package_class(name, url, versions)
|
||||
tty.msg("Created template for {0} package".format(package.name))
|
||||
|
||||
# Create a directory for the new package
|
||||
|
|
|
@ -72,7 +72,7 @@ def fc_rpath_arg(self):
|
|||
return '-Wl,-Wl,,-rpath,,'
|
||||
|
||||
@classmethod
|
||||
def default_version(self, comp):
|
||||
def default_version(cls, comp):
|
||||
"""The ``-V`` option works for nag compilers.
|
||||
Output looks like this::
|
||||
|
||||
|
|
|
@ -74,7 +74,7 @@ def fflags(self):
|
|||
return "-qzerosize"
|
||||
|
||||
@classmethod
|
||||
def default_version(self, comp):
|
||||
def default_version(cls, comp):
|
||||
"""The '-qversion' is the standard option fo XL compilers.
|
||||
Output looks like this::
|
||||
|
||||
|
|
|
@ -542,9 +542,9 @@ def _validate_section(data, schema):
|
|||
"""
|
||||
import jsonschema
|
||||
if not hasattr(_validate_section, 'validator'):
|
||||
DefaultSettingValidator = _extend_with_default(
|
||||
default_setting_validator = _extend_with_default(
|
||||
jsonschema.Draft4Validator)
|
||||
_validate_section.validator = DefaultSettingValidator
|
||||
_validate_section.validator = default_setting_validator
|
||||
|
||||
try:
|
||||
_validate_section.validator(schema).validate(data)
|
||||
|
|
|
@ -80,7 +80,7 @@ class DirectiveMeta(type):
|
|||
_directive_names = set()
|
||||
_directives_to_be_executed = []
|
||||
|
||||
def __new__(mcs, name, bases, attr_dict):
|
||||
def __new__(cls, name, bases, attr_dict):
|
||||
# Initialize the attribute containing the list of directives
|
||||
# to be executed. Here we go reversed because we want to execute
|
||||
# commands:
|
||||
|
@ -109,8 +109,8 @@ def __new__(mcs, name, bases, attr_dict):
|
|||
DirectiveMeta._directives_to_be_executed)
|
||||
DirectiveMeta._directives_to_be_executed = []
|
||||
|
||||
return super(DirectiveMeta, mcs).__new__(
|
||||
mcs, name, bases, attr_dict)
|
||||
return super(DirectiveMeta, cls).__new__(
|
||||
cls, name, bases, attr_dict)
|
||||
|
||||
def __init__(cls, name, bases, attr_dict):
|
||||
# The class is being created: if it is a package we must ensure
|
||||
|
|
|
@ -1026,7 +1026,7 @@ class FsCache(object):
|
|||
def __init__(self, root):
|
||||
self.root = os.path.abspath(root)
|
||||
|
||||
def store(self, fetcher, relativeDst):
|
||||
def store(self, fetcher, relative_dest):
|
||||
# skip fetchers that aren't cachable
|
||||
if not fetcher.cachable:
|
||||
return
|
||||
|
@ -1035,12 +1035,12 @@ def store(self, fetcher, relativeDst):
|
|||
if isinstance(fetcher, CacheURLFetchStrategy):
|
||||
return
|
||||
|
||||
dst = os.path.join(self.root, relativeDst)
|
||||
dst = os.path.join(self.root, relative_dest)
|
||||
mkdirp(os.path.dirname(dst))
|
||||
fetcher.archive(dst)
|
||||
|
||||
def fetcher(self, targetPath, digest, **kwargs):
|
||||
path = os.path.join(self.root, targetPath)
|
||||
def fetcher(self, target_path, digest, **kwargs):
|
||||
path = os.path.join(self.root, target_path)
|
||||
return CacheURLFetchStrategy(path, digest, **kwargs)
|
||||
|
||||
def destroy(self):
|
||||
|
|
|
@ -44,7 +44,7 @@
|
|||
from spack.util.compression import allowed_archive
|
||||
|
||||
|
||||
def mirror_archive_filename(spec, fetcher, resourceId=None):
|
||||
def mirror_archive_filename(spec, fetcher, resource_id=None):
|
||||
"""Get the name of the spec's archive in the mirror."""
|
||||
if not spec.version.concrete:
|
||||
raise ValueError("mirror.path requires spec with concrete version.")
|
||||
|
@ -87,18 +87,18 @@ def mirror_archive_filename(spec, fetcher, resourceId=None):
|
|||
# Otherwise we'll make a .tar.gz ourselves
|
||||
ext = 'tar.gz'
|
||||
|
||||
if resourceId:
|
||||
filename = "%s-%s" % (resourceId, spec.version) + ".%s" % ext
|
||||
if resource_id:
|
||||
filename = "%s-%s" % (resource_id, spec.version) + ".%s" % ext
|
||||
else:
|
||||
filename = "%s-%s" % (spec.package.name, spec.version) + ".%s" % ext
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
def mirror_archive_path(spec, fetcher, resourceId=None):
|
||||
def mirror_archive_path(spec, fetcher, resource_id=None):
|
||||
"""Get the relative path to the spec's archive within a mirror."""
|
||||
return os.path.join(
|
||||
spec.name, mirror_archive_filename(spec, fetcher, resourceId))
|
||||
spec.name, mirror_archive_filename(spec, fetcher, resource_id))
|
||||
|
||||
|
||||
def get_matching_versions(specs, **kwargs):
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
|
||||
|
||||
# FIXME: store versions inside OperatingSystem as a Version instead of string
|
||||
def macOS_version():
|
||||
def macos_version():
|
||||
"""temporary workaround to return a macOS version as a Version object
|
||||
"""
|
||||
return Version('.'.join(py_platform.mac_ver()[0].split('.')[:2]))
|
||||
|
|
|
@ -163,7 +163,7 @@ class PackageMeta(
|
|||
_InstallPhase_run_before = {}
|
||||
_InstallPhase_run_after = {}
|
||||
|
||||
def __new__(mcs, name, bases, attr_dict):
|
||||
def __new__(cls, name, bases, attr_dict):
|
||||
|
||||
if 'phases' in attr_dict:
|
||||
# Turn the strings in 'phases' into InstallPhase instances
|
||||
|
@ -176,7 +176,7 @@ def __new__(mcs, name, bases, attr_dict):
|
|||
def _flush_callbacks(check_name):
|
||||
# Name of the attribute I am going to check it exists
|
||||
attr_name = PackageMeta.phase_fmt.format(check_name)
|
||||
checks = getattr(mcs, attr_name)
|
||||
checks = getattr(cls, attr_name)
|
||||
if checks:
|
||||
for phase_name, funcs in checks.items():
|
||||
try:
|
||||
|
@ -202,12 +202,12 @@ def _flush_callbacks(check_name):
|
|||
PackageMeta.phase_fmt.format(phase_name)]
|
||||
getattr(phase, check_name).extend(funcs)
|
||||
# Clear the attribute for the next class
|
||||
setattr(mcs, attr_name, {})
|
||||
setattr(cls, attr_name, {})
|
||||
|
||||
_flush_callbacks('run_before')
|
||||
_flush_callbacks('run_after')
|
||||
|
||||
return super(PackageMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
return super(PackageMeta, cls).__new__(cls, name, bases, attr_dict)
|
||||
|
||||
@staticmethod
|
||||
def register_callback(check_type, *phases):
|
||||
|
@ -1229,7 +1229,7 @@ def content_hash(self, content=None):
|
|||
" if the associated spec is not concrete")
|
||||
raise spack.error.SpackError(err_msg)
|
||||
|
||||
hashContent = list()
|
||||
hash_content = list()
|
||||
source_id = fs.for_package_version(self, self.version).source_id()
|
||||
if not source_id:
|
||||
# TODO? in cases where a digest or source_id isn't available,
|
||||
|
@ -1238,14 +1238,15 @@ def content_hash(self, content=None):
|
|||
# referenced by branch name rather than tag or commit ID.
|
||||
message = 'Missing a source id for {s.name}@{s.version}'
|
||||
tty.warn(message.format(s=self))
|
||||
hashContent.append(''.encode('utf-8'))
|
||||
hash_content.append(''.encode('utf-8'))
|
||||
else:
|
||||
hashContent.append(source_id.encode('utf-8'))
|
||||
hashContent.extend(':'.join((p.sha256, str(p.level))).encode('utf-8')
|
||||
for p in self.spec.patches)
|
||||
hashContent.append(package_hash(self.spec, content))
|
||||
hash_content.append(source_id.encode('utf-8'))
|
||||
hash_content.extend(':'.join((p.sha256, str(p.level))).encode('utf-8')
|
||||
for p in self.spec.patches)
|
||||
hash_content.append(package_hash(self.spec, content))
|
||||
return base64.b32encode(
|
||||
hashlib.sha256(bytes().join(sorted(hashContent))).digest()).lower()
|
||||
hashlib.sha256(bytes().join(
|
||||
sorted(hash_content))).digest()).lower()
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
|
|
|
@ -53,5 +53,5 @@ def __init__(self):
|
|||
self.add_operating_system(str(back_distro), back_distro)
|
||||
|
||||
@classmethod
|
||||
def detect(self):
|
||||
def detect(cls):
|
||||
return os.path.exists('/bgsys')
|
||||
|
|
|
@ -45,5 +45,5 @@ def __init__(self):
|
|||
self.add_operating_system(str(mac_os), mac_os)
|
||||
|
||||
@classmethod
|
||||
def detect(self):
|
||||
def detect(cls):
|
||||
return 'darwin' in platform.system().lower()
|
||||
|
|
|
@ -49,5 +49,5 @@ def __init__(self):
|
|||
self.add_operating_system(str(linux_dist), linux_dist)
|
||||
|
||||
@classmethod
|
||||
def detect(self):
|
||||
def detect(cls):
|
||||
return 'linux' in platform.system().lower()
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack.architecture import Platform, Target
|
||||
from spack.architecture import OperatingSystem as OS
|
||||
from spack.architecture import OperatingSystem
|
||||
|
||||
|
||||
class Test(Platform):
|
||||
|
@ -41,9 +41,11 @@ def __init__(self):
|
|||
self.add_target(self.default, Target(self.default))
|
||||
self.add_target(self.front_end, Target(self.front_end))
|
||||
|
||||
self.add_operating_system(self.default_os, OS('debian', 6))
|
||||
self.add_operating_system(self.front_os, OS('redhat', 6))
|
||||
self.add_operating_system(
|
||||
self.default_os, OperatingSystem('debian', 6))
|
||||
self.add_operating_system(
|
||||
self.front_os, OperatingSystem('redhat', 6))
|
||||
|
||||
@classmethod
|
||||
def detect(self):
|
||||
def detect(cls):
|
||||
return True
|
||||
|
|
|
@ -432,9 +432,9 @@ def generate_fetchers():
|
|||
tty.debug(e)
|
||||
continue
|
||||
else:
|
||||
errMessage = "All fetchers failed for %s" % self.name
|
||||
err_msg = "All fetchers failed for %s" % self.name
|
||||
self.fetcher = self.default_fetcher
|
||||
raise fs.FetchError(errMessage, None)
|
||||
raise fs.FetchError(err_msg, None)
|
||||
|
||||
def check(self):
|
||||
"""Check the downloaded archive against a checksum digest.
|
||||
|
|
|
@ -43,10 +43,10 @@ class ContextMeta(type):
|
|||
#: by the class that is being defined
|
||||
_new_context_properties = []
|
||||
|
||||
def __new__(mcs, name, bases, attr_dict):
|
||||
def __new__(cls, name, bases, attr_dict):
|
||||
# Merge all the context properties that are coming from base classes
|
||||
# into a list without duplicates.
|
||||
context_properties = list(mcs._new_context_properties)
|
||||
context_properties = list(cls._new_context_properties)
|
||||
for x in bases:
|
||||
try:
|
||||
context_properties.extend(x.context_properties)
|
||||
|
@ -55,20 +55,20 @@ def __new__(mcs, name, bases, attr_dict):
|
|||
context_properties = list(llnl.util.lang.dedupe(context_properties))
|
||||
|
||||
# Flush the list
|
||||
mcs._new_context_properties = []
|
||||
cls._new_context_properties = []
|
||||
|
||||
# Attach the list to the class being created
|
||||
attr_dict['context_properties'] = context_properties
|
||||
|
||||
return super(ContextMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
return super(ContextMeta, cls).__new__(cls, name, bases, attr_dict)
|
||||
|
||||
@classmethod
|
||||
def context_property(mcs, func):
|
||||
def context_property(cls, func):
|
||||
"""Decorator that adds a function name to the list of new context
|
||||
properties, and then returns a property.
|
||||
"""
|
||||
name = func.__name__
|
||||
mcs._new_context_properties.append(name)
|
||||
cls._new_context_properties.append(name)
|
||||
return property(func)
|
||||
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@
|
|||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.executable
|
||||
import spack.util.pattern
|
||||
from spack.util.pattern import Bunch
|
||||
from spack.dependency import Dependency
|
||||
from spack.package import PackageBase
|
||||
from spack.fetch_strategy import FetchStrategyComposite, URLFetchStrategy
|
||||
|
@ -165,10 +165,10 @@ def mock_fetch_cache(monkeypatch):
|
|||
and raises on fetch.
|
||||
"""
|
||||
class MockCache(object):
|
||||
def store(self, copyCmd, relativeDst):
|
||||
def store(self, copy_cmd, relative_dest):
|
||||
pass
|
||||
|
||||
def fetcher(self, targetPath, digest, **kwargs):
|
||||
def fetcher(self, target_path, digest, **kwargs):
|
||||
return MockCacheFetcher()
|
||||
|
||||
class MockCacheFetcher(object):
|
||||
|
@ -508,7 +508,6 @@ def mock_git_repository(tmpdir_factory):
|
|||
r1 = rev_hash(branch)
|
||||
r1_file = branch_file
|
||||
|
||||
Bunch = spack.util.pattern.Bunch
|
||||
checks = {
|
||||
'master': Bunch(
|
||||
revision='master', file=r0_file, args={'git': str(repodir)}
|
||||
|
@ -561,7 +560,6 @@ def mock_hg_repository(tmpdir_factory):
|
|||
hg('commit', '-m' 'revision 1', '-u', 'test')
|
||||
r1 = get_rev()
|
||||
|
||||
Bunch = spack.util.pattern.Bunch
|
||||
checks = {
|
||||
'default': Bunch(
|
||||
revision=r1, file=r1_file, args={'hg': str(repodir)}
|
||||
|
@ -618,7 +616,6 @@ def mock_svn_repository(tmpdir_factory):
|
|||
r0 = '1'
|
||||
r1 = '2'
|
||||
|
||||
Bunch = spack.util.pattern.Bunch
|
||||
checks = {
|
||||
'default': Bunch(
|
||||
revision=r1, file=r1_file, args={'svn': url}),
|
||||
|
|
|
@ -37,7 +37,7 @@ def temp_env():
|
|||
os.environ = old_env
|
||||
|
||||
|
||||
def add_O3_to_build_system_cflags(pkg, name, flags):
|
||||
def add_o3_to_build_system_cflags(pkg, name, flags):
|
||||
build_system_flags = []
|
||||
if name == 'cflags':
|
||||
build_system_flags.append('-O3')
|
||||
|
@ -137,7 +137,7 @@ def test_add_build_system_flags_autotools(self, temp_env):
|
|||
s = spack.spec.Spec('libelf cppflags=-g')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
pkg.flag_handler = add_O3_to_build_system_cflags
|
||||
pkg.flag_handler = add_o3_to_build_system_cflags
|
||||
spack.build_environment.setup_package(pkg, False)
|
||||
|
||||
assert '-g' in os.environ['SPACK_CPPFLAGS']
|
||||
|
@ -149,7 +149,7 @@ def test_add_build_system_flags_cmake(self, temp_env):
|
|||
s = spack.spec.Spec('callpath cppflags=-g')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
pkg.flag_handler = add_O3_to_build_system_cflags
|
||||
pkg.flag_handler = add_o3_to_build_system_cflags
|
||||
spack.build_environment.setup_package(pkg, False)
|
||||
|
||||
assert '-g' in os.environ['SPACK_CPPFLAGS']
|
||||
|
|
|
@ -263,7 +263,7 @@ def test_searching_order(search_fn, search_list, root, kwargs):
|
|||
# Now reverse the result and start discarding things
|
||||
# as soon as you have matches. In the end the list should
|
||||
# be emptied.
|
||||
L = list(reversed(result))
|
||||
rlist = list(reversed(result))
|
||||
|
||||
# At this point make sure the search list is a sequence
|
||||
if isinstance(search_list, six.string_types):
|
||||
|
@ -272,14 +272,14 @@ def test_searching_order(search_fn, search_list, root, kwargs):
|
|||
# Discard entries in the order they appear in search list
|
||||
for x in search_list:
|
||||
try:
|
||||
while fnmatch.fnmatch(L[-1], x) or x in L[-1]:
|
||||
L.pop()
|
||||
while fnmatch.fnmatch(rlist[-1], x) or x in rlist[-1]:
|
||||
rlist.pop()
|
||||
except IndexError:
|
||||
# List is empty
|
||||
pass
|
||||
|
||||
# List should be empty here
|
||||
assert len(L) == 0
|
||||
assert len(rlist) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize('root,search_list,kwargs,expected', [
|
||||
|
|
|
@ -42,13 +42,13 @@
|
|||
|
||||
@pytest.fixture
|
||||
def save_env():
|
||||
old_PATH = os.environ.get('PATH', None)
|
||||
old_path = os.environ.get('PATH', None)
|
||||
old_bash_func = os.environ.get('BASH_FUNC_module()', None)
|
||||
|
||||
yield
|
||||
|
||||
if old_PATH:
|
||||
os.environ['PATH'] = old_PATH
|
||||
if old_path:
|
||||
os.environ['PATH'] = old_path
|
||||
if old_bash_func:
|
||||
os.environ['BASH_FUNC_module()'] = old_bash_func
|
||||
|
||||
|
|
|
@ -171,11 +171,11 @@ def test_no_hash(self, factory, patch_configuration):
|
|||
path = module.layout.filename
|
||||
mpi_spec = spec['mpi']
|
||||
|
||||
mpiElement = "{0}/{1}-{2}/".format(
|
||||
mpi_element = "{0}/{1}-{2}/".format(
|
||||
mpi_spec.name, mpi_spec.version, mpi_spec.dag_hash(length=7)
|
||||
)
|
||||
|
||||
assert mpiElement in path
|
||||
assert mpi_element in path
|
||||
|
||||
mpileaks_spec = spec
|
||||
mpileaks_element = "{0}/{1}.lua".format(
|
||||
|
|
|
@ -42,8 +42,8 @@ def extra_repo(tmpdir_factory):
|
|||
repo_dir = tmpdir_factory.mktemp(repo_namespace)
|
||||
repo_dir.ensure('packages', dir=True)
|
||||
|
||||
with open(str(repo_dir.join('repo.yaml')), 'w') as F:
|
||||
F.write("""
|
||||
with open(str(repo_dir.join('repo.yaml')), 'w') as f:
|
||||
f.write("""
|
||||
repo:
|
||||
namespace: extra_test_repo
|
||||
""")
|
||||
|
|
|
@ -137,8 +137,8 @@ def python_and_extension_dirs(tmpdir):
|
|||
create_dir_structure(ext_prefix, ext_dirs)
|
||||
|
||||
easy_install_location = 'lib/python2.7/site-packages/easy-install.pth'
|
||||
with open(str(ext_prefix.join(easy_install_location)), 'w') as F:
|
||||
F.write("""path/to/ext1.egg
|
||||
with open(str(ext_prefix.join(easy_install_location)), 'w') as f:
|
||||
f.write("""path/to/ext1.egg
|
||||
path/to/setuptools.egg""")
|
||||
|
||||
return str(python_prefix), str(ext_prefix)
|
||||
|
@ -204,8 +204,8 @@ def test_python_activation_with_files(tmpdir, python_and_extension_dirs):
|
|||
assert os.path.exists(os.path.join(python_prefix, 'bin/py-ext-tool'))
|
||||
|
||||
easy_install_location = 'lib/python2.7/site-packages/easy-install.pth'
|
||||
with open(os.path.join(python_prefix, easy_install_location), 'r') as F:
|
||||
easy_install_contents = F.read()
|
||||
with open(os.path.join(python_prefix, easy_install_location), 'r') as f:
|
||||
easy_install_contents = f.read()
|
||||
|
||||
assert 'ext1.egg' in easy_install_contents
|
||||
assert 'setuptools.egg' not in easy_install_contents
|
||||
|
|
|
@ -130,7 +130,7 @@ def write_transaction(self, key):
|
|||
"""
|
||||
class WriteContextManager(object):
|
||||
|
||||
def __enter__(cm):
|
||||
def __enter__(cm): # noqa
|
||||
cm.orig_filename = self.cache_path(key)
|
||||
cm.orig_file = None
|
||||
if os.path.exists(cm.orig_filename):
|
||||
|
@ -141,7 +141,7 @@ def __enter__(cm):
|
|||
|
||||
return cm.orig_file, cm.tmp_file
|
||||
|
||||
def __exit__(cm, type, value, traceback):
|
||||
def __exit__(cm, type, value, traceback): # noqa
|
||||
if cm.orig_file:
|
||||
cm.orig_file.close()
|
||||
cm.tmp_file.close()
|
||||
|
|
|
@ -194,9 +194,9 @@ def get_path_from_module(mod):
|
|||
|
||||
# If it lists a -L instruction, use that
|
||||
for line in text:
|
||||
L = line.find('-L/')
|
||||
if L >= 0:
|
||||
return line[L + 2:line.find('/lib')]
|
||||
lib_paths = line.find('-L/')
|
||||
if lib_paths >= 0:
|
||||
return line[lib_paths + 2:line.find('/lib')]
|
||||
|
||||
# If it sets the PATH, use it
|
||||
for line in text:
|
||||
|
|
|
@ -44,13 +44,13 @@ def remove_docstring(self, node):
|
|||
self.generic_visit(node)
|
||||
return node
|
||||
|
||||
def visit_FunctionDef(self, node):
|
||||
def visit_FunctionDef(self, node): # noqa
|
||||
return self.remove_docstring(node)
|
||||
|
||||
def visit_ClassDef(self, node):
|
||||
def visit_ClassDef(self, node): # noqa
|
||||
return self.remove_docstring(node)
|
||||
|
||||
def visit_Module(self, node):
|
||||
def visit_Module(self, node): # noqa
|
||||
return self.remove_docstring(node)
|
||||
|
||||
|
||||
|
@ -69,7 +69,7 @@ def is_spack_attr(self, node):
|
|||
node.targets and isinstance(node.targets[0], ast.Name) and
|
||||
node.targets[0].id in spack.package.Package.metadata_attrs)
|
||||
|
||||
def visit_ClassDef(self, node):
|
||||
def visit_ClassDef(self, node): # noqa
|
||||
if node.name == spack.util.naming.mod_to_class(self.spec.name):
|
||||
node.body = [
|
||||
c for c in node.body
|
||||
|
@ -83,7 +83,7 @@ def __init__(self, spec):
|
|||
self.spec = spec
|
||||
self.methods = {}
|
||||
|
||||
def visit_FunctionDef(self, node):
|
||||
def visit_FunctionDef(self, node): # noqa
|
||||
nodes = self.methods.setdefault(node.name, [])
|
||||
if node.decorator_list:
|
||||
dec = node.decorator_list[0]
|
||||
|
@ -112,7 +112,7 @@ def resolve(self, node):
|
|||
result = n
|
||||
return result
|
||||
|
||||
def visit_FunctionDef(self, node):
|
||||
def visit_FunctionDef(self, node): # noqa
|
||||
if self.resolve(node) is node:
|
||||
node.decorator_list = []
|
||||
return node
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
from spack.operating_systems.mac_os import macOS_version
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
import sys
|
||||
|
||||
|
||||
|
@ -42,7 +42,7 @@ class Bison(AutotoolsPackage):
|
|||
|
||||
patch('pgi.patch', when='@3.0.4')
|
||||
|
||||
if sys.platform == 'darwin' and macOS_version() >= Version('10.13'):
|
||||
if sys.platform == 'darwin' and macos_version() >= Version('10.13'):
|
||||
patch('secure_snprintf.patch', level=0, when='@3.0.4')
|
||||
|
||||
build_directory = 'spack-build'
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
from spack.operating_systems.mac_os import macOS_version
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
from llnl.util import tty
|
||||
|
||||
import glob
|
||||
|
@ -157,7 +157,7 @@ class Gcc(AutotoolsPackage):
|
|||
if sys.platform == 'darwin':
|
||||
# Fix parallel build on APFS filesystem
|
||||
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=81797
|
||||
if macOS_version() >= Version('10.13'):
|
||||
if macos_version() >= Version('10.13'):
|
||||
patch('darwin/apfs.patch', when='@5.5.0,6.1:6.4,7.1:7.3')
|
||||
# from homebrew via macports
|
||||
# https://trac.macports.org/ticket/56502#no1
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
from spack.operating_systems.mac_os import macOS_version
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
import platform
|
||||
|
||||
|
||||
|
@ -68,7 +68,7 @@ class Oce(Package):
|
|||
# fix build with Xcode 8 "previous definition of CLOCK_REALTIME"
|
||||
# reported 27 Sep 2016 https://github.com/tpaviot/oce/issues/643
|
||||
if (platform.system() == "Darwin") and (
|
||||
macOS_version() == Version('10.12')):
|
||||
macos_version() == Version('10.12')):
|
||||
patch('sierra.patch', when='@0.17.2:0.18.0')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
|
@ -99,7 +99,7 @@ def install(self, spec, prefix):
|
|||
])
|
||||
|
||||
if platform.system() == 'Darwin' and (
|
||||
macOS_version() >= Version('10.12')):
|
||||
macos_version() >= Version('10.12')):
|
||||
# use @rpath on Sierra due to limit of dynamic loader
|
||||
options.append('-DCMAKE_MACOSX_RPATH=ON')
|
||||
else:
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
import os
|
||||
import sys
|
||||
from spack import *
|
||||
from spack.operating_systems.mac_os import macOS_version
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
|
||||
# Trilinos is complicated to build, as an inspiration a couple of links to
|
||||
# other repositories which build it:
|
||||
|
@ -703,7 +703,7 @@ def cmake_args(self):
|
|||
'-DTrilinos_ENABLE_FEI=OFF'
|
||||
])
|
||||
|
||||
if sys.platform == 'darwin' and macOS_version() >= Version('10.12'):
|
||||
if sys.platform == 'darwin' and macos_version() >= Version('10.12'):
|
||||
# use @rpath on Sierra due to limit of dynamic loader
|
||||
options.append('-DCMAKE_MACOSX_RPATH=ON')
|
||||
else:
|
||||
|
|
Loading…
Reference in a new issue