Merge branch 'develop' of https://github.com/LLNL/spack into features/install_with_phases

Conflicts:
	lib/spack/llnl/util/tty/log.py
	lib/spack/spack/__init__.py
	lib/spack/spack/cmd/install.py
	lib/spack/spack/cmd/setup.py
	lib/spack/spack/package.py
	var/spack/repos/builtin/packages/blitz/package.py
	var/spack/repos/builtin/packages/gmp/package.py
	var/spack/repos/builtin/packages/qhull/package.py
	var/spack/repos/builtin/packages/szip/package.py
This commit is contained in:
alalazo 2016-08-11 08:55:20 +02:00
commit b4b9ebe7d7
770 changed files with 14235 additions and 4355 deletions

View file

@ -19,5 +19,5 @@
# - F999: name name be undefined or undefined from star imports.
#
[flake8]
ignore = E221,E241,E731,F403,F821,F999,F405
ignore = E129,E221,E241,E272,E731,F403,F821,F999,F405
max-line-length = 79

2
.gitignore vendored
View file

@ -1,5 +1,7 @@
/var/spack/stage
/var/spack/cache
/var/spack/repos/*/index.yaml
/var/spack/repos/*/lock
*.pyc
/opt
*~

View file

@ -1,7 +1,17 @@
language: python
python:
- "2.6"
- "2.7"
env:
- TEST_TYPE=unit
- TEST_TYPE=flake8
# Exclude flake8 from python 2.6
matrix:
exclude:
- python: "2.6"
env: TEST_TYPE=flake8
# Use new Travis infrastructure (Docker can't sudo yet)
sudo: false
@ -20,20 +30,13 @@ before_install:
- git fetch origin develop:develop
script:
# Regular spack setup and tests
- . share/spack/setup-env.sh
- spack compilers
- spack config get compilers
- spack install -v libdwarf
# Run unit tests with code coverage
- coverage run bin/spack test
# Run unit tests with code coverage plus install libdwarf
- 'if [ "$TEST_TYPE" = "unit" ]; then share/spack/qa/run-unit-tests; fi'
# Run flake8 code style checks.
- share/spack/qa/run-flake8
- 'if [ "$TEST_TYPE" = "flake8" ]; then share/spack/qa/run-flake8; fi'
after_success:
- coveralls
- 'if [ "$TEST_TYPE" = "unit" ] && [ "$TRAVIS_PYTHON_VERSION" = "2.7" ]; then coveralls; fi'
notifications:
email:

View file

@ -58,17 +58,24 @@ can join it here:
### Contributions
At the moment, contributing to Spack is relatively simple. Just send us
a [pull request](https://help.github.com/articles/using-pull-requests/).
Contributing to Spack is relatively. Just send us a
[pull request](https://help.github.com/articles/using-pull-requests/).
When you send your request, make ``develop`` the destination branch on the
[Spack repository](https://github.com/LLNL/spack).
Your contribution will need to pass all the tests run by the `spack test`
command, as well as the formatting checks in `share/spack/qa/run-flake8`.
You should run both of these before submitting your pull request, to
ensure that the online checks succeed.
Before you send a PR, your code should pass the following checks:
Spack is using a rough approximation of the [Git
* Your contribution will need to pass the `spack test` command.
Run this before submitting your PR.
* Also run the `share/spack/qa/run-flake8` script to check for PEP8 compliance.
To encourage contributions and readability by a broad audience,
Spack uses the [PEP8](https://www.python.org/dev/peps/pep-0008/) coding
standard with [a few exceptions](https://github.com/LLNL/spack/blob/develop/.flake8).
We enforce these guidelines with [Travis CI](https://travis-ci.org/LLNL/spack).
Spack uses a rough approximation of the [Git
Flow](http://nvie.com/posts/a-successful-git-branching-model/)
branching model. The ``develop`` branch contains the latest
contributions, and ``master`` is always tagged and points to the

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
# flake8: noqa
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@ -24,9 +25,10 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
if not sys.version_info[:2] >= (2,6):
if not sys.version_info[:2] >= (2, 6):
v_info = sys.version_info[:3]
sys.exit("Spack requires Python 2.6 or higher. This is Python %d.%d.%d." % v_info)
sys.exit("Spack requires Python 2.6 or higher. "
"This is Python %d.%d.%d." % v_info)
import os
@ -62,7 +64,8 @@ for pyc_file in orphaned_pyc_files:
try:
os.remove(pyc_file)
except OSError as e:
print "WARNING: Spack may fail mysteriously. Couldn't remove orphaned .pyc file: %s" % pyc_file
print ("WARNING: Spack may fail mysteriously. "
"Couldn't remove orphaned .pyc file: %s" % pyc_file)
# If there is no working directory, use the spack prefix.
try:
@ -128,6 +131,7 @@ if len(sys.argv) == 1:
# actually parse the args.
args = parser.parse_args()
def main():
# Set up environment based on args.
tty.set_verbose(args.verbose)
@ -148,7 +152,7 @@ def main():
# If the user asked for it, don't check ssl certs.
if args.insecure:
tty.warn("You asked for --insecure, which does not check SSL certificates.")
tty.warn("You asked for --insecure. Will NOT check SSL certificates.")
spack.curl.add_default_arg('-k')
# Try to load the particular command asked for and run it
@ -167,7 +171,8 @@ def main():
elif isinstance(return_val, int):
sys.exit(return_val)
else:
tty.die("Bad return value from command %s: %s" % (args.command, return_val))
tty.die("Bad return value from command %s: %s"
% (args.command, return_val))
if args.profile:
import cProfile

View file

@ -1147,18 +1147,19 @@ packages use RPATH to find their dependencies: this can be true in
particular for Python extensions, which are currently *not* built with
RPATH.
Modules may be loaded recursively with the command:
Modules may be loaded recursively with the ``load`` command's
``--dependencies`` or ``-r`` argument:
.. code-block:: sh
$ module load `spack module tcl --dependencies <spec>...
$ spack load --dependencies <spec> ...
More than one spec may be placed on the command line here.
Module Commands for Shell Scripts
``````````````````````````````````
Although Spack is flexbile, the ``module`` command is much faster.
Although Spack is flexible, the ``module`` command is much faster.
This could become an issue when emitting a series of ``spack load``
commands inside a shell script. By adding the ``--shell`` flag,
``spack module find`` may also be used to generate code that can be
@ -1866,6 +1867,10 @@ to call the Cray compiler wrappers during build time.
For more on compiler configuration, check out :ref:`compiler-config`.
Spack sets the default Cray link type to dynamic, to better match other
other platforms. Individual packages can enable static linking (which is the
default outside of Spack on cray systems) using the -static flag.
Setting defaults and using Cray modules
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

View file

@ -142,8 +142,9 @@ Here's an example packages.yaml file that sets preferred packages:
.. code-block:: sh
packages:
dyninst:
opencv:
compiler: [gcc@4.9]
variants: +debug
gperftools:
version: [2.2, 2.4, 2.3]
all:
@ -153,17 +154,17 @@ Here's an example packages.yaml file that sets preferred packages:
At a high level, this example is specifying how packages should be
concretized. The dyninst package should prefer using gcc 4.9.
The gperftools package should prefer version
concretized. The opencv package should prefer using gcc 4.9 and
be built with debug options. The gperftools package should prefer version
2.2 over 2.4. Every package on the system should prefer mvapich for
its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by preferring gcc 4.9).
its MPI and gcc 4.4.7 (except for opencv, which overrides this by preferring gcc 4.9).
These options are used to fill in implicit defaults. Any of them can be overwritten
on the command line if explicitly requested.
Each packages.yaml file begins with the string ``packages:`` and
package names are specified on the next level. The special string ``all``
applies settings to each package. Underneath each package name is
one or more components: ``compiler``, ``version``,
one or more components: ``compiler``, ``variants``, ``version``,
or ``providers``. Each component has an ordered list of spec
``constraints``, with earlier entries in the list being preferred over
later entries.

View file

@ -1307,9 +1307,9 @@ The dependency types are:
If not specified, ``type`` is assumed to be ``("build", "link")``. This is the
common case for compiled language usage. Also available are the aliases
``alldeps`` for all dependency types and ``nolink`` (``("build", "run")``) for
use by dependencies which are not expressed via a linker (e.g., Python or Lua
module loading).
``"alldeps"`` for all dependency types and ``"nolink"`` (``("build", "run")``)
for use by dependencies which are not expressed via a linker (e.g., Python or
Lua module loading).
.. _setup-dependent-environment:

1
lib/spack/env/cray/CC vendored Symbolic link
View file

@ -0,0 +1 @@
../cc

1
lib/spack/env/cray/cc vendored Symbolic link
View file

@ -0,0 +1 @@
../cc

1
lib/spack/env/cray/ftn vendored Symbolic link
View file

@ -0,0 +1 @@
../cc

View file

@ -29,8 +29,9 @@
import stat
import errno
import getpass
from contextlib import contextmanager, closing
from contextlib import contextmanager
import subprocess
import fileinput
import llnl.util.tty as tty
@ -85,13 +86,14 @@ def groupid_to_group(x):
if ignore_absent and not os.path.exists(filename):
continue
# Create backup file. Don't overwrite an existing backup
# file in case this file is being filtered multiple times.
if not os.path.exists(backup_filename):
shutil.copy(filename, backup_filename)
try:
with closing(open(backup_filename)) as infile:
with closing(open(filename, 'w')) as outfile:
for line in infile:
foo = re.sub(regex, repl, line)
outfile.write(foo)
for line in fileinput.input(filename, inplace=True):
print(re.sub(regex, repl, line.rstrip('\n')))
except:
# clean up the original file on failure.
shutil.move(backup_filename, filename)
@ -104,6 +106,7 @@ def groupid_to_group(x):
class FileFilter(object):
"""Convenience class for calling filter_file a lot."""
def __init__(self, *filenames):
self.filenames = filenames
@ -189,7 +192,7 @@ def install(src, dest):
def install_tree(src, dest, **kwargs):
"""Manually install a file to a particular location."""
"""Manually install a directory tree to a particular location."""
tty.debug("Installing %s to %s" % (src, dest))
shutil.copytree(src, dest, **kwargs)
@ -353,7 +356,8 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs) # NOQA: ignore=E501
tuples = traverse_tree(
source_root, dest_root, rel_child, **kwargs)
for t in tuples:
yield t
@ -420,14 +424,20 @@ def fix_darwin_install_name(path):
libs = glob.glob(join_path(path, "*.dylib"))
for lib in libs:
# fix install name first:
subprocess.Popen(["install_name_tool", "-id", lib, lib], stdout=subprocess.PIPE).communicate()[0] # NOQA: ignore=E501
long_deps = subprocess.Popen(["otool", "-L", lib], stdout=subprocess.PIPE).communicate()[0].split('\n') # NOQA: ignore=E501
subprocess.Popen(
["install_name_tool", "-id", lib, lib],
stdout=subprocess.PIPE).communicate()[0]
long_deps = subprocess.Popen(
["otool", "-L", lib],
stdout=subprocess.PIPE).communicate()[0].split('\n')
deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]]
# fix all dependencies:
for dep in deps:
for loc in libs:
if dep == os.path.basename(loc):
subprocess.Popen(["install_name_tool", "-change", dep, loc, lib], stdout=subprocess.PIPE).communicate()[0] # NOQA: ignore=E501
subprocess.Popen(
["install_name_tool", "-change", dep, loc, lib],
stdout=subprocess.PIPE).communicate()[0]
break

View file

@ -24,7 +24,6 @@
##############################################################################
import os
import re
import sys
import functools
import collections
import inspect
@ -45,7 +44,8 @@ def index_by(objects, *funcs):
d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib")
list_of_specs = [a,b,c,d]
index1 = index_by(list_of_specs, lambda s: s.arch, lambda s: s.compiler)
index1 = index_by(list_of_specs, lambda s: s.arch,
lambda s: s.compiler)
index2 = index_by(list_of_specs, lambda s: s.compiler)
``index1'' now has two levels of dicts, with lists at the
@ -137,7 +137,7 @@ def get_calling_module_name():
finally:
del stack
if not '__module__' in caller_locals:
if '__module__' not in caller_locals:
raise RuntimeError("Must invoke get_calling_module_name() "
"from inside a class definition!")
@ -173,11 +173,11 @@ def has_method(cls, name):
class memoized(object):
"""Decorator that caches the results of a function, storing them
in an attribute of that function."""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
if not isinstance(args, collections.Hashable):
# Not hashable, so just call the function.
@ -187,12 +187,10 @@ def __call__(self, *args):
self.cache[args] = self.func(*args)
return self.cache[args]
def __get__(self, obj, objtype):
"""Support instance methods."""
return functools.partial(self.__call__, obj)
def clear(self):
"""Expunge cache so that self.func will be called again."""
self.cache.clear()
@ -237,13 +235,21 @@ def setter(name, value):
if not has_method(cls, '_cmp_key'):
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
setter('__eq__', lambda s,o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
setter('__lt__', lambda s,o: o is not None and s._cmp_key() < o._cmp_key())
setter('__le__', lambda s,o: o is not None and s._cmp_key() <= o._cmp_key())
setter('__eq__',
lambda s, o:
(s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
setter('__lt__',
lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
setter('__le__',
lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
setter('__ne__', lambda s,o: (s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
setter('__gt__', lambda s,o: o is None or s._cmp_key() > o._cmp_key())
setter('__ge__', lambda s,o: o is None or s._cmp_key() >= o._cmp_key())
setter('__ne__',
lambda s, o:
(s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
setter('__gt__',
lambda s, o: o is None or s._cmp_key() > o._cmp_key())
setter('__ge__',
lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
setter('__hash__', lambda self: hash(self._cmp_key()))
@ -254,10 +260,10 @@ def setter(name, value):
class HashableMap(dict):
"""This is a hashable, comparable dictionary. Hash is performed on
a tuple of the values in the dictionary."""
def _cmp_key(self):
return tuple(sorted(self.values()))
def copy(self):
"""Type-agnostic clone method. Preserves subclass type."""
# Construct a new dict of my type
@ -336,24 +342,39 @@ def match(string):
return match
def DictWrapper(dictionary):
"""Returns a class that wraps a dictionary and enables it to be used
like an object."""
class wrapper(object):
def __getattr__(self, name): return dictionary[name]
def __setattr__(self, name, value): dictionary[name] = value
def setdefault(self, *args): return dictionary.setdefault(*args)
def get(self, *args): return dictionary.get(*args)
def keys(self): return dictionary.keys()
def values(self): return dictionary.values()
def items(self): return dictionary.items()
def __iter__(self): return iter(dictionary)
def __getattr__(self, name):
return dictionary[name]
def __setattr__(self, name, value):
dictionary[name] = value
def setdefault(self, *args):
return dictionary.setdefault(*args)
def get(self, *args):
return dictionary.get(*args)
def keys(self):
return dictionary.keys()
def values(self):
return dictionary.values()
def items(self):
return dictionary.items()
def __iter__(self):
return iter(dictionary)
return wrapper()
class RequiredAttributeError(ValueError):
def __init__(self, message):
super(RequiredAttributeError, self).__init__(message)

View file

@ -23,12 +23,13 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""LinkTree class for setting up trees of symbolic links."""
__all__ = ['LinkTree']
import os
import shutil
from llnl.util.filesystem import *
__all__ = ['LinkTree']
empty_file_name = '.spack-empty'
@ -43,13 +44,13 @@ class LinkTree(object):
modified.
"""
def __init__(self, source_root):
if not os.path.exists(source_root):
raise IOError("No such file or directory: '%s'", source_root)
self._root = source_root
def find_conflict(self, dest_root, **kwargs):
"""Returns the first file in dest that conflicts with src"""
kwargs['follow_nonexisting'] = False
@ -61,9 +62,9 @@ def find_conflict(self, dest_root, **kwargs):
return dest
return None
def merge(self, dest_root, **kwargs):
"""Link all files in src into dest, creating directories if necessary."""
"""Link all files in src into dest, creating directories
if necessary."""
kwargs['order'] = 'pre'
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if os.path.isdir(src):
@ -83,7 +84,6 @@ def merge(self, dest_root, **kwargs):
assert(not os.path.exists(dest))
os.symlink(src, dest)
def unmerge(self, dest_root, **kwargs):
"""Unlink all files in dest that exist in src.

View file

@ -28,6 +28,9 @@
import time
import socket
__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction',
'LockError']
# Default timeout in seconds, after which locks will raise exceptions.
_default_timeout = 60
@ -36,13 +39,21 @@
class Lock(object):
def __init__(self,file_path):
"""This is an implementation of a filesystem lock using Python's lockf.
In Python, `lockf` actually calls `fcntl`, so this should work with any
filesystem implementation that supports locking through the fcntl calls.
This includes distributed filesystems like Lustre (when flock is enabled)
and recent NFS versions.
"""
def __init__(self, file_path):
self._file_path = file_path
self._fd = None
self._reads = 0
self._writes = 0
def _lock(self, op, timeout):
"""This takes a lock using POSIX locks (``fnctl.lockf``).
@ -63,7 +74,9 @@ def _lock(self, op, timeout):
fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
if op == fcntl.LOCK_EX:
os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
os.write(
self._fd,
"pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
return
except IOError as error:
@ -75,7 +88,6 @@ def _lock(self, op, timeout):
raise LockError("Timed out waiting for lock.")
def _unlock(self):
"""Releases a lock using POSIX locks (``fcntl.lockf``)
@ -83,11 +95,10 @@ def _unlock(self):
be masquerading as write locks, but this removes either.
"""
fcntl.lockf(self._fd,fcntl.LOCK_UN)
fcntl.lockf(self._fd, fcntl.LOCK_UN)
os.close(self._fd)
self._fd = None
def acquire_read(self, timeout=_default_timeout):
"""Acquires a recursive, shared lock for reading.
@ -107,7 +118,6 @@ def acquire_read(self, timeout=_default_timeout):
self._reads += 1
return False
def acquire_write(self, timeout=_default_timeout):
"""Acquires a recursive, exclusive lock for writing.
@ -127,7 +137,6 @@ def acquire_write(self, timeout=_default_timeout):
self._writes += 1
return False
def release_read(self):
"""Releases a read lock.
@ -148,7 +157,6 @@ def release_read(self):
self._reads -= 1
return False
def release_write(self):
"""Releases a write lock.
@ -170,6 +178,70 @@ def release_write(self):
return False
class LockTransaction(object):
"""Simple nested transaction context manager that uses a file lock.
This class can trigger actions when the lock is acquired for the
first time and released for the last.
If the acquire_fn returns a value, it is used as the return value for
__enter__, allowing it to be passed as the `as` argument of a `with`
statement.
If acquire_fn returns a context manager, *its* `__enter__` function will be
called in `__enter__` after acquire_fn, and its `__exit__` funciton will be
called before `release_fn` in `__exit__`, allowing you to nest a context
manager to be used along with the lock.
Timeout for lock is customizable.
"""
def __init__(self, lock, acquire_fn=None, release_fn=None,
timeout=_default_timeout):
self._lock = lock
self._timeout = timeout
self._acquire_fn = acquire_fn
self._release_fn = release_fn
self._as = None
def __enter__(self):
if self._enter() and self._acquire_fn:
self._as = self._acquire_fn()
if hasattr(self._as, '__enter__'):
return self._as.__enter__()
else:
return self._as
def __exit__(self, type, value, traceback):
suppress = False
if self._exit():
if self._as and hasattr(self._as, '__exit__'):
if self._as.__exit__(type, value, traceback):
suppress = True
if self._release_fn:
if self._release_fn(type, value, traceback):
suppress = True
return suppress
class ReadTransaction(LockTransaction):
def _enter(self):
return self._lock.acquire_read(self._timeout)
def _exit(self):
return self._lock.release_read()
class WriteTransaction(LockTransaction):
def _enter(self):
return self._lock.acquire_write(self._timeout)
def _exit(self):
return self._lock.release_write()
class LockError(Exception):
"""Raised when an attempt to acquire a lock times out."""
pass

View file

@ -36,6 +36,7 @@
_verbose = False
indent = " "
def is_verbose():
return _verbose
@ -148,7 +149,8 @@ def get_yes_or_no(prompt, **kwargs):
elif default_value is False:
prompt += ' [y/N] '
else:
raise ValueError("default for get_yes_no() must be True, False, or None.")
raise ValueError(
"default for get_yes_no() must be True, False, or None.")
result = None
while result is None:
@ -174,7 +176,8 @@ def hline(label=None, **kwargs):
char = kwargs.pop('char', '-')
max_width = kwargs.pop('max_width', 64)
if kwargs:
raise TypeError("'%s' is an invalid keyword argument for this function."
raise TypeError(
"'%s' is an invalid keyword argument for this function."
% next(kwargs.iterkeys()))
rows, cols = terminal_size()
@ -200,7 +203,8 @@ def terminal_size():
"""Gets the dimensions of the console: (rows, cols)."""
def ioctl_GWINSZ(fd):
try:
rc = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
rc = struct.unpack('hh', fcntl.ioctl(
fd, termios.TIOCGWINSZ, '1234'))
except:
return
return rc

View file

@ -27,15 +27,14 @@
"""
import os
import sys
import fcntl
import termios
import struct
from StringIO import StringIO
from llnl.util.tty import terminal_size
from llnl.util.tty.color import clen, cextra
class ColumnConfig:
def __init__(self, cols):
self.cols = cols
self.line_length = 0
@ -43,7 +42,8 @@ def __init__(self, cols):
self.widths = [0] * cols # does not include ansi colors
def __repr__(self):
attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")]
attrs = [(a, getattr(self, a))
for a in dir(self) if not a.startswith("__")]
return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs)
@ -68,7 +68,7 @@ def config_variable_cols(elts, console_width, padding, cols=0):
max_cols = min(len(elts), max_cols)
# Range of column counts to try. If forced, use the supplied value.
col_range = [cols] if cols else xrange(1, max_cols+1)
col_range = [cols] if cols else xrange(1, max_cols + 1)
# Determine the most columns possible for the console width.
configs = [ColumnConfig(c) for c in col_range]
@ -106,7 +106,6 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
# 'clen' ignores length of ansi color sequences.
max_len = max(clen(e) for e in elts) + padding
max_clen = max(len(e) for e in elts) + padding
if cols == 0:
cols = max(1, console_width / max_len)
cols = min(len(elts), cols)
@ -130,17 +129,19 @@ def colify(elts, **options):
output=<stream> A file object to write to. Default is sys.stdout.
indent=<int> Optionally indent all columns by some number of spaces.
padding=<int> Spaces between columns. Default is 2.
width=<int> Width of the output. Default is 80 if tty is not detected.
width=<int> Width of the output. Default is 80 if tty not detected.
cols=<int> Force number of columns. Default is to size to terminal,
or single-column if no tty
tty=<bool> Whether to attempt to write to a tty. Default is to
autodetect a tty. Set to False to force single-column output.
autodetect a tty. Set to False to force
single-column output.
method=<string> Method to use to fit columns. Options are variable or uniform.
Variable-width columns are tighter, uniform columns are all the
same width and fit less data on the screen.
method=<string> Method to use to fit columns. Options are variable or
uniform. Variable-width columns are tighter, uniform
columns are all the same width and fit less data on
the screen.
"""
# Get keyword arguments or set defaults
cols = options.pop("cols", 0)
@ -152,7 +153,8 @@ def colify(elts, **options):
console_cols = options.pop("width", None)
if options:
raise TypeError("'%s' is an invalid keyword argument for this function."
raise TypeError(
"'%s' is an invalid keyword argument for this function."
% next(options.iterkeys()))
# elts needs to be an array of strings so we can count the elements
@ -167,7 +169,8 @@ def colify(elts, **options):
r, c = env_size.split('x')
console_rows, console_cols = int(r), int(c)
tty = True
except: pass
except:
pass
# Use only one column if not a tty.
if not tty:
@ -228,6 +231,7 @@ def colify_table(table, **options):
raise ValueError("Table is empty in colify_table!")
columns = len(table[0])
def transpose():
for i in xrange(columns):
for row in table:

View file

@ -75,25 +75,27 @@
import re
import sys
class ColorParseError(Exception):
"""Raised when a color format fails to parse."""
def __init__(self, message):
super(ColorParseError, self).__init__(message)
# Text styles for ansi codes
styles = {'*' : '1', # bold
'_' : '4', # underline
None : '0' } # plain
styles = {'*': '1', # bold
'_': '4', # underline
None: '0'} # plain
# Dim and bright ansi colors
colors = {'k' : 30, 'K' : 90, # black
'r' : 31, 'R' : 91, # red
'g' : 32, 'G' : 92, # green
'y' : 33, 'Y' : 93, # yellow
'b' : 34, 'B' : 94, # blue
'm' : 35, 'M' : 95, # magenta
'c' : 36, 'C' : 96, # cyan
'w' : 37, 'W' : 97 } # white
colors = {'k': 30, 'K': 90, # black
'r': 31, 'R': 91, # red
'g': 32, 'G': 92, # green
'y': 33, 'Y': 93, # yellow
'b': 34, 'B': 94, # blue
'm': 35, 'M': 95, # magenta
'c': 36, 'C': 96, # cyan
'w': 37, 'W': 97} # white
# Regex to be used for color formatting
color_re = r'@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)'
@ -104,6 +106,7 @@ def __init__(self, message):
class match_to_ansi(object):
def __init__(self, color=True):
self.color = color
@ -179,12 +182,14 @@ def cprint(string, stream=sys.stdout, color=None):
"""Same as cwrite, but writes a trailing newline to the stream."""
cwrite(string + "\n", stream, color)
def cescape(string):
"""Replace all @ with @@ in the string provided."""
return str(string).replace('@', '@@')
class ColorStream(object):
def __init__(self, stream, color=None):
self._stream = stream
self._color = color
@ -196,7 +201,7 @@ def write(self, string, **kwargs):
color = self._color
if self._color is None:
if raw:
color=True
color = True
else:
color = self._stream.isatty() or _force_color
raw_write(colorize(string, color=color))

View file

@ -37,6 +37,7 @@
# Use this to strip escape sequences
_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h')
def _strip(line):
"""Strip color and control characters from a line."""
return _escape.sub('', line)
@ -59,10 +60,10 @@ class keyboard_input(object):
When the with block completes, this will restore settings before
canonical and echo were disabled.
"""
def __init__(self, stream):
self.stream = stream
def __enter__(self):
self.old_cfg = None
@ -87,10 +88,9 @@ def __enter__(self):
# Apply new settings for terminal
termios.tcsetattr(fd, termios.TCSADRAIN, self.new_cfg)
except Exception, e:
except Exception:
pass # Some OS's do not support termios, so ignore.
def __exit__(self, exc_type, exception, traceback):
# If termios was avaialble, restore old settings after the
# with block

View file

@ -1,3 +1,4 @@
# flake8: noqa
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@ -50,8 +51,15 @@
share_path = join_path(spack_root, "share", "spack")
cache_path = join_path(var_path, "cache")
# User configuration location
user_config_path = os.path.expanduser('~/.spack')
import spack.fetch_strategy
cache = spack.fetch_strategy.FsCache(cache_path)
fetch_cache = spack.fetch_strategy.FsCache(cache_path)
from spack.file_cache import FileCache
user_cache_path = join_path(user_config_path, 'cache')
user_cache = FileCache(user_cache_path)
prefix = spack_root
opt_path = join_path(prefix, "opt")
@ -140,7 +148,7 @@
_tmp_candidates = (_default_tmp, '/nfs/tmp2', '/tmp', '/var/tmp')
for path in _tmp_candidates:
# don't add a second username if it's already unique by user.
if not _tmp_user in path:
if _tmp_user not in path:
tmp_dirs.append(join_path(path, '%u', 'spack-stage'))
else:
tmp_dirs.append(join_path(path, 'spack-stage'))
@ -172,9 +180,10 @@
# Spack internal code should call 'import spack' and accesses other
# variables (spack.repo, paths, etc.) directly.
#
# TODO: maybe this should be separated out and should go in build_environment.py?
# TODO: it's not clear where all the stuff that needs to be included in packages
# should live. This file is overloaded for spack core vs. for packages.
# TODO: maybe this should be separated out to build_environment.py?
# TODO: it's not clear where all the stuff that needs to be included in
# packages should live. This file is overloaded for spack core vs.
# for packages.
#
__all__ = ['Package',
'CMakePackage',
@ -204,8 +213,8 @@
__all__ += spack.util.executable.__all__
from spack.package import \
install_dependency_symlinks, flatten_dependencies, DependencyConflictError, \
InstallError, ExternalPackageError
install_dependency_symlinks, flatten_dependencies, \
DependencyConflictError, InstallError, ExternalPackageError
__all__ += [
'install_dependency_symlinks', 'flatten_dependencies', 'DependencyConflictError',
'InstallError', 'ExternalPackageError']
'install_dependency_symlinks', 'flatten_dependencies',
'DependencyConflictError', 'InstallError', 'ExternalPackageError']

View file

@ -30,15 +30,15 @@
from spack.util.executable import Executable, ProcessError
from llnl.util.lang import memoized
class ABI(object):
"""This class provides methods to test ABI compatibility between specs.
The current implementation is rather rough and could be improved."""
def architecture_compatible(self, parent, child):
"""Returns true iff the parent and child specs have ABI compatible targets."""
return not parent.architecture or not child.architecture \
or parent.architecture == child.architecture
"""Return true if parent and child have ABI compatible targets."""
return not parent.architecture or not child.architecture or \
parent.architecture == child.architecture
@memoized
def _gcc_get_libstdcxx_version(self, version):
@ -61,8 +61,9 @@ def _gcc_get_libstdcxx_version(self, version):
else:
return None
try:
output = rungcc("--print-file-name=%s" % libname, return_output=True)
except ProcessError, e:
output = rungcc("--print-file-name=%s" % libname,
return_output=True)
except ProcessError:
return None
if not output:
return None
@ -71,7 +72,6 @@ def _gcc_get_libstdcxx_version(self, version):
return None
return os.path.basename(libpath)
@memoized
def _gcc_compiler_compare(self, pversion, cversion):
"""Returns true iff the gcc version pversion and cversion
@ -82,7 +82,6 @@ def _gcc_compiler_compare(self, pversion, cversion):
return False
return plib == clib
def _intel_compiler_compare(self, pversion, cversion):
"""Returns true iff the intel version pversion and cversion
are ABI compatible"""
@ -92,9 +91,8 @@ def _intel_compiler_compare(self, pversion, cversion):
return False
return pversion.version[:2] == cversion.version[:2]
def compiler_compatible(self, parent, child, **kwargs):
"""Returns true iff the compilers for parent and child specs are ABI compatible"""
"""Return true if compilers for parent and child are ABI compatible."""
if not parent.compiler or not child.compiler:
return True
@ -109,8 +107,8 @@ def compiler_compatible(self, parent, child, **kwargs):
# TODO: into compiler classes?
for pversion in parent.compiler.versions:
for cversion in child.compiler.versions:
# For a few compilers use specialized comparisons. Otherwise
# match on version match.
# For a few compilers use specialized comparisons.
# Otherwise match on version match.
if pversion.satisfies(cversion):
return True
elif (parent.compiler.name == "gcc" and
@ -121,7 +119,6 @@ def compiler_compatible(self, parent, child, **kwargs):
return True
return False
def compatible(self, parent, child, **kwargs):
"""Returns true iff a parent and child spec are ABI compatible"""
loosematch = kwargs.get('loose', False)

View file

@ -76,7 +76,6 @@
will be responsible for compiler detection.
"""
import os
import imp
import inspect
from llnl.util.lang import memoized, list_modules, key_ordering
@ -92,6 +91,7 @@
class NoPlatformError(serr.SpackError):
def __init__(self):
super(NoPlatformError, self).__init__(
"Could not determine a platform for this machine.")
@ -190,6 +190,12 @@ def operating_system(self, name):
return self.operating_sys.get(name, None)
@classmethod
def setup_platform_environment(self, pkg, env):
""" Subclass can override this method if it requires any
platform-specific build environment modifications.
"""
pass
@classmethod
def detect(self):
@ -200,15 +206,12 @@ def detect(self):
"""
raise NotImplementedError()
def __repr__(self):
return self.__str__()
def __str__(self):
return self.name
def _cmp_key(self):
t_keys = ''.join(str(t._cmp_key()) for t in
sorted(self.targets.values()))
@ -279,7 +282,7 @@ def find_compilers(self, *paths):
# ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot.
clist = reduce(lambda x, y: x+y, compiler_lists)
clist = reduce(lambda x, y: x + y, compiler_lists)
return clist
def find_compiler(self, cmp_cls, *path):
@ -337,6 +340,7 @@ def to_dict(self):
d['version'] = self.version
return d
@key_ordering
class Arch(object):
"""Architecture is now a class to help with setting attributes.
@ -377,10 +381,15 @@ def __str__(self):
else:
return ''
def __contains__(self, string):
return string in str(self)
# TODO: make this unnecessary: don't include an empty arch on *every* spec.
def __nonzero__(self):
return (self.platform is not None or
self.platform_os is not None or
self.target is not None)
__bool__ = __nonzero__
def _cmp_key(self):
if isinstance(self.platform, Platform):
@ -424,7 +433,7 @@ def _operating_system_from_dict(os_name, plat=None):
if isinstance(os_name, dict):
name = os_name['name']
version = os_name['version']
return plat.operating_system(name+version)
return plat.operating_system(name + version)
else:
return plat.operating_system(os_name)

View file

@ -436,6 +436,7 @@ def setup_package(pkg, dirty=False):
set_compiler_environment_variables(pkg, spack_env)
set_build_environment_variables(pkg, spack_env, dirty)
pkg.spec.architecture.platform.setup_platform_environment(pkg, spack_env)
load_external_modules(pkg)
# traverse in postorder so package can use vars from its dependencies
spec = pkg.spec

View file

@ -240,4 +240,4 @@ def fmt(s):
else:
raise ValueError(
"Invalid mode for display_specs: %s. Must be one of (paths,"
"deps, short)." % mode) # NOQA: ignore=E501
"deps, short)." % mode)

View file

@ -29,12 +29,14 @@
description = "Activate a package extension."
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true',
help="Activate without first activating dependencies.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.")
'spec', nargs=argparse.REMAINDER,
help="spec of package extension to activate.")
def activate(parser, args):

View file

@ -22,10 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack
import spack.architecture as architecture
description = "Print the architecture for this machine"
def arch(parser, args):
print architecture.sys_type()

View file

@ -25,7 +25,8 @@
import spack.cmd.location
import spack.modules
description="cd to spack directories in the shell."
description = "cd to spack directories in the shell."
def setup_parser(subparser):
"""This is for decoration -- spack cd is used through spack's

View file

@ -31,6 +31,7 @@
description = "Remove build stage and source tarball for packages."
def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to clean")

View file

@ -35,7 +35,7 @@
def add_common_arguments(parser, list_of_arguments):
for argument in list_of_arguments:
if argument not in _arguments:
message = 'Trying to add the non existing argument "{0}" to a command' # NOQA: ignore=E501
message = 'Trying to add non existing argument "{0}" to a command'
raise KeyError(message.format(argument))
x = _arguments[argument]
parser.add_argument(*x.flags, **x.kwargs)
@ -82,7 +82,7 @@ def __call__(self, parser, namespace, values, option_string=None):
kwargs={
'action': 'store_true',
'dest': 'yes_to_all',
'help': 'Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501
'help': 'Assume "yes" is the answer to every confirmation request.'
})
_arguments['yes_to_all'] = parms

View file

@ -37,6 +37,7 @@
description = "Manage compilers"
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='compiler_command')
@ -44,34 +45,44 @@ def setup_parser(subparser):
scopes = spack.config.config_scopes
# Find
find_parser = sp.add_parser('find', aliases=['add'], help='Search the system for compilers to add to the Spack configuration.')
find_parser = sp.add_parser(
'find', aliases=['add'],
help='Search the system for compilers to add to Spack configuration.')
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
find_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
find_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
# Remove
remove_parser = sp.add_parser('remove', aliases=['rm'], help='Remove compiler by spec.')
remove_parser = sp.add_parser(
'remove', aliases=['rm'], help='Remove compiler by spec.')
remove_parser.add_argument(
'-a', '--all', action='store_true', help='Remove ALL compilers that match spec.')
'-a', '--all', action='store_true',
help='Remove ALL compilers that match spec.')
remove_parser.add_argument('compiler_spec')
remove_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
remove_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
# List
list_parser = sp.add_parser('list', help='list available compilers')
list_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
list_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_list_scope,
help="Configuration scope to read from.")
# Info
info_parser = sp.add_parser('info', help='Show compiler paths.')
info_parser.add_argument('compiler_spec')
info_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
info_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_list_scope,
help="Configuration scope to read from.")
def compiler_find(args):
"""Search either $PATH or a list of paths OR MODULES for compilers and add them
to Spack's configuration."""
"""Search either $PATH or a list of paths OR MODULES for compilers and
add them to Spack's configuration.
"""
paths = args.add_paths
if not paths:
paths = get_path('PATH')
@ -103,11 +114,12 @@ def compiler_remove(args):
elif not args.all and len(compilers) > 1:
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
tty.msg("Or, you can use `spack compiler remove -a` to remove all of them.")
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
sys.exit(1)
for compiler in compilers:
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
spack.compilers.remove_compiler_from_config(
compiler.spec, scope=args.scope)
tty.msg("Removed compiler %s" % compiler.spec)
@ -133,7 +145,8 @@ def compiler_list(args):
tty.msg("Available compilers")
index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name')
for i, (name, compilers) in enumerate(index.items()):
if i >= 1: print
if i >= 1:
print
cname = "%s{%s}" % (spack.spec.compiler_color, name)
tty.hline(colorize(cname), char='-')
@ -141,10 +154,10 @@ def compiler_list(args):
def compiler(parser, args):
action = {'add' : compiler_find,
'find' : compiler_find,
'remove' : compiler_remove,
'rm' : compiler_remove,
'info' : compiler_info,
'list' : compiler_list }
action = {'add': compiler_find,
'find': compiler_find,
'remove': compiler_remove,
'rm': compiler_remove,
'info': compiler_info,
'list': compiler_list}
action[args.compiler_command](args)

View file

@ -22,18 +22,16 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.lang import index_by
import spack
from spack.cmd.compiler import compiler_list
description = "List available compilers. Same as 'spack compiler list'."
def setup_parser(subparser):
subparser.add_argument('--scope', choices=spack.config.config_scopes,
help="Configuration scope to read/modify.")
def compilers(parser, args):
compiler_list(args)

View file

@ -22,15 +22,11 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import argparse
import llnl.util.tty as tty
import spack.config
description = "Get and set configuration options."
def setup_parser(subparser):
# User can only choose one
scope_group = subparser.add_mutually_exclusive_group()
@ -64,6 +60,6 @@ def config_edit(args):
def config(parser, args):
action = { 'get' : config_get,
'edit' : config_edit }
action = {'get': config_get,
'edit': config_edit}
action[args.config_command](args)

View file

@ -96,8 +96,7 @@ class ${class_name}(Package):
${versions}
# FIXME: Add additional dependencies if required.
${dependencies}
${dependencies}
def install(self, spec, prefix):
${install}
@ -105,13 +104,39 @@ def install(self, spec, prefix):
# Build dependencies and extensions
dependencies_dict = {
'autotools': "# depends_on('foo')",
'cmake': "depends_on('cmake')",
'scons': "depends_on('scons')",
'python': "extends('python')",
'R': "extends('R')",
'octave': "extends('octave')",
'unknown': "# depends_on('foo')"
'autotools': """\
# FIXME: Add dependencies if required.
# depends_on('foo')""",
'cmake': """\
# FIXME: Add additional dependencies if required.
depends_on('cmake', type='build')""",
'scons': """\
# FIXME: Add additional dependencies if required.
depends_on('scons', type='build')""",
'python': """\
extends('python')
# FIXME: Add additional dependencies if required.
# depends_on('py-foo', type=nolink)""",
'R': """\
extends('R')
# FIXME: Add additional dependencies if required.
# depends_on('r-foo', type=nolink)""",
'octave': """\
extends('octave')
# FIXME: Add additional dependencies if required.
# depends_on('octave-foo', type=nolink)""",
'unknown': """\
# FIXME: Add dependencies if required.
# depends_on('foo')"""
}
# Default installation instructions
@ -140,7 +165,7 @@ def install(self, spec, prefix):
'python': """\
# FIXME: Add logic to build and install here.
python('setup.py', 'install', '--prefix={0}'.format(prefix))""",
setup_py('install', '--prefix={0}'.format(prefix))""",
'R': """\
# FIXME: Add logic to build and install here.
@ -192,6 +217,7 @@ def setup_parser(subparser):
class BuildSystemGuesser(object):
def __call__(self, stage, url):
"""Try to guess the type of build system used by a project based on
the contents of its archive or the URL it was downloaded from."""

View file

@ -31,6 +31,7 @@
description = "Deactivate a package extension."
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true',
@ -40,7 +41,8 @@ def setup_parser(subparser):
help="Deactivate all extensions of an extendable package, or "
"deactivate an extension AND its dependencies.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.")
'spec', nargs=argparse.REMAINDER,
help="spec of package extension to deactivate.")
def deactivate(parser, args):
@ -65,7 +67,8 @@ def deactivate(parser, args):
if not args.force and not spec.package.activated:
tty.die("%s is not activated." % pkg.spec.short_spec)
tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec)
tty.msg("Deactivating %s and all dependencies." %
pkg.spec.short_spec)
topo_order = topological_sort(spec)
index = spec.index()
@ -79,7 +82,9 @@ def deactivate(parser, args):
epkg.do_deactivate(force=args.force)
else:
tty.die("spack deactivate --all requires an extendable package or an extension.")
tty.die(
"spack deactivate --all requires an extendable package "
"or an extension.")
else:
if not pkg.is_extension:

View file

@ -31,9 +31,11 @@
description = "Show installed packages that depend on another."
def setup_parser(subparser):
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="specs to list dependencies of.")
'spec', nargs=argparse.REMAINDER,
help="specs to list dependencies of.")
def dependents(parser, args):
@ -42,5 +44,6 @@ def dependents(parser, args):
tty.die("spack dependents takes only one spec.")
fmt = '$_$@$%@$+$=$#'
deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents]
deps = [d.format(fmt, color=True)
for d in specs[0].package.installed_dependents]
tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps)

View file

@ -35,6 +35,7 @@
description = "Do-It-Yourself: build from an existing source directory."
def setup_parser(subparser):
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
@ -76,14 +77,17 @@ def diy(self, args):
return
if not spec.versions.concrete:
tty.die("spack diy spec must have a single, concrete version. Did you forget a package version number?")
tty.die(
"spack diy spec must have a single, concrete version. "
"Did you forget a package version number?")
spec.concretize()
package = spack.repo.get(spec)
if package.installed:
tty.error("Already installed in %s" % package.prefix)
tty.msg("Uninstall or try adding a version suffix for this DIY build.")
tty.msg("Uninstall or try adding a version suffix for this "
"DIY build.")
sys.exit(1)
# Forces the build to run out of the current directory.

View file

@ -25,6 +25,7 @@
description = "Run pydoc from within spack."
def setup_parser(subparser):
subparser.add_argument('entity', help="Run pydoc help on entity")

View file

@ -68,7 +68,7 @@ def edit_package(name, repo_path, namespace, force=False):
if os.path.exists(path):
if not os.path.isfile(path):
tty.die("Something's wrong. '%s' is not a file!" % path)
if not os.access(path, os.R_OK|os.W_OK):
if not os.access(path, os.R_OK | os.W_OK):
tty.die("Insufficient permissions on '%s'!" % path)
elif not force:
tty.die("No package '%s'. Use spack create, or supply -f/--force "
@ -93,19 +93,23 @@ def setup_parser(subparser):
# Various filetypes you can edit directly from the cmd line.
excl_args.add_argument(
'-c', '--command', dest='path', action='store_const',
const=spack.cmd.command_path, help="Edit the command with the supplied name.")
const=spack.cmd.command_path,
help="Edit the command with the supplied name.")
excl_args.add_argument(
'-t', '--test', dest='path', action='store_const',
const=spack.test_path, help="Edit the test with the supplied name.")
excl_args.add_argument(
'-m', '--module', dest='path', action='store_const',
const=spack.module_path, help="Edit the main spack module with the supplied name.")
const=spack.module_path,
help="Edit the main spack module with the supplied name.")
# Options for editing packages
excl_args.add_argument(
'-r', '--repo', default=None, help="Path to repo to edit package in.")
'-r', '--repo', default=None,
help="Path to repo to edit package in.")
excl_args.add_argument(
'-N', '--namespace', default=None, help="Namespace of package to edit.")
'-N', '--namespace', default=None,
help="Namespace of package to edit.")
subparser.add_argument(
'name', nargs='?', default=None, help="name of package to edit")

View file

@ -28,11 +28,13 @@
import spack.cmd
import spack.build_environment as build_env
description = "Run a command with the environment for a particular spec's install."
description = "Run a command with the install environment for a spec."
def setup_parser(subparser):
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="specs of package environment to emulate.")
'spec', nargs=argparse.REMAINDER,
help="specs of package environment to emulate.")
def env(parser, args):
@ -47,7 +49,7 @@ def env(parser, args):
if sep in args.spec:
s = args.spec.index(sep)
spec = args.spec[:s]
cmd = args.spec[s+1:]
cmd = args.spec[s + 1:]
else:
spec = args.spec[0]
cmd = args.spec[1:]

View file

@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import argparse
import llnl.util.tty as tty
@ -34,6 +33,7 @@
description = "List extensions for package."
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
@ -47,7 +47,8 @@ def setup_parser(subparser):
help='Show full dependency DAG of extensions')
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for')
'spec', nargs=argparse.REMAINDER,
help='Spec of package to list extensions for')
def extensions(parser, args):
@ -85,7 +86,8 @@ def extensions(parser, args):
#
# List specs of installed extensions.
#
installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)]
installed = [
s.spec for s in spack.installed_db.installed_extensions_for(spec)]
print
if not installed:
tty.msg("None installed.")
@ -102,4 +104,5 @@ def extensions(parser, args):
tty.msg("None activated.")
return
tty.msg("%d currently activated:" % len(activated))
spack.cmd.find.display_specs(activated.values(), mode=args.mode, long=args.long)
spack.cmd.find.display_specs(
activated.values(), mode=args.mode, long=args.long)

View file

@ -29,16 +29,21 @@
description = "Fetch archives for packages"
def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
'-m', '--missing', action='store_true', help="Also fetch all missing dependencies")
'-m', '--missing', action='store_true',
help="Also fetch all missing dependencies")
subparser.add_argument(
'-D', '--dependencies', action='store_true', help="Also fetch all dependencies")
'-D', '--dependencies', action='store_true',
help="Also fetch all dependencies")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to fetch")
'packages', nargs=argparse.REMAINDER,
help="specs of packages to fetch")
def fetch(parser, args):
if not args.packages:
@ -50,7 +55,6 @@ def fetch(parser, args):
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
if args.missing or args.dependencies:
to_fetch = set()
for s in spec.traverse(deptype_query=spack.alldeps):
package = spack.repo.get(s)
if args.missing and package.installed:

View file

@ -30,6 +30,7 @@
description = "Generate graphs of package dependency relationships."
def setup_parser(subparser):
setup_parser.parser = subparser
@ -42,10 +43,12 @@ def setup_parser(subparser):
help="Generate graph in dot format and print to stdout.")
subparser.add_argument(
'--concretize', action='store_true', help="Concretize specs before graphing.")
'--concretize', action='store_true',
help="Concretize specs before graphing.")
subparser.add_argument(
'specs', nargs=argparse.REMAINDER, help="specs of packages to graph.")
'specs', nargs=argparse.REMAINDER,
help="specs of packages to graph.")
def graph(parser, args):

View file

@ -22,14 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
description = "Get help on spack and its commands"
def setup_parser(subparser):
subparser.add_argument('help_command', nargs='?', default=None,
help='command to get help on')
def help(parser, args):
if args.help_command:
parser.parse_args([args.help_command, '-h'])

View file

@ -31,6 +31,7 @@
description = "Build and install packages"
def setup_parser(subparser):
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
@ -52,7 +53,7 @@ def setup_parser(subparser):
help="Display verbose build output while installing.")
subparser.add_argument(
'--fake', action='store_true', dest='fake',
help="Fake install. Just remove the prefix and touch a fake file in it.")
help="Fake install. Just remove prefix and create a fake file.")
subparser.add_argument(
'--dirty', action='store_true', dest='dirty',
help="Install a package *without* cleaning the environment.")
@ -60,13 +61,13 @@ def setup_parser(subparser):
'--stop-at', help="Stop at a particular phase of installation"
)
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to install")
'packages', nargs=argparse.REMAINDER,
help="specs of packages to install")
subparser.add_argument(
'--run-tests', action='store_true', dest='run_tests',
help="Run tests during installation of a package.")
def install(parser, args):
if not args.packages:
tty.die("install requires at least one package argument")

View file

@ -25,13 +25,16 @@
import argparse
import spack.modules
description ="Add package to environment using modules."
description = "Add package to environment using modules."
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="Spec of package to load with modules. (If -, read specs from STDIN)")
'spec', nargs=argparse.REMAINDER,
help="Spec of package to load with modules. "
"(If -, read specs from STDIN)")
def load(parser, args):

View file

@ -22,8 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import sys
import argparse
import llnl.util.tty as tty
@ -32,16 +30,19 @@
import spack
import spack.cmd
description="Print out locations of various directories used by Spack"
description = "Print out locations of various directories used by Spack"
def setup_parser(subparser):
global directories
directories = subparser.add_mutually_exclusive_group()
directories.add_argument(
'-m', '--module-dir', action='store_true', help="Spack python module directory.")
'-m', '--module-dir', action='store_true',
help="Spack python module directory.")
directories.add_argument(
'-r', '--spack-root', action='store_true', help="Spack installation root.")
'-r', '--spack-root', action='store_true',
help="Spack installation root.")
directories.add_argument(
'-i', '--install-dir', action='store_true',
@ -53,15 +54,19 @@ def setup_parser(subparser):
'-P', '--packages', action='store_true',
help="Top-level packages directory for Spack.")
directories.add_argument(
'-s', '--stage-dir', action='store_true', help="Stage directory for a spec.")
'-s', '--stage-dir', action='store_true',
help="Stage directory for a spec.")
directories.add_argument(
'-S', '--stages', action='store_true', help="Top level Stage directory.")
'-S', '--stages', action='store_true',
help="Top level Stage directory.")
directories.add_argument(
'-b', '--build-dir', action='store_true',
help="Checked out or expanded source directory for a spec (requires it to be staged first).")
help="Checked out or expanded source directory for a spec "
"(requires it to be staged first).")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package to fetch directory for.")
'spec', nargs=argparse.REMAINDER,
help="spec of package to fetch directory for.")
def location(parser, args):
@ -106,7 +111,7 @@ def location(parser, args):
else: # args.build_dir is the default.
if not pkg.stage.source_path:
tty.die("Build directory does not exist yet. Run this to create it:",
tty.die("Build directory does not exist yet. "
"Run this to create it:",
"spack stage " + " ".join(args.spec))
print pkg.stage.source_path

View file

@ -23,7 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import sys
from datetime import datetime
import argparse
@ -40,6 +39,7 @@
description = "Manage mirrors."
def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
@ -61,8 +61,9 @@ def setup_parser(subparser):
'-D', '--dependencies', action='store_true',
help="Also fetch all dependencies")
create_parser.add_argument(
'-o', '--one-version-per-spec', action='store_const', const=1, default=0,
help="Only fetch one 'preferred' version per spec, not all known versions.")
'-o', '--one-version-per-spec', action='store_const',
const=1, default=0,
help="Only fetch one 'preferred' version per spec, not all known.")
scopes = spack.config.config_scopes
@ -70,7 +71,7 @@ def setup_parser(subparser):
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
add_parser.add_argument('name', help="Mnemonic name for mirror.")
add_parser.add_argument(
'url', help="URL of mirror directory created by 'spack mirror create'.")
'url', help="URL of mirror directory from 'spack mirror create'.")
add_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
@ -107,7 +108,7 @@ def mirror_add(args):
tty.die("Mirror with url %s already exists." % url)
# should only be one item per mirror dict.
items = [(n,u) for n,u in mirrors.items()]
items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (args.name, url))
mirrors = syaml_dict(items)
spack.config.update_config('mirrors', mirrors, scope=args.scope)
@ -121,7 +122,7 @@ def mirror_remove(args):
if not mirrors:
mirrors = syaml_dict()
if not name in mirrors:
if name not in mirrors:
tty.die("No mirror with name %s" % name)
old_value = mirrors.pop(name)
@ -152,7 +153,7 @@ def _read_specs_from_file(filename):
s.package
specs.append(s)
except SpackError, e:
tty.die("Parse error in %s, line %d:" % (args.file, i+1),
tty.die("Parse error in %s, line %d:" % (args.file, i + 1),
">>> " + string, str(e))
return specs
@ -214,10 +215,10 @@ def mirror_create(args):
def mirror(parser, args):
action = { 'create' : mirror_create,
'add' : mirror_add,
'remove' : mirror_remove,
'rm' : mirror_remove,
'list' : mirror_list }
action = {'create': mirror_create,
'add': mirror_add,
'remove': mirror_remove,
'rm': mirror_remove,
'list': mirror_list}
action[args.mirror_command](args)

View file

@ -118,7 +118,8 @@ def loads(mtype, specs, args):
seen_add = seen.add
for spec in specs_from_user_constraint:
specs.extend(
[item for item in spec.traverse(order='post', cover='nodes') if not (item in seen or seen_add(item))] # NOQA: ignore=E501
[item for item in spec.traverse(order='post', cover='nodes')
if not (item in seen or seen_add(item))]
)
module_cls = module_types[mtype]
@ -178,7 +179,9 @@ def rm(mtype, specs, args):
# Ask for confirmation
if not args.yes_to_all:
tty.msg('You are about to remove {0} module files the following specs:\n'.format(mtype)) # NOQA: ignore=E501
tty.msg(
'You are about to remove {0} module files the following specs:\n'
.format(mtype))
spack.cmd.display_specs(specs_with_modules, long=True)
print('')
spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
@ -197,7 +200,9 @@ def refresh(mtype, specs, args):
return
if not args.yes_to_all:
tty.msg('You are about to regenerate {name} module files for the following specs:\n'.format(name=mtype)) # NOQA: ignore=E501
tty.msg(
'You are about to regenerate {name} module files for:\n'
.format(name=mtype))
spack.cmd.display_specs(specs, long=True)
print('')
spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
@ -245,11 +250,13 @@ def module(parser, args):
try:
callbacks[args.subparser_name](module_type, args.specs, args)
except MultipleMatches:
message = 'the constraint \'{query}\' matches multiple packages, and this is not allowed in this context' # NOQA: ignore=E501
message = ('the constraint \'{query}\' matches multiple packages, '
'and this is not allowed in this context')
tty.error(message.format(query=constraint))
for s in args.specs:
sys.stderr.write(s.format(color=True) + '\n')
raise SystemExit(1)
except NoMatch:
message = 'the constraint \'{query}\' match no package, and this is not allowed in this context' # NOQA: ignore=E501
message = ('the constraint \'{query}\' match no package, '
'and this is not allowed in this context')
tty.die(message.format(query=constraint))

View file

@ -32,7 +32,7 @@
def github_url(pkg):
"""Link to a package file on github."""
url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" # NOQA: ignore=E501
url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py"
return (url % pkg.name)

View file

@ -29,14 +29,16 @@
import spack
description="Patch expanded archive sources in preparation for install"
description = "Patch expanded archive sources in preparation for install"
def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check downloaded packages against checksum")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to stage")
'packages', nargs=argparse.REMAINDER,
help="specs of packages to stage")
def patch(parser, args):

View file

@ -33,6 +33,7 @@
description = "Query packages associated with particular git revisions."
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='pkg_command')
@ -46,21 +47,27 @@ def setup_parser(subparser):
help="Revision to list packages for.")
diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__)
diff_parser.add_argument('rev1', nargs='?', default='HEAD^',
diff_parser.add_argument(
'rev1', nargs='?', default='HEAD^',
help="Revision to compare against.")
diff_parser.add_argument('rev2', nargs='?', default='HEAD',
diff_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="Revision to compare to rev1 (default is HEAD).")
add_parser = sp.add_parser('added', help=pkg_added.__doc__)
add_parser.add_argument('rev1', nargs='?', default='HEAD^',
add_parser.add_argument(
'rev1', nargs='?', default='HEAD^',
help="Revision to compare against.")
add_parser.add_argument('rev2', nargs='?', default='HEAD',
add_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="Revision to compare to rev1 (default is HEAD).")
rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__)
rm_parser.add_argument('rev1', nargs='?', default='HEAD^',
rm_parser.add_argument(
'rev1', nargs='?', default='HEAD^',
help="Revision to compare against.")
rm_parser.add_argument('rev2', nargs='?', default='HEAD',
rm_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="Revision to compare to rev1 (default is HEAD).")
@ -88,7 +95,8 @@ def pkg_add(args):
for pkg_name in args.packages:
filename = spack.repo.filename_for_package_name(pkg_name)
if not os.path.isfile(filename):
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
tty.die("No such package: %s. Path does not exist:" %
pkg_name, filename)
git = get_git()
git('-C', spack.packages_path, 'add', filename)
@ -112,7 +120,8 @@ def pkg_diff(args):
if u1:
print "%s:" % args.rev1
colify(sorted(u1), indent=4)
if u1: print
if u1:
print
if u2:
print "%s:" % args.rev2
@ -122,19 +131,21 @@ def pkg_diff(args):
def pkg_removed(args):
"""Show packages removed since a commit."""
u1, u2 = diff_packages(args.rev1, args.rev2)
if u1: colify(sorted(u1))
if u1:
colify(sorted(u1))
def pkg_added(args):
"""Show packages added since a commit."""
u1, u2 = diff_packages(args.rev1, args.rev2)
if u2: colify(sorted(u2))
if u2:
colify(sorted(u2))
def pkg(parser, args):
action = { 'add' : pkg_add,
'diff' : pkg_diff,
'list' : pkg_list,
'removed' : pkg_removed,
'added' : pkg_added }
action = {'add': pkg_add,
'diff': pkg_diff,
'list': pkg_list,
'removed': pkg_removed,
'added': pkg_added}
action[args.pkg_command](args)

View file

@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import argparse
from llnl.util.tty.colify import colify
@ -30,10 +29,12 @@
import spack
import spack.cmd
description ="List packages that provide a particular virtual package"
description = "List packages that provide a particular virtual package"
def setup_parser(subparser):
subparser.add_argument('vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER,
subparser.add_argument(
'vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER,
help='Find packages that provide this virtual package')

View file

@ -33,7 +33,11 @@ def setup_parser(subparser):
'-s', '--stage', action='store_true', default=True,
help="Remove all temporary build stages (default).")
subparser.add_argument(
'-c', '--cache', action='store_true', help="Remove cached downloads.")
'-d', '--downloads', action='store_true',
help="Remove cached downloads.")
subparser.add_argument(
'-u', '--user-cache', action='store_true',
help="Remove caches in user home directory. Includes virtual indices.")
subparser.add_argument(
'-a', '--all', action='store_true',
help="Remove all of the above.")
@ -41,12 +45,14 @@ def setup_parser(subparser):
def purge(parser, args):
# Special case: no flags.
if not any((args.stage, args.cache, args.all)):
if not any((args.stage, args.downloads, args.user_cache, args.all)):
stage.purge()
return
# handle other flags with fall through.
if args.stage or args.all:
stage.purge()
if args.cache or args.all:
spack.cache.destroy()
if args.downloads or args.all:
spack.fetch_cache.destroy()
if args.user_cache or args.all:
spack.user_cache.destroy()

View file

@ -30,18 +30,22 @@
import spack
def setup_parser(subparser):
subparser.add_argument(
'-c', dest='python_command', help='Command to execute.')
subparser.add_argument(
'python_args', nargs=argparse.REMAINDER, help="File to run plus arguments.")
'python_args', nargs=argparse.REMAINDER,
help="File to run plus arguments.")
description = "Launch an interpreter as spack would launch a command"
def python(parser, args):
# Fake a main python shell by setting __name__ to __main__.
console = code.InteractiveConsole({'__name__' : '__main__',
'spack' : spack})
console = code.InteractiveConsole({'__name__': '__main__',
'spack': spack})
if "PYTHONSTARTUP" in os.environ:
startup_file = os.environ["PYTHONSTARTUP"]

View file

@ -22,10 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
import spack
description = "Rebuild Spack's package database."
def reindex(parser, args):
spack.installed_db.reindex(spack.install_layout)

View file

@ -23,20 +23,16 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import re
import shutil
import argparse
import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
import spack.spec
import spack.config
from spack.util.environment import get_path
from spack.repository import *
description = "Manage package source repositories."
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
scopes = spack.config.config_scopes
@ -57,13 +53,15 @@ def setup_parser(subparser):
# Add
add_parser = sp.add_parser('add', help=repo_add.__doc__)
add_parser.add_argument('path', help="Path to a Spack package repository directory.")
add_parser.add_argument(
'path', help="Path to a Spack package repository directory.")
add_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
# Remove
remove_parser = sp.add_parser('remove', help=repo_remove.__doc__, aliases=['rm'])
remove_parser = sp.add_parser(
'remove', help=repo_remove.__doc__, aliases=['rm'])
remove_parser.add_argument(
'path_or_namespace',
help="Path or namespace of a Spack package repository.")
@ -100,7 +98,8 @@ def repo_add(args):
# If that succeeds, finally add it to the configuration.
repos = spack.config.get_config('repos', args.scope)
if not repos: repos = []
if not repos:
repos = []
if repo.root in repos or path in repos:
tty.die("Repository is already registered with Spack: %s" % path)
@ -135,7 +134,7 @@ def repo_remove(args):
tty.msg("Removed repository %s with namespace '%s'."
% (repo.root, repo.namespace))
return
except RepoError as e:
except RepoError:
continue
tty.die("No repository with path or namespace: %s"
@ -149,7 +148,7 @@ def repo_list(args):
for r in roots:
try:
repos.append(Repo(r))
except RepoError as e:
except RepoError:
continue
msg = "%d package repositor" % len(repos)
@ -166,9 +165,9 @@ def repo_list(args):
def repo(parser, args):
action = { 'create' : repo_create,
'list' : repo_list,
'add' : repo_add,
'remove' : repo_remove,
'rm' : repo_remove}
action = {'create': repo_create,
'list': repo_list,
'add': repo_add,
'remove': repo_remove,
'rm': repo_remove}
action[args.repo_command](args)

View file

@ -31,6 +31,7 @@
description = "Revert checked out package source code."
def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to restage")

View file

@ -143,7 +143,8 @@ def setup(self, args):
if not spec.versions.concrete:
tty.die(
"spack setup spec must have a single, concrete version. Did you forget a package version number?")
"spack setup spec must have a single, concrete version. "
"Did you forget a package version number?")
spec.concretize()
package = spack.repo.get(spec)

View file

@ -25,23 +25,22 @@
import argparse
import spack.cmd
import llnl.util.tty as tty
import spack
import spack.url as url
description = "print out abstract and concrete versions of a spec."
def setup_parser(subparser):
subparser.add_argument('-i', '--ids', action='store_true',
help="show numerical ids for dependencies.")
subparser.add_argument('specs', nargs=argparse.REMAINDER, help="specs of packages")
subparser.add_argument(
'specs', nargs=argparse.REMAINDER, help="specs of packages")
def spec(parser, args):
kwargs = { 'ids' : args.ids,
'indent' : 2,
'color' : True }
kwargs = {'ids': args.ids,
'indent': 2,
'color': True}
for spec in spack.cmd.parse_specs(args.specs):
print "Input spec"

View file

@ -22,14 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
description="Expand downloaded archive in preparation for install"
description = "Expand downloaded archive in preparation for install"
def setup_parser(subparser):
subparser.add_argument(

View file

@ -36,25 +36,25 @@
from spack.build_environment import InstallError
from spack.fetch_strategy import FetchError
description = "Run package installation as a unit test, output formatted results."
description = "Run package install as a unit test, output formatted results."
def setup_parser(subparser):
subparser.add_argument('-j',
'--jobs',
action='store',
type=int,
subparser.add_argument(
'-j', '--jobs', action='store', type=int,
help="Explicitly set number of make jobs. Default is #cpus.")
subparser.add_argument('-n',
'--no-checksum',
action='store_true',
dest='no_checksum',
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument('-o', '--output', action='store', help="test output goes in this file")
subparser.add_argument(
'-o', '--output', action='store',
help="test output goes in this file")
subparser.add_argument('package', nargs=argparse.REMAINDER, help="spec of package to install")
subparser.add_argument(
'package', nargs=argparse.REMAINDER,
help="spec of package to install")
class TestResult(object):
@ -65,6 +65,7 @@ class TestResult(object):
class TestSuite(object):
def __init__(self, filename):
self.filename = filename
self.root = ET.Element('testsuite')
@ -75,14 +76,17 @@ def __enter__(self):
def append(self, item):
if not isinstance(item, TestCase):
raise TypeError('only TestCase instances may be appended to a TestSuite instance')
raise TypeError(
'only TestCase instances may be appended to TestSuite')
self.tests.append(item) # Append the item to the list of tests
def __exit__(self, exc_type, exc_val, exc_tb):
# Prepare the header for the entire test suite
number_of_errors = sum(x.result_type == TestResult.ERRORED for x in self.tests)
number_of_errors = sum(
x.result_type == TestResult.ERRORED for x in self.tests)
self.root.set('errors', str(number_of_errors))
number_of_failures = sum(x.result_type == TestResult.FAILED for x in self.tests)
number_of_failures = sum(
x.result_type == TestResult.FAILED for x in self.tests)
self.root.set('failures', str(number_of_failures))
self.root.set('tests', str(len(self.tests)))
@ -112,7 +116,8 @@ def __init__(self, classname, name, time=None):
self.element.set('time', str(time))
self.result_type = None
def set_result(self, result_type, message=None, error_type=None, text=None):
def set_result(self, result_type,
message=None, error_type=None, text=None):
self.result_type = result_type
result = TestCase.results[self.result_type]
if result is not None and result is not TestResult.PASSED:
@ -155,13 +160,19 @@ def install_single_spec(spec, number_of_jobs):
# If it is already installed, skip the test
if spack.repo.get(spec).installed:
testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
testcase.set_result(TestResult.SKIPPED, message='Skipped [already installed]', error_type='already_installed')
testcase.set_result(
TestResult.SKIPPED,
message='Skipped [already installed]',
error_type='already_installed')
return testcase
# If it relies on dependencies that did not install, skip
if failed_dependencies(spec):
testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
testcase.set_result(TestResult.SKIPPED, message='Skipped [failed dependencies]', error_type='dep_failed')
testcase.set_result(
TestResult.SKIPPED,
message='Skipped [failed dependencies]',
error_type='dep_failed')
return testcase
# Otherwise try to install the spec
@ -177,26 +188,30 @@ def install_single_spec(spec, number_of_jobs):
testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.PASSED)
except InstallError:
# An InstallError is considered a failure (the recipe didn't work correctly)
# An InstallError is considered a failure (the recipe didn't work
# correctly)
duration = time.time() - start_time
# Try to get the log
lines = fetch_log(package.build_log_path)
text = '\n'.join(lines)
testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.FAILED, message='Installation failure', text=text)
testcase.set_result(TestResult.FAILED,
message='Installation failure', text=text)
except FetchError:
# A FetchError is considered an error (we didn't even start building)
duration = time.time() - start_time
testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.ERRORED, message='Unable to fetch package')
testcase.set_result(TestResult.ERRORED,
message='Unable to fetch package')
return testcase
def get_filename(args, top_spec):
if not args.output:
fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(x=top_spec, hash=top_spec.dag_hash())
fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(
x=top_spec, hash=top_spec.dag_hash())
output_directory = join_path(os.getcwd(), 'test-output')
if not os.path.exists(output_directory):
os.mkdir(output_directory)

View file

@ -23,23 +23,23 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
from pprint import pprint
from llnl.util.filesystem import join_path, mkdirp
from llnl.util.tty.colify import colify
from llnl.util.lang import list_modules
import spack
import spack.test
from spack.fetch_strategy import FetchError
description ="Run unit tests"
description = "Run unit tests"
def setup_parser(subparser):
subparser.add_argument(
'names', nargs='*', help="Names of tests to run.")
subparser.add_argument(
'-l', '--list', action='store_true', dest='list', help="Show available tests")
'-l', '--list', action='store_true', dest='list',
help="Show available tests")
subparser.add_argument(
'--createXmlOutput', action='store_true', dest='createXmlOutput',
help="Create JUnit XML from test results")
@ -52,6 +52,7 @@ def setup_parser(subparser):
class MockCache(object):
def store(self, copyCmd, relativeDst):
pass
@ -60,6 +61,7 @@ def fetcher(self, targetPath, digest):
class MockCacheFetcher(object):
def set_stage(self, stage):
pass
@ -69,6 +71,7 @@ def fetch(self):
def __str__(self):
return "[mock fetcher]"
def test(parser, args):
if args.list:
print "Available tests:"
@ -85,5 +88,5 @@ def test(parser, args):
if not os.path.exists(outputDir):
mkdirp(outputDir)
spack.cache = MockCache()
spack.fetch_cache = MockCache()
spack.test.run(args.names, outputDir, args.verbose)

View file

@ -50,25 +50,27 @@ def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
help="Remove regardless of whether other packages depend on this one.")
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
help="USE CAREFULLY. Remove ALL installed packages that match each " +
"supplied spec. i.e., if you say uninstall libelf, ALL versions of " + # NOQA: ignore=E501
"libelf are uninstalled. This is both useful and dangerous, like rm -r.") # NOQA: ignore=E501
help="USE CAREFULLY. Remove ALL installed packages that match each "
"supplied spec. i.e., if you say uninstall libelf, ALL versions "
"of libelf are uninstalled. This is both useful and dangerous, "
"like rm -r.")
subparser.add_argument(
'-d', '--dependents', action='store_true', dest='dependents',
help='Also uninstall any packages that depend on the ones given via command line.' # NOQA: ignore=E501
)
help='Also uninstall any packages that depend on the ones given '
'via command line.')
subparser.add_argument(
'-y', '--yes-to-all', action='store_true', dest='yes_to_all',
help='Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501
help='Assume "yes" is the answer to every confirmation requested')
)
subparser.add_argument(
'packages',
nargs=argparse.REMAINDER,
help="specs of packages to uninstall"
)
help="specs of packages to uninstall")
def concretize_specs(specs, allow_multiple_matches=False, force=False):
@ -184,7 +186,8 @@ def uninstall(parser, args):
uninstall_list = list(set(uninstall_list))
if has_error:
tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') # NOQA: ignore=E501
tty.die('You can use spack uninstall --dependents '
'to uninstall these dependencies as well')
if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ")

View file

@ -25,13 +25,15 @@
import argparse
import spack.modules
description ="Remove package from environment using module."
description = "Remove package from environment using module."
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to unload with modules.')
'spec', nargs=argparse.REMAINDER,
help='Spec of package to unload with modules.')
def unload(parser, args):

View file

@ -25,13 +25,15 @@
import argparse
import spack.modules
description ="Remove package from environment using dotkit."
description = "Remove package from environment using dotkit."
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to unuse with dotkit.')
'spec', nargs=argparse.REMAINDER,
help='Spec of package to unuse with dotkit.')
def unuse(parser, args):

View file

@ -22,28 +22,28 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import llnl.util.tty as tty
import spack
import spack.url
from spack.util.web import find_versions_of_archive
description = "Show parsing of a URL, optionally spider web for other versions."
description = "Show parsing of a URL, optionally spider web for versions."
def setup_parser(subparser):
subparser.add_argument('url', help="url of a package archive")
subparser.add_argument(
'-s', '--spider', action='store_true', help="Spider the source page for versions.")
'-s', '--spider', action='store_true',
help="Spider the source page for versions.")
def print_name_and_version(url):
name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url)
underlines = [" "] * max(ns+nl, vs+vl)
for i in range(ns, ns+nl):
underlines = [" "] * max(ns + nl, vs + vl)
for i in range(ns, ns + nl):
underlines[i] = '-'
for i in range(vs, vs+vl):
for i in range(vs, vs + vl):
underlines[i] = '~'
print " %s" % url

View file

@ -22,12 +22,12 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import spack
import spack.url
description = "Inspect urls used by packages in spack."
def setup_parser(subparser):
subparser.add_argument(
'-c', '--color', action='store_true',
@ -53,6 +53,7 @@ def urls(parser, args):
for url in sorted(urls):
if args.color or args.extrapolation:
print spack.url.color_url(url, subs=args.extrapolation, errors=True)
print spack.url.color_url(
url, subs=args.extrapolation, errors=True)
else:
print url

View file

@ -25,13 +25,15 @@
import argparse
import spack.modules
description ="Add package to environment using dotkit."
description = "Add package to environment using dotkit."
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to use with dotkit.')
'spec', nargs=argparse.REMAINDER,
help='Spec of package to use with dotkit.')
def use(parser, args):

View file

@ -22,15 +22,16 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
from llnl.util.tty.colify import colify
import llnl.util.tty as tty
import spack
description ="List available versions of a package"
description = "List available versions of a package"
def setup_parser(subparser):
subparser.add_argument('package', metavar='PACKAGE', help='Package to list versions for')
subparser.add_argument('package', metavar='PACKAGE',
help='Package to list versions for')
def versions(parser, args):

View file

@ -25,10 +25,8 @@
import os
import re
import itertools
from datetime import datetime
import llnl.util.tty as tty
from llnl.util.lang import memoized
from llnl.util.filesystem import join_path
import spack.error
@ -37,10 +35,10 @@
from spack.util.multiproc import parmap
from spack.util.executable import *
from spack.util.environment import get_path
from spack.version import Version
__all__ = ['Compiler', 'get_compiler_version']
def _verify_executables(*paths):
for path in paths:
if not os.path.isfile(path) and os.access(path, os.X_OK):
@ -49,8 +47,9 @@ def _verify_executables(*paths):
_version_cache = {}
def get_compiler_version(compiler_path, version_arg, regex='(.*)'):
if not compiler_path in _version_cache:
if compiler_path not in _version_cache:
compiler = Executable(compiler_path)
output = compiler(version_arg, output=str, error=str)
@ -130,11 +129,6 @@ def check(exe):
else:
self.fc = check(paths[3])
#self.cc = check(cc)
#self.cxx = check(cxx)
#self.f77 = check(f77)
#self.fc = check(fc)
# Unfortunately have to make sure these params are accepted
# in the same order they are returned by sorted(flags)
# in compilers/__init__.py
@ -158,32 +152,31 @@ def version(self):
@property
def openmp_flag(self):
# If it is not overridden, assume it is not supported and warn the user
tty.die("The compiler you have chosen does not currently support OpenMP.",
tty.die(
"The compiler you have chosen does not currently support OpenMP.",
"If you think it should, please edit the compiler subclass and",
"submit a pull request or issue.")
# This property should be overridden in the compiler subclass if
# C++11 is supported by that compiler
@property
def cxx11_flag(self):
# If it is not overridden, assume it is not supported and warn the user
tty.die("The compiler you have chosen does not currently support C++11.",
tty.die(
"The compiler you have chosen does not currently support C++11.",
"If you think it should, please edit the compiler subclass and",
"submit a pull request or issue.")
# This property should be overridden in the compiler subclass if
# C++14 is supported by that compiler
@property
def cxx14_flag(self):
# If it is not overridden, assume it is not supported and warn the user
tty.die("The compiler you have chosen does not currently support C++14.",
tty.die(
"The compiler you have chosen does not currently support C++14.",
"If you think it should, please edit the compiler subclass and",
"submit a pull request or issue.")
#
# Compiler classes have methods for querying the version of
# specific compiler executables. This is used when discovering compilers.
@ -191,7 +184,6 @@ def cxx14_flag(self):
# Compiler *instances* are just data objects, and can only be
# constructed from an actual set of executables.
#
@classmethod
def default_version(cls, cc):
"""Override just this to override all compiler version functions."""
@ -258,16 +250,19 @@ def check(key):
version = detect_version(full_path)
return (version, prefix, suffix, full_path)
except ProcessError, e:
tty.debug("Couldn't get version for compiler %s" % full_path, e)
tty.debug(
"Couldn't get version for compiler %s" % full_path, e)
return None
except Exception, e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
tty.debug("Error while executing candidate compiler %s" % full_path,
"%s: %s" %(e.__class__.__name__, e))
tty.debug("Error while executing candidate compiler %s"
% full_path,
"%s: %s" % (e.__class__.__name__, e))
return None
successful = [key for key in parmap(check, checks) if key is not None]
successful = [k for k in parmap(check, checks) if k is not None]
# The 'successful' list is ordered like the input paths.
# Reverse it here so that the dict creation (last insert wins)
# does not spoil the intented precedence.
@ -278,20 +273,23 @@ def __repr__(self):
"""Return a string representation of the compiler toolchain."""
return self.__str__()
def __str__(self):
"""Return a string representation of the compiler toolchain."""
return "%s(%s)" % (
self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc, self.modules, str(self.operating_system)))))
self.name, '\n '.join((str(s) for s in (
self.cc, self.cxx, self.f77, self.fc, self.modules,
str(self.operating_system)))))
class CompilerAccessError(spack.error.SpackError):
def __init__(self, path):
super(CompilerAccessError, self).__init__(
"'%s' is not a valid compiler." % path)
class InvalidCompilerError(spack.error.SpackError):
def __init__(self):
super(InvalidCompilerError, self).__init__(
"Compiler has no executables.")

View file

@ -26,15 +26,9 @@
system and configuring Spack to use multiple compilers.
"""
import imp
import os
import platform
import copy
import hashlib
import base64
import yaml
import sys
from llnl.util.lang import memoized, list_modules
from llnl.util.lang import list_modules
from llnl.util.filesystem import join_path
import spack
@ -43,11 +37,7 @@
import spack.config
import spack.architecture
from spack.util.multiproc import parmap
from spack.compiler import Compiler
from spack.util.executable import which
from spack.util.naming import mod_to_class
from spack.util.environment import get_path
_imported_compilers_module = 'spack.compilers'
_path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
@ -73,7 +63,8 @@ def _to_dict(compiler):
"""Return a dict version of compiler suitable to insert in YAML."""
d = {}
d['spec'] = str(compiler.spec)
d['paths'] = dict( (attr, getattr(compiler, attr, None)) for attr in _path_instance_vars )
d['paths'] = dict((attr, getattr(compiler, attr, None))
for attr in _path_instance_vars)
d['operating_system'] = str(compiler.operating_system)
d['modules'] = compiler.modules if compiler.modules else []
@ -140,14 +131,18 @@ def remove_compiler_from_config(compiler_spec, scope=None):
- compiler_specs: a list of CompilerSpec objects.
- scope: configuration scope to modify.
"""
# Need a better way for this
global _cache_config_file
compiler_config = get_compiler_config(scope)
config_length = len(compiler_config)
filtered_compiler_config = [comp for comp in compiler_config
filtered_compiler_config = [
comp for comp in compiler_config
if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec]
# Need a better way for this
global _cache_config_file
_cache_config_file = filtered_compiler_config # Update the cache for changes
# Update the cache for changes
_cache_config_file = filtered_compiler_config
if len(filtered_compiler_config) == config_length: # No items removed
CompilerSpecInsufficientlySpecificError(compiler_spec)
spack.config.update_config('compilers', filtered_compiler_config, scope)
@ -158,7 +153,8 @@ def all_compilers_config(scope=None, init_config=True):
available to build with. These are instances of CompilerSpec.
"""
# Get compilers for this architecture.
global _cache_config_file #Create a cache of the config file so we don't load all the time.
# Create a cache of the config file so we don't load all the time.
global _cache_config_file
if not _cache_config_file:
_cache_config_file = get_compiler_config(scope, init_config)
return _cache_config_file
@ -236,7 +232,8 @@ def get_compilers(cspec):
continue
items = items['compiler']
if not ('paths' in items and all(n in items['paths'] for n in _path_instance_vars)):
if not ('paths' in items and
all(n in items['paths'] for n in _path_instance_vars)):
raise InvalidCompilerConfigurationError(cspec)
cls = class_for_compiler_name(cspec.name)
@ -254,10 +251,10 @@ def get_compilers(cspec):
mods = []
if 'operating_system' in items:
operating_system = spack.architecture._operating_system_from_dict(items['operating_system'], platform)
os = spack.architecture._operating_system_from_dict(
items['operating_system'], platform)
else:
operating_system = None
os = None
alias = items['alias'] if 'alias' in items else None
@ -266,7 +263,8 @@ def get_compilers(cspec):
if f in items:
flags[f] = items[f]
compilers.append(cls(cspec, operating_system, compiler_paths, mods, alias, **flags))
compilers.append(
cls(cspec, os, compiler_paths, mods, alias, **flags))
return compilers
@ -275,7 +273,6 @@ def get_compilers(cspec):
for cspec in matches:
compilers.extend(get_compilers(cspec))
return compilers
# return [get_compilers(cspec) for cspec in matches]
@_auto_compiler_spec
@ -285,7 +282,8 @@ def compiler_for_spec(compiler_spec, arch):
operating_system = arch.platform_os
assert(compiler_spec.concrete)
compilers = [c for c in compilers_for_spec(compiler_spec, platform=arch.platform)
compilers = [
c for c in compilers_for_spec(compiler_spec, platform=arch.platform)
if c.operating_system == operating_system]
if len(compilers) < 1:
raise NoCompilerForSpecError(compiler_spec, operating_system)
@ -321,11 +319,13 @@ def all_os_classes():
return classes
def all_compiler_types():
return [class_for_compiler_name(c) for c in supported_compilers()]
class InvalidCompilerConfigurationError(spack.error.SpackError):
def __init__(self, compiler_spec):
super(InvalidCompilerConfigurationError, self).__init__(
"Invalid configuration for [compiler \"%s\"]: " % compiler_spec,
@ -335,14 +335,18 @@ def __init__(self, compiler_spec):
class NoCompilersError(spack.error.SpackError):
def __init__(self):
super(NoCompilersError, self).__init__("Spack could not find any compilers!")
super(NoCompilersError, self).__init__(
"Spack could not find any compilers!")
class NoCompilerForSpecError(spack.error.SpackError):
def __init__(self, compiler_spec, target):
super(NoCompilerForSpecError, self).__init__("No compilers for operating system %s satisfy spec %s" % (
target, compiler_spec))
super(NoCompilerForSpecError, self).__init__(
"No compilers for operating system %s satisfy spec %s"
% (target, compiler_spec))
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
def __init__(self, compiler_spec):
super(CompilerSpecInsufficientlySpecificError, self).__init__("Multiple compilers satisfy spec %s",
compiler_spec)
super(CompilerSpecInsufficientlySpecificError, self).__init__(
"Multiple compilers satisfy spec %s" % compiler_spec)

View file

@ -29,6 +29,7 @@
import llnl.util.tty as tty
from spack.version import ver
class Clang(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['clang']
@ -43,11 +44,12 @@ class Clang(Compiler):
fc_names = []
# Named wrapper links within spack.build_env_path
link_paths = { 'cc' : 'clang/clang',
'cxx' : 'clang/clang++',
# Use default wrappers for fortran, in case provided in compilers.yaml
'f77' : 'f77',
'fc' : 'f90' }
link_paths = {'cc': 'clang/clang',
'cxx': 'clang/clang++',
# Use default wrappers for fortran, in case provided in
# compilers.yaml
'f77': 'f77',
'fc': 'f90'}
@property
def is_apple(self):

View file

@ -1,34 +1,33 @@
##############################################################################}
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import llnl.util.tty as tty
#from spack.build_environment import load_module
from spack.compiler import *
#from spack.version import ver
class Craype(Compiler):
"""Cray programming environment compiler."""
# Subclasses use possible names of C compiler
cc_names = ['cc']
@ -47,12 +46,11 @@ class Craype(Compiler):
PrgEnv = 'PrgEnv-cray'
PrgEnv_compiler = 'craype'
link_paths = { 'cc' : 'cc',
'cxx' : 'c++',
'f77' : 'f77',
'fc' : 'fc'}
link_paths = {'cc': 'cc',
'cxx': 'c++',
'f77': 'f77',
'fc': 'fc'}
@classmethod
def default_version(cls, comp):
return get_compiler_version(comp, r'([Vv]ersion).*(\d+(\.\d+)+)')

View file

@ -26,6 +26,7 @@
from spack.compiler import *
from spack.version import ver
class Gcc(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['gcc']
@ -44,10 +45,10 @@ class Gcc(Compiler):
suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d']
# Named wrapper links within spack.build_env_path
link_paths = {'cc' : 'gcc/gcc',
'cxx' : 'gcc/g++',
'f77' : 'gcc/gfortran',
'fc' : 'gcc/gfortran' }
link_paths = {'cc': 'gcc/gcc',
'cxx': 'gcc/g++',
'f77': 'gcc/gfortran',
'fc': 'gcc/gfortran'}
PrgEnv = 'PrgEnv-gnu'
PrgEnv_compiler = 'gcc'
@ -79,7 +80,6 @@ def fc_version(cls, fc):
# older gfortran versions don't have simple dumpversion output.
r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)')
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)

View file

@ -26,6 +26,7 @@
import llnl.util.tty as tty
from spack.version import ver
class Intel(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['icc']
@ -40,10 +41,10 @@ class Intel(Compiler):
fc_names = ['ifort']
# Named wrapper links within spack.build_env_path
link_paths = { 'cc' : 'intel/icc',
'cxx' : 'intel/icpc',
'f77' : 'intel/ifort',
'fc' : 'intel/ifort' }
link_paths = {'cc': 'intel/icc',
'cxx': 'intel/icpc',
'f77': 'intel/ifort',
'fc': 'intel/ifort'}
PrgEnv = 'PrgEnv-intel'
PrgEnv_compiler = 'intel'
@ -64,7 +65,6 @@ def cxx11_flag(self):
else:
return "-std=c++11"
@classmethod
def default_version(cls, comp):
"""The '--version' option seems to be the most consistent one

View file

@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import *
import llnl.util.tty as tty
class Nag(Compiler):
# Subclasses use possible names of C compiler
@ -39,11 +39,12 @@ class Nag(Compiler):
fc_names = ['nagfor']
# Named wrapper links within spack.build_env_path
link_paths = { # Use default wrappers for C and C++, in case provided in compilers.yaml
'cc' : 'cc',
'cxx' : 'c++',
'f77' : 'nag/nagfor',
'fc' : 'nag/nagfor' }
# Use default wrappers for C and C++, in case provided in compilers.yaml
link_paths = {
'cc': 'cc',
'cxx': 'c++',
'f77': 'nag/nagfor',
'fc': 'nag/nagfor'}
@property
def openmp_flag(self):
@ -73,7 +74,6 @@ def default_version(self, comp):
NAG Fortran Compiler Release 6.0(Hibiya) Build 1037
Product NPL6A60NA for x86-64 Linux
Copyright 1990-2015 The Numerical Algorithms Group Ltd., Oxford, U.K.
"""
return get_compiler_version(
comp, '-V', r'NAG Fortran Compiler Release ([0-9.]+)')

View file

@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import *
import llnl.util.tty as tty
class Pgi(Compiler):
# Subclasses use possible names of C compiler
@ -39,17 +39,14 @@ class Pgi(Compiler):
fc_names = ['pgfortran', 'pgf95', 'pgf90']
# Named wrapper links within spack.build_env_path
link_paths = { 'cc' : 'pgi/pgcc',
'cxx' : 'pgi/pgc++',
'f77' : 'pgi/pgfortran',
'fc' : 'pgi/pgfortran' }
link_paths = {'cc': 'pgi/pgcc',
'cxx': 'pgi/pgc++',
'f77': 'pgi/pgfortran',
'fc': 'pgi/pgfortran'}
PrgEnv = 'PrgEnv-pgi'
PrgEnv_compiler = 'pgi'
@property
def openmp_flag(self):
return "-mp"

View file

@ -26,24 +26,26 @@
import llnl.util.tty as tty
from spack.version import ver
class Xl(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['xlc','xlc_r']
cc_names = ['xlc', 'xlc_r']
# Subclasses use possible names of C++ compiler
cxx_names = ['xlC','xlC_r','xlc++','xlc++_r']
cxx_names = ['xlC', 'xlC_r', 'xlc++', 'xlc++_r']
# Subclasses use possible names of Fortran 77 compiler
f77_names = ['xlf','xlf_r']
f77_names = ['xlf', 'xlf_r']
# Subclasses use possible names of Fortran 90 compiler
fc_names = ['xlf90','xlf90_r','xlf95','xlf95_r','xlf2003','xlf2003_r','xlf2008','xlf2008_r']
fc_names = ['xlf90', 'xlf90_r', 'xlf95', 'xlf95_r',
'xlf2003', 'xlf2003_r', 'xlf2008', 'xlf2008_r']
# Named wrapper links within spack.build_env_path
link_paths = { 'cc' : 'xl/xlc',
'cxx' : 'xl/xlc++',
'f77' : 'xl/xlf',
'fc' : 'xl/xlf90' }
link_paths = {'cc': 'xl/xlc',
'cxx': 'xl/xlc++',
'f77': 'xl/xlf',
'fc': 'xl/xlf90'}
@property
def openmp_flag(self):
@ -56,7 +58,6 @@ def cxx11_flag(self):
else:
return "-qlanglvl=extended0x"
@classmethod
def default_version(cls, comp):
"""The '-qversion' is the standard option fo XL compilers.
@ -82,29 +83,28 @@ def default_version(cls, comp):
"""
return get_compiler_version(
comp, '-qversion',r'([0-9]?[0-9]\.[0-9])')
comp, '-qversion', r'([0-9]?[0-9]\.[0-9])')
@classmethod
def fc_version(cls, fc):
"""The fortran and C/C++ versions of the XL compiler are always two units apart.
By this we mean that the fortran release that goes with XL C/C++ 11.1 is 13.1.
Having such a difference in version number is confusing spack quite a lot.
Most notably if you keep the versions as is the default xl compiler will only
have fortran and no C/C++.
So we associate the Fortran compiler with the version associated to the C/C++
compiler.
One last stumble. Version numbers over 10 have at least a .1 those under 10
a .0. There is no xlf 9.x or under currently available. BG/P and BG/L can
such a compiler mix and possibly older version of AIX and linux on power.
"""The fortran and C/C++ versions of the XL compiler are always
two units apart. By this we mean that the fortran release that
goes with XL C/C++ 11.1 is 13.1. Having such a difference in
version number is confusing spack quite a lot. Most notably
if you keep the versions as is the default xl compiler will
only have fortran and no C/C++. So we associate the Fortran
compiler with the version associated to the C/C++ compiler.
One last stumble. Version numbers over 10 have at least a .1
those under 10 a .0. There is no xlf 9.x or under currently
available. BG/P and BG/L can such a compiler mix and possibly
older version of AIX and linux on power.
"""
fver = get_compiler_version(fc, '-qversion',r'([0-9]?[0-9]\.[0-9])')
fver = get_compiler_version(fc, '-qversion', r'([0-9]?[0-9]\.[0-9])')
cver = float(fver) - 2
if cver < 10 :
if cver < 10:
cver = cver - 0.1
return str(cver)
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)

View file

@ -40,12 +40,12 @@
import spack.error
from spack.version import *
from functools import partial
from spec import DependencyMap
from itertools import chain
from spack.config import *
class DefaultConcretizer(object):
"""This class doesn't have any state, it just provides some methods for
concretization. You can subclass it to override just some of the
default concretization strategies, or you can override all of them.
@ -61,14 +61,19 @@ def _valid_virtuals_and_externals(self, spec):
if not providers:
raise UnsatisfiableProviderSpecError(providers[0], spec)
spec_w_preferred_providers = find_spec(
spec, lambda(x): spack.pkgsort.spec_has_preferred_provider(x.name, spec.name))
spec,
lambda x: spack.pkgsort.spec_has_preferred_provider(
x.name, spec.name))
if not spec_w_preferred_providers:
spec_w_preferred_providers = spec
provider_cmp = partial(spack.pkgsort.provider_compare, spec_w_preferred_providers.name, spec.name)
provider_cmp = partial(spack.pkgsort.provider_compare,
spec_w_preferred_providers.name,
spec.name)
candidates = sorted(providers, cmp=provider_cmp)
# For each candidate package, if it has externals, add those to the usable list.
# if it's not buildable, then *only* add the externals.
# For each candidate package, if it has externals, add those
# to the usable list. if it's not buildable, then *only* add
# the externals.
usable = []
for cspec in candidates:
if is_spec_buildable(cspec):
@ -114,26 +119,26 @@ def choose_virtual_or_external(self, spec):
# Find the nearest spec in the dag that has a compiler. We'll
# use that spec to calibrate compiler compatibility.
abi_exemplar = find_spec(spec, lambda(x): x.compiler)
abi_exemplar = find_spec(spec, lambda x: x.compiler)
if not abi_exemplar:
abi_exemplar = spec.root
# Make a list including ABI compatibility of specs with the exemplar.
strict = [spack.abi.compatible(c, abi_exemplar) for c in candidates]
loose = [spack.abi.compatible(c, abi_exemplar, loose=True) for c in candidates]
loose = [spack.abi.compatible(c, abi_exemplar, loose=True)
for c in candidates]
keys = zip(strict, loose, candidates)
# Sort candidates from most to least compatibility.
# Note:
# 1. We reverse because True > False.
# 2. Sort is stable, so c's keep their order.
keys.sort(key=lambda k:k[:2], reverse=True)
keys.sort(key=lambda k: k[:2], reverse=True)
# Pull the candidates back out and return them in order
candidates = [c for s,l,c in keys]
candidates = [c for s, l, c in keys]
return candidates
def concretize_version(self, spec):
"""If the spec is already concrete, return. Otherwise take
the preferred version from spackconfig, and default to the package's
@ -167,7 +172,8 @@ def prefer_key(v):
if valid_versions:
# Disregard @develop and take the next valid version
if ver(valid_versions[0]) == ver('develop') and len(valid_versions) > 1:
if ver(valid_versions[0]) == ver('develop') and \
len(valid_versions) > 1:
spec.versions = ver([valid_versions[1]])
else:
spec.versions = ver([valid_versions[0]])
@ -193,40 +199,45 @@ def prefer_key(v):
return True # Things changed
def _concretize_operating_system(self, spec):
platform = spec.architecture.platform
if spec.architecture.platform_os is not None and isinstance(
spec.architecture.platform_os,spack.architecture.OperatingSystem):
spec.architecture.platform_os,
spack.architecture.OperatingSystem):
return False
if spec.root.architecture and spec.root.architecture.platform_os:
if isinstance(spec.root.architecture.platform_os,spack.architecture.OperatingSystem):
spec.architecture.platform_os = spec.root.architecture.platform_os
if isinstance(spec.root.architecture.platform_os,
spack.architecture.OperatingSystem):
spec.architecture.platform_os = \
spec.root.architecture.platform_os
else:
spec.architecture.platform_os = spec.architecture.platform.operating_system('default_os')
return True #changed
spec.architecture.platform_os = \
spec.architecture.platform.operating_system('default_os')
return True # changed
def _concretize_target(self, spec):
platform = spec.architecture.platform
if spec.architecture.target is not None and isinstance(
spec.architecture.target, spack.architecture.Target):
return False
if spec.root.architecture and spec.root.architecture.target:
if isinstance(spec.root.architecture.target,spack.architecture.Target):
if isinstance(spec.root.architecture.target,
spack.architecture.Target):
spec.architecture.target = spec.root.architecture.target
else:
spec.architecture.target = spec.architecture.platform.target('default_target')
return True #changed
spec.architecture.target = spec.architecture.platform.target(
'default_target')
return True # changed
def _concretize_platform(self, spec):
if spec.architecture.platform is not None and isinstance(
spec.architecture.platform, spack.architecture.Platform):
return False
if spec.root.architecture and spec.root.architecture.platform:
if isinstance(spec.root.architecture.platform,spack.architecture.Platform):
if isinstance(spec.root.architecture.platform,
spack.architecture.Platform):
spec.architecture.platform = spec.root.architecture.platform
else:
spec.architecture.platform = spack.architecture.platform()
return True #changed?
return True # changed?
def concretize_architecture(self, spec):
"""If the spec is empty provide the defaults of the platform. If the
@ -250,20 +261,24 @@ def concretize_architecture(self, spec):
self._concretize_target(spec)))
return ret
def concretize_variants(self, spec):
"""If the spec already has variants filled in, return. Otherwise, add
the default variants from the package specification.
the user preferences from packages.yaml or the default variants from
the package specification.
"""
changed = False
preferred_variants = spack.pkgsort.spec_preferred_variants(
spec.package_class.name)
for name, variant in spec.package_class.variants.items():
if name not in spec.variants:
spec.variants[name] = spack.spec.VariantSpec(name, variant.default)
changed = True
if name in preferred_variants:
spec.variants[name] = preferred_variants.get(name)
else:
spec.variants[name] = \
spack.spec.VariantSpec(name, variant.default)
return changed
def concretize_compiler(self, spec):
"""If the spec already has a compiler, we're done. If not, then take
the compiler used for the nearest ancestor with a compiler
@ -278,20 +293,21 @@ def concretize_compiler(self, spec):
"""
# Pass on concretizing the compiler if the target is not yet determined
if not spec.architecture.platform_os:
#Although this usually means changed, this means awaiting other changes
# Although this usually means changed, this means awaiting other
# changes
return True
# Only use a matching compiler if it is of the proper style
# Takes advantage of the proper logic already existing in compiler_for_spec
# Should think whether this can be more efficient
# Takes advantage of the proper logic already existing in
# compiler_for_spec Should think whether this can be more
# efficient
def _proper_compiler_style(cspec, arch):
platform = arch.platform
compilers = spack.compilers.compilers_for_spec(cspec,
platform=platform)
return filter(lambda c: c.operating_system ==
arch.platform_os, compilers)
#return compilers
# return compilers
all_compilers = spack.compilers.all_compilers()
@ -300,8 +316,9 @@ def _proper_compiler_style(cspec, arch):
spec.compiler in all_compilers):
return False
#Find the another spec that has a compiler, or the root if none do
other_spec = spec if spec.compiler else find_spec(spec, lambda(x) : x.compiler)
# Find the another spec that has a compiler, or the root if none do
other_spec = spec if spec.compiler else find_spec(
spec, lambda x: x.compiler)
if not other_spec:
other_spec = spec.root
@ -313,24 +330,30 @@ def _proper_compiler_style(cspec, arch):
spec.compiler = other_compiler.copy()
return True
# Filter the compilers into a sorted list based on the compiler_order from spackconfig
compiler_list = all_compilers if not other_compiler else spack.compilers.find(other_compiler)
cmp_compilers = partial(spack.pkgsort.compiler_compare, other_spec.name)
# Filter the compilers into a sorted list based on the compiler_order
# from spackconfig
compiler_list = all_compilers if not other_compiler else \
spack.compilers.find(other_compiler)
cmp_compilers = partial(
spack.pkgsort.compiler_compare, other_spec.name)
matches = sorted(compiler_list, cmp=cmp_compilers)
if not matches:
raise UnavailableCompilerVersionError(other_compiler)
arch = spec.architecture
raise UnavailableCompilerVersionError(other_compiler,
arch.platform_os)
# copy concrete version into other_compiler
index = 0
while not _proper_compiler_style(matches[index], spec.architecture):
index += 1
if index == len(matches) - 1:
raise NoValidVersionError(spec)
arch = spec.architecture
raise UnavailableCompilerVersionError(spec.compiler,
arch.platform_os)
spec.compiler = matches[index].copy()
assert(spec.compiler.concrete)
return True # things changed.
def concretize_compiler_flags(self, spec):
"""
The compiler flags are updated to match those of the spec whose
@ -338,53 +361,65 @@ def concretize_compiler_flags(self, spec):
Default specs set at the compiler level will still be added later.
"""
if not spec.architecture.platform_os:
#Although this usually means changed, this means awaiting other changes
# Although this usually means changed, this means awaiting other
# changes
return True
ret = False
for flag in spack.spec.FlagMap.valid_compiler_flags():
try:
nearest = next(p for p in spec.traverse(direction='parents')
if ((p.compiler == spec.compiler and p is not spec)
and flag in p.compiler_flags))
if not flag in spec.compiler_flags or \
not (sorted(spec.compiler_flags[flag]) >= sorted(nearest.compiler_flags[flag])):
if ((p.compiler == spec.compiler and
p is not spec) and
flag in p.compiler_flags))
if flag not in spec.compiler_flags or \
not (sorted(spec.compiler_flags[flag]) >=
sorted(nearest.compiler_flags[flag])):
if flag in spec.compiler_flags:
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
spec.compiler_flags[flag] = list(
set(spec.compiler_flags[flag]) |
set(nearest.compiler_flags[flag]))
else:
spec.compiler_flags[flag] = nearest.compiler_flags[flag]
spec.compiler_flags[
flag] = nearest.compiler_flags[flag]
ret = True
except StopIteration:
if (flag in spec.root.compiler_flags and ((not flag in spec.compiler_flags) or
sorted(spec.compiler_flags[flag]) != sorted(spec.root.compiler_flags[flag]))):
if (flag in spec.root.compiler_flags and
((flag not in spec.compiler_flags) or
sorted(spec.compiler_flags[flag]) !=
sorted(spec.root.compiler_flags[flag]))):
if flag in spec.compiler_flags:
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
spec.compiler_flags[flag] = list(
set(spec.compiler_flags[flag]) |
set(spec.root.compiler_flags[flag]))
else:
spec.compiler_flags[flag] = spec.root.compiler_flags[flag]
spec.compiler_flags[
flag] = spec.root.compiler_flags[flag]
ret = True
else:
if not flag in spec.compiler_flags:
if flag not in spec.compiler_flags:
spec.compiler_flags[flag] = []
# Include the compiler flag defaults from the config files
# This ensures that spack will detect conflicts that stem from a change
# in default compiler flags.
compiler = spack.compilers.compiler_for_spec(spec.compiler, spec.architecture)
compiler = spack.compilers.compiler_for_spec(
spec.compiler, spec.architecture)
for flag in compiler.flags:
if flag not in spec.compiler_flags:
spec.compiler_flags[flag] = compiler.flags[flag]
if compiler.flags[flag] != []:
ret = True
else:
if ((sorted(spec.compiler_flags[flag]) != sorted(compiler.flags[flag])) and
(not set(spec.compiler_flags[flag]) >= set(compiler.flags[flag]))):
if ((sorted(spec.compiler_flags[flag]) !=
sorted(compiler.flags[flag])) and
(not set(spec.compiler_flags[flag]) >=
set(compiler.flags[flag]))):
ret = True
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
spec.compiler_flags[flag] = list(
set(spec.compiler_flags[flag]) |
set(compiler.flags[flag]))
return ret
@ -406,8 +441,10 @@ def find_spec(spec, condition):
# Then search all other relatives in the DAG *except* spec
for relative in spec.root.traverse(deptypes=spack.alldeps):
if relative is spec: continue
if id(relative) in visited: continue
if relative is spec:
continue
if id(relative) in visited:
continue
if condition(relative):
return relative
@ -454,25 +491,33 @@ def cmp_specs(lhs, rhs):
class UnavailableCompilerVersionError(spack.error.SpackError):
"""Raised when there is no available compiler that satisfies a
compiler spec."""
def __init__(self, compiler_spec):
def __init__(self, compiler_spec, operating_system):
super(UnavailableCompilerVersionError, self).__init__(
"No available compiler version matches '%s'" % compiler_spec,
"No available compiler version matches '%s' on operating_system %s"
% (compiler_spec, operating_system),
"Run 'spack compilers' to see available compiler Options.")
class NoValidVersionError(spack.error.SpackError):
"""Raised when there is no way to have a concrete version for a
particular spec."""
def __init__(self, spec):
super(NoValidVersionError, self).__init__(
"There are no valid versions for %s that match '%s'" % (spec.name, spec.versions))
"There are no valid versions for %s that match '%s'"
% (spec.name, spec.versions))
class NoBuildError(spack.error.SpackError):
"""Raised when a package is configured with the buildable option False, but
no satisfactory external versions can be found"""
def __init__(self, spec):
super(NoBuildError, self).__init__(
"The spec '%s' is configured as not buildable, and no matching external installs were found" % spec.name)
msg = ("The spec '%s' is configured as not buildable, "
"and no matching external installs were found")
super(NoBuildError, self).__init__(msg % spec.name)

View file

@ -1,4 +1,3 @@
# flake8: noqa
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@ -123,15 +122,18 @@
import re
import sys
import jsonschema
import llnl.util.tty as tty
import spack
import yaml
from jsonschema import Draft4Validator, validators
from llnl.util.filesystem import mkdirp
from ordereddict_backport import OrderedDict
from spack.error import SpackError
import jsonschema
from yaml.error import MarkedYAMLError
from jsonschema import Draft4Validator, validators
from ordereddict_backport import OrderedDict
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
import spack
from spack.error import SpackError
import spack.schema
# Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml
@ -139,251 +141,12 @@
"""Dict from section names -> schema for that section."""
section_schemas = {
'compilers': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack compiler configuration file schema',
'type': 'object',
'additionalProperties': False,
'patternProperties': {
'compilers:?': { # optional colon for overriding site config.
'type': 'array',
'items': {
'compiler': {
'type': 'object',
'additionalProperties': False,
'required': ['paths', 'spec', 'modules', 'operating_system'],
'properties': {
'paths': {
'type': 'object',
'required': ['cc', 'cxx', 'f77', 'fc'],
'additionalProperties': False,
'properties': {
'cc': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'cxx': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'f77': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'fc': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'cflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'cxxflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'fflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'cppflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'ldflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'ldlibs': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]}}},
'spec': { 'type': 'string'},
'operating_system': { 'type': 'string'},
'alias': { 'anyOf': [ {'type' : 'string'},
{'type' : 'null' }]},
'modules': { 'anyOf': [ {'type' : 'string'},
{'type' : 'null' },
{'type': 'array'},
]}
},},},},},},
'mirrors': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack mirror configuration file schema',
'type': 'object',
'additionalProperties': False,
'patternProperties': {
r'mirrors:?': {
'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': {
'type': 'string'},},},},},
'repos': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack repository configuration file schema',
'type': 'object',
'additionalProperties': False,
'patternProperties': {
r'repos:?': {
'type': 'array',
'default': [],
'items': {
'type': 'string'},},},},
'packages': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack package configuration file schema',
'type': 'object',
'additionalProperties': False,
'patternProperties': {
r'packages:?': {
'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': { # package name
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'version': {
'type' : 'array',
'default' : [],
'items' : { 'anyOf' : [ { 'type' : 'string' },
{ 'type' : 'number'}]}}, #version strings
'compiler': {
'type' : 'array',
'default' : [],
'items' : { 'type' : 'string' } }, #compiler specs
'buildable': {
'type': 'boolean',
'default': True,
},
'modules': {
'type' : 'object',
'default' : {},
},
'providers': {
'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': {
'type' : 'array',
'default' : [],
'items' : { 'type' : 'string' },},},},
'paths': {
'type' : 'object',
'default' : {},
}
},},},},},},
'modules': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack module file configuration file schema',
'type': 'object',
'additionalProperties': False,
'definitions': {
'array_of_strings': {
'type': 'array',
'default': [],
'items': {
'type': 'string'
}
},
'dictionary_of_strings': {
'type': 'object',
'patternProperties': {
r'\w[\w-]*': { # key
'type': 'string'
}
}
},
'dependency_selection': {
'type': 'string',
'enum': ['none', 'direct', 'all']
},
'module_file_configuration': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'filter': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'environment_blacklist': {
'type': 'array',
'default': [],
'items': {
'type': 'string'
}
}
}
},
'autoload': {'$ref': '#/definitions/dependency_selection'},
'prerequisites': {'$ref': '#/definitions/dependency_selection'},
'conflict': {'$ref': '#/definitions/array_of_strings'},
'load': {'$ref': '#/definitions/array_of_strings'},
'suffixes': {'$ref': '#/definitions/dictionary_of_strings'},
'environment': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'set': {'$ref': '#/definitions/dictionary_of_strings'},
'unset': {'$ref': '#/definitions/array_of_strings'},
'prepend_path': {'$ref': '#/definitions/dictionary_of_strings'},
'append_path': {'$ref': '#/definitions/dictionary_of_strings'}
}
}
}
},
'module_type_configuration': {
'type': 'object',
'default': {},
'anyOf': [
{
'properties': {
'hash_length': {
'type': 'integer',
'minimum': 0,
'default': 7
},
'whitelist': {'$ref': '#/definitions/array_of_strings'},
'blacklist': {'$ref': '#/definitions/array_of_strings'},
'naming_scheme': {
'type': 'string' # Can we be more specific here?
}
}
},
{
'patternProperties': {r'\w[\w-]*': {'$ref': '#/definitions/module_file_configuration'}}
}
]
}
},
'patternProperties': {
r'modules:?': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'prefix_inspections': {
'type': 'object',
'patternProperties': {
r'\w[\w-]*': { # path to be inspected for existence (relative to prefix)
'$ref': '#/definitions/array_of_strings'
}
}
},
'enable': {
'type': 'array',
'default': [],
'items': {
'type': 'string',
'enum': ['tcl', 'dotkit']
}
},
'tcl': {
'allOf': [
{'$ref': '#/definitions/module_type_configuration'}, # Base configuration
{} # Specific tcl extensions
]
},
'dotkit': {
'allOf': [
{'$ref': '#/definitions/module_type_configuration'}, # Base configuration
{} # Specific dotkit extensions
]
},
}
},
},
},
'compilers': spack.schema.compilers.schema,
'mirrors': spack.schema.mirrors.schema,
'repos': spack.schema.repos.schema,
'packages': spack.schema.packages.schema,
'targets': spack.schema.targets.schema,
'modules': spack.schema.modules.schema,
}
"""OrderedDict of config scopes keyed by name.
@ -400,7 +163,7 @@ def validate_section_name(section):
def extend_with_default(validator_class):
"""Add support for the 'default' attribute for properties and patternProperties.
"""Add support for the 'default' attr for properties and patternProperties.
jsonschema does not handle this out of the box -- it only
validates. This allows us to set default values for configs
@ -409,13 +172,15 @@ def extend_with_default(validator_class):
"""
validate_properties = validator_class.VALIDATORS["properties"]
validate_pattern_properties = validator_class.VALIDATORS["patternProperties"]
validate_pattern_properties = validator_class.VALIDATORS[
"patternProperties"]
def set_defaults(validator, properties, instance, schema):
for property, subschema in properties.iteritems():
if "default" in subschema:
instance.setdefault(property, subschema["default"])
for err in validate_properties(validator, properties, instance, schema):
for err in validate_properties(
validator, properties, instance, schema):
yield err
def set_pp_defaults(validator, properties, instance, schema):
@ -426,7 +191,8 @@ def set_pp_defaults(validator, properties, instance, schema):
if re.match(property, key) and val is None:
instance[key] = subschema["default"]
for err in validate_pattern_properties(validator, properties, instance, schema):
for err in validate_pattern_properties(
validator, properties, instance, schema):
yield err
return validators.extend(validator_class, {
@ -491,7 +257,8 @@ def write_section(self, section):
except jsonschema.ValidationError as e:
raise ConfigSanityError(e, data)
except (yaml.YAMLError, IOError) as e:
raise ConfigFileError("Error writing to config file: '%s'" % str(e))
raise ConfigFileError(
"Error writing to config file: '%s'" % str(e))
def clear(self):
"""Empty cached config information."""
@ -506,7 +273,7 @@ def clear(self):
ConfigScope('site', os.path.join(spack.etc_path, 'spack'))
"""User configuration can override both spack defaults and site config."""
ConfigScope('user', os.path.expanduser('~/.spack'))
ConfigScope('user', spack.user_config_path)
def highest_precedence_scope():
@ -689,7 +456,7 @@ def print_section(section):
data = syaml.syaml_dict()
data[section] = get_config(section)
syaml.dump(data, stream=sys.stdout, default_flow_style=False)
except (yaml.YAMLError, IOError) as e:
except (yaml.YAMLError, IOError):
raise ConfigError("Error reading configuration: %s" % section)
@ -720,7 +487,8 @@ def spec_externals(spec):
path = get_path_from_module(module)
external_spec = spack.spec.Spec(external_spec, external=path, external_module=module)
external_spec = spack.spec.Spec(
external_spec, external=path, external_module=module)
if external_spec.satisfies(spec):
external_specs.append(external_spec)
@ -754,6 +522,7 @@ def get_path(path, data):
class ConfigFormatError(ConfigError):
"""Raised when a configuration format does not match its schema."""
def __init__(self, validation_error, data):
# Try to get line number from erroneous instance and its parent
instance_mark = getattr(validation_error.instance, '_start_mark', None)

View file

@ -119,6 +119,7 @@ def from_dict(cls, spec, dictionary):
class Database(object):
def __init__(self, root, db_dir=None):
"""Create a Database for Spack installations under ``root``.
@ -165,11 +166,11 @@ def __init__(self, root, db_dir=None):
def write_transaction(self, timeout=_db_lock_timeout):
"""Get a write lock context manager for use in a `with` block."""
return WriteTransaction(self, self._read, self._write, timeout)
return WriteTransaction(self.lock, self._read, self._write, timeout)
def read_transaction(self, timeout=_db_lock_timeout):
"""Get a read lock context manager for use in a `with` block."""
return ReadTransaction(self, self._read, None, timeout)
return ReadTransaction(self.lock, self._read, timeout=timeout)
def _write_to_yaml(self, stream):
"""Write out the databsae to a YAML file.
@ -352,12 +353,22 @@ def _check_ref_counts(self):
"Invalid ref_count: %s: %d (expected %d), in DB %s" %
(key, found, expected, self._index_path))
def _write(self):
def _write(self, type, value, traceback):
"""Write the in-memory database index to its file path.
Does no locking.
This is a helper function called by the WriteTransaction context
manager. If there is an exception while the write lock is active,
nothing will be written to the database file, but the in-memory
database *may* be left in an inconsistent state. It will be consistent
after the start of the next transaction, when it read from disk again.
This routine does no locking.
"""
# Do not write if exceptions were raised
if type is not None:
return
temp_file = self._index_path + (
'.%s.%s.temp' % (socket.getfqdn(), os.getpid()))
@ -589,50 +600,8 @@ def missing(self, spec):
return key in self._data and not self._data[key].installed
class _Transaction(object):
"""Simple nested transaction context manager that uses a file lock.
This class can trigger actions when the lock is acquired for the
first time and released for the last.
Timeout for lock is customizable.
"""
def __init__(self, db,
acquire_fn=None,
release_fn=None,
timeout=_db_lock_timeout):
self._db = db
self._timeout = timeout
self._acquire_fn = acquire_fn
self._release_fn = release_fn
def __enter__(self):
if self._enter() and self._acquire_fn:
self._acquire_fn()
def __exit__(self, type, value, traceback):
if self._exit() and self._release_fn:
self._release_fn()
class ReadTransaction(_Transaction):
def _enter(self):
return self._db.lock.acquire_read(self._timeout)
def _exit(self):
return self._db.lock.release_read()
class WriteTransaction(_Transaction):
def _enter(self):
return self._db.lock.acquire_write(self._timeout)
def _exit(self):
return self._db.lock.release_write()
class CorruptDatabaseError(SpackError):
def __init__(self, path, msg=''):
super(CorruptDatabaseError, self).__init__(
"Spack database is corrupt: %s. %s." % (path, msg),
@ -640,6 +609,7 @@ def __init__(self, path, msg=''):
class InvalidDatabaseVersionError(SpackError):
def __init__(self, expected, found):
super(InvalidDatabaseVersionError, self).__init__(
"Expected database version %s but found version %s."

View file

@ -189,7 +189,7 @@ def _depends_on(pkg, spec, when=None, type=None):
type = ('build', 'link')
if isinstance(type, str):
type = (type,)
type = spack.spec.special_types.get(type, (type,))
for deptype in type:
if deptype not in spack.spec.alldeps:
@ -349,9 +349,10 @@ def __init__(self, directive, package):
class UnknownDependencyTypeError(DirectiveError):
"""This is raised when a dependency is of an unknown type."""
def __init__(self, directive, package, deptype):
super(UnknownDependencyTypeError, self).__init__(
directive,
"Package '%s' cannot depend on a package via %s." %
(package, deptype))
"Package '%s' cannot depend on a package via %s."
% (package, deptype))
self.package = package

View file

@ -22,16 +22,13 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import re
import os
import exceptions
import hashlib
import shutil
import glob
import tempfile
import yaml
import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
import spack
@ -51,10 +48,10 @@ class DirectoryLayout(object):
install, and they can use this to customize the nesting structure of
spack installs.
"""
def __init__(self, root):
self.root = root
@property
def hidden_file_paths(self):
"""Return a list of hidden files used by the directory layout.
@ -67,25 +64,21 @@ def hidden_file_paths(self):
"""
raise NotImplementedError()
def all_specs(self):
"""To be implemented by subclasses to traverse all specs for which there is
a directory within the root.
"""
raise NotImplementedError()
def relative_path_for_spec(self, spec):
"""Implemented by subclasses to return a relative path from the install
root to a unique location for the provided spec."""
raise NotImplementedError()
def create_install_directory(self, spec):
"""Creates the installation directory for a spec."""
raise NotImplementedError()
def check_installed(self, spec):
"""Checks whether a spec is installed.
@ -95,7 +88,6 @@ def check_installed(self, spec):
"""
raise NotImplementedError()
def extension_map(self, spec):
"""Get a dict of currently installed extension packages for a spec.
@ -104,7 +96,6 @@ def extension_map(self, spec):
"""
raise NotImplementedError()
def check_extension_conflict(self, spec, ext_spec):
"""Ensure that ext_spec can be activated in spec.
@ -113,7 +104,6 @@ def check_extension_conflict(self, spec, ext_spec):
"""
raise NotImplementedError()
def check_activated(self, spec, ext_spec):
"""Ensure that ext_spec can be removed from spec.
@ -121,26 +111,22 @@ def check_activated(self, spec, ext_spec):
"""
raise NotImplementedError()
def add_extension(self, spec, ext_spec):
"""Add to the list of currently installed extensions."""
raise NotImplementedError()
def remove_extension(self, spec, ext_spec):
"""Remove from the list of currently installed extensions."""
raise NotImplementedError()
def path_for_spec(self, spec):
"""Return an absolute path from the root to a directory for the spec."""
"""Return absolute path from the root to a directory for the spec."""
_check_concrete(spec)
path = self.relative_path_for_spec(spec)
assert(not path.startswith(self.root))
return os.path.join(self.root, path)
def remove_install_directory(self, spec):
"""Removes a prefix and any empty parent directories from the root.
Raised RemoveFailedError if something goes wrong.
@ -177,6 +163,7 @@ class YamlDirectoryLayout(DirectoryLayout):
only enabled variants are included in the install path.
Disabled variants are omitted.
"""
def __init__(self, root, **kwargs):
super(YamlDirectoryLayout, self).__init__(root)
self.metadata_dir = kwargs.get('metadata_dir', '.spack')
@ -191,12 +178,10 @@ def __init__(self, root, **kwargs):
# Cache of already written/read extension maps.
self._extension_maps = {}
@property
def hidden_file_paths(self):
return (self.metadata_dir,)
def relative_path_for_spec(self, spec):
_check_concrete(spec)
@ -208,20 +193,19 @@ def relative_path_for_spec(self, spec):
spec.version,
spec.dag_hash(self.hash_len))
path = join_path(spec.architecture,
path = join_path(
spec.architecture,
"%s-%s" % (spec.compiler.name, spec.compiler.version),
dir_name)
return path
def write_spec(self, spec, path):
"""Write a spec out to a file."""
_check_concrete(spec)
with open(path, 'w') as f:
spec.to_yaml(f)
def read_spec(self, path):
"""Read the contents of a file and parse them as a spec"""
try:
@ -237,32 +221,26 @@ def read_spec(self, path):
spec._mark_concrete()
return spec
def spec_file_path(self, spec):
"""Gets full path to spec file"""
_check_concrete(spec)
return join_path(self.metadata_path(spec), self.spec_file_name)
def metadata_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir)
def build_log_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir,
self.build_log_name)
def build_env_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir,
self.build_env_name)
def build_packages_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir,
self.packages_dir)
def create_install_directory(self, spec):
_check_concrete(spec)
@ -273,7 +251,6 @@ def create_install_directory(self, spec):
mkdirp(self.metadata_path(spec))
self.write_spec(spec, self.spec_file_path(spec))
def check_installed(self, spec):
_check_concrete(spec)
path = self.path_for_spec(spec)
@ -284,7 +261,7 @@ def check_installed(self, spec):
if not os.path.isfile(spec_file_path):
raise InconsistentInstallDirectoryError(
'Inconsistent state: install prefix exists but contains no spec.yaml:',
'Install prefix exists but contains no spec.yaml:',
" " + path)
installed_spec = self.read_spec(spec_file_path)
@ -297,7 +274,6 @@ def check_installed(self, spec):
raise InconsistentInstallDirectoryError(
'Spec file in %s does not match hash!' % spec_file_path)
def all_specs(self):
if not os.path.isdir(self.root):
return []
@ -307,20 +283,17 @@ def all_specs(self):
spec_files = glob.glob(pattern)
return [self.read_spec(s) for s in spec_files]
def specs_by_hash(self):
by_hash = {}
for spec in self.all_specs():
by_hash[spec.dag_hash()] = spec
return by_hash
def extension_file_path(self, spec):
"""Gets full path to an installed package's extension file"""
_check_concrete(spec)
return join_path(self.metadata_path(spec), self.extension_file_name)
def _write_extensions(self, spec, extensions):
path = self.extension_file_path(spec)
@ -332,23 +305,22 @@ def _write_extensions(self, spec, extensions):
# write tmp file
with tmp:
yaml.dump({
'extensions' : [
{ ext.name : {
'hash' : ext.dag_hash(),
'path' : str(ext.prefix)
'extensions': [
{ext.name: {
'hash': ext.dag_hash(),
'path': str(ext.prefix)
}} for ext in sorted(extensions.values())]
}, tmp, default_flow_style=False)
# Atomic update by moving tmpfile on top of old one.
os.rename(tmp.name, path)
def _extension_map(self, spec):
"""Get a dict<name -> spec> for all extensions currently
installed for this package."""
_check_concrete(spec)
if not spec in self._extension_maps:
if spec not in self._extension_maps:
path = self.extension_file_path(spec)
if not os.path.exists(path):
self._extension_maps[spec] = {}
@ -363,14 +335,14 @@ def _extension_map(self, spec):
dag_hash = entry[name]['hash']
prefix = entry[name]['path']
if not dag_hash in by_hash:
if dag_hash not in by_hash:
raise InvalidExtensionSpecError(
"Spec %s not found in %s" % (dag_hash, prefix))
ext_spec = by_hash[dag_hash]
if not prefix == ext_spec.prefix:
if prefix != ext_spec.prefix:
raise InvalidExtensionSpecError(
"Prefix %s does not match spec with hash %s: %s"
"Prefix %s does not match spec hash %s: %s"
% (prefix, dag_hash, ext_spec))
exts[ext_spec.name] = ext_spec
@ -378,13 +350,11 @@ def _extension_map(self, spec):
return self._extension_maps[spec]
def extension_map(self, spec):
"""Defensive copying version of _extension_map() for external API."""
_check_concrete(spec)
return self._extension_map(spec).copy()
def check_extension_conflict(self, spec, ext_spec):
exts = self._extension_map(spec)
if ext_spec.name in exts:
@ -394,13 +364,11 @@ def check_extension_conflict(self, spec, ext_spec):
else:
raise ExtensionConflictError(spec, ext_spec, installed_spec)
def check_activated(self, spec, ext_spec):
exts = self._extension_map(spec)
if (not ext_spec.name in exts) or (ext_spec != exts[ext_spec.name]):
if (ext_spec.name not in exts) or (ext_spec != exts[ext_spec.name]):
raise NoSuchExtensionError(spec, ext_spec)
def add_extension(self, spec, ext_spec):
_check_concrete(spec)
_check_concrete(ext_spec)
@ -413,7 +381,6 @@ def add_extension(self, spec, ext_spec):
exts[ext_spec.name] = ext_spec
self._write_extensions(spec, exts)
def remove_extension(self, spec, ext_spec):
_check_concrete(spec)
_check_concrete(ext_spec)
@ -429,12 +396,14 @@ def remove_extension(self, spec, ext_spec):
class DirectoryLayoutError(SpackError):
"""Superclass for directory layout errors."""
def __init__(self, message, long_msg=None):
super(DirectoryLayoutError, self).__init__(message, long_msg)
class SpecHashCollisionError(DirectoryLayoutError):
"""Raised when there is a hash collision in an install layout."""
def __init__(self, installed_spec, new_spec):
super(SpecHashCollisionError, self).__init__(
'Specs %s and %s have the same SHA-1 prefix!'
@ -443,6 +412,7 @@ def __init__(self, installed_spec, new_spec):
class RemoveFailedError(DirectoryLayoutError):
"""Raised when a DirectoryLayout cannot remove an install prefix."""
def __init__(self, installed_spec, prefix, error):
super(RemoveFailedError, self).__init__(
'Could not remove prefix %s for %s : %s'
@ -452,12 +422,15 @@ def __init__(self, installed_spec, prefix, error):
class InconsistentInstallDirectoryError(DirectoryLayoutError):
"""Raised when a package seems to be installed to the wrong place."""
def __init__(self, message, long_msg=None):
super(InconsistentInstallDirectoryError, self).__init__(message, long_msg)
super(InconsistentInstallDirectoryError, self).__init__(
message, long_msg)
class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
"""Raised when create_install_directory is called unnecessarily."""
def __init__(self, path):
super(InstallDirectoryAlreadyExistsError, self).__init__(
"Install path %s already exists!")
@ -473,22 +446,26 @@ class InvalidExtensionSpecError(DirectoryLayoutError):
class ExtensionAlreadyInstalledError(DirectoryLayoutError):
"""Raised when an extension is added to a package that already has it."""
def __init__(self, spec, ext_spec):
super(ExtensionAlreadyInstalledError, self).__init__(
"%s is already installed in %s" % (ext_spec.short_spec, spec.short_spec))
"%s is already installed in %s"
% (ext_spec.short_spec, spec.short_spec))
class ExtensionConflictError(DirectoryLayoutError):
"""Raised when an extension is added to a package that already has it."""
def __init__(self, spec, ext_spec, conflict):
super(ExtensionConflictError, self).__init__(
"%s cannot be installed in %s because it conflicts with %s"% (
ext_spec.short_spec, spec.short_spec, conflict.short_spec))
"%s cannot be installed in %s because it conflicts with %s"
% (ext_spec.short_spec, spec.short_spec, conflict.short_spec))
class NoSuchExtensionError(DirectoryLayoutError):
"""Raised when an extension isn't there on deactivate."""
def __init__(self, spec, ext_spec):
super(NoSuchExtensionError, self).__init__(
"%s cannot be removed from %s because it's not activated."% (
ext_spec.short_spec, spec.short_spec))
"%s cannot be removed from %s because it's not activated."
% (ext_spec.short_spec, spec.short_spec))

View file

@ -1,4 +1,4 @@
#
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
@ -21,7 +21,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
##############################################################################
import collections
import inspect
import json
@ -37,6 +37,10 @@ def __init__(self, name, **kwargs):
self.args = {'name': name}
self.args.update(kwargs)
def update_args(self, **kwargs):
self.__dict__.update(kwargs)
self.args.update(kwargs)
class NameValueModifier(object):
@ -44,7 +48,11 @@ def __init__(self, name, value, **kwargs):
self.name = name
self.value = value
self.separator = kwargs.get('separator', ':')
self.args = {'name': name, 'value': value, 'delim': self.separator}
self.args = {'name': name, 'value': value, 'separator': self.separator}
self.args.update(kwargs)
def update_args(self, **kwargs):
self.__dict__.update(kwargs)
self.args.update(kwargs)
@ -279,7 +287,10 @@ def from_sourcing_files(*args, **kwargs):
shell = '{shell}'.format(**info)
shell_options = '{shell_options}'.format(**info)
source_file = '{source_command} {file} {concatenate_on_success}'
dump_environment = 'python -c "import os, json; print json.dumps(dict(os.environ))"' # NOQA: ignore=E501
dump_cmd = "import os, json; print json.dumps(dict(os.environ))"
dump_environment = 'python -c "%s"' % dump_cmd
# Construct the command that will be executed
command = [source_file.format(file=file, **info) for file in args]
command.append(dump_environment)
@ -318,8 +329,10 @@ def from_sourcing_files(*args, **kwargs):
for x in unset_variables:
env.unset(x)
# Variables that have been modified
common_variables = set(this_environment).intersection(set(after_source_env)) # NOQA: ignore=E501
modified_variables = [x for x in common_variables if this_environment[x] != after_source_env[x]] # NOQA: ignore=E501
common_variables = set(
this_environment).intersection(set(after_source_env))
modified_variables = [x for x in common_variables
if this_environment[x] != after_source_env[x]]
def return_separator_if_any(first_value, second_value):
separators = ':', ';'
@ -397,7 +410,7 @@ def set_or_unset_not_first(variable, changes, errstream):
if indexes:
good = '\t \t{context} at {filename}:{lineno}'
nogood = '\t--->\t{context} at {filename}:{lineno}'
message = 'Suspicious requests to set or unset the variable \'{var}\' found' # NOQA: ignore=E501
message = "Suspicious requests to set or unset '{var}' found"
errstream(message.format(var=variable))
for ii, item in enumerate(changes):
print_format = nogood if ii in indexes else good

View file

@ -27,21 +27,21 @@
import llnl.util.tty as tty
import spack
class SpackError(Exception):
"""This is the superclass for all Spack errors.
Subclasses can be found in the modules they have to do with.
"""
def __init__(self, message, long_message=None):
super(SpackError, self).__init__()
self.message = message
self._long_message = long_message
@property
def long_message(self):
return self._long_message
def die(self):
if spack.debug:
sys.excepthook(*sys.exc_info())
@ -52,21 +52,23 @@ def die(self):
print self.long_message
os._exit(1)
def __str__(self):
msg = self.message
if self._long_message:
msg += "\n %s" % self._long_message
return msg
class UnsupportedPlatformError(SpackError):
"""Raised by packages when a platform is not supported"""
def __init__(self, message):
super(UnsupportedPlatformError, self).__init__(message)
class NoNetworkConnectionError(SpackError):
"""Raised when an operation needs an internet connection."""
def __init__(self, message, url):
super(NoNetworkConnectionError, self).__init__(
"No network connection: " + str(message),

View file

@ -356,6 +356,7 @@ def __str__(self):
class CacheURLFetchStrategy(URLFetchStrategy):
"""The resource associated with a cache URL may be out of date."""
def __init__(self, *args, **kwargs):
super(CacheURLFetchStrategy, self).__init__(*args, **kwargs)
@ -836,6 +837,7 @@ def for_package_version(pkg, version):
class FsCache(object):
def __init__(self, root):
self.root = os.path.abspath(root)

View file

@ -0,0 +1,185 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import shutil
from llnl.util.filesystem import *
from llnl.util.lock import *
from spack.error import SpackError
class FileCache(object):
"""This class manages cached data in the filesystem.
- Cache files are fetched and stored by unique keys. Keys can be relative
paths, so that thre can be some hierarchy in the cache.
- The FileCache handles locking cache files for reading and writing, so
client code need not manage locks for cache entries.
"""
def __init__(self, root):
"""Create a file cache object.
This will create the cache directory if it does not exist yet.
"""
self.root = root.rstrip(os.path.sep)
if not os.path.exists(self.root):
mkdirp(self.root)
self._locks = {}
def destroy(self):
"""Remove all files under the cache root."""
for f in os.listdir(self.root):
path = join_path(self.root, f)
if os.path.isdir(path):
shutil.rmtree(path, True)
else:
os.remove(path)
def cache_path(self, key):
"""Path to the file in the cache for a particular key."""
return join_path(self.root, key)
def _lock_path(self, key):
"""Path to the file in the cache for a particular key."""
keyfile = os.path.basename(key)
keydir = os.path.dirname(key)
return join_path(self.root, keydir, '.' + keyfile + '.lock')
def _get_lock(self, key):
"""Create a lock for a key, if necessary, and return a lock object."""
if key not in self._locks:
lock_file = self._lock_path(key)
if not os.path.exists(lock_file):
touch(lock_file)
self._locks[key] = Lock(lock_file)
return self._locks[key]
def init_entry(self, key):
"""Ensure we can access a cache file. Create a lock for it if needed.
Return whether the cache file exists yet or not.
"""
cache_path = self.cache_path(key)
exists = os.path.exists(cache_path)
if exists:
if not os.path.isfile(cache_path):
raise CacheError("Cache file is not a file: %s" % cache_path)
if not os.access(cache_path, os.R_OK | os.W_OK):
raise CacheError("Cannot access cache file: %s" % cache_path)
else:
# if the file is hierarchical, make parent directories
parent = os.path.dirname(cache_path)
if parent.rstrip(os.path.sep) != self.root:
mkdirp(parent)
if not os.access(parent, os.R_OK | os.W_OK):
raise CacheError("Cannot access cache directory: %s" % parent)
# ensure lock is created for this key
self._get_lock(key)
return exists
def read_transaction(self, key):
"""Get a read transaction on a file cache item.
Returns a ReadTransaction context manager and opens the cache file for
reading. You can use it like this:
with spack.user_cache.read_transaction(key) as cache_file:
cache_file.read()
"""
return ReadTransaction(
self._get_lock(key), lambda: open(self.cache_path(key)))
def write_transaction(self, key):
"""Get a write transaction on a file cache item.
Returns a WriteTransaction context manager that opens a temporary file
for writing. Once the context manager finishes, if nothing went wrong,
moves the file into place on top of the old file atomically.
"""
class WriteContextManager(object):
def __enter__(cm):
cm.orig_filename = self.cache_path(key)
cm.orig_file = None
if os.path.exists(cm.orig_filename):
cm.orig_file = open(cm.orig_filename, 'r')
cm.tmp_filename = self.cache_path(key) + '.tmp'
cm.tmp_file = open(cm.tmp_filename, 'w')
return cm.orig_file, cm.tmp_file
def __exit__(cm, type, value, traceback):
if cm.orig_file:
cm.orig_file.close()
cm.tmp_file.close()
if value:
# remove tmp on exception & raise it
shutil.rmtree(cm.tmp_filename, True)
raise value
else:
os.rename(cm.tmp_filename, cm.orig_filename)
return WriteTransaction(self._get_lock(key), WriteContextManager)
def mtime(self, key):
"""Return modification time of cache file, or 0 if it does not exist.
Time is in units returned by os.stat in the mtime field, which is
platform-dependent.
"""
if not self.init_entry(key):
return 0
else:
sinfo = os.stat(self.cache_path(key))
return sinfo.st_mtime
def remove(self, key):
lock = self._get_lock(key)
try:
lock.acquire_write()
os.unlink(self.cache_path(key))
finally:
lock.release_write()
os.unlink(self._lock_path(key))
class CacheError(SpackError):
pass

View file

@ -61,7 +61,6 @@
can take a number of specs as input.
"""
__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
from heapq import *
@ -71,6 +70,8 @@
import spack
from spack.spec import Spec
__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
def topological_sort(spec, **kwargs):
"""Topological sort for specs.
@ -94,6 +95,7 @@ def topological_sort(spec, **kwargs):
nodes = spec.index()
topo_order = []
par = dict((name, parents(nodes[name])) for name in nodes.keys())
remaining = [name for name in nodes.keys() if not parents(nodes[name])]
heapify(remaining)
@ -102,12 +104,12 @@ def topological_sort(spec, **kwargs):
topo_order.append(name)
node = nodes[name]
for dep in children(node).values():
del parents(dep)[node.name]
if not parents(dep):
for dep in children(node):
par[dep.name].remove(node)
if not par[dep.name]:
heappush(remaining, dep.name)
if any(parents(s) for s in spec.traverse()):
if any(par.get(s.name, []) for s in spec.traverse()):
raise ValueError("Spec has cycles!")
else:
return topo_order
@ -132,7 +134,9 @@ def find(seq, predicate):
states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge')
NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states
class AsciiGraph(object):
def __init__(self):
# These can be set after initialization or after a call to
# graph() to change behavior.
@ -153,18 +157,15 @@ def __init__(self):
self._prev_state = None # State of previous line
self._prev_index = None # Index of expansion point of prev line
def _indent(self):
self._out.write(self.indent * ' ')
def _write_edge(self, string, index, sub=0):
"""Write a colored edge to the output stream."""
name = self._frontier[index][sub]
edge = "@%s{%s}" % (self._name_to_color[name], string)
self._out.write(edge)
def _connect_deps(self, i, deps, label=None):
"""Connect dependencies to existing edges in the frontier.
@ -199,7 +200,8 @@ def _connect_deps(self, i, deps, label=None):
collapse = True
if self._prev_state == EXPAND_RIGHT:
# Special case where previous line expanded and i is off by 1.
self._back_edge_line([], j, i+1, True, label + "-1.5 " + str((i+1,j)))
self._back_edge_line([], j, i + 1, True,
label + "-1.5 " + str((i + 1, j)))
collapse = False
else:
@ -207,19 +209,20 @@ def _connect_deps(self, i, deps, label=None):
if self._prev_state == NODE and self._prev_index < i:
i += 1
if i-j > 1:
if i - j > 1:
# We need two lines to connect if distance > 1
self._back_edge_line([], j, i, True, label + "-1 " + str((i,j)))
self._back_edge_line([], j, i, True,
label + "-1 " + str((i, j)))
collapse = False
self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i,j)))
self._back_edge_line([j], -1, -1, collapse,
label + "-2 " + str((i, j)))
return True
elif deps:
self._frontier.insert(i, deps)
return False
def _set_state(self, state, index, label=None):
if state not in states:
raise ValueError("Invalid graph state!")
@ -233,7 +236,6 @@ def _set_state(self, state, index, label=None):
self._out.write("%-20s" % (str(label) if label else ''))
self._out.write("%s" % self._frontier)
def _back_edge_line(self, prev_ends, end, start, collapse, label=None):
"""Write part of a backwards edge in the graph.
@ -287,27 +289,26 @@ def advance(to_pos, edges):
self._indent()
for p in prev_ends:
advance(p, lambda: [("| ", self._pos)] )
advance(p+1, lambda: [("|/", self._pos)] )
advance(p, lambda: [("| ", self._pos)])
advance(p + 1, lambda: [("|/", self._pos)])
if end >= 0:
advance(end + 1, lambda: [("| ", self._pos)] )
advance(start - 1, lambda: [("|", self._pos), ("_", end)] )
advance(end + 1, lambda: [("| ", self._pos)])
advance(start - 1, lambda: [("|", self._pos), ("_", end)])
else:
advance(start - 1, lambda: [("| ", self._pos)] )
advance(start - 1, lambda: [("| ", self._pos)])
if start >= 0:
advance(start, lambda: [("|", self._pos), ("/", end)] )
advance(start, lambda: [("|", self._pos), ("/", end)])
if collapse:
advance(flen, lambda: [(" /", self._pos)] )
advance(flen, lambda: [(" /", self._pos)])
else:
advance(flen, lambda: [("| ", self._pos)] )
advance(flen, lambda: [("| ", self._pos)])
self._set_state(BACK_EDGE, end, label)
self._out.write("\n")
def _node_line(self, index, name):
"""Writes a line with a node at index."""
self._indent()
@ -316,14 +317,13 @@ def _node_line(self, index, name):
self._out.write("%s " % self.node_character)
for c in range(index+1, len(self._frontier)):
for c in range(index + 1, len(self._frontier)):
self._write_edge("| ", c)
self._out.write(" %s" % name)
self._set_state(NODE, index)
self._out.write("\n")
def _collapse_line(self, index):
"""Write a collapsing line after a node was added at index."""
self._indent()
@ -335,36 +335,33 @@ def _collapse_line(self, index):
self._set_state(COLLAPSE, index)
self._out.write("\n")
def _merge_right_line(self, index):
"""Edge at index is same as edge to right. Merge directly with '\'"""
self._indent()
for c in range(index):
self._write_edge("| ", c)
self._write_edge("|", index)
self._write_edge("\\", index+1)
for c in range(index+1, len(self._frontier)):
self._write_edge("| ", c )
self._write_edge("\\", index + 1)
for c in range(index + 1, len(self._frontier)):
self._write_edge("| ", c)
self._set_state(MERGE_RIGHT, index)
self._out.write("\n")
def _expand_right_line(self, index):
self._indent()
for c in range(index):
self._write_edge("| ", c)
self._write_edge("|", index)
self._write_edge("\\", index+1)
self._write_edge("\\", index + 1)
for c in range(index+2, len(self._frontier)):
for c in range(index + 2, len(self._frontier)):
self._write_edge(" \\", c)
self._set_state(EXPAND_RIGHT, index)
self._out.write("\n")
def write(self, spec, **kwargs):
"""Write out an ascii graph of the provided spec.
@ -407,7 +404,8 @@ def write(self, spec, **kwargs):
i = find(self._frontier, lambda f: len(f) > 1)
if i >= 0:
# Expand frontier until there are enough columns for all children.
# Expand frontier until there are enough columns for all
# children.
# Figure out how many back connections there are and
# sort them so we do them in order
@ -424,8 +422,9 @@ def write(self, spec, **kwargs):
prev_ends = []
for j, (b, d) in enumerate(back):
self._frontier[i].remove(d)
if i-b > 1:
self._back_edge_line(prev_ends, b, i, False, 'left-1')
if i - b > 1:
self._back_edge_line(prev_ends, b, i, False,
'left-1')
del prev_ends[:]
prev_ends.append(b)
@ -439,12 +438,13 @@ def write(self, spec, **kwargs):
elif len(self._frontier[i]) > 1:
# Expand forward after doing all back connections
if (i+1 < len(self._frontier) and len(self._frontier[i+1]) == 1
and self._frontier[i+1][0] in self._frontier[i]):
if (i + 1 < len(self._frontier) and
len(self._frontier[i + 1]) == 1 and
self._frontier[i + 1][0] in self._frontier[i]):
# We need to connect to the element to the right.
# Keep lines straight by connecting directly and
# avoiding unnecessary expand/contract.
name = self._frontier[i+1][0]
name = self._frontier[i + 1][0]
self._frontier[i].remove(name)
self._merge_right_line(i)
@ -458,9 +458,8 @@ def write(self, spec, **kwargs):
self._frontier.pop(i)
self._connect_deps(i, deps, "post-expand")
# Handle any remaining back edges to the right
j = i+1
j = i + 1
while j < len(self._frontier):
deps = self._frontier.pop(j)
if not self._connect_deps(j, deps, "back-from-right"):
@ -477,8 +476,9 @@ def write(self, spec, **kwargs):
# Replace node with its dependencies
self._frontier.pop(i)
if node.dependencies:
deps = sorted((d for d in node.dependencies), reverse=True)
if node.dependencies():
deps = sorted((d.name for d in node.dependencies()),
reverse=True)
self._connect_deps(i, deps, "new-deps") # anywhere.
elif self._frontier:
@ -501,7 +501,6 @@ def graph_ascii(spec, **kwargs):
graph.write(spec, color=color, out=out)
def graph_dot(*specs, **kwargs):
"""Generate a graph in dot format of all provided specs.

View file

@ -45,6 +45,7 @@
from llnl.util.filesystem import join_path
import spack
@memoized
def all_hook_modules():
modules = []
@ -58,6 +59,7 @@ def all_hook_modules():
class HookRunner(object):
def __init__(self, hook_name):
self.hook_name = hook_name

View file

@ -23,8 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack
def pre_uninstall(pkg):
assert(pkg.spec.concrete)

View file

@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import stat
import re
import llnl.util.tty as tty
@ -62,10 +63,21 @@ def filter_shebang(path):
if re.search(r'^#!(/[^/]*)*lua\b', original):
original = re.sub(r'^#', '--', original)
# Change non-writable files to be writable if needed.
saved_mode = None
if not os.access(path, os.W_OK):
st = os.stat(path)
saved_mode = st.st_mode
os.chmod(path, saved_mode | stat.S_IWRITE)
with open(path, 'w') as new_file:
new_file.write(new_sbang_line)
new_file.write(original)
# Restore original permissions.
if saved_mode is not None:
os.chmod(path, saved_mode)
tty.warn("Patched overlong shebang in %s" % path)

View file

@ -40,9 +40,8 @@
import spack.url as url
import spack.fetch_strategy as fs
from spack.spec import Spec
from spack.stage import Stage
from spack.version import *
from spack.util.compression import extension, allowed_archive
from spack.util.compression import allowed_archive
def mirror_archive_filename(spec, fetcher):
@ -52,10 +51,10 @@ def mirror_archive_filename(spec, fetcher):
if isinstance(fetcher, fs.URLFetchStrategy):
if fetcher.expand_archive:
# If we fetch this version with a URLFetchStrategy, use URL's archive type
# If we fetch with a URLFetchStrategy, use URL's archive type
ext = url.downloaded_file_extension(fetcher.url)
else:
# If the archive shouldn't be expanded, don't check for its extension.
# If the archive shouldn't be expanded, don't check extension.
ext = None
else:
# Otherwise we'll make a .tar.gz ourselves
@ -106,7 +105,9 @@ def get_matching_versions(specs, **kwargs):
def suggest_archive_basename(resource):
"""
Return a tentative basename for an archive. Raise an exception if the name is among the allowed archive types.
Return a tentative basename for an archive.
Raises an exception if the name is not an allowed archive type.
:param fetcher:
:return:
@ -170,7 +171,7 @@ def create(path, specs, **kwargs):
'error': []
}
# Iterate through packages and download all the safe tarballs for each of them
# Iterate through packages and download all safe tarballs for each
for spec in version_specs:
add_single_spec(spec, mirror_root, categories, **kwargs)
@ -190,12 +191,15 @@ def add_single_spec(spec, mirror_root, categories, **kwargs):
fetcher = stage.fetcher
if ii == 0:
# create a subdirectory for the current package@version
archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec, fetcher)))
archive_path = os.path.abspath(join_path(
mirror_root, mirror_archive_path(spec, fetcher)))
name = spec.format("$_$@")
else:
resource = stage.resource
archive_path = join_path(subdir, suggest_archive_basename(resource))
name = "{resource} ({pkg}).".format(resource=resource.name, pkg=spec.format("$_$@"))
archive_path = join_path(
subdir, suggest_archive_basename(resource))
name = "{resource} ({pkg}).".format(
resource=resource.name, pkg=spec.format("$_$@"))
subdir = os.path.dirname(archive_path)
mkdirp(subdir)
@ -217,15 +221,18 @@ def add_single_spec(spec, mirror_root, categories, **kwargs):
categories['present'].append(spec)
else:
categories['mirrored'].append(spec)
except Exception as e:
if spack.debug:
sys.excepthook(*sys.exc_info())
else:
tty.warn("Error while fetching %s" % spec.format('$_$@'), e.message)
tty.warn("Error while fetching %s"
% spec.format('$_$@'), e.message)
categories['error'].append(spec)
class MirrorError(spack.error.SpackError):
"""Superclass of all mirror-creation related errors."""
def __init__(self, msg, long_msg=None):
super(MirrorError, self).__init__(msg, long_msg)

View file

@ -272,13 +272,25 @@ def naming_scheme(self):
@property
def tokens(self):
"""Tokens that can be substituted in environment variable values
and naming schemes
"""
tokens = {
'name': self.spec.name,
'version': self.spec.version,
'compiler': self.spec.compiler
'compiler': self.spec.compiler,
'prefix': self.spec.package.prefix
}
return tokens
@property
def upper_tokens(self):
"""Tokens that can be substituted in environment variable names"""
upper_tokens = {
'name': self.spec.name.replace('-', '_').upper()
}
return upper_tokens
@property
def use_name(self):
"""
@ -438,11 +450,17 @@ def prerequisite(self, spec):
def process_environment_command(self, env):
for command in env:
# Token expansion from configuration file
name = command.args.get('name', '').format(**self.upper_tokens)
value = str(command.args.get('value', '')).format(**self.tokens)
command.update_args(name=name, value=value)
# Format the line int the module file
try:
yield self.environment_modifications_formats[type(
command)].format(**command.args)
except KeyError:
message = 'Cannot handle command of type {command} : skipping request' # NOQA: ignore=E501
message = ('Cannot handle command of type {command}: '
'skipping request')
details = '{context} at {filename}:{lineno}'
tty.warn(message.format(command=type(command)))
tty.warn(details.format(**command.args))
@ -471,12 +489,14 @@ class Dotkit(EnvModule):
path = join_path(spack.share_path, 'dotkit')
environment_modifications_formats = {
PrependPath: 'dk_alter {name} {value}\n',
RemovePath: 'dk_unalter {name} {value}\n',
SetEnv: 'dk_setenv {name} {value}\n'
}
autoload_format = 'dk_op {module_file}\n'
default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501
default_naming_format = \
'{name}-{version}-{compiler.name}-{compiler.version}'
@property
def file_name(self):
@ -502,7 +522,8 @@ def header(self):
def prerequisite(self, spec):
tty.warn('prerequisites: not supported by dotkit module files')
tty.warn('\tYou may want to check ~/.spack/modules.yaml')
tty.warn('\tYou may want to check %s/modules.yaml'
% spack.user_config_path)
return ''
@ -510,9 +531,9 @@ class TclModule(EnvModule):
name = 'tcl'
path = join_path(spack.share_path, "modules")
environment_modifications_formats = {
PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n',
AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n',
RemovePath: 'remove-path --delim "{delim}" {name} \"{value}\"\n',
PrependPath: 'prepend-path --delim "{separator}" {name} \"{value}\"\n',
AppendPath: 'append-path --delim "{separator}" {name} \"{value}\"\n',
RemovePath: 'remove-path --delim "{separator}" {name} \"{value}\"\n',
SetEnv: 'setenv {name} \"{value}\"\n',
UnsetEnv: 'unsetenv {name}\n'
}
@ -524,7 +545,8 @@ class TclModule(EnvModule):
prerequisite_format = 'prereq {module_file}\n'
default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501
default_naming_format = \
'{name}-{version}-{compiler.name}-{compiler.version}'
@property
def file_name(self):
@ -535,7 +557,7 @@ def header(self):
timestamp = datetime.datetime.now()
# TCL Modulefile header
header = '#%Module1.0\n'
header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501
header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp
header += '##\n'
header += '## %s\n' % self.spec.short_spec
header += '##\n'
@ -565,10 +587,12 @@ def module_specific_content(self, configuration):
for naming_dir, conflict_dir in zip(
self.naming_scheme.split('/'), item.split('/')):
if naming_dir != conflict_dir:
message = 'conflict scheme does not match naming scheme [{spec}]\n\n' # NOQA: ignore=E501
message = 'conflict scheme does not match naming '
message += 'scheme [{spec}]\n\n'
message += 'naming scheme : "{nformat}"\n'
message += 'conflict scheme : "{cformat}"\n\n'
message += '** You may want to check your `modules.yaml` configuration file **\n' # NOQA: ignore=E501
message += '** You may want to check your '
message += '`modules.yaml` configuration file **\n'
tty.error(message.format(spec=self.spec,
nformat=self.naming_scheme,
cformat=item))

View file

@ -43,15 +43,13 @@
depending on the scenario, regular old conditionals might be clearer,
so package authors should use their judgement.
"""
import sys
import functools
import collections
from llnl.util.lang import *
import spack.architecture
import spack.error
from spack.spec import parse_anonymous_spec, Spec
from spack.spec import parse_anonymous_spec
class SpecMultiMethod(object):
@ -89,13 +87,13 @@ class SpecMultiMethod(object):
See the docs for decorators below for more details.
"""
def __init__(self, default=None):
self.method_list = []
self.default = default
if default:
functools.update_wrapper(self, default)
def register(self, spec, method):
"""Register a version of a method for a particular sys_type."""
self.method_list.append((spec, method))
@ -105,12 +103,10 @@ def register(self, spec, method):
else:
assert(self.__name__ == method.__name__)
def __get__(self, obj, objtype):
"""This makes __call__ support instance methods."""
return functools.partial(self.__call__, obj)
def __call__(self, package_self, *args, **kwargs):
"""Find the first method with a spec that matches the
package's spec. If none is found, call the default
@ -127,7 +123,6 @@ def __call__(self, package_self, *args, **kwargs):
type(package_self), self.__name__, spec,
[m[0] for m in self.method_list])
def __str__(self):
return "SpecMultiMethod {\n\tdefault: %s,\n\tspecs: %s\n}" % (
self.default, self.method_list)
@ -195,11 +190,13 @@ def install(self, prefix):
platform-specific versions. There's not much we can do to get
around this because of the way decorators work.
"""
def __init__(self, spec):
pkg = get_calling_module_name()
if spec is True:
spec = pkg
self.spec = parse_anonymous_spec(spec, pkg) if spec is not False else None
self.spec = (parse_anonymous_spec(spec, pkg)
if spec is not False else None)
def __call__(self, method):
# Get the first definition of the method in the calling scope
@ -218,12 +215,14 @@ def __call__(self, method):
class MultiMethodError(spack.error.SpackError):
"""Superclass for multimethod dispatch errors"""
def __init__(self, message):
super(MultiMethodError, self).__init__(message)
class NoSuchMethodError(spack.error.SpackError):
"""Raised when we can't find a version of a multi-method."""
def __init__(self, cls, method_name, spec, possible_specs):
super(NoSuchMethodError, self).__init__(
"Package %s does not support %s called with %s. Options are: %s"

View file

@ -7,6 +7,7 @@
from spack.util.multiproc import parmap
import spack.compilers
class Cnl(OperatingSystem):
""" Compute Node Linux (CNL) is the operating system used for the Cray XC
series super computers. It is a very stripped down version of GNU/Linux.
@ -14,22 +15,25 @@ class Cnl(OperatingSystem):
modules. If updated, user must make sure that version and name are
updated to indicate that OS has been upgraded (or downgraded)
"""
def __init__(self):
name = 'CNL'
version = '10'
super(Cnl, self).__init__(name, version)
def __str__(self):
return self.name
def find_compilers(self, *paths):
types = spack.compilers.all_compiler_types()
compiler_lists = parmap(lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types)
compiler_lists = parmap(
lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types)
# ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot.
clist = reduce(lambda x,y: x+y, compiler_lists)
clist = reduce(lambda x, y: x + y, compiler_lists)
return clist
def find_compiler(self, cmp_cls, *paths):
compilers = []
if cmp_cls.PrgEnv:
@ -46,12 +50,15 @@ def find_compiler(self, cmp_cls, *paths):
module_paths = ':' + ':'.join(p for p in paths)
os.environ['MODULEPATH'] = module_paths
output = modulecmd('avail', cmp_cls.PrgEnv_compiler, output=str, error=str)
matches = re.findall(r'(%s)/([\d\.]+[\d])' % cmp_cls.PrgEnv_compiler, output)
output = modulecmd(
'avail', cmp_cls.PrgEnv_compiler, output=str, error=str)
matches = re.findall(
r'(%s)/([\d\.]+[\d])' % cmp_cls.PrgEnv_compiler, output)
for name, version in matches:
v = version
comp = cmp_cls(spack.spec.CompilerSpec(name + '@' + v), self,
['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name +'/' + v])
comp = cmp_cls(
spack.spec.CompilerSpec(name + '@' + v), self,
['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name + '/' + v])
compilers.append(comp)

View file

@ -2,6 +2,7 @@
import platform as py_platform
from spack.architecture import OperatingSystem
class LinuxDistro(OperatingSystem):
""" This class will represent the autodetected operating system
for a Linux System. Since there are many different flavors of
@ -9,6 +10,7 @@ class LinuxDistro(OperatingSystem):
autodetection using the python module platform and the method
platform.dist()
"""
def __init__(self):
distname, version, _ = py_platform.linux_distribution(
full_distribution_name=False)

View file

@ -1,6 +1,7 @@
import platform as py_platform
from spack.architecture import OperatingSystem
class MacOs(OperatingSystem):
"""This class represents the macOS operating system. This will be
auto detected using the python platform.mac_ver. The macOS

View file

@ -39,6 +39,7 @@
import os
import platform
import re
import sys
import textwrap
import time
from StringIO import StringIO
@ -64,7 +65,7 @@
from spack.stage import Stage, ResourceStage, StageComposite
from spack.util.compression import allowed_archive
from spack.util.environment import dump_environment
from spack.util.executable import ProcessError, which
from spack.util.executable import ProcessError
from spack.version import *
"""Allowed URL schemes for spack packages."""
@ -330,12 +331,10 @@ def install(self, spec, prefix):
Most software comes in nicely packaged tarballs, like this one:
http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz
Taking a page from homebrew, spack deduces pretty much everything it
needs to know from the URL above. If you simply type this:
spack create http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz
Spack will download the tarball, generate an md5 hash, figure out the
version and the name of the package from the URL, and create a new
package file for you with all the names and attributes set correctly.
@ -785,50 +784,12 @@ def activated(self):
exts = spack.install_layout.extension_map(self.extendee_spec)
return (self.name in exts) and (exts[self.name] == self.spec)
def preorder_traversal(self, visited=None, **kwargs):
"""This does a preorder traversal of the package's dependence DAG."""
virtual = kwargs.get("virtual", False)
if visited is None:
visited = set()
if self.name in visited:
return
visited.add(self.name)
if not virtual:
yield self
for name in sorted(self.dependencies.keys()):
dep_spec = self.get_dependency(name)
spec = dep_spec.spec
# Currently, we do not descend into virtual dependencies, as this
# makes doing a sensible traversal much harder. We just assume
# that ANY of the virtual deps will work, which might not be true
# (due to conflicts or unsatisfiable specs). For now this is ok,
# but we might want to reinvestigate if we start using a lot of
# complicated virtual dependencies
# TODO: reinvestigate this.
if spec.virtual:
if virtual:
yield spec
continue
for pkg in spack.repo.get(name).preorder_traversal(visited,
**kwargs):
yield pkg
def provides(self, vpkg_name):
"""
True if this package provides a virtual package with the specified name
"""
return any(s.name == vpkg_name for s in self.provided)
def virtual_dependencies(self, visited=None):
for spec in sorted(set(self.preorder_traversal(virtual=True))):
yield spec
@property
def installed(self):
return os.path.isdir(self.prefix)
@ -898,13 +859,13 @@ def do_fetch(self, mirror_only=False):
# Ask the user whether to skip the checksum if we're
# interactive, but just fail if non-interactive.
checksum_msg = "Add a checksum or use --no-checksum to skip this check." # NOQA: ignore=E501
ck_msg = "Add a checksum or use --no-checksum to skip this check."
ignore_checksum = False
if sys.stdout.isatty():
ignore_checksum = tty.get_yes_or_no(" Fetch anyway?",
default=False)
if ignore_checksum:
tty.msg("Fetching with no checksum.", checksum_msg)
tty.msg("Fetching with no checksum.", ck_msg)
if not ignore_checksum:
raise FetchError("Will not fetch %s" %
@ -1396,6 +1357,14 @@ def setup_dependent_package(self, module, dependent_spec):
def do_uninstall(self, force=False):
if not self.installed:
# prefix may not exist, but DB may be inconsistent. Try to fix by
# removing, but omit hooks.
specs = spack.installed_db.query(self.spec, installed=True)
if specs:
spack.installed_db.remove(specs[0])
tty.msg("Removed stale DB entry for %s" % self.spec.short_spec)
return
else:
raise InstallError(str(self.spec) + " is not installed.")
if not force:
@ -1495,9 +1464,10 @@ def do_deactivate(self, **kwargs):
continue
for dep in aspec.traverse(deptype='run'):
if self.spec == dep:
msg = ("Cannot deactivate %s because %s is activated "
"and depends on it.")
raise ActivationError(
"Cannot deactivate %s because %s is activated and depends on it." # NOQA: ignore=E501
% (self.spec.short_spec, aspec.short_spec))
msg % (self.spec.short_spec, aspec.short_spec))
self.extendee_spec.package.deactivate(self, **self.extendee_args)
@ -1726,6 +1696,7 @@ def use_cray_compiler_names():
os.environ['FC'] = 'ftn'
os.environ['F77'] = 'ftn'
def flatten_dependencies(spec, flat_dir):
"""Make each dependency of spec present in dir via symlink."""
for dep in spec.traverse(root=False):
@ -1890,12 +1861,14 @@ class ExtensionError(PackageError):
class ExtensionConflictError(ExtensionError):
def __init__(self, path):
super(ExtensionConflictError, self).__init__(
"Extension blocked by file: %s" % path)
class ActivationError(ExtensionError):
def __init__(self, msg, long_msg=None):
super(ActivationError, self).__init__(msg, long_msg)

View file

@ -29,6 +29,7 @@
class Token:
"""Represents tokens; generated from input by lexer and fed to parse()."""
def __init__(self, type, value='', start=0, end=0):
self.type = type
self.value = value
@ -51,11 +52,13 @@ def __cmp__(self, other):
class Lexer(object):
"""Base class for Lexers that keep track of line numbers."""
def __init__(self, lexicon):
self.scanner = re.Scanner(lexicon)
def token(self, type, value=''):
return Token(type, value, self.scanner.match.start(0), self.scanner.match.end(0))
return Token(type, value,
self.scanner.match.start(0), self.scanner.match.end(0))
def lex(self, text):
tokens, remainder = self.scanner.scan(text)
@ -66,9 +69,10 @@ def lex(self, text):
class Parser(object):
"""Base class for simple recursive descent parsers."""
def __init__(self, lexer):
self.tokens = iter([]) # iterators over tokens, handled in order. Starts empty.
self.token = Token(None) # last accepted token starts at beginning of file
self.tokens = iter([]) # iterators over tokens, handled in order.
self.token = Token(None) # last accepted token
self.next = None # next token
self.lexer = lexer
self.text = None
@ -82,11 +86,12 @@ def gettok(self):
def push_tokens(self, iterable):
"""Adds all tokens in some iterable to the token stream."""
self.tokens = itertools.chain(iter(iterable), iter([self.next]), self.tokens)
self.tokens = itertools.chain(
iter(iterable), iter([self.next]), self.tokens)
self.gettok()
def accept(self, id):
"""Puts the next symbol in self.token if we like it. Then calls gettok()"""
"""Put the next symbol in self.token if accepted, then call gettok()"""
if self.next and self.next.is_a(id):
self.token = self.next
self.gettok()
@ -124,9 +129,9 @@ def parse(self, text):
return self.do_parse()
class ParseError(spack.error.SpackError):
"""Raised when we don't hit an error while parsing."""
def __init__(self, message, string, pos):
super(ParseError, self).__init__(message)
self.string = string
@ -135,5 +140,6 @@ def __init__(self, message, string, pos):
class LexError(ParseError):
"""Raised when we don't know how to lex something."""
def __init__(self, message, string, pos):
super(LexError, self).__init__(message, string, pos)

View file

@ -24,7 +24,6 @@
##############################################################################
import os
import llnl.util.tty as tty
from llnl.util.filesystem import join_path
import spack
@ -59,7 +58,6 @@ def __init__(self, pkg, path_or_url, level):
if not os.path.isfile(self.path):
raise NoSuchPatchFileError(pkg_name, self.path)
def apply(self, stage):
"""Fetch this patch, if necessary, and apply it to the source
code in the supplied stage.
@ -84,9 +82,9 @@ def apply(self, stage):
patch_stage.destroy()
class NoSuchPatchFileError(spack.error.SpackError):
"""Raised when user specifies a patch file that doesn't exist."""
def __init__(self, package, path):
super(NoSuchPatchFileError, self).__init__(
"No such patch file for package %s: %s" % (package, path))

Some files were not shown because too many files have changed in this diff Show more