Merge branch 'develop' of https://github.com/LLNL/spack into features/install_with_phases

Conflicts:
	lib/spack/llnl/util/tty/log.py
	lib/spack/spack/__init__.py
	lib/spack/spack/cmd/install.py
	lib/spack/spack/cmd/setup.py
	lib/spack/spack/package.py
	var/spack/repos/builtin/packages/blitz/package.py
	var/spack/repos/builtin/packages/gmp/package.py
	var/spack/repos/builtin/packages/qhull/package.py
	var/spack/repos/builtin/packages/szip/package.py
This commit is contained in:
alalazo 2016-08-11 08:55:20 +02:00
commit b4b9ebe7d7
770 changed files with 14235 additions and 4355 deletions

View file

@ -19,5 +19,5 @@
# - F999: name name be undefined or undefined from star imports. # - F999: name name be undefined or undefined from star imports.
# #
[flake8] [flake8]
ignore = E221,E241,E731,F403,F821,F999,F405 ignore = E129,E221,E241,E272,E731,F403,F821,F999,F405
max-line-length = 79 max-line-length = 79

2
.gitignore vendored
View file

@ -1,5 +1,7 @@
/var/spack/stage /var/spack/stage
/var/spack/cache /var/spack/cache
/var/spack/repos/*/index.yaml
/var/spack/repos/*/lock
*.pyc *.pyc
/opt /opt
*~ *~

View file

@ -1,7 +1,17 @@
language: python language: python
python: python:
- "2.6" - "2.6"
- "2.7" - "2.7"
env:
- TEST_TYPE=unit
- TEST_TYPE=flake8
# Exclude flake8 from python 2.6
matrix:
exclude:
- python: "2.6"
env: TEST_TYPE=flake8
# Use new Travis infrastructure (Docker can't sudo yet) # Use new Travis infrastructure (Docker can't sudo yet)
sudo: false sudo: false
@ -20,20 +30,13 @@ before_install:
- git fetch origin develop:develop - git fetch origin develop:develop
script: script:
# Regular spack setup and tests # Run unit tests with code coverage plus install libdwarf
- . share/spack/setup-env.sh - 'if [ "$TEST_TYPE" = "unit" ]; then share/spack/qa/run-unit-tests; fi'
- spack compilers
- spack config get compilers
- spack install -v libdwarf
# Run unit tests with code coverage
- coverage run bin/spack test
# Run flake8 code style checks. # Run flake8 code style checks.
- share/spack/qa/run-flake8 - 'if [ "$TEST_TYPE" = "flake8" ]; then share/spack/qa/run-flake8; fi'
after_success: after_success:
- coveralls - 'if [ "$TEST_TYPE" = "unit" ] && [ "$TRAVIS_PYTHON_VERSION" = "2.7" ]; then coveralls; fi'
notifications: notifications:
email: email:

View file

@ -58,17 +58,24 @@ can join it here:
### Contributions ### Contributions
At the moment, contributing to Spack is relatively simple. Just send us Contributing to Spack is relatively. Just send us a
a [pull request](https://help.github.com/articles/using-pull-requests/). [pull request](https://help.github.com/articles/using-pull-requests/).
When you send your request, make ``develop`` the destination branch on the When you send your request, make ``develop`` the destination branch on the
[Spack repository](https://github.com/LLNL/spack). [Spack repository](https://github.com/LLNL/spack).
Your contribution will need to pass all the tests run by the `spack test` Before you send a PR, your code should pass the following checks:
command, as well as the formatting checks in `share/spack/qa/run-flake8`.
You should run both of these before submitting your pull request, to
ensure that the online checks succeed.
Spack is using a rough approximation of the [Git * Your contribution will need to pass the `spack test` command.
Run this before submitting your PR.
* Also run the `share/spack/qa/run-flake8` script to check for PEP8 compliance.
To encourage contributions and readability by a broad audience,
Spack uses the [PEP8](https://www.python.org/dev/peps/pep-0008/) coding
standard with [a few exceptions](https://github.com/LLNL/spack/blob/develop/.flake8).
We enforce these guidelines with [Travis CI](https://travis-ci.org/LLNL/spack).
Spack uses a rough approximation of the [Git
Flow](http://nvie.com/posts/a-successful-git-branching-model/) Flow](http://nvie.com/posts/a-successful-git-branching-model/)
branching model. The ``develop`` branch contains the latest branching model. The ``develop`` branch contains the latest
contributions, and ``master`` is always tagged and points to the contributions, and ``master`` is always tagged and points to the

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
# flake8: noqa
############################################################################## ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
@ -24,9 +25,10 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import sys import sys
if not sys.version_info[:2] >= (2,6): if not sys.version_info[:2] >= (2, 6):
v_info = sys.version_info[:3] v_info = sys.version_info[:3]
sys.exit("Spack requires Python 2.6 or higher. This is Python %d.%d.%d." % v_info) sys.exit("Spack requires Python 2.6 or higher. "
"This is Python %d.%d.%d." % v_info)
import os import os
@ -62,7 +64,8 @@ for pyc_file in orphaned_pyc_files:
try: try:
os.remove(pyc_file) os.remove(pyc_file)
except OSError as e: except OSError as e:
print "WARNING: Spack may fail mysteriously. Couldn't remove orphaned .pyc file: %s" % pyc_file print ("WARNING: Spack may fail mysteriously. "
"Couldn't remove orphaned .pyc file: %s" % pyc_file)
# If there is no working directory, use the spack prefix. # If there is no working directory, use the spack prefix.
try: try:
@ -128,6 +131,7 @@ if len(sys.argv) == 1:
# actually parse the args. # actually parse the args.
args = parser.parse_args() args = parser.parse_args()
def main(): def main():
# Set up environment based on args. # Set up environment based on args.
tty.set_verbose(args.verbose) tty.set_verbose(args.verbose)
@ -148,7 +152,7 @@ def main():
# If the user asked for it, don't check ssl certs. # If the user asked for it, don't check ssl certs.
if args.insecure: if args.insecure:
tty.warn("You asked for --insecure, which does not check SSL certificates.") tty.warn("You asked for --insecure. Will NOT check SSL certificates.")
spack.curl.add_default_arg('-k') spack.curl.add_default_arg('-k')
# Try to load the particular command asked for and run it # Try to load the particular command asked for and run it
@ -167,7 +171,8 @@ def main():
elif isinstance(return_val, int): elif isinstance(return_val, int):
sys.exit(return_val) sys.exit(return_val)
else: else:
tty.die("Bad return value from command %s: %s" % (args.command, return_val)) tty.die("Bad return value from command %s: %s"
% (args.command, return_val))
if args.profile: if args.profile:
import cProfile import cProfile

View file

@ -1147,18 +1147,19 @@ packages use RPATH to find their dependencies: this can be true in
particular for Python extensions, which are currently *not* built with particular for Python extensions, which are currently *not* built with
RPATH. RPATH.
Modules may be loaded recursively with the command: Modules may be loaded recursively with the ``load`` command's
``--dependencies`` or ``-r`` argument:
.. code-block:: sh .. code-block:: sh
$ module load `spack module tcl --dependencies <spec>... $ spack load --dependencies <spec> ...
More than one spec may be placed on the command line here. More than one spec may be placed on the command line here.
Module Commands for Shell Scripts Module Commands for Shell Scripts
`````````````````````````````````` ``````````````````````````````````
Although Spack is flexbile, the ``module`` command is much faster. Although Spack is flexible, the ``module`` command is much faster.
This could become an issue when emitting a series of ``spack load`` This could become an issue when emitting a series of ``spack load``
commands inside a shell script. By adding the ``--shell`` flag, commands inside a shell script. By adding the ``--shell`` flag,
``spack module find`` may also be used to generate code that can be ``spack module find`` may also be used to generate code that can be
@ -1866,6 +1867,10 @@ to call the Cray compiler wrappers during build time.
For more on compiler configuration, check out :ref:`compiler-config`. For more on compiler configuration, check out :ref:`compiler-config`.
Spack sets the default Cray link type to dynamic, to better match other
other platforms. Individual packages can enable static linking (which is the
default outside of Spack on cray systems) using the -static flag.
Setting defaults and using Cray modules Setting defaults and using Cray modules
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

View file

@ -142,8 +142,9 @@ Here's an example packages.yaml file that sets preferred packages:
.. code-block:: sh .. code-block:: sh
packages: packages:
dyninst: opencv:
compiler: [gcc@4.9] compiler: [gcc@4.9]
variants: +debug
gperftools: gperftools:
version: [2.2, 2.4, 2.3] version: [2.2, 2.4, 2.3]
all: all:
@ -153,17 +154,17 @@ Here's an example packages.yaml file that sets preferred packages:
At a high level, this example is specifying how packages should be At a high level, this example is specifying how packages should be
concretized. The dyninst package should prefer using gcc 4.9. concretized. The opencv package should prefer using gcc 4.9 and
The gperftools package should prefer version be built with debug options. The gperftools package should prefer version
2.2 over 2.4. Every package on the system should prefer mvapich for 2.2 over 2.4. Every package on the system should prefer mvapich for
its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by preferring gcc 4.9). its MPI and gcc 4.4.7 (except for opencv, which overrides this by preferring gcc 4.9).
These options are used to fill in implicit defaults. Any of them can be overwritten These options are used to fill in implicit defaults. Any of them can be overwritten
on the command line if explicitly requested. on the command line if explicitly requested.
Each packages.yaml file begins with the string ``packages:`` and Each packages.yaml file begins with the string ``packages:`` and
package names are specified on the next level. The special string ``all`` package names are specified on the next level. The special string ``all``
applies settings to each package. Underneath each package name is applies settings to each package. Underneath each package name is
one or more components: ``compiler``, ``version``, one or more components: ``compiler``, ``variants``, ``version``,
or ``providers``. Each component has an ordered list of spec or ``providers``. Each component has an ordered list of spec
``constraints``, with earlier entries in the list being preferred over ``constraints``, with earlier entries in the list being preferred over
later entries. later entries.

View file

@ -1307,9 +1307,9 @@ The dependency types are:
If not specified, ``type`` is assumed to be ``("build", "link")``. This is the If not specified, ``type`` is assumed to be ``("build", "link")``. This is the
common case for compiled language usage. Also available are the aliases common case for compiled language usage. Also available are the aliases
``alldeps`` for all dependency types and ``nolink`` (``("build", "run")``) for ``"alldeps"`` for all dependency types and ``"nolink"`` (``("build", "run")``)
use by dependencies which are not expressed via a linker (e.g., Python or Lua for use by dependencies which are not expressed via a linker (e.g., Python or
module loading). Lua module loading).
.. _setup-dependent-environment: .. _setup-dependent-environment:

1
lib/spack/env/cray/CC vendored Symbolic link
View file

@ -0,0 +1 @@
../cc

1
lib/spack/env/cray/cc vendored Symbolic link
View file

@ -0,0 +1 @@
../cc

1
lib/spack/env/cray/ftn vendored Symbolic link
View file

@ -0,0 +1 @@
../cc

View file

@ -29,8 +29,9 @@
import stat import stat
import errno import errno
import getpass import getpass
from contextlib import contextmanager, closing from contextlib import contextmanager
import subprocess import subprocess
import fileinput
import llnl.util.tty as tty import llnl.util.tty as tty
@ -85,13 +86,14 @@ def groupid_to_group(x):
if ignore_absent and not os.path.exists(filename): if ignore_absent and not os.path.exists(filename):
continue continue
shutil.copy(filename, backup_filename) # Create backup file. Don't overwrite an existing backup
# file in case this file is being filtered multiple times.
if not os.path.exists(backup_filename):
shutil.copy(filename, backup_filename)
try: try:
with closing(open(backup_filename)) as infile: for line in fileinput.input(filename, inplace=True):
with closing(open(filename, 'w')) as outfile: print(re.sub(regex, repl, line.rstrip('\n')))
for line in infile:
foo = re.sub(regex, repl, line)
outfile.write(foo)
except: except:
# clean up the original file on failure. # clean up the original file on failure.
shutil.move(backup_filename, filename) shutil.move(backup_filename, filename)
@ -104,6 +106,7 @@ def groupid_to_group(x):
class FileFilter(object): class FileFilter(object):
"""Convenience class for calling filter_file a lot.""" """Convenience class for calling filter_file a lot."""
def __init__(self, *filenames): def __init__(self, *filenames):
self.filenames = filenames self.filenames = filenames
@ -189,7 +192,7 @@ def install(src, dest):
def install_tree(src, dest, **kwargs): def install_tree(src, dest, **kwargs):
"""Manually install a file to a particular location.""" """Manually install a directory tree to a particular location."""
tty.debug("Installing %s to %s" % (src, dest)) tty.debug("Installing %s to %s" % (src, dest))
shutil.copytree(src, dest, **kwargs) shutil.copytree(src, dest, **kwargs)
@ -353,7 +356,8 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
# When follow_nonexisting isn't set, don't descend into dirs # When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest # in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child): if follow_nonexisting or os.path.exists(dest_child):
tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs) # NOQA: ignore=E501 tuples = traverse_tree(
source_root, dest_root, rel_child, **kwargs)
for t in tuples: for t in tuples:
yield t yield t
@ -420,14 +424,20 @@ def fix_darwin_install_name(path):
libs = glob.glob(join_path(path, "*.dylib")) libs = glob.glob(join_path(path, "*.dylib"))
for lib in libs: for lib in libs:
# fix install name first: # fix install name first:
subprocess.Popen(["install_name_tool", "-id", lib, lib], stdout=subprocess.PIPE).communicate()[0] # NOQA: ignore=E501 subprocess.Popen(
long_deps = subprocess.Popen(["otool", "-L", lib], stdout=subprocess.PIPE).communicate()[0].split('\n') # NOQA: ignore=E501 ["install_name_tool", "-id", lib, lib],
stdout=subprocess.PIPE).communicate()[0]
long_deps = subprocess.Popen(
["otool", "-L", lib],
stdout=subprocess.PIPE).communicate()[0].split('\n')
deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]] deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]]
# fix all dependencies: # fix all dependencies:
for dep in deps: for dep in deps:
for loc in libs: for loc in libs:
if dep == os.path.basename(loc): if dep == os.path.basename(loc):
subprocess.Popen(["install_name_tool", "-change", dep, loc, lib], stdout=subprocess.PIPE).communicate()[0] # NOQA: ignore=E501 subprocess.Popen(
["install_name_tool", "-change", dep, loc, lib],
stdout=subprocess.PIPE).communicate()[0]
break break

View file

@ -24,7 +24,6 @@
############################################################################## ##############################################################################
import os import os
import re import re
import sys
import functools import functools
import collections import collections
import inspect import inspect
@ -39,14 +38,15 @@ def index_by(objects, *funcs):
Values are used as keys. For example, suppose you have four Values are used as keys. For example, suppose you have four
objects with attributes that look like this: objects with attributes that look like this:
a = Spec(name="boost", compiler="gcc", arch="bgqos_0") a = Spec(name="boost", compiler="gcc", arch="bgqos_0")
b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib") b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib")
c = Spec(name="libelf", compiler="xlc", arch="bgqos_0") c = Spec(name="libelf", compiler="xlc", arch="bgqos_0")
d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib") d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib")
list_of_specs = [a,b,c,d] list_of_specs = [a,b,c,d]
index1 = index_by(list_of_specs, lambda s: s.arch, lambda s: s.compiler) index1 = index_by(list_of_specs, lambda s: s.arch,
index2 = index_by(list_of_specs, lambda s: s.compiler) lambda s: s.compiler)
index2 = index_by(list_of_specs, lambda s: s.compiler)
``index1'' now has two levels of dicts, with lists at the ``index1'' now has two levels of dicts, with lists at the
leaves, like this: leaves, like this:
@ -137,7 +137,7 @@ def get_calling_module_name():
finally: finally:
del stack del stack
if not '__module__' in caller_locals: if '__module__' not in caller_locals:
raise RuntimeError("Must invoke get_calling_module_name() " raise RuntimeError("Must invoke get_calling_module_name() "
"from inside a class definition!") "from inside a class definition!")
@ -173,11 +173,11 @@ def has_method(cls, name):
class memoized(object): class memoized(object):
"""Decorator that caches the results of a function, storing them """Decorator that caches the results of a function, storing them
in an attribute of that function.""" in an attribute of that function."""
def __init__(self, func): def __init__(self, func):
self.func = func self.func = func
self.cache = {} self.cache = {}
def __call__(self, *args): def __call__(self, *args):
if not isinstance(args, collections.Hashable): if not isinstance(args, collections.Hashable):
# Not hashable, so just call the function. # Not hashable, so just call the function.
@ -187,12 +187,10 @@ def __call__(self, *args):
self.cache[args] = self.func(*args) self.cache[args] = self.func(*args)
return self.cache[args] return self.cache[args]
def __get__(self, obj, objtype): def __get__(self, obj, objtype):
"""Support instance methods.""" """Support instance methods."""
return functools.partial(self.__call__, obj) return functools.partial(self.__call__, obj)
def clear(self): def clear(self):
"""Expunge cache so that self.func will be called again.""" """Expunge cache so that self.func will be called again."""
self.cache.clear() self.cache.clear()
@ -237,13 +235,21 @@ def setter(name, value):
if not has_method(cls, '_cmp_key'): if not has_method(cls, '_cmp_key'):
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__) raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
setter('__eq__', lambda s,o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key())) setter('__eq__',
setter('__lt__', lambda s,o: o is not None and s._cmp_key() < o._cmp_key()) lambda s, o:
setter('__le__', lambda s,o: o is not None and s._cmp_key() <= o._cmp_key()) (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
setter('__lt__',
lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
setter('__le__',
lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
setter('__ne__', lambda s,o: (s is not o) and (o is None or s._cmp_key() != o._cmp_key())) setter('__ne__',
setter('__gt__', lambda s,o: o is None or s._cmp_key() > o._cmp_key()) lambda s, o:
setter('__ge__', lambda s,o: o is None or s._cmp_key() >= o._cmp_key()) (s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
setter('__gt__',
lambda s, o: o is None or s._cmp_key() > o._cmp_key())
setter('__ge__',
lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
setter('__hash__', lambda self: hash(self._cmp_key())) setter('__hash__', lambda self: hash(self._cmp_key()))
@ -254,10 +260,10 @@ def setter(name, value):
class HashableMap(dict): class HashableMap(dict):
"""This is a hashable, comparable dictionary. Hash is performed on """This is a hashable, comparable dictionary. Hash is performed on
a tuple of the values in the dictionary.""" a tuple of the values in the dictionary."""
def _cmp_key(self): def _cmp_key(self):
return tuple(sorted(self.values())) return tuple(sorted(self.values()))
def copy(self): def copy(self):
"""Type-agnostic clone method. Preserves subclass type.""" """Type-agnostic clone method. Preserves subclass type."""
# Construct a new dict of my type # Construct a new dict of my type
@ -336,24 +342,39 @@ def match(string):
return match return match
def DictWrapper(dictionary): def DictWrapper(dictionary):
"""Returns a class that wraps a dictionary and enables it to be used """Returns a class that wraps a dictionary and enables it to be used
like an object.""" like an object."""
class wrapper(object): class wrapper(object):
def __getattr__(self, name): return dictionary[name]
def __setattr__(self, name, value): dictionary[name] = value
def setdefault(self, *args): return dictionary.setdefault(*args)
def get(self, *args): return dictionary.get(*args)
def keys(self): return dictionary.keys()
def values(self): return dictionary.values()
def items(self): return dictionary.items()
def __iter__(self): return iter(dictionary)
def __getattr__(self, name):
return dictionary[name]
def __setattr__(self, name, value):
dictionary[name] = value
def setdefault(self, *args):
return dictionary.setdefault(*args)
def get(self, *args):
return dictionary.get(*args)
def keys(self):
return dictionary.keys()
def values(self):
return dictionary.values()
def items(self):
return dictionary.items()
def __iter__(self):
return iter(dictionary)
return wrapper() return wrapper()
class RequiredAttributeError(ValueError): class RequiredAttributeError(ValueError):
def __init__(self, message): def __init__(self, message):
super(RequiredAttributeError, self).__init__(message) super(RequiredAttributeError, self).__init__(message)

View file

@ -23,12 +23,13 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
"""LinkTree class for setting up trees of symbolic links.""" """LinkTree class for setting up trees of symbolic links."""
__all__ = ['LinkTree']
import os import os
import shutil import shutil
from llnl.util.filesystem import * from llnl.util.filesystem import *
__all__ = ['LinkTree']
empty_file_name = '.spack-empty' empty_file_name = '.spack-empty'
@ -43,13 +44,13 @@ class LinkTree(object):
modified. modified.
""" """
def __init__(self, source_root): def __init__(self, source_root):
if not os.path.exists(source_root): if not os.path.exists(source_root):
raise IOError("No such file or directory: '%s'", source_root) raise IOError("No such file or directory: '%s'", source_root)
self._root = source_root self._root = source_root
def find_conflict(self, dest_root, **kwargs): def find_conflict(self, dest_root, **kwargs):
"""Returns the first file in dest that conflicts with src""" """Returns the first file in dest that conflicts with src"""
kwargs['follow_nonexisting'] = False kwargs['follow_nonexisting'] = False
@ -61,9 +62,9 @@ def find_conflict(self, dest_root, **kwargs):
return dest return dest
return None return None
def merge(self, dest_root, **kwargs): def merge(self, dest_root, **kwargs):
"""Link all files in src into dest, creating directories if necessary.""" """Link all files in src into dest, creating directories
if necessary."""
kwargs['order'] = 'pre' kwargs['order'] = 'pre'
for src, dest in traverse_tree(self._root, dest_root, **kwargs): for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if os.path.isdir(src): if os.path.isdir(src):
@ -83,7 +84,6 @@ def merge(self, dest_root, **kwargs):
assert(not os.path.exists(dest)) assert(not os.path.exists(dest))
os.symlink(src, dest) os.symlink(src, dest)
def unmerge(self, dest_root, **kwargs): def unmerge(self, dest_root, **kwargs):
"""Unlink all files in dest that exist in src. """Unlink all files in dest that exist in src.

View file

@ -28,6 +28,9 @@
import time import time
import socket import socket
__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction',
'LockError']
# Default timeout in seconds, after which locks will raise exceptions. # Default timeout in seconds, after which locks will raise exceptions.
_default_timeout = 60 _default_timeout = 60
@ -36,13 +39,21 @@
class Lock(object): class Lock(object):
def __init__(self,file_path): """This is an implementation of a filesystem lock using Python's lockf.
In Python, `lockf` actually calls `fcntl`, so this should work with any
filesystem implementation that supports locking through the fcntl calls.
This includes distributed filesystems like Lustre (when flock is enabled)
and recent NFS versions.
"""
def __init__(self, file_path):
self._file_path = file_path self._file_path = file_path
self._fd = None self._fd = None
self._reads = 0 self._reads = 0
self._writes = 0 self._writes = 0
def _lock(self, op, timeout): def _lock(self, op, timeout):
"""This takes a lock using POSIX locks (``fnctl.lockf``). """This takes a lock using POSIX locks (``fnctl.lockf``).
@ -63,7 +74,9 @@ def _lock(self, op, timeout):
fcntl.lockf(self._fd, op | fcntl.LOCK_NB) fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
if op == fcntl.LOCK_EX: if op == fcntl.LOCK_EX:
os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn())) os.write(
self._fd,
"pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
return return
except IOError as error: except IOError as error:
@ -75,7 +88,6 @@ def _lock(self, op, timeout):
raise LockError("Timed out waiting for lock.") raise LockError("Timed out waiting for lock.")
def _unlock(self): def _unlock(self):
"""Releases a lock using POSIX locks (``fcntl.lockf``) """Releases a lock using POSIX locks (``fcntl.lockf``)
@ -83,11 +95,10 @@ def _unlock(self):
be masquerading as write locks, but this removes either. be masquerading as write locks, but this removes either.
""" """
fcntl.lockf(self._fd,fcntl.LOCK_UN) fcntl.lockf(self._fd, fcntl.LOCK_UN)
os.close(self._fd) os.close(self._fd)
self._fd = None self._fd = None
def acquire_read(self, timeout=_default_timeout): def acquire_read(self, timeout=_default_timeout):
"""Acquires a recursive, shared lock for reading. """Acquires a recursive, shared lock for reading.
@ -107,7 +118,6 @@ def acquire_read(self, timeout=_default_timeout):
self._reads += 1 self._reads += 1
return False return False
def acquire_write(self, timeout=_default_timeout): def acquire_write(self, timeout=_default_timeout):
"""Acquires a recursive, exclusive lock for writing. """Acquires a recursive, exclusive lock for writing.
@ -127,7 +137,6 @@ def acquire_write(self, timeout=_default_timeout):
self._writes += 1 self._writes += 1
return False return False
def release_read(self): def release_read(self):
"""Releases a read lock. """Releases a read lock.
@ -148,7 +157,6 @@ def release_read(self):
self._reads -= 1 self._reads -= 1
return False return False
def release_write(self): def release_write(self):
"""Releases a write lock. """Releases a write lock.
@ -170,6 +178,70 @@ def release_write(self):
return False return False
class LockTransaction(object):
"""Simple nested transaction context manager that uses a file lock.
This class can trigger actions when the lock is acquired for the
first time and released for the last.
If the acquire_fn returns a value, it is used as the return value for
__enter__, allowing it to be passed as the `as` argument of a `with`
statement.
If acquire_fn returns a context manager, *its* `__enter__` function will be
called in `__enter__` after acquire_fn, and its `__exit__` funciton will be
called before `release_fn` in `__exit__`, allowing you to nest a context
manager to be used along with the lock.
Timeout for lock is customizable.
"""
def __init__(self, lock, acquire_fn=None, release_fn=None,
timeout=_default_timeout):
self._lock = lock
self._timeout = timeout
self._acquire_fn = acquire_fn
self._release_fn = release_fn
self._as = None
def __enter__(self):
if self._enter() and self._acquire_fn:
self._as = self._acquire_fn()
if hasattr(self._as, '__enter__'):
return self._as.__enter__()
else:
return self._as
def __exit__(self, type, value, traceback):
suppress = False
if self._exit():
if self._as and hasattr(self._as, '__exit__'):
if self._as.__exit__(type, value, traceback):
suppress = True
if self._release_fn:
if self._release_fn(type, value, traceback):
suppress = True
return suppress
class ReadTransaction(LockTransaction):
def _enter(self):
return self._lock.acquire_read(self._timeout)
def _exit(self):
return self._lock.release_read()
class WriteTransaction(LockTransaction):
def _enter(self):
return self._lock.acquire_write(self._timeout)
def _exit(self):
return self._lock.release_write()
class LockError(Exception): class LockError(Exception):
"""Raised when an attempt to acquire a lock times out.""" """Raised when an attempt to acquire a lock times out."""
pass

View file

@ -36,6 +36,7 @@
_verbose = False _verbose = False
indent = " " indent = " "
def is_verbose(): def is_verbose():
return _verbose return _verbose
@ -148,7 +149,8 @@ def get_yes_or_no(prompt, **kwargs):
elif default_value is False: elif default_value is False:
prompt += ' [y/N] ' prompt += ' [y/N] '
else: else:
raise ValueError("default for get_yes_no() must be True, False, or None.") raise ValueError(
"default for get_yes_no() must be True, False, or None.")
result = None result = None
while result is None: while result is None:
@ -174,8 +176,9 @@ def hline(label=None, **kwargs):
char = kwargs.pop('char', '-') char = kwargs.pop('char', '-')
max_width = kwargs.pop('max_width', 64) max_width = kwargs.pop('max_width', 64)
if kwargs: if kwargs:
raise TypeError("'%s' is an invalid keyword argument for this function." raise TypeError(
% next(kwargs.iterkeys())) "'%s' is an invalid keyword argument for this function."
% next(kwargs.iterkeys()))
rows, cols = terminal_size() rows, cols = terminal_size()
if not cols: if not cols:
@ -200,7 +203,8 @@ def terminal_size():
"""Gets the dimensions of the console: (rows, cols).""" """Gets the dimensions of the console: (rows, cols)."""
def ioctl_GWINSZ(fd): def ioctl_GWINSZ(fd):
try: try:
rc = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) rc = struct.unpack('hh', fcntl.ioctl(
fd, termios.TIOCGWINSZ, '1234'))
except: except:
return return
return rc return rc

View file

@ -27,15 +27,14 @@
""" """
import os import os
import sys import sys
import fcntl
import termios
import struct
from StringIO import StringIO from StringIO import StringIO
from llnl.util.tty import terminal_size from llnl.util.tty import terminal_size
from llnl.util.tty.color import clen, cextra from llnl.util.tty.color import clen, cextra
class ColumnConfig: class ColumnConfig:
def __init__(self, cols): def __init__(self, cols):
self.cols = cols self.cols = cols
self.line_length = 0 self.line_length = 0
@ -43,7 +42,8 @@ def __init__(self, cols):
self.widths = [0] * cols # does not include ansi colors self.widths = [0] * cols # does not include ansi colors
def __repr__(self): def __repr__(self):
attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")] attrs = [(a, getattr(self, a))
for a in dir(self) if not a.startswith("__")]
return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs) return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs)
@ -68,7 +68,7 @@ def config_variable_cols(elts, console_width, padding, cols=0):
max_cols = min(len(elts), max_cols) max_cols = min(len(elts), max_cols)
# Range of column counts to try. If forced, use the supplied value. # Range of column counts to try. If forced, use the supplied value.
col_range = [cols] if cols else xrange(1, max_cols+1) col_range = [cols] if cols else xrange(1, max_cols + 1)
# Determine the most columns possible for the console width. # Determine the most columns possible for the console width.
configs = [ColumnConfig(c) for c in col_range] configs = [ColumnConfig(c) for c in col_range]
@ -106,7 +106,6 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
# 'clen' ignores length of ansi color sequences. # 'clen' ignores length of ansi color sequences.
max_len = max(clen(e) for e in elts) + padding max_len = max(clen(e) for e in elts) + padding
max_clen = max(len(e) for e in elts) + padding
if cols == 0: if cols == 0:
cols = max(1, console_width / max_len) cols = max(1, console_width / max_len)
cols = min(len(elts), cols) cols = min(len(elts), cols)
@ -130,17 +129,19 @@ def colify(elts, **options):
output=<stream> A file object to write to. Default is sys.stdout. output=<stream> A file object to write to. Default is sys.stdout.
indent=<int> Optionally indent all columns by some number of spaces. indent=<int> Optionally indent all columns by some number of spaces.
padding=<int> Spaces between columns. Default is 2. padding=<int> Spaces between columns. Default is 2.
width=<int> Width of the output. Default is 80 if tty is not detected. width=<int> Width of the output. Default is 80 if tty not detected.
cols=<int> Force number of columns. Default is to size to terminal, cols=<int> Force number of columns. Default is to size to terminal,
or single-column if no tty or single-column if no tty
tty=<bool> Whether to attempt to write to a tty. Default is to tty=<bool> Whether to attempt to write to a tty. Default is to
autodetect a tty. Set to False to force single-column output. autodetect a tty. Set to False to force
single-column output.
method=<string> Method to use to fit columns. Options are variable or uniform. method=<string> Method to use to fit columns. Options are variable or
Variable-width columns are tighter, uniform columns are all the uniform. Variable-width columns are tighter, uniform
same width and fit less data on the screen. columns are all the same width and fit less data on
the screen.
""" """
# Get keyword arguments or set defaults # Get keyword arguments or set defaults
cols = options.pop("cols", 0) cols = options.pop("cols", 0)
@ -152,8 +153,9 @@ def colify(elts, **options):
console_cols = options.pop("width", None) console_cols = options.pop("width", None)
if options: if options:
raise TypeError("'%s' is an invalid keyword argument for this function." raise TypeError(
% next(options.iterkeys())) "'%s' is an invalid keyword argument for this function."
% next(options.iterkeys()))
# elts needs to be an array of strings so we can count the elements # elts needs to be an array of strings so we can count the elements
elts = [str(elt) for elt in elts] elts = [str(elt) for elt in elts]
@ -167,7 +169,8 @@ def colify(elts, **options):
r, c = env_size.split('x') r, c = env_size.split('x')
console_rows, console_cols = int(r), int(c) console_rows, console_cols = int(r), int(c)
tty = True tty = True
except: pass except:
pass
# Use only one column if not a tty. # Use only one column if not a tty.
if not tty: if not tty:
@ -228,6 +231,7 @@ def colify_table(table, **options):
raise ValueError("Table is empty in colify_table!") raise ValueError("Table is empty in colify_table!")
columns = len(table[0]) columns = len(table[0])
def transpose(): def transpose():
for i in xrange(columns): for i in xrange(columns):
for row in table: for row in table:

View file

@ -75,25 +75,27 @@
import re import re
import sys import sys
class ColorParseError(Exception): class ColorParseError(Exception):
"""Raised when a color format fails to parse.""" """Raised when a color format fails to parse."""
def __init__(self, message): def __init__(self, message):
super(ColorParseError, self).__init__(message) super(ColorParseError, self).__init__(message)
# Text styles for ansi codes # Text styles for ansi codes
styles = {'*' : '1', # bold styles = {'*': '1', # bold
'_' : '4', # underline '_': '4', # underline
None : '0' } # plain None: '0'} # plain
# Dim and bright ansi colors # Dim and bright ansi colors
colors = {'k' : 30, 'K' : 90, # black colors = {'k': 30, 'K': 90, # black
'r' : 31, 'R' : 91, # red 'r': 31, 'R': 91, # red
'g' : 32, 'G' : 92, # green 'g': 32, 'G': 92, # green
'y' : 33, 'Y' : 93, # yellow 'y': 33, 'Y': 93, # yellow
'b' : 34, 'B' : 94, # blue 'b': 34, 'B': 94, # blue
'm' : 35, 'M' : 95, # magenta 'm': 35, 'M': 95, # magenta
'c' : 36, 'C' : 96, # cyan 'c': 36, 'C': 96, # cyan
'w' : 37, 'W' : 97 } # white 'w': 37, 'W': 97} # white
# Regex to be used for color formatting # Regex to be used for color formatting
color_re = r'@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)' color_re = r'@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)'
@ -104,6 +106,7 @@ def __init__(self, message):
class match_to_ansi(object): class match_to_ansi(object):
def __init__(self, color=True): def __init__(self, color=True):
self.color = color self.color = color
@ -179,12 +182,14 @@ def cprint(string, stream=sys.stdout, color=None):
"""Same as cwrite, but writes a trailing newline to the stream.""" """Same as cwrite, but writes a trailing newline to the stream."""
cwrite(string + "\n", stream, color) cwrite(string + "\n", stream, color)
def cescape(string): def cescape(string):
"""Replace all @ with @@ in the string provided.""" """Replace all @ with @@ in the string provided."""
return str(string).replace('@', '@@') return str(string).replace('@', '@@')
class ColorStream(object): class ColorStream(object):
def __init__(self, stream, color=None): def __init__(self, stream, color=None):
self._stream = stream self._stream = stream
self._color = color self._color = color
@ -196,7 +201,7 @@ def write(self, string, **kwargs):
color = self._color color = self._color
if self._color is None: if self._color is None:
if raw: if raw:
color=True color = True
else: else:
color = self._stream.isatty() or _force_color color = self._stream.isatty() or _force_color
raw_write(colorize(string, color=color)) raw_write(colorize(string, color=color))

View file

@ -37,6 +37,7 @@
# Use this to strip escape sequences # Use this to strip escape sequences
_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h') _escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h')
def _strip(line): def _strip(line):
"""Strip color and control characters from a line.""" """Strip color and control characters from a line."""
return _escape.sub('', line) return _escape.sub('', line)
@ -59,10 +60,10 @@ class keyboard_input(object):
When the with block completes, this will restore settings before When the with block completes, this will restore settings before
canonical and echo were disabled. canonical and echo were disabled.
""" """
def __init__(self, stream): def __init__(self, stream):
self.stream = stream self.stream = stream
def __enter__(self): def __enter__(self):
self.old_cfg = None self.old_cfg = None
@ -87,10 +88,9 @@ def __enter__(self):
# Apply new settings for terminal # Apply new settings for terminal
termios.tcsetattr(fd, termios.TCSADRAIN, self.new_cfg) termios.tcsetattr(fd, termios.TCSADRAIN, self.new_cfg)
except Exception, e: except Exception:
pass # Some OS's do not support termios, so ignore. pass # Some OS's do not support termios, so ignore.
def __exit__(self, exc_type, exception, traceback): def __exit__(self, exc_type, exception, traceback):
# If termios was avaialble, restore old settings after the # If termios was avaialble, restore old settings after the
# with block # with block

View file

@ -1,3 +1,4 @@
# flake8: noqa
############################################################################## ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
@ -50,8 +51,15 @@
share_path = join_path(spack_root, "share", "spack") share_path = join_path(spack_root, "share", "spack")
cache_path = join_path(var_path, "cache") cache_path = join_path(var_path, "cache")
# User configuration location
user_config_path = os.path.expanduser('~/.spack')
import spack.fetch_strategy import spack.fetch_strategy
cache = spack.fetch_strategy.FsCache(cache_path) fetch_cache = spack.fetch_strategy.FsCache(cache_path)
from spack.file_cache import FileCache
user_cache_path = join_path(user_config_path, 'cache')
user_cache = FileCache(user_cache_path)
prefix = spack_root prefix = spack_root
opt_path = join_path(prefix, "opt") opt_path = join_path(prefix, "opt")
@ -140,7 +148,7 @@
_tmp_candidates = (_default_tmp, '/nfs/tmp2', '/tmp', '/var/tmp') _tmp_candidates = (_default_tmp, '/nfs/tmp2', '/tmp', '/var/tmp')
for path in _tmp_candidates: for path in _tmp_candidates:
# don't add a second username if it's already unique by user. # don't add a second username if it's already unique by user.
if not _tmp_user in path: if _tmp_user not in path:
tmp_dirs.append(join_path(path, '%u', 'spack-stage')) tmp_dirs.append(join_path(path, '%u', 'spack-stage'))
else: else:
tmp_dirs.append(join_path(path, 'spack-stage')) tmp_dirs.append(join_path(path, 'spack-stage'))
@ -172,9 +180,10 @@
# Spack internal code should call 'import spack' and accesses other # Spack internal code should call 'import spack' and accesses other
# variables (spack.repo, paths, etc.) directly. # variables (spack.repo, paths, etc.) directly.
# #
# TODO: maybe this should be separated out and should go in build_environment.py? # TODO: maybe this should be separated out to build_environment.py?
# TODO: it's not clear where all the stuff that needs to be included in packages # TODO: it's not clear where all the stuff that needs to be included in
# should live. This file is overloaded for spack core vs. for packages. # packages should live. This file is overloaded for spack core vs.
# for packages.
# #
__all__ = ['Package', __all__ = ['Package',
'CMakePackage', 'CMakePackage',
@ -204,8 +213,8 @@
__all__ += spack.util.executable.__all__ __all__ += spack.util.executable.__all__
from spack.package import \ from spack.package import \
install_dependency_symlinks, flatten_dependencies, DependencyConflictError, \ install_dependency_symlinks, flatten_dependencies, \
InstallError, ExternalPackageError DependencyConflictError, InstallError, ExternalPackageError
__all__ += [ __all__ += [
'install_dependency_symlinks', 'flatten_dependencies', 'DependencyConflictError', 'install_dependency_symlinks', 'flatten_dependencies',
'InstallError', 'ExternalPackageError'] 'DependencyConflictError', 'InstallError', 'ExternalPackageError']

View file

@ -30,15 +30,15 @@
from spack.util.executable import Executable, ProcessError from spack.util.executable import Executable, ProcessError
from llnl.util.lang import memoized from llnl.util.lang import memoized
class ABI(object): class ABI(object):
"""This class provides methods to test ABI compatibility between specs. """This class provides methods to test ABI compatibility between specs.
The current implementation is rather rough and could be improved.""" The current implementation is rather rough and could be improved."""
def architecture_compatible(self, parent, child): def architecture_compatible(self, parent, child):
"""Returns true iff the parent and child specs have ABI compatible targets.""" """Return true if parent and child have ABI compatible targets."""
return not parent.architecture or not child.architecture \ return not parent.architecture or not child.architecture or \
or parent.architecture == child.architecture parent.architecture == child.architecture
@memoized @memoized
def _gcc_get_libstdcxx_version(self, version): def _gcc_get_libstdcxx_version(self, version):
@ -61,8 +61,9 @@ def _gcc_get_libstdcxx_version(self, version):
else: else:
return None return None
try: try:
output = rungcc("--print-file-name=%s" % libname, return_output=True) output = rungcc("--print-file-name=%s" % libname,
except ProcessError, e: return_output=True)
except ProcessError:
return None return None
if not output: if not output:
return None return None
@ -71,7 +72,6 @@ def _gcc_get_libstdcxx_version(self, version):
return None return None
return os.path.basename(libpath) return os.path.basename(libpath)
@memoized @memoized
def _gcc_compiler_compare(self, pversion, cversion): def _gcc_compiler_compare(self, pversion, cversion):
"""Returns true iff the gcc version pversion and cversion """Returns true iff the gcc version pversion and cversion
@ -82,7 +82,6 @@ def _gcc_compiler_compare(self, pversion, cversion):
return False return False
return plib == clib return plib == clib
def _intel_compiler_compare(self, pversion, cversion): def _intel_compiler_compare(self, pversion, cversion):
"""Returns true iff the intel version pversion and cversion """Returns true iff the intel version pversion and cversion
are ABI compatible""" are ABI compatible"""
@ -92,9 +91,8 @@ def _intel_compiler_compare(self, pversion, cversion):
return False return False
return pversion.version[:2] == cversion.version[:2] return pversion.version[:2] == cversion.version[:2]
def compiler_compatible(self, parent, child, **kwargs): def compiler_compatible(self, parent, child, **kwargs):
"""Returns true iff the compilers for parent and child specs are ABI compatible""" """Return true if compilers for parent and child are ABI compatible."""
if not parent.compiler or not child.compiler: if not parent.compiler or not child.compiler:
return True return True
@ -109,8 +107,8 @@ def compiler_compatible(self, parent, child, **kwargs):
# TODO: into compiler classes? # TODO: into compiler classes?
for pversion in parent.compiler.versions: for pversion in parent.compiler.versions:
for cversion in child.compiler.versions: for cversion in child.compiler.versions:
# For a few compilers use specialized comparisons. Otherwise # For a few compilers use specialized comparisons.
# match on version match. # Otherwise match on version match.
if pversion.satisfies(cversion): if pversion.satisfies(cversion):
return True return True
elif (parent.compiler.name == "gcc" and elif (parent.compiler.name == "gcc" and
@ -121,9 +119,8 @@ def compiler_compatible(self, parent, child, **kwargs):
return True return True
return False return False
def compatible(self, parent, child, **kwargs): def compatible(self, parent, child, **kwargs):
"""Returns true iff a parent and child spec are ABI compatible""" """Returns true iff a parent and child spec are ABI compatible"""
loosematch = kwargs.get('loose', False) loosematch = kwargs.get('loose', False)
return self.architecture_compatible(parent, child) and \ return self.architecture_compatible(parent, child) and \
self.compiler_compatible(parent, child, loose=loosematch) self.compiler_compatible(parent, child, loose=loosematch)

View file

@ -76,7 +76,6 @@
will be responsible for compiler detection. will be responsible for compiler detection.
""" """
import os import os
import imp
import inspect import inspect
from llnl.util.lang import memoized, list_modules, key_ordering from llnl.util.lang import memoized, list_modules, key_ordering
@ -92,6 +91,7 @@
class NoPlatformError(serr.SpackError): class NoPlatformError(serr.SpackError):
def __init__(self): def __init__(self):
super(NoPlatformError, self).__init__( super(NoPlatformError, self).__init__(
"Could not determine a platform for this machine.") "Could not determine a platform for this machine.")
@ -190,6 +190,12 @@ def operating_system(self, name):
return self.operating_sys.get(name, None) return self.operating_sys.get(name, None)
@classmethod
def setup_platform_environment(self, pkg, env):
""" Subclass can override this method if it requires any
platform-specific build environment modifications.
"""
pass
@classmethod @classmethod
def detect(self): def detect(self):
@ -200,15 +206,12 @@ def detect(self):
""" """
raise NotImplementedError() raise NotImplementedError()
def __repr__(self): def __repr__(self):
return self.__str__() return self.__str__()
def __str__(self): def __str__(self):
return self.name return self.name
def _cmp_key(self): def _cmp_key(self):
t_keys = ''.join(str(t._cmp_key()) for t in t_keys = ''.join(str(t._cmp_key()) for t in
sorted(self.targets.values())) sorted(self.targets.values()))
@ -279,7 +282,7 @@ def find_compilers(self, *paths):
# ensure all the version calls we made are cached in the parent # ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot. # process, as well. This speeds up Spack a lot.
clist = reduce(lambda x, y: x+y, compiler_lists) clist = reduce(lambda x, y: x + y, compiler_lists)
return clist return clist
def find_compiler(self, cmp_cls, *path): def find_compiler(self, cmp_cls, *path):
@ -320,7 +323,7 @@ def find_compiler(self, cmp_cls, *path):
# prefer the one with more compilers. # prefer the one with more compilers.
prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc] prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
newcount = len([p for p in paths if p is not None]) newcount = len([p for p in paths if p is not None])
prevcount = len([p for p in prev_paths if p is not None]) prevcount = len([p for p in prev_paths if p is not None])
# Don't add if it's not an improvement over prev compiler. # Don't add if it's not an improvement over prev compiler.
@ -337,6 +340,7 @@ def to_dict(self):
d['version'] = self.version d['version'] = self.version
return d return d
@key_ordering @key_ordering
class Arch(object): class Arch(object):
"""Architecture is now a class to help with setting attributes. """Architecture is now a class to help with setting attributes.
@ -377,10 +381,15 @@ def __str__(self):
else: else:
return '' return ''
def __contains__(self, string): def __contains__(self, string):
return string in str(self) return string in str(self)
# TODO: make this unnecessary: don't include an empty arch on *every* spec.
def __nonzero__(self):
return (self.platform is not None or
self.platform_os is not None or
self.target is not None)
__bool__ = __nonzero__
def _cmp_key(self): def _cmp_key(self):
if isinstance(self.platform, Platform): if isinstance(self.platform, Platform):
@ -424,7 +433,7 @@ def _operating_system_from_dict(os_name, plat=None):
if isinstance(os_name, dict): if isinstance(os_name, dict):
name = os_name['name'] name = os_name['name']
version = os_name['version'] version = os_name['version']
return plat.operating_system(name+version) return plat.operating_system(name + version)
else: else:
return plat.operating_system(os_name) return plat.operating_system(os_name)

View file

@ -436,6 +436,7 @@ def setup_package(pkg, dirty=False):
set_compiler_environment_variables(pkg, spack_env) set_compiler_environment_variables(pkg, spack_env)
set_build_environment_variables(pkg, spack_env, dirty) set_build_environment_variables(pkg, spack_env, dirty)
pkg.spec.architecture.platform.setup_platform_environment(pkg, spack_env)
load_external_modules(pkg) load_external_modules(pkg)
# traverse in postorder so package can use vars from its dependencies # traverse in postorder so package can use vars from its dependencies
spec = pkg.spec spec = pkg.spec

View file

@ -240,4 +240,4 @@ def fmt(s):
else: else:
raise ValueError( raise ValueError(
"Invalid mode for display_specs: %s. Must be one of (paths," "Invalid mode for display_specs: %s. Must be one of (paths,"
"deps, short)." % mode) # NOQA: ignore=E501 "deps, short)." % mode)

View file

@ -29,12 +29,14 @@
description = "Activate a package extension." description = "Activate a package extension."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-f', '--force', action='store_true', '-f', '--force', action='store_true',
help="Activate without first activating dependencies.") help="Activate without first activating dependencies.")
subparser.add_argument( subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.") 'spec', nargs=argparse.REMAINDER,
help="spec of package extension to activate.")
def activate(parser, args): def activate(parser, args):

View file

@ -22,10 +22,10 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import spack
import spack.architecture as architecture import spack.architecture as architecture
description = "Print the architecture for this machine" description = "Print the architecture for this machine"
def arch(parser, args): def arch(parser, args):
print architecture.sys_type() print architecture.sys_type()

View file

@ -25,7 +25,8 @@
import spack.cmd.location import spack.cmd.location
import spack.modules import spack.modules
description="cd to spack directories in the shell." description = "cd to spack directories in the shell."
def setup_parser(subparser): def setup_parser(subparser):
"""This is for decoration -- spack cd is used through spack's """This is for decoration -- spack cd is used through spack's

View file

@ -31,6 +31,7 @@
description = "Remove build stage and source tarball for packages." description = "Remove build stage and source tarball for packages."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER, subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to clean") help="specs of packages to clean")

View file

@ -35,7 +35,7 @@
def add_common_arguments(parser, list_of_arguments): def add_common_arguments(parser, list_of_arguments):
for argument in list_of_arguments: for argument in list_of_arguments:
if argument not in _arguments: if argument not in _arguments:
message = 'Trying to add the non existing argument "{0}" to a command' # NOQA: ignore=E501 message = 'Trying to add non existing argument "{0}" to a command'
raise KeyError(message.format(argument)) raise KeyError(message.format(argument))
x = _arguments[argument] x = _arguments[argument]
parser.add_argument(*x.flags, **x.kwargs) parser.add_argument(*x.flags, **x.kwargs)
@ -82,7 +82,7 @@ def __call__(self, parser, namespace, values, option_string=None):
kwargs={ kwargs={
'action': 'store_true', 'action': 'store_true',
'dest': 'yes_to_all', 'dest': 'yes_to_all',
'help': 'Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501 'help': 'Assume "yes" is the answer to every confirmation request.'
}) })
_arguments['yes_to_all'] = parms _arguments['yes_to_all'] = parms

View file

@ -37,6 +37,7 @@
description = "Manage compilers" description = "Manage compilers"
def setup_parser(subparser): def setup_parser(subparser):
sp = subparser.add_subparsers( sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='compiler_command') metavar='SUBCOMMAND', dest='compiler_command')
@ -44,34 +45,44 @@ def setup_parser(subparser):
scopes = spack.config.config_scopes scopes = spack.config.config_scopes
# Find # Find
find_parser = sp.add_parser('find', aliases=['add'], help='Search the system for compilers to add to the Spack configuration.') find_parser = sp.add_parser(
'find', aliases=['add'],
help='Search the system for compilers to add to Spack configuration.')
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER) find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
find_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope, find_parser.add_argument(
help="Configuration scope to modify.") '--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
# Remove # Remove
remove_parser = sp.add_parser('remove', aliases=['rm'], help='Remove compiler by spec.') remove_parser = sp.add_parser(
'remove', aliases=['rm'], help='Remove compiler by spec.')
remove_parser.add_argument( remove_parser.add_argument(
'-a', '--all', action='store_true', help='Remove ALL compilers that match spec.') '-a', '--all', action='store_true',
help='Remove ALL compilers that match spec.')
remove_parser.add_argument('compiler_spec') remove_parser.add_argument('compiler_spec')
remove_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope, remove_parser.add_argument(
help="Configuration scope to modify.") '--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
# List # List
list_parser = sp.add_parser('list', help='list available compilers') list_parser = sp.add_parser('list', help='list available compilers')
list_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope, list_parser.add_argument(
help="Configuration scope to read from.") '--scope', choices=scopes, default=spack.cmd.default_list_scope,
help="Configuration scope to read from.")
# Info # Info
info_parser = sp.add_parser('info', help='Show compiler paths.') info_parser = sp.add_parser('info', help='Show compiler paths.')
info_parser.add_argument('compiler_spec') info_parser.add_argument('compiler_spec')
info_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope, info_parser.add_argument(
help="Configuration scope to read from.") '--scope', choices=scopes, default=spack.cmd.default_list_scope,
help="Configuration scope to read from.")
def compiler_find(args): def compiler_find(args):
"""Search either $PATH or a list of paths OR MODULES for compilers and add them """Search either $PATH or a list of paths OR MODULES for compilers and
to Spack's configuration.""" add them to Spack's configuration.
"""
paths = args.add_paths paths = args.add_paths
if not paths: if not paths:
paths = get_path('PATH') paths = get_path('PATH')
@ -85,7 +96,7 @@ def compiler_find(args):
scope=args.scope, init_config=False)] scope=args.scope, init_config=False)]
if compilers: if compilers:
spack.compilers.add_compilers_to_config(compilers, scope=args.scope, spack.compilers.add_compilers_to_config(compilers, scope=args.scope,
init_config=False) init_config=False)
n = len(compilers) n = len(compilers)
s = 's' if n > 1 else '' s = 's' if n > 1 else ''
filename = spack.config.get_config_filename(args.scope, 'compilers') filename = spack.config.get_config_filename(args.scope, 'compilers')
@ -103,11 +114,12 @@ def compiler_remove(args):
elif not args.all and len(compilers) > 1: elif not args.all and len(compilers) > 1:
tty.error("Multiple compilers match spec %s. Choose one:" % cspec) tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
colify(reversed(sorted([c.spec for c in compilers])), indent=4) colify(reversed(sorted([c.spec for c in compilers])), indent=4)
tty.msg("Or, you can use `spack compiler remove -a` to remove all of them.") tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
sys.exit(1) sys.exit(1)
for compiler in compilers: for compiler in compilers:
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope) spack.compilers.remove_compiler_from_config(
compiler.spec, scope=args.scope)
tty.msg("Removed compiler %s" % compiler.spec) tty.msg("Removed compiler %s" % compiler.spec)
@ -133,7 +145,8 @@ def compiler_list(args):
tty.msg("Available compilers") tty.msg("Available compilers")
index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name') index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name')
for i, (name, compilers) in enumerate(index.items()): for i, (name, compilers) in enumerate(index.items()):
if i >= 1: print if i >= 1:
print
cname = "%s{%s}" % (spack.spec.compiler_color, name) cname = "%s{%s}" % (spack.spec.compiler_color, name)
tty.hline(colorize(cname), char='-') tty.hline(colorize(cname), char='-')
@ -141,10 +154,10 @@ def compiler_list(args):
def compiler(parser, args): def compiler(parser, args):
action = {'add' : compiler_find, action = {'add': compiler_find,
'find' : compiler_find, 'find': compiler_find,
'remove' : compiler_remove, 'remove': compiler_remove,
'rm' : compiler_remove, 'rm': compiler_remove,
'info' : compiler_info, 'info': compiler_info,
'list' : compiler_list } 'list': compiler_list}
action[args.compiler_command](args) action[args.compiler_command](args)

View file

@ -22,18 +22,16 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.lang import index_by
import spack import spack
from spack.cmd.compiler import compiler_list from spack.cmd.compiler import compiler_list
description = "List available compilers. Same as 'spack compiler list'." description = "List available compilers. Same as 'spack compiler list'."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('--scope', choices=spack.config.config_scopes, subparser.add_argument('--scope', choices=spack.config.config_scopes,
help="Configuration scope to read/modify.") help="Configuration scope to read/modify.")
def compilers(parser, args): def compilers(parser, args):
compiler_list(args) compiler_list(args)

View file

@ -22,15 +22,11 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import sys
import argparse
import llnl.util.tty as tty
import spack.config import spack.config
description = "Get and set configuration options." description = "Get and set configuration options."
def setup_parser(subparser): def setup_parser(subparser):
# User can only choose one # User can only choose one
scope_group = subparser.add_mutually_exclusive_group() scope_group = subparser.add_mutually_exclusive_group()
@ -64,6 +60,6 @@ def config_edit(args):
def config(parser, args): def config(parser, args):
action = { 'get' : config_get, action = {'get': config_get,
'edit' : config_edit } 'edit': config_edit}
action[args.config_command](args) action[args.config_command](args)

View file

@ -96,8 +96,7 @@ class ${class_name}(Package):
${versions} ${versions}
# FIXME: Add additional dependencies if required. ${dependencies}
${dependencies}
def install(self, spec, prefix): def install(self, spec, prefix):
${install} ${install}
@ -105,13 +104,39 @@ def install(self, spec, prefix):
# Build dependencies and extensions # Build dependencies and extensions
dependencies_dict = { dependencies_dict = {
'autotools': "# depends_on('foo')", 'autotools': """\
'cmake': "depends_on('cmake')", # FIXME: Add dependencies if required.
'scons': "depends_on('scons')", # depends_on('foo')""",
'python': "extends('python')",
'R': "extends('R')", 'cmake': """\
'octave': "extends('octave')", # FIXME: Add additional dependencies if required.
'unknown': "# depends_on('foo')" depends_on('cmake', type='build')""",
'scons': """\
# FIXME: Add additional dependencies if required.
depends_on('scons', type='build')""",
'python': """\
extends('python')
# FIXME: Add additional dependencies if required.
# depends_on('py-foo', type=nolink)""",
'R': """\
extends('R')
# FIXME: Add additional dependencies if required.
# depends_on('r-foo', type=nolink)""",
'octave': """\
extends('octave')
# FIXME: Add additional dependencies if required.
# depends_on('octave-foo', type=nolink)""",
'unknown': """\
# FIXME: Add dependencies if required.
# depends_on('foo')"""
} }
# Default installation instructions # Default installation instructions
@ -140,7 +165,7 @@ def install(self, spec, prefix):
'python': """\ 'python': """\
# FIXME: Add logic to build and install here. # FIXME: Add logic to build and install here.
python('setup.py', 'install', '--prefix={0}'.format(prefix))""", setup_py('install', '--prefix={0}'.format(prefix))""",
'R': """\ 'R': """\
# FIXME: Add logic to build and install here. # FIXME: Add logic to build and install here.
@ -192,6 +217,7 @@ def setup_parser(subparser):
class BuildSystemGuesser(object): class BuildSystemGuesser(object):
def __call__(self, stage, url): def __call__(self, stage, url):
"""Try to guess the type of build system used by a project based on """Try to guess the type of build system used by a project based on
the contents of its archive or the URL it was downloaded from.""" the contents of its archive or the URL it was downloaded from."""

View file

@ -31,6 +31,7 @@
description = "Deactivate a package extension." description = "Deactivate a package extension."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-f', '--force', action='store_true', '-f', '--force', action='store_true',
@ -40,7 +41,8 @@ def setup_parser(subparser):
help="Deactivate all extensions of an extendable package, or " help="Deactivate all extensions of an extendable package, or "
"deactivate an extension AND its dependencies.") "deactivate an extension AND its dependencies.")
subparser.add_argument( subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.") 'spec', nargs=argparse.REMAINDER,
help="spec of package extension to deactivate.")
def deactivate(parser, args): def deactivate(parser, args):
@ -65,7 +67,8 @@ def deactivate(parser, args):
if not args.force and not spec.package.activated: if not args.force and not spec.package.activated:
tty.die("%s is not activated." % pkg.spec.short_spec) tty.die("%s is not activated." % pkg.spec.short_spec)
tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec) tty.msg("Deactivating %s and all dependencies." %
pkg.spec.short_spec)
topo_order = topological_sort(spec) topo_order = topological_sort(spec)
index = spec.index() index = spec.index()
@ -79,7 +82,9 @@ def deactivate(parser, args):
epkg.do_deactivate(force=args.force) epkg.do_deactivate(force=args.force)
else: else:
tty.die("spack deactivate --all requires an extendable package or an extension.") tty.die(
"spack deactivate --all requires an extendable package "
"or an extension.")
else: else:
if not pkg.is_extension: if not pkg.is_extension:

View file

@ -31,9 +31,11 @@
description = "Show installed packages that depend on another." description = "Show installed packages that depend on another."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="specs to list dependencies of.") 'spec', nargs=argparse.REMAINDER,
help="specs to list dependencies of.")
def dependents(parser, args): def dependents(parser, args):
@ -42,5 +44,6 @@ def dependents(parser, args):
tty.die("spack dependents takes only one spec.") tty.die("spack dependents takes only one spec.")
fmt = '$_$@$%@$+$=$#' fmt = '$_$@$%@$+$=$#'
deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents] deps = [d.format(fmt, color=True)
for d in specs[0].package.installed_dependents]
tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps) tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps)

View file

@ -35,6 +35,7 @@
description = "Do-It-Yourself: build from an existing source directory." description = "Do-It-Yourself: build from an existing source directory."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps', '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
@ -76,14 +77,17 @@ def diy(self, args):
return return
if not spec.versions.concrete: if not spec.versions.concrete:
tty.die("spack diy spec must have a single, concrete version. Did you forget a package version number?") tty.die(
"spack diy spec must have a single, concrete version. "
"Did you forget a package version number?")
spec.concretize() spec.concretize()
package = spack.repo.get(spec) package = spack.repo.get(spec)
if package.installed: if package.installed:
tty.error("Already installed in %s" % package.prefix) tty.error("Already installed in %s" % package.prefix)
tty.msg("Uninstall or try adding a version suffix for this DIY build.") tty.msg("Uninstall or try adding a version suffix for this "
"DIY build.")
sys.exit(1) sys.exit(1)
# Forces the build to run out of the current directory. # Forces the build to run out of the current directory.

View file

@ -25,6 +25,7 @@
description = "Run pydoc from within spack." description = "Run pydoc from within spack."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('entity', help="Run pydoc help on entity") subparser.add_argument('entity', help="Run pydoc help on entity")

View file

@ -68,7 +68,7 @@ def edit_package(name, repo_path, namespace, force=False):
if os.path.exists(path): if os.path.exists(path):
if not os.path.isfile(path): if not os.path.isfile(path):
tty.die("Something's wrong. '%s' is not a file!" % path) tty.die("Something's wrong. '%s' is not a file!" % path)
if not os.access(path, os.R_OK|os.W_OK): if not os.access(path, os.R_OK | os.W_OK):
tty.die("Insufficient permissions on '%s'!" % path) tty.die("Insufficient permissions on '%s'!" % path)
elif not force: elif not force:
tty.die("No package '%s'. Use spack create, or supply -f/--force " tty.die("No package '%s'. Use spack create, or supply -f/--force "
@ -93,19 +93,23 @@ def setup_parser(subparser):
# Various filetypes you can edit directly from the cmd line. # Various filetypes you can edit directly from the cmd line.
excl_args.add_argument( excl_args.add_argument(
'-c', '--command', dest='path', action='store_const', '-c', '--command', dest='path', action='store_const',
const=spack.cmd.command_path, help="Edit the command with the supplied name.") const=spack.cmd.command_path,
help="Edit the command with the supplied name.")
excl_args.add_argument( excl_args.add_argument(
'-t', '--test', dest='path', action='store_const', '-t', '--test', dest='path', action='store_const',
const=spack.test_path, help="Edit the test with the supplied name.") const=spack.test_path, help="Edit the test with the supplied name.")
excl_args.add_argument( excl_args.add_argument(
'-m', '--module', dest='path', action='store_const', '-m', '--module', dest='path', action='store_const',
const=spack.module_path, help="Edit the main spack module with the supplied name.") const=spack.module_path,
help="Edit the main spack module with the supplied name.")
# Options for editing packages # Options for editing packages
excl_args.add_argument( excl_args.add_argument(
'-r', '--repo', default=None, help="Path to repo to edit package in.") '-r', '--repo', default=None,
help="Path to repo to edit package in.")
excl_args.add_argument( excl_args.add_argument(
'-N', '--namespace', default=None, help="Namespace of package to edit.") '-N', '--namespace', default=None,
help="Namespace of package to edit.")
subparser.add_argument( subparser.add_argument(
'name', nargs='?', default=None, help="name of package to edit") 'name', nargs='?', default=None, help="name of package to edit")

View file

@ -28,11 +28,13 @@
import spack.cmd import spack.cmd
import spack.build_environment as build_env import spack.build_environment as build_env
description = "Run a command with the environment for a particular spec's install." description = "Run a command with the install environment for a spec."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="specs of package environment to emulate.") 'spec', nargs=argparse.REMAINDER,
help="specs of package environment to emulate.")
def env(parser, args): def env(parser, args):
@ -47,7 +49,7 @@ def env(parser, args):
if sep in args.spec: if sep in args.spec:
s = args.spec.index(sep) s = args.spec.index(sep)
spec = args.spec[:s] spec = args.spec[:s]
cmd = args.spec[s+1:] cmd = args.spec[s + 1:]
else: else:
spec = args.spec[0] spec = args.spec[0]
cmd = args.spec[1:] cmd = args.spec[1:]

View file

@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import sys
import argparse import argparse
import llnl.util.tty as tty import llnl.util.tty as tty
@ -34,6 +33,7 @@
description = "List extensions for package." description = "List extensions for package."
def setup_parser(subparser): def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group() format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument( format_group.add_argument(
@ -47,7 +47,8 @@ def setup_parser(subparser):
help='Show full dependency DAG of extensions') help='Show full dependency DAG of extensions')
subparser.add_argument( subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for') 'spec', nargs=argparse.REMAINDER,
help='Spec of package to list extensions for')
def extensions(parser, args): def extensions(parser, args):
@ -85,7 +86,8 @@ def extensions(parser, args):
# #
# List specs of installed extensions. # List specs of installed extensions.
# #
installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)] installed = [
s.spec for s in spack.installed_db.installed_extensions_for(spec)]
print print
if not installed: if not installed:
tty.msg("None installed.") tty.msg("None installed.")
@ -102,4 +104,5 @@ def extensions(parser, args):
tty.msg("None activated.") tty.msg("None activated.")
return return
tty.msg("%d currently activated:" % len(activated)) tty.msg("%d currently activated:" % len(activated))
spack.cmd.find.display_specs(activated.values(), mode=args.mode, long=args.long) spack.cmd.find.display_specs(
activated.values(), mode=args.mode, long=args.long)

View file

@ -29,16 +29,21 @@
description = "Fetch archives for packages" description = "Fetch archives for packages"
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum', '-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum") help="Do not check packages against checksum")
subparser.add_argument( subparser.add_argument(
'-m', '--missing', action='store_true', help="Also fetch all missing dependencies") '-m', '--missing', action='store_true',
help="Also fetch all missing dependencies")
subparser.add_argument( subparser.add_argument(
'-D', '--dependencies', action='store_true', help="Also fetch all dependencies") '-D', '--dependencies', action='store_true',
help="Also fetch all dependencies")
subparser.add_argument( subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to fetch") 'packages', nargs=argparse.REMAINDER,
help="specs of packages to fetch")
def fetch(parser, args): def fetch(parser, args):
if not args.packages: if not args.packages:
@ -50,7 +55,6 @@ def fetch(parser, args):
specs = spack.cmd.parse_specs(args.packages, concretize=True) specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs: for spec in specs:
if args.missing or args.dependencies: if args.missing or args.dependencies:
to_fetch = set()
for s in spec.traverse(deptype_query=spack.alldeps): for s in spec.traverse(deptype_query=spack.alldeps):
package = spack.repo.get(s) package = spack.repo.get(s)
if args.missing and package.installed: if args.missing and package.installed:

View file

@ -30,6 +30,7 @@
description = "Generate graphs of package dependency relationships." description = "Generate graphs of package dependency relationships."
def setup_parser(subparser): def setup_parser(subparser):
setup_parser.parser = subparser setup_parser.parser = subparser
@ -42,10 +43,12 @@ def setup_parser(subparser):
help="Generate graph in dot format and print to stdout.") help="Generate graph in dot format and print to stdout.")
subparser.add_argument( subparser.add_argument(
'--concretize', action='store_true', help="Concretize specs before graphing.") '--concretize', action='store_true',
help="Concretize specs before graphing.")
subparser.add_argument( subparser.add_argument(
'specs', nargs=argparse.REMAINDER, help="specs of packages to graph.") 'specs', nargs=argparse.REMAINDER,
help="specs of packages to graph.")
def graph(parser, args): def graph(parser, args):
@ -56,11 +59,11 @@ def graph(parser, args):
setup_parser.parser.print_help() setup_parser.parser.print_help()
return 1 return 1
if args.dot: # Dot graph only if asked for. if args.dot: # Dot graph only if asked for.
graph_dot(*specs) graph_dot(*specs)
elif specs: # ascii is default: user doesn't need to provide it explicitly elif specs: # ascii is default: user doesn't need to provide it explicitly
graph_ascii(specs[0], debug=spack.debug) graph_ascii(specs[0], debug=spack.debug)
for spec in specs[1:]: for spec in specs[1:]:
print # extra line bt/w independent graphs print # extra line bt/w independent graphs
graph_ascii(spec, debug=spack.debug) graph_ascii(spec, debug=spack.debug)

View file

@ -22,14 +22,14 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import sys
description = "Get help on spack and its commands" description = "Get help on spack and its commands"
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('help_command', nargs='?', default=None, subparser.add_argument('help_command', nargs='?', default=None,
help='command to get help on') help='command to get help on')
def help(parser, args): def help(parser, args):
if args.help_command: if args.help_command:
parser.parse_args([args.help_command, '-h']) parser.parse_args([args.help_command, '-h'])

View file

@ -31,6 +31,7 @@
description = "Build and install packages" description = "Build and install packages"
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps', '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
@ -52,7 +53,7 @@ def setup_parser(subparser):
help="Display verbose build output while installing.") help="Display verbose build output while installing.")
subparser.add_argument( subparser.add_argument(
'--fake', action='store_true', dest='fake', '--fake', action='store_true', dest='fake',
help="Fake install. Just remove the prefix and touch a fake file in it.") help="Fake install. Just remove prefix and create a fake file.")
subparser.add_argument( subparser.add_argument(
'--dirty', action='store_true', dest='dirty', '--dirty', action='store_true', dest='dirty',
help="Install a package *without* cleaning the environment.") help="Install a package *without* cleaning the environment.")
@ -60,13 +61,13 @@ def setup_parser(subparser):
'--stop-at', help="Stop at a particular phase of installation" '--stop-at', help="Stop at a particular phase of installation"
) )
subparser.add_argument( subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to install") 'packages', nargs=argparse.REMAINDER,
help="specs of packages to install")
subparser.add_argument( subparser.add_argument(
'--run-tests', action='store_true', dest='run_tests', '--run-tests', action='store_true', dest='run_tests',
help="Run tests during installation of a package.") help="Run tests during installation of a package.")
def install(parser, args): def install(parser, args):
if not args.packages: if not args.packages:
tty.die("install requires at least one package argument") tty.die("install requires at least one package argument")

View file

@ -25,13 +25,16 @@
import argparse import argparse
import spack.modules import spack.modules
description ="Add package to environment using modules." description = "Add package to environment using modules."
def setup_parser(subparser): def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help """Parser is only constructed so that this prints a nice help
message with -h. """ message with -h. """
subparser.add_argument( subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="Spec of package to load with modules. (If -, read specs from STDIN)") 'spec', nargs=argparse.REMAINDER,
help="Spec of package to load with modules. "
"(If -, read specs from STDIN)")
def load(parser, args): def load(parser, args):

View file

@ -22,8 +22,6 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os
import sys
import argparse import argparse
import llnl.util.tty as tty import llnl.util.tty as tty
@ -32,16 +30,19 @@
import spack import spack
import spack.cmd import spack.cmd
description="Print out locations of various directories used by Spack" description = "Print out locations of various directories used by Spack"
def setup_parser(subparser): def setup_parser(subparser):
global directories global directories
directories = subparser.add_mutually_exclusive_group() directories = subparser.add_mutually_exclusive_group()
directories.add_argument( directories.add_argument(
'-m', '--module-dir', action='store_true', help="Spack python module directory.") '-m', '--module-dir', action='store_true',
help="Spack python module directory.")
directories.add_argument( directories.add_argument(
'-r', '--spack-root', action='store_true', help="Spack installation root.") '-r', '--spack-root', action='store_true',
help="Spack installation root.")
directories.add_argument( directories.add_argument(
'-i', '--install-dir', action='store_true', '-i', '--install-dir', action='store_true',
@ -53,15 +54,19 @@ def setup_parser(subparser):
'-P', '--packages', action='store_true', '-P', '--packages', action='store_true',
help="Top-level packages directory for Spack.") help="Top-level packages directory for Spack.")
directories.add_argument( directories.add_argument(
'-s', '--stage-dir', action='store_true', help="Stage directory for a spec.") '-s', '--stage-dir', action='store_true',
help="Stage directory for a spec.")
directories.add_argument( directories.add_argument(
'-S', '--stages', action='store_true', help="Top level Stage directory.") '-S', '--stages', action='store_true',
help="Top level Stage directory.")
directories.add_argument( directories.add_argument(
'-b', '--build-dir', action='store_true', '-b', '--build-dir', action='store_true',
help="Checked out or expanded source directory for a spec (requires it to be staged first).") help="Checked out or expanded source directory for a spec "
"(requires it to be staged first).")
subparser.add_argument( subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package to fetch directory for.") 'spec', nargs=argparse.REMAINDER,
help="spec of package to fetch directory for.")
def location(parser, args): def location(parser, args):
@ -104,9 +109,9 @@ def location(parser, args):
if args.stage_dir: if args.stage_dir:
print pkg.stage.path print pkg.stage.path
else: # args.build_dir is the default. else: # args.build_dir is the default.
if not pkg.stage.source_path: if not pkg.stage.source_path:
tty.die("Build directory does not exist yet. Run this to create it:", tty.die("Build directory does not exist yet. "
"Run this to create it:",
"spack stage " + " ".join(args.spec)) "spack stage " + " ".join(args.spec))
print pkg.stage.source_path print pkg.stage.source_path

View file

@ -23,7 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os import os
import sys
from datetime import datetime from datetime import datetime
import argparse import argparse
@ -40,6 +39,7 @@
description = "Manage mirrors." description = "Manage mirrors."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum', '-n', '--no-checksum', action='store_true', dest='no_checksum',
@ -61,8 +61,9 @@ def setup_parser(subparser):
'-D', '--dependencies', action='store_true', '-D', '--dependencies', action='store_true',
help="Also fetch all dependencies") help="Also fetch all dependencies")
create_parser.add_argument( create_parser.add_argument(
'-o', '--one-version-per-spec', action='store_const', const=1, default=0, '-o', '--one-version-per-spec', action='store_const',
help="Only fetch one 'preferred' version per spec, not all known versions.") const=1, default=0,
help="Only fetch one 'preferred' version per spec, not all known.")
scopes = spack.config.config_scopes scopes = spack.config.config_scopes
@ -70,7 +71,7 @@ def setup_parser(subparser):
add_parser = sp.add_parser('add', help=mirror_add.__doc__) add_parser = sp.add_parser('add', help=mirror_add.__doc__)
add_parser.add_argument('name', help="Mnemonic name for mirror.") add_parser.add_argument('name', help="Mnemonic name for mirror.")
add_parser.add_argument( add_parser.add_argument(
'url', help="URL of mirror directory created by 'spack mirror create'.") 'url', help="URL of mirror directory from 'spack mirror create'.")
add_parser.add_argument( add_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope, '--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.") help="Configuration scope to modify.")
@ -107,7 +108,7 @@ def mirror_add(args):
tty.die("Mirror with url %s already exists." % url) tty.die("Mirror with url %s already exists." % url)
# should only be one item per mirror dict. # should only be one item per mirror dict.
items = [(n,u) for n,u in mirrors.items()] items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (args.name, url)) items.insert(0, (args.name, url))
mirrors = syaml_dict(items) mirrors = syaml_dict(items)
spack.config.update_config('mirrors', mirrors, scope=args.scope) spack.config.update_config('mirrors', mirrors, scope=args.scope)
@ -121,7 +122,7 @@ def mirror_remove(args):
if not mirrors: if not mirrors:
mirrors = syaml_dict() mirrors = syaml_dict()
if not name in mirrors: if name not in mirrors:
tty.die("No mirror with name %s" % name) tty.die("No mirror with name %s" % name)
old_value = mirrors.pop(name) old_value = mirrors.pop(name)
@ -152,7 +153,7 @@ def _read_specs_from_file(filename):
s.package s.package
specs.append(s) specs.append(s)
except SpackError, e: except SpackError, e:
tty.die("Parse error in %s, line %d:" % (args.file, i+1), tty.die("Parse error in %s, line %d:" % (args.file, i + 1),
">>> " + string, str(e)) ">>> " + string, str(e))
return specs return specs
@ -214,10 +215,10 @@ def mirror_create(args):
def mirror(parser, args): def mirror(parser, args):
action = { 'create' : mirror_create, action = {'create': mirror_create,
'add' : mirror_add, 'add': mirror_add,
'remove' : mirror_remove, 'remove': mirror_remove,
'rm' : mirror_remove, 'rm': mirror_remove,
'list' : mirror_list } 'list': mirror_list}
action[args.mirror_command](args) action[args.mirror_command](args)

View file

@ -118,7 +118,8 @@ def loads(mtype, specs, args):
seen_add = seen.add seen_add = seen.add
for spec in specs_from_user_constraint: for spec in specs_from_user_constraint:
specs.extend( specs.extend(
[item for item in spec.traverse(order='post', cover='nodes') if not (item in seen or seen_add(item))] # NOQA: ignore=E501 [item for item in spec.traverse(order='post', cover='nodes')
if not (item in seen or seen_add(item))]
) )
module_cls = module_types[mtype] module_cls = module_types[mtype]
@ -178,7 +179,9 @@ def rm(mtype, specs, args):
# Ask for confirmation # Ask for confirmation
if not args.yes_to_all: if not args.yes_to_all:
tty.msg('You are about to remove {0} module files the following specs:\n'.format(mtype)) # NOQA: ignore=E501 tty.msg(
'You are about to remove {0} module files the following specs:\n'
.format(mtype))
spack.cmd.display_specs(specs_with_modules, long=True) spack.cmd.display_specs(specs_with_modules, long=True)
print('') print('')
spack.cmd.ask_for_confirmation('Do you want to proceed ? ') spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
@ -197,7 +200,9 @@ def refresh(mtype, specs, args):
return return
if not args.yes_to_all: if not args.yes_to_all:
tty.msg('You are about to regenerate {name} module files for the following specs:\n'.format(name=mtype)) # NOQA: ignore=E501 tty.msg(
'You are about to regenerate {name} module files for:\n'
.format(name=mtype))
spack.cmd.display_specs(specs, long=True) spack.cmd.display_specs(specs, long=True)
print('') print('')
spack.cmd.ask_for_confirmation('Do you want to proceed ? ') spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
@ -245,11 +250,13 @@ def module(parser, args):
try: try:
callbacks[args.subparser_name](module_type, args.specs, args) callbacks[args.subparser_name](module_type, args.specs, args)
except MultipleMatches: except MultipleMatches:
message = 'the constraint \'{query}\' matches multiple packages, and this is not allowed in this context' # NOQA: ignore=E501 message = ('the constraint \'{query}\' matches multiple packages, '
'and this is not allowed in this context')
tty.error(message.format(query=constraint)) tty.error(message.format(query=constraint))
for s in args.specs: for s in args.specs:
sys.stderr.write(s.format(color=True) + '\n') sys.stderr.write(s.format(color=True) + '\n')
raise SystemExit(1) raise SystemExit(1)
except NoMatch: except NoMatch:
message = 'the constraint \'{query}\' match no package, and this is not allowed in this context' # NOQA: ignore=E501 message = ('the constraint \'{query}\' match no package, '
'and this is not allowed in this context')
tty.die(message.format(query=constraint)) tty.die(message.format(query=constraint))

View file

@ -32,7 +32,7 @@
def github_url(pkg): def github_url(pkg):
"""Link to a package file on github.""" """Link to a package file on github."""
url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" # NOQA: ignore=E501 url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py"
return (url % pkg.name) return (url % pkg.name)

View file

@ -29,14 +29,16 @@
import spack import spack
description="Patch expanded archive sources in preparation for install" description = "Patch expanded archive sources in preparation for install"
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum', '-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check downloaded packages against checksum") help="Do not check downloaded packages against checksum")
subparser.add_argument( subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to stage") 'packages', nargs=argparse.REMAINDER,
help="specs of packages to stage")
def patch(parser, args): def patch(parser, args):

View file

@ -33,6 +33,7 @@
description = "Query packages associated with particular git revisions." description = "Query packages associated with particular git revisions."
def setup_parser(subparser): def setup_parser(subparser):
sp = subparser.add_subparsers( sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='pkg_command') metavar='SUBCOMMAND', dest='pkg_command')
@ -46,22 +47,28 @@ def setup_parser(subparser):
help="Revision to list packages for.") help="Revision to list packages for.")
diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__) diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__)
diff_parser.add_argument('rev1', nargs='?', default='HEAD^', diff_parser.add_argument(
help="Revision to compare against.") 'rev1', nargs='?', default='HEAD^',
diff_parser.add_argument('rev2', nargs='?', default='HEAD', help="Revision to compare against.")
help="Revision to compare to rev1 (default is HEAD).") diff_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="Revision to compare to rev1 (default is HEAD).")
add_parser = sp.add_parser('added', help=pkg_added.__doc__) add_parser = sp.add_parser('added', help=pkg_added.__doc__)
add_parser.add_argument('rev1', nargs='?', default='HEAD^', add_parser.add_argument(
help="Revision to compare against.") 'rev1', nargs='?', default='HEAD^',
add_parser.add_argument('rev2', nargs='?', default='HEAD', help="Revision to compare against.")
help="Revision to compare to rev1 (default is HEAD).") add_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="Revision to compare to rev1 (default is HEAD).")
rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__) rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__)
rm_parser.add_argument('rev1', nargs='?', default='HEAD^', rm_parser.add_argument(
help="Revision to compare against.") 'rev1', nargs='?', default='HEAD^',
rm_parser.add_argument('rev2', nargs='?', default='HEAD', help="Revision to compare against.")
help="Revision to compare to rev1 (default is HEAD).") rm_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="Revision to compare to rev1 (default is HEAD).")
def get_git(): def get_git():
@ -88,7 +95,8 @@ def pkg_add(args):
for pkg_name in args.packages: for pkg_name in args.packages:
filename = spack.repo.filename_for_package_name(pkg_name) filename = spack.repo.filename_for_package_name(pkg_name)
if not os.path.isfile(filename): if not os.path.isfile(filename):
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename) tty.die("No such package: %s. Path does not exist:" %
pkg_name, filename)
git = get_git() git = get_git()
git('-C', spack.packages_path, 'add', filename) git('-C', spack.packages_path, 'add', filename)
@ -112,7 +120,8 @@ def pkg_diff(args):
if u1: if u1:
print "%s:" % args.rev1 print "%s:" % args.rev1
colify(sorted(u1), indent=4) colify(sorted(u1), indent=4)
if u1: print if u1:
print
if u2: if u2:
print "%s:" % args.rev2 print "%s:" % args.rev2
@ -122,19 +131,21 @@ def pkg_diff(args):
def pkg_removed(args): def pkg_removed(args):
"""Show packages removed since a commit.""" """Show packages removed since a commit."""
u1, u2 = diff_packages(args.rev1, args.rev2) u1, u2 = diff_packages(args.rev1, args.rev2)
if u1: colify(sorted(u1)) if u1:
colify(sorted(u1))
def pkg_added(args): def pkg_added(args):
"""Show packages added since a commit.""" """Show packages added since a commit."""
u1, u2 = diff_packages(args.rev1, args.rev2) u1, u2 = diff_packages(args.rev1, args.rev2)
if u2: colify(sorted(u2)) if u2:
colify(sorted(u2))
def pkg(parser, args): def pkg(parser, args):
action = { 'add' : pkg_add, action = {'add': pkg_add,
'diff' : pkg_diff, 'diff': pkg_diff,
'list' : pkg_list, 'list': pkg_list,
'removed' : pkg_removed, 'removed': pkg_removed,
'added' : pkg_added } 'added': pkg_added}
action[args.pkg_command](args) action[args.pkg_command](args)

View file

@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os
import argparse import argparse
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
@ -30,11 +29,13 @@
import spack import spack
import spack.cmd import spack.cmd
description ="List packages that provide a particular virtual package" description = "List packages that provide a particular virtual package"
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER, subparser.add_argument(
help='Find packages that provide this virtual package') 'vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER,
help='Find packages that provide this virtual package')
def providers(parser, args): def providers(parser, args):

View file

@ -33,7 +33,11 @@ def setup_parser(subparser):
'-s', '--stage', action='store_true', default=True, '-s', '--stage', action='store_true', default=True,
help="Remove all temporary build stages (default).") help="Remove all temporary build stages (default).")
subparser.add_argument( subparser.add_argument(
'-c', '--cache', action='store_true', help="Remove cached downloads.") '-d', '--downloads', action='store_true',
help="Remove cached downloads.")
subparser.add_argument(
'-u', '--user-cache', action='store_true',
help="Remove caches in user home directory. Includes virtual indices.")
subparser.add_argument( subparser.add_argument(
'-a', '--all', action='store_true', '-a', '--all', action='store_true',
help="Remove all of the above.") help="Remove all of the above.")
@ -41,12 +45,14 @@ def setup_parser(subparser):
def purge(parser, args): def purge(parser, args):
# Special case: no flags. # Special case: no flags.
if not any((args.stage, args.cache, args.all)): if not any((args.stage, args.downloads, args.user_cache, args.all)):
stage.purge() stage.purge()
return return
# handle other flags with fall through. # handle other flags with fall through.
if args.stage or args.all: if args.stage or args.all:
stage.purge() stage.purge()
if args.cache or args.all: if args.downloads or args.all:
spack.cache.destroy() spack.fetch_cache.destroy()
if args.user_cache or args.all:
spack.user_cache.destroy()

View file

@ -30,18 +30,22 @@
import spack import spack
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-c', dest='python_command', help='Command to execute.') '-c', dest='python_command', help='Command to execute.')
subparser.add_argument( subparser.add_argument(
'python_args', nargs=argparse.REMAINDER, help="File to run plus arguments.") 'python_args', nargs=argparse.REMAINDER,
help="File to run plus arguments.")
description = "Launch an interpreter as spack would launch a command" description = "Launch an interpreter as spack would launch a command"
def python(parser, args): def python(parser, args):
# Fake a main python shell by setting __name__ to __main__. # Fake a main python shell by setting __name__ to __main__.
console = code.InteractiveConsole({'__name__' : '__main__', console = code.InteractiveConsole({'__name__': '__main__',
'spack' : spack}) 'spack': spack})
if "PYTHONSTARTUP" in os.environ: if "PYTHONSTARTUP" in os.environ:
startup_file = os.environ["PYTHONSTARTUP"] startup_file = os.environ["PYTHONSTARTUP"]

View file

@ -22,10 +22,10 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import argparse
import spack import spack
description = "Rebuild Spack's package database." description = "Rebuild Spack's package database."
def reindex(parser, args): def reindex(parser, args):
spack.installed_db.reindex(spack.install_layout) spack.installed_db.reindex(spack.install_layout)

View file

@ -23,20 +23,16 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os import os
import re
import shutil
import argparse
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
import spack.spec import spack.spec
import spack.config import spack.config
from spack.util.environment import get_path
from spack.repository import * from spack.repository import *
description = "Manage package source repositories." description = "Manage package source repositories."
def setup_parser(subparser): def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command') sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
scopes = spack.config.config_scopes scopes = spack.config.config_scopes
@ -57,13 +53,15 @@ def setup_parser(subparser):
# Add # Add
add_parser = sp.add_parser('add', help=repo_add.__doc__) add_parser = sp.add_parser('add', help=repo_add.__doc__)
add_parser.add_argument('path', help="Path to a Spack package repository directory.") add_parser.add_argument(
'path', help="Path to a Spack package repository directory.")
add_parser.add_argument( add_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope, '--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.") help="Configuration scope to modify.")
# Remove # Remove
remove_parser = sp.add_parser('remove', help=repo_remove.__doc__, aliases=['rm']) remove_parser = sp.add_parser(
'remove', help=repo_remove.__doc__, aliases=['rm'])
remove_parser.add_argument( remove_parser.add_argument(
'path_or_namespace', 'path_or_namespace',
help="Path or namespace of a Spack package repository.") help="Path or namespace of a Spack package repository.")
@ -100,7 +98,8 @@ def repo_add(args):
# If that succeeds, finally add it to the configuration. # If that succeeds, finally add it to the configuration.
repos = spack.config.get_config('repos', args.scope) repos = spack.config.get_config('repos', args.scope)
if not repos: repos = [] if not repos:
repos = []
if repo.root in repos or path in repos: if repo.root in repos or path in repos:
tty.die("Repository is already registered with Spack: %s" % path) tty.die("Repository is already registered with Spack: %s" % path)
@ -135,7 +134,7 @@ def repo_remove(args):
tty.msg("Removed repository %s with namespace '%s'." tty.msg("Removed repository %s with namespace '%s'."
% (repo.root, repo.namespace)) % (repo.root, repo.namespace))
return return
except RepoError as e: except RepoError:
continue continue
tty.die("No repository with path or namespace: %s" tty.die("No repository with path or namespace: %s"
@ -149,7 +148,7 @@ def repo_list(args):
for r in roots: for r in roots:
try: try:
repos.append(Repo(r)) repos.append(Repo(r))
except RepoError as e: except RepoError:
continue continue
msg = "%d package repositor" % len(repos) msg = "%d package repositor" % len(repos)
@ -166,9 +165,9 @@ def repo_list(args):
def repo(parser, args): def repo(parser, args):
action = { 'create' : repo_create, action = {'create': repo_create,
'list' : repo_list, 'list': repo_list,
'add' : repo_add, 'add': repo_add,
'remove' : repo_remove, 'remove': repo_remove,
'rm' : repo_remove} 'rm': repo_remove}
action[args.repo_command](args) action[args.repo_command](args)

View file

@ -31,6 +31,7 @@
description = "Revert checked out package source code." description = "Revert checked out package source code."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER, subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to restage") help="specs of packages to restage")

View file

@ -143,7 +143,8 @@ def setup(self, args):
if not spec.versions.concrete: if not spec.versions.concrete:
tty.die( tty.die(
"spack setup spec must have a single, concrete version. Did you forget a package version number?") "spack setup spec must have a single, concrete version. "
"Did you forget a package version number?")
spec.concretize() spec.concretize()
package = spack.repo.get(spec) package = spack.repo.get(spec)

View file

@ -25,23 +25,22 @@
import argparse import argparse
import spack.cmd import spack.cmd
import llnl.util.tty as tty
import spack import spack
import spack.url as url
description = "print out abstract and concrete versions of a spec." description = "print out abstract and concrete versions of a spec."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('-i', '--ids', action='store_true', subparser.add_argument('-i', '--ids', action='store_true',
help="show numerical ids for dependencies.") help="show numerical ids for dependencies.")
subparser.add_argument('specs', nargs=argparse.REMAINDER, help="specs of packages") subparser.add_argument(
'specs', nargs=argparse.REMAINDER, help="specs of packages")
def spec(parser, args): def spec(parser, args):
kwargs = { 'ids' : args.ids, kwargs = {'ids': args.ids,
'indent' : 2, 'indent': 2,
'color' : True } 'color': True}
for spec in spack.cmd.parse_specs(args.specs): for spec in spack.cmd.parse_specs(args.specs):
print "Input spec" print "Input spec"

View file

@ -22,14 +22,14 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os
import argparse import argparse
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
import spack.cmd import spack.cmd
description="Expand downloaded archive in preparation for install" description = "Expand downloaded archive in preparation for install"
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(

View file

@ -36,25 +36,25 @@
from spack.build_environment import InstallError from spack.build_environment import InstallError
from spack.fetch_strategy import FetchError from spack.fetch_strategy import FetchError
description = "Run package installation as a unit test, output formatted results." description = "Run package install as a unit test, output formatted results."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('-j', subparser.add_argument(
'--jobs', '-j', '--jobs', action='store', type=int,
action='store', help="Explicitly set number of make jobs. Default is #cpus.")
type=int,
help="Explicitly set number of make jobs. Default is #cpus.")
subparser.add_argument('-n', subparser.add_argument(
'--no-checksum', '-n', '--no-checksum', action='store_true', dest='no_checksum',
action='store_true', help="Do not check packages against checksum")
dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument('-o', '--output', action='store', help="test output goes in this file") subparser.add_argument(
'-o', '--output', action='store',
help="test output goes in this file")
subparser.add_argument('package', nargs=argparse.REMAINDER, help="spec of package to install") subparser.add_argument(
'package', nargs=argparse.REMAINDER,
help="spec of package to install")
class TestResult(object): class TestResult(object):
@ -65,6 +65,7 @@ class TestResult(object):
class TestSuite(object): class TestSuite(object):
def __init__(self, filename): def __init__(self, filename):
self.filename = filename self.filename = filename
self.root = ET.Element('testsuite') self.root = ET.Element('testsuite')
@ -75,14 +76,17 @@ def __enter__(self):
def append(self, item): def append(self, item):
if not isinstance(item, TestCase): if not isinstance(item, TestCase):
raise TypeError('only TestCase instances may be appended to a TestSuite instance') raise TypeError(
'only TestCase instances may be appended to TestSuite')
self.tests.append(item) # Append the item to the list of tests self.tests.append(item) # Append the item to the list of tests
def __exit__(self, exc_type, exc_val, exc_tb): def __exit__(self, exc_type, exc_val, exc_tb):
# Prepare the header for the entire test suite # Prepare the header for the entire test suite
number_of_errors = sum(x.result_type == TestResult.ERRORED for x in self.tests) number_of_errors = sum(
x.result_type == TestResult.ERRORED for x in self.tests)
self.root.set('errors', str(number_of_errors)) self.root.set('errors', str(number_of_errors))
number_of_failures = sum(x.result_type == TestResult.FAILED for x in self.tests) number_of_failures = sum(
x.result_type == TestResult.FAILED for x in self.tests)
self.root.set('failures', str(number_of_failures)) self.root.set('failures', str(number_of_failures))
self.root.set('tests', str(len(self.tests))) self.root.set('tests', str(len(self.tests)))
@ -112,7 +116,8 @@ def __init__(self, classname, name, time=None):
self.element.set('time', str(time)) self.element.set('time', str(time))
self.result_type = None self.result_type = None
def set_result(self, result_type, message=None, error_type=None, text=None): def set_result(self, result_type,
message=None, error_type=None, text=None):
self.result_type = result_type self.result_type = result_type
result = TestCase.results[self.result_type] result = TestCase.results[self.result_type]
if result is not None and result is not TestResult.PASSED: if result is not None and result is not TestResult.PASSED:
@ -155,13 +160,19 @@ def install_single_spec(spec, number_of_jobs):
# If it is already installed, skip the test # If it is already installed, skip the test
if spack.repo.get(spec).installed: if spack.repo.get(spec).installed:
testcase = TestCase(package.name, package.spec.short_spec, time=0.0) testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
testcase.set_result(TestResult.SKIPPED, message='Skipped [already installed]', error_type='already_installed') testcase.set_result(
TestResult.SKIPPED,
message='Skipped [already installed]',
error_type='already_installed')
return testcase return testcase
# If it relies on dependencies that did not install, skip # If it relies on dependencies that did not install, skip
if failed_dependencies(spec): if failed_dependencies(spec):
testcase = TestCase(package.name, package.spec.short_spec, time=0.0) testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
testcase.set_result(TestResult.SKIPPED, message='Skipped [failed dependencies]', error_type='dep_failed') testcase.set_result(
TestResult.SKIPPED,
message='Skipped [failed dependencies]',
error_type='dep_failed')
return testcase return testcase
# Otherwise try to install the spec # Otherwise try to install the spec
@ -177,26 +188,30 @@ def install_single_spec(spec, number_of_jobs):
testcase = TestCase(package.name, package.spec.short_spec, duration) testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.PASSED) testcase.set_result(TestResult.PASSED)
except InstallError: except InstallError:
# An InstallError is considered a failure (the recipe didn't work correctly) # An InstallError is considered a failure (the recipe didn't work
# correctly)
duration = time.time() - start_time duration = time.time() - start_time
# Try to get the log # Try to get the log
lines = fetch_log(package.build_log_path) lines = fetch_log(package.build_log_path)
text = '\n'.join(lines) text = '\n'.join(lines)
testcase = TestCase(package.name, package.spec.short_spec, duration) testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.FAILED, message='Installation failure', text=text) testcase.set_result(TestResult.FAILED,
message='Installation failure', text=text)
except FetchError: except FetchError:
# A FetchError is considered an error (we didn't even start building) # A FetchError is considered an error (we didn't even start building)
duration = time.time() - start_time duration = time.time() - start_time
testcase = TestCase(package.name, package.spec.short_spec, duration) testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.ERRORED, message='Unable to fetch package') testcase.set_result(TestResult.ERRORED,
message='Unable to fetch package')
return testcase return testcase
def get_filename(args, top_spec): def get_filename(args, top_spec):
if not args.output: if not args.output:
fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(x=top_spec, hash=top_spec.dag_hash()) fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(
x=top_spec, hash=top_spec.dag_hash())
output_directory = join_path(os.getcwd(), 'test-output') output_directory = join_path(os.getcwd(), 'test-output')
if not os.path.exists(output_directory): if not os.path.exists(output_directory):
os.mkdir(output_directory) os.mkdir(output_directory)

View file

@ -23,23 +23,23 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os import os
from pprint import pprint
from llnl.util.filesystem import join_path, mkdirp from llnl.util.filesystem import join_path, mkdirp
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
from llnl.util.lang import list_modules
import spack import spack
import spack.test import spack.test
from spack.fetch_strategy import FetchError from spack.fetch_strategy import FetchError
description ="Run unit tests" description = "Run unit tests"
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'names', nargs='*', help="Names of tests to run.") 'names', nargs='*', help="Names of tests to run.")
subparser.add_argument( subparser.add_argument(
'-l', '--list', action='store_true', dest='list', help="Show available tests") '-l', '--list', action='store_true', dest='list',
help="Show available tests")
subparser.add_argument( subparser.add_argument(
'--createXmlOutput', action='store_true', dest='createXmlOutput', '--createXmlOutput', action='store_true', dest='createXmlOutput',
help="Create JUnit XML from test results") help="Create JUnit XML from test results")
@ -52,6 +52,7 @@ def setup_parser(subparser):
class MockCache(object): class MockCache(object):
def store(self, copyCmd, relativeDst): def store(self, copyCmd, relativeDst):
pass pass
@ -60,6 +61,7 @@ def fetcher(self, targetPath, digest):
class MockCacheFetcher(object): class MockCacheFetcher(object):
def set_stage(self, stage): def set_stage(self, stage):
pass pass
@ -69,6 +71,7 @@ def fetch(self):
def __str__(self): def __str__(self):
return "[mock fetcher]" return "[mock fetcher]"
def test(parser, args): def test(parser, args):
if args.list: if args.list:
print "Available tests:" print "Available tests:"
@ -85,5 +88,5 @@ def test(parser, args):
if not os.path.exists(outputDir): if not os.path.exists(outputDir):
mkdirp(outputDir) mkdirp(outputDir)
spack.cache = MockCache() spack.fetch_cache = MockCache()
spack.test.run(args.names, outputDir, args.verbose) spack.test.run(args.names, outputDir, args.verbose)

View file

@ -50,25 +50,27 @@ def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-f', '--force', action='store_true', dest='force', '-f', '--force', action='store_true', dest='force',
help="Remove regardless of whether other packages depend on this one.") help="Remove regardless of whether other packages depend on this one.")
subparser.add_argument( subparser.add_argument(
'-a', '--all', action='store_true', dest='all', '-a', '--all', action='store_true', dest='all',
help="USE CAREFULLY. Remove ALL installed packages that match each " + help="USE CAREFULLY. Remove ALL installed packages that match each "
"supplied spec. i.e., if you say uninstall libelf, ALL versions of " + # NOQA: ignore=E501 "supplied spec. i.e., if you say uninstall libelf, ALL versions "
"libelf are uninstalled. This is both useful and dangerous, like rm -r.") # NOQA: ignore=E501 "of libelf are uninstalled. This is both useful and dangerous, "
"like rm -r.")
subparser.add_argument( subparser.add_argument(
'-d', '--dependents', action='store_true', dest='dependents', '-d', '--dependents', action='store_true', dest='dependents',
help='Also uninstall any packages that depend on the ones given via command line.' # NOQA: ignore=E501 help='Also uninstall any packages that depend on the ones given '
) 'via command line.')
subparser.add_argument( subparser.add_argument(
'-y', '--yes-to-all', action='store_true', dest='yes_to_all', '-y', '--yes-to-all', action='store_true', dest='yes_to_all',
help='Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501 help='Assume "yes" is the answer to every confirmation requested')
)
subparser.add_argument( subparser.add_argument(
'packages', 'packages',
nargs=argparse.REMAINDER, nargs=argparse.REMAINDER,
help="specs of packages to uninstall" help="specs of packages to uninstall")
)
def concretize_specs(specs, allow_multiple_matches=False, force=False): def concretize_specs(specs, allow_multiple_matches=False, force=False):
@ -184,7 +186,8 @@ def uninstall(parser, args):
uninstall_list = list(set(uninstall_list)) uninstall_list = list(set(uninstall_list))
if has_error: if has_error:
tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') # NOQA: ignore=E501 tty.die('You can use spack uninstall --dependents '
'to uninstall these dependencies as well')
if not args.yes_to_all: if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ") tty.msg("The following packages will be uninstalled : ")

View file

@ -25,13 +25,15 @@
import argparse import argparse
import spack.modules import spack.modules
description ="Remove package from environment using module." description = "Remove package from environment using module."
def setup_parser(subparser): def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help """Parser is only constructed so that this prints a nice help
message with -h. """ message with -h. """
subparser.add_argument( subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to unload with modules.') 'spec', nargs=argparse.REMAINDER,
help='Spec of package to unload with modules.')
def unload(parser, args): def unload(parser, args):

View file

@ -25,13 +25,15 @@
import argparse import argparse
import spack.modules import spack.modules
description ="Remove package from environment using dotkit." description = "Remove package from environment using dotkit."
def setup_parser(subparser): def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help """Parser is only constructed so that this prints a nice help
message with -h. """ message with -h. """
subparser.add_argument( subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to unuse with dotkit.') 'spec', nargs=argparse.REMAINDER,
help='Spec of package to unuse with dotkit.')
def unuse(parser, args): def unuse(parser, args):

View file

@ -22,28 +22,28 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import sys
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
import spack.url import spack.url
from spack.util.web import find_versions_of_archive from spack.util.web import find_versions_of_archive
description = "Show parsing of a URL, optionally spider web for other versions." description = "Show parsing of a URL, optionally spider web for versions."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('url', help="url of a package archive") subparser.add_argument('url', help="url of a package archive")
subparser.add_argument( subparser.add_argument(
'-s', '--spider', action='store_true', help="Spider the source page for versions.") '-s', '--spider', action='store_true',
help="Spider the source page for versions.")
def print_name_and_version(url): def print_name_and_version(url):
name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url) name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url)
underlines = [" "] * max(ns+nl, vs+vl) underlines = [" "] * max(ns + nl, vs + vl)
for i in range(ns, ns+nl): for i in range(ns, ns + nl):
underlines[i] = '-' underlines[i] = '-'
for i in range(vs, vs+vl): for i in range(vs, vs + vl):
underlines[i] = '~' underlines[i] = '~'
print " %s" % url print " %s" % url

View file

@ -22,12 +22,12 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import sys
import spack import spack
import spack.url import spack.url
description = "Inspect urls used by packages in spack." description = "Inspect urls used by packages in spack."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-c', '--color', action='store_true', '-c', '--color', action='store_true',
@ -53,6 +53,7 @@ def urls(parser, args):
for url in sorted(urls): for url in sorted(urls):
if args.color or args.extrapolation: if args.color or args.extrapolation:
print spack.url.color_url(url, subs=args.extrapolation, errors=True) print spack.url.color_url(
url, subs=args.extrapolation, errors=True)
else: else:
print url print url

View file

@ -25,13 +25,15 @@
import argparse import argparse
import spack.modules import spack.modules
description ="Add package to environment using dotkit." description = "Add package to environment using dotkit."
def setup_parser(subparser): def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help """Parser is only constructed so that this prints a nice help
message with -h. """ message with -h. """
subparser.add_argument( subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to use with dotkit.') 'spec', nargs=argparse.REMAINDER,
help='Spec of package to use with dotkit.')
def use(parser, args): def use(parser, args):

View file

@ -22,15 +22,16 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
description ="List available versions of a package" description = "List available versions of a package"
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('package', metavar='PACKAGE', help='Package to list versions for') subparser.add_argument('package', metavar='PACKAGE',
help='Package to list versions for')
def versions(parser, args): def versions(parser, args):

View file

@ -25,10 +25,8 @@
import os import os
import re import re
import itertools import itertools
from datetime import datetime
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import memoized
from llnl.util.filesystem import join_path from llnl.util.filesystem import join_path
import spack.error import spack.error
@ -37,10 +35,10 @@
from spack.util.multiproc import parmap from spack.util.multiproc import parmap
from spack.util.executable import * from spack.util.executable import *
from spack.util.environment import get_path from spack.util.environment import get_path
from spack.version import Version
__all__ = ['Compiler', 'get_compiler_version'] __all__ = ['Compiler', 'get_compiler_version']
def _verify_executables(*paths): def _verify_executables(*paths):
for path in paths: for path in paths:
if not os.path.isfile(path) and os.access(path, os.X_OK): if not os.path.isfile(path) and os.access(path, os.X_OK):
@ -49,8 +47,9 @@ def _verify_executables(*paths):
_version_cache = {} _version_cache = {}
def get_compiler_version(compiler_path, version_arg, regex='(.*)'): def get_compiler_version(compiler_path, version_arg, regex='(.*)'):
if not compiler_path in _version_cache: if compiler_path not in _version_cache:
compiler = Executable(compiler_path) compiler = Executable(compiler_path)
output = compiler(version_arg, output=str, error=str) output = compiler(version_arg, output=str, error=str)
@ -130,11 +129,6 @@ def check(exe):
else: else:
self.fc = check(paths[3]) self.fc = check(paths[3])
#self.cc = check(cc)
#self.cxx = check(cxx)
#self.f77 = check(f77)
#self.fc = check(fc)
# Unfortunately have to make sure these params are accepted # Unfortunately have to make sure these params are accepted
# in the same order they are returned by sorted(flags) # in the same order they are returned by sorted(flags)
# in compilers/__init__.py # in compilers/__init__.py
@ -158,31 +152,30 @@ def version(self):
@property @property
def openmp_flag(self): def openmp_flag(self):
# If it is not overridden, assume it is not supported and warn the user # If it is not overridden, assume it is not supported and warn the user
tty.die("The compiler you have chosen does not currently support OpenMP.", tty.die(
"If you think it should, please edit the compiler subclass and", "The compiler you have chosen does not currently support OpenMP.",
"submit a pull request or issue.") "If you think it should, please edit the compiler subclass and",
"submit a pull request or issue.")
# This property should be overridden in the compiler subclass if # This property should be overridden in the compiler subclass if
# C++11 is supported by that compiler # C++11 is supported by that compiler
@property @property
def cxx11_flag(self): def cxx11_flag(self):
# If it is not overridden, assume it is not supported and warn the user # If it is not overridden, assume it is not supported and warn the user
tty.die("The compiler you have chosen does not currently support C++11.", tty.die(
"If you think it should, please edit the compiler subclass and", "The compiler you have chosen does not currently support C++11.",
"submit a pull request or issue.") "If you think it should, please edit the compiler subclass and",
"submit a pull request or issue.")
# This property should be overridden in the compiler subclass if # This property should be overridden in the compiler subclass if
# C++14 is supported by that compiler # C++14 is supported by that compiler
@property @property
def cxx14_flag(self): def cxx14_flag(self):
# If it is not overridden, assume it is not supported and warn the user # If it is not overridden, assume it is not supported and warn the user
tty.die("The compiler you have chosen does not currently support C++14.", tty.die(
"If you think it should, please edit the compiler subclass and", "The compiler you have chosen does not currently support C++14.",
"submit a pull request or issue.") "If you think it should, please edit the compiler subclass and",
"submit a pull request or issue.")
# #
# Compiler classes have methods for querying the version of # Compiler classes have methods for querying the version of
@ -191,7 +184,6 @@ def cxx14_flag(self):
# Compiler *instances* are just data objects, and can only be # Compiler *instances* are just data objects, and can only be
# constructed from an actual set of executables. # constructed from an actual set of executables.
# #
@classmethod @classmethod
def default_version(cls, cc): def default_version(cls, cc):
"""Override just this to override all compiler version functions.""" """Override just this to override all compiler version functions."""
@ -258,16 +250,19 @@ def check(key):
version = detect_version(full_path) version = detect_version(full_path)
return (version, prefix, suffix, full_path) return (version, prefix, suffix, full_path)
except ProcessError, e: except ProcessError, e:
tty.debug("Couldn't get version for compiler %s" % full_path, e) tty.debug(
"Couldn't get version for compiler %s" % full_path, e)
return None return None
except Exception, e: except Exception, e:
# Catching "Exception" here is fine because it just # Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable. # means something went wrong running a candidate executable.
tty.debug("Error while executing candidate compiler %s" % full_path, tty.debug("Error while executing candidate compiler %s"
"%s: %s" %(e.__class__.__name__, e)) % full_path,
"%s: %s" % (e.__class__.__name__, e))
return None return None
successful = [key for key in parmap(check, checks) if key is not None] successful = [k for k in parmap(check, checks) if k is not None]
# The 'successful' list is ordered like the input paths. # The 'successful' list is ordered like the input paths.
# Reverse it here so that the dict creation (last insert wins) # Reverse it here so that the dict creation (last insert wins)
# does not spoil the intented precedence. # does not spoil the intented precedence.
@ -278,20 +273,23 @@ def __repr__(self):
"""Return a string representation of the compiler toolchain.""" """Return a string representation of the compiler toolchain."""
return self.__str__() return self.__str__()
def __str__(self): def __str__(self):
"""Return a string representation of the compiler toolchain.""" """Return a string representation of the compiler toolchain."""
return "%s(%s)" % ( return "%s(%s)" % (
self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc, self.modules, str(self.operating_system))))) self.name, '\n '.join((str(s) for s in (
self.cc, self.cxx, self.f77, self.fc, self.modules,
str(self.operating_system)))))
class CompilerAccessError(spack.error.SpackError): class CompilerAccessError(spack.error.SpackError):
def __init__(self, path): def __init__(self, path):
super(CompilerAccessError, self).__init__( super(CompilerAccessError, self).__init__(
"'%s' is not a valid compiler." % path) "'%s' is not a valid compiler." % path)
class InvalidCompilerError(spack.error.SpackError): class InvalidCompilerError(spack.error.SpackError):
def __init__(self): def __init__(self):
super(InvalidCompilerError, self).__init__( super(InvalidCompilerError, self).__init__(
"Compiler has no executables.") "Compiler has no executables.")

View file

@ -26,15 +26,9 @@
system and configuring Spack to use multiple compilers. system and configuring Spack to use multiple compilers.
""" """
import imp import imp
import os
import platform import platform
import copy
import hashlib
import base64
import yaml
import sys
from llnl.util.lang import memoized, list_modules from llnl.util.lang import list_modules
from llnl.util.filesystem import join_path from llnl.util.filesystem import join_path
import spack import spack
@ -43,11 +37,7 @@
import spack.config import spack.config
import spack.architecture import spack.architecture
from spack.util.multiproc import parmap
from spack.compiler import Compiler
from spack.util.executable import which
from spack.util.naming import mod_to_class from spack.util.naming import mod_to_class
from spack.util.environment import get_path
_imported_compilers_module = 'spack.compilers' _imported_compilers_module = 'spack.compilers'
_path_instance_vars = ['cc', 'cxx', 'f77', 'fc'] _path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
@ -73,7 +63,8 @@ def _to_dict(compiler):
"""Return a dict version of compiler suitable to insert in YAML.""" """Return a dict version of compiler suitable to insert in YAML."""
d = {} d = {}
d['spec'] = str(compiler.spec) d['spec'] = str(compiler.spec)
d['paths'] = dict( (attr, getattr(compiler, attr, None)) for attr in _path_instance_vars ) d['paths'] = dict((attr, getattr(compiler, attr, None))
for attr in _path_instance_vars)
d['operating_system'] = str(compiler.operating_system) d['operating_system'] = str(compiler.operating_system)
d['modules'] = compiler.modules if compiler.modules else [] d['modules'] = compiler.modules if compiler.modules else []
@ -140,15 +131,19 @@ def remove_compiler_from_config(compiler_spec, scope=None):
- compiler_specs: a list of CompilerSpec objects. - compiler_specs: a list of CompilerSpec objects.
- scope: configuration scope to modify. - scope: configuration scope to modify.
""" """
# Need a better way for this
global _cache_config_file
compiler_config = get_compiler_config(scope) compiler_config = get_compiler_config(scope)
config_length = len(compiler_config) config_length = len(compiler_config)
filtered_compiler_config = [comp for comp in compiler_config filtered_compiler_config = [
if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec] comp for comp in compiler_config
# Need a better way for this if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec]
global _cache_config_file
_cache_config_file = filtered_compiler_config # Update the cache for changes # Update the cache for changes
if len(filtered_compiler_config) == config_length: # No items removed _cache_config_file = filtered_compiler_config
if len(filtered_compiler_config) == config_length: # No items removed
CompilerSpecInsufficientlySpecificError(compiler_spec) CompilerSpecInsufficientlySpecificError(compiler_spec)
spack.config.update_config('compilers', filtered_compiler_config, scope) spack.config.update_config('compilers', filtered_compiler_config, scope)
@ -158,7 +153,8 @@ def all_compilers_config(scope=None, init_config=True):
available to build with. These are instances of CompilerSpec. available to build with. These are instances of CompilerSpec.
""" """
# Get compilers for this architecture. # Get compilers for this architecture.
global _cache_config_file #Create a cache of the config file so we don't load all the time. # Create a cache of the config file so we don't load all the time.
global _cache_config_file
if not _cache_config_file: if not _cache_config_file:
_cache_config_file = get_compiler_config(scope, init_config) _cache_config_file = get_compiler_config(scope, init_config)
return _cache_config_file return _cache_config_file
@ -236,7 +232,8 @@ def get_compilers(cspec):
continue continue
items = items['compiler'] items = items['compiler']
if not ('paths' in items and all(n in items['paths'] for n in _path_instance_vars)): if not ('paths' in items and
all(n in items['paths'] for n in _path_instance_vars)):
raise InvalidCompilerConfigurationError(cspec) raise InvalidCompilerConfigurationError(cspec)
cls = class_for_compiler_name(cspec.name) cls = class_for_compiler_name(cspec.name)
@ -254,10 +251,10 @@ def get_compilers(cspec):
mods = [] mods = []
if 'operating_system' in items: if 'operating_system' in items:
operating_system = spack.architecture._operating_system_from_dict(items['operating_system'], platform) os = spack.architecture._operating_system_from_dict(
items['operating_system'], platform)
else: else:
operating_system = None os = None
alias = items['alias'] if 'alias' in items else None alias = items['alias'] if 'alias' in items else None
@ -266,7 +263,8 @@ def get_compilers(cspec):
if f in items: if f in items:
flags[f] = items[f] flags[f] = items[f]
compilers.append(cls(cspec, operating_system, compiler_paths, mods, alias, **flags)) compilers.append(
cls(cspec, os, compiler_paths, mods, alias, **flags))
return compilers return compilers
@ -275,7 +273,6 @@ def get_compilers(cspec):
for cspec in matches: for cspec in matches:
compilers.extend(get_compilers(cspec)) compilers.extend(get_compilers(cspec))
return compilers return compilers
# return [get_compilers(cspec) for cspec in matches]
@_auto_compiler_spec @_auto_compiler_spec
@ -285,8 +282,9 @@ def compiler_for_spec(compiler_spec, arch):
operating_system = arch.platform_os operating_system = arch.platform_os
assert(compiler_spec.concrete) assert(compiler_spec.concrete)
compilers = [c for c in compilers_for_spec(compiler_spec, platform=arch.platform) compilers = [
if c.operating_system == operating_system] c for c in compilers_for_spec(compiler_spec, platform=arch.platform)
if c.operating_system == operating_system]
if len(compilers) < 1: if len(compilers) < 1:
raise NoCompilerForSpecError(compiler_spec, operating_system) raise NoCompilerForSpecError(compiler_spec, operating_system)
if len(compilers) > 1: if len(compilers) > 1:
@ -321,11 +319,13 @@ def all_os_classes():
return classes return classes
def all_compiler_types(): def all_compiler_types():
return [class_for_compiler_name(c) for c in supported_compilers()] return [class_for_compiler_name(c) for c in supported_compilers()]
class InvalidCompilerConfigurationError(spack.error.SpackError): class InvalidCompilerConfigurationError(spack.error.SpackError):
def __init__(self, compiler_spec): def __init__(self, compiler_spec):
super(InvalidCompilerConfigurationError, self).__init__( super(InvalidCompilerConfigurationError, self).__init__(
"Invalid configuration for [compiler \"%s\"]: " % compiler_spec, "Invalid configuration for [compiler \"%s\"]: " % compiler_spec,
@ -335,14 +335,18 @@ def __init__(self, compiler_spec):
class NoCompilersError(spack.error.SpackError): class NoCompilersError(spack.error.SpackError):
def __init__(self): def __init__(self):
super(NoCompilersError, self).__init__("Spack could not find any compilers!") super(NoCompilersError, self).__init__(
"Spack could not find any compilers!")
class NoCompilerForSpecError(spack.error.SpackError): class NoCompilerForSpecError(spack.error.SpackError):
def __init__(self, compiler_spec, target): def __init__(self, compiler_spec, target):
super(NoCompilerForSpecError, self).__init__("No compilers for operating system %s satisfy spec %s" % ( super(NoCompilerForSpecError, self).__init__(
target, compiler_spec)) "No compilers for operating system %s satisfy spec %s"
% (target, compiler_spec))
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError): class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
def __init__(self, compiler_spec): def __init__(self, compiler_spec):
super(CompilerSpecInsufficientlySpecificError, self).__init__("Multiple compilers satisfy spec %s", super(CompilerSpecInsufficientlySpecificError, self).__init__(
compiler_spec) "Multiple compilers satisfy spec %s" % compiler_spec)

View file

@ -29,6 +29,7 @@
import llnl.util.tty as tty import llnl.util.tty as tty
from spack.version import ver from spack.version import ver
class Clang(Compiler): class Clang(Compiler):
# Subclasses use possible names of C compiler # Subclasses use possible names of C compiler
cc_names = ['clang'] cc_names = ['clang']
@ -43,11 +44,12 @@ class Clang(Compiler):
fc_names = [] fc_names = []
# Named wrapper links within spack.build_env_path # Named wrapper links within spack.build_env_path
link_paths = { 'cc' : 'clang/clang', link_paths = {'cc': 'clang/clang',
'cxx' : 'clang/clang++', 'cxx': 'clang/clang++',
# Use default wrappers for fortran, in case provided in compilers.yaml # Use default wrappers for fortran, in case provided in
'f77' : 'f77', # compilers.yaml
'fc' : 'f90' } 'f77': 'f77',
'fc': 'f90'}
@property @property
def is_apple(self): def is_apple(self):

View file

@ -1,34 +1,33 @@
##############################################################################} ##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188 # LLNL-CODE-647188
# #
# For details, see https://scalability-llnl.github.io/spack # For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL. # Please also see the LICENSE file for our notice and the LGPL.
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by # it under the terms of the GNU Lesser General Public License (as
# the Free Software Foundation) version 2.1 dated February 1999. # published by the Free Software Foundation) version 2.1, February 1999.
# #
# This program is distributed in the hope that it will be useful, but # This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details. # conditions of the GNU Lesser General Public License for more details.
# #
# You should have received a copy of the GNU Lesser General Public License # You should have received a copy of the GNU Lesser General Public
# along with this program; if not, write to the Free Software Foundation, # License along with this program; if not, write to the Free Software
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import llnl.util.tty as tty
#from spack.build_environment import load_module
from spack.compiler import * from spack.compiler import *
#from spack.version import ver
class Craype(Compiler): class Craype(Compiler):
"""Cray programming environment compiler."""
# Subclasses use possible names of C compiler # Subclasses use possible names of C compiler
cc_names = ['cc'] cc_names = ['cc']
@ -47,12 +46,11 @@ class Craype(Compiler):
PrgEnv = 'PrgEnv-cray' PrgEnv = 'PrgEnv-cray'
PrgEnv_compiler = 'craype' PrgEnv_compiler = 'craype'
link_paths = { 'cc' : 'cc', link_paths = {'cc': 'cc',
'cxx' : 'c++', 'cxx': 'c++',
'f77' : 'f77', 'f77': 'f77',
'fc' : 'fc'} 'fc': 'fc'}
@classmethod @classmethod
def default_version(cls, comp): def default_version(cls, comp):
return get_compiler_version(comp, r'([Vv]ersion).*(\d+(\.\d+)+)') return get_compiler_version(comp, r'([Vv]ersion).*(\d+(\.\d+)+)')

View file

@ -26,6 +26,7 @@
from spack.compiler import * from spack.compiler import *
from spack.version import ver from spack.version import ver
class Gcc(Compiler): class Gcc(Compiler):
# Subclasses use possible names of C compiler # Subclasses use possible names of C compiler
cc_names = ['gcc'] cc_names = ['gcc']
@ -44,10 +45,10 @@ class Gcc(Compiler):
suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d'] suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d']
# Named wrapper links within spack.build_env_path # Named wrapper links within spack.build_env_path
link_paths = {'cc' : 'gcc/gcc', link_paths = {'cc': 'gcc/gcc',
'cxx' : 'gcc/g++', 'cxx': 'gcc/g++',
'f77' : 'gcc/gfortran', 'f77': 'gcc/gfortran',
'fc' : 'gcc/gfortran' } 'fc': 'gcc/gfortran'}
PrgEnv = 'PrgEnv-gnu' PrgEnv = 'PrgEnv-gnu'
PrgEnv_compiler = 'gcc' PrgEnv_compiler = 'gcc'
@ -79,7 +80,6 @@ def fc_version(cls, fc):
# older gfortran versions don't have simple dumpversion output. # older gfortran versions don't have simple dumpversion output.
r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)') r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)')
@classmethod @classmethod
def f77_version(cls, f77): def f77_version(cls, f77):
return cls.fc_version(f77) return cls.fc_version(f77)

View file

@ -26,6 +26,7 @@
import llnl.util.tty as tty import llnl.util.tty as tty
from spack.version import ver from spack.version import ver
class Intel(Compiler): class Intel(Compiler):
# Subclasses use possible names of C compiler # Subclasses use possible names of C compiler
cc_names = ['icc'] cc_names = ['icc']
@ -40,10 +41,10 @@ class Intel(Compiler):
fc_names = ['ifort'] fc_names = ['ifort']
# Named wrapper links within spack.build_env_path # Named wrapper links within spack.build_env_path
link_paths = { 'cc' : 'intel/icc', link_paths = {'cc': 'intel/icc',
'cxx' : 'intel/icpc', 'cxx': 'intel/icpc',
'f77' : 'intel/ifort', 'f77': 'intel/ifort',
'fc' : 'intel/ifort' } 'fc': 'intel/ifort'}
PrgEnv = 'PrgEnv-intel' PrgEnv = 'PrgEnv-intel'
PrgEnv_compiler = 'intel' PrgEnv_compiler = 'intel'
@ -64,7 +65,6 @@ def cxx11_flag(self):
else: else:
return "-std=c++11" return "-std=c++11"
@classmethod @classmethod
def default_version(cls, comp): def default_version(cls, comp):
"""The '--version' option seems to be the most consistent one """The '--version' option seems to be the most consistent one

View file

@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from spack.compiler import * from spack.compiler import *
import llnl.util.tty as tty
class Nag(Compiler): class Nag(Compiler):
# Subclasses use possible names of C compiler # Subclasses use possible names of C compiler
@ -39,11 +39,12 @@ class Nag(Compiler):
fc_names = ['nagfor'] fc_names = ['nagfor']
# Named wrapper links within spack.build_env_path # Named wrapper links within spack.build_env_path
link_paths = { # Use default wrappers for C and C++, in case provided in compilers.yaml # Use default wrappers for C and C++, in case provided in compilers.yaml
'cc' : 'cc', link_paths = {
'cxx' : 'c++', 'cc': 'cc',
'f77' : 'nag/nagfor', 'cxx': 'c++',
'fc' : 'nag/nagfor' } 'f77': 'nag/nagfor',
'fc': 'nag/nagfor'}
@property @property
def openmp_flag(self): def openmp_flag(self):
@ -71,9 +72,8 @@ def default_version(self, comp):
"""The '-V' option works for nag compilers. """The '-V' option works for nag compilers.
Output looks like this:: Output looks like this::
NAG Fortran Compiler Release 6.0(Hibiya) Build 1037 NAG Fortran Compiler Release 6.0(Hibiya) Build 1037
Product NPL6A60NA for x86-64 Linux Product NPL6A60NA for x86-64 Linux
Copyright 1990-2015 The Numerical Algorithms Group Ltd., Oxford, U.K.
""" """
return get_compiler_version( return get_compiler_version(
comp, '-V', r'NAG Fortran Compiler Release ([0-9.]+)') comp, '-V', r'NAG Fortran Compiler Release ([0-9.]+)')

View file

@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from spack.compiler import * from spack.compiler import *
import llnl.util.tty as tty
class Pgi(Compiler): class Pgi(Compiler):
# Subclasses use possible names of C compiler # Subclasses use possible names of C compiler
@ -39,17 +39,14 @@ class Pgi(Compiler):
fc_names = ['pgfortran', 'pgf95', 'pgf90'] fc_names = ['pgfortran', 'pgf95', 'pgf90']
# Named wrapper links within spack.build_env_path # Named wrapper links within spack.build_env_path
link_paths = { 'cc' : 'pgi/pgcc', link_paths = {'cc': 'pgi/pgcc',
'cxx' : 'pgi/pgc++', 'cxx': 'pgi/pgc++',
'f77' : 'pgi/pgfortran', 'f77': 'pgi/pgfortran',
'fc' : 'pgi/pgfortran' } 'fc': 'pgi/pgfortran'}
PrgEnv = 'PrgEnv-pgi' PrgEnv = 'PrgEnv-pgi'
PrgEnv_compiler = 'pgi' PrgEnv_compiler = 'pgi'
@property @property
def openmp_flag(self): def openmp_flag(self):
return "-mp" return "-mp"

View file

@ -26,24 +26,26 @@
import llnl.util.tty as tty import llnl.util.tty as tty
from spack.version import ver from spack.version import ver
class Xl(Compiler): class Xl(Compiler):
# Subclasses use possible names of C compiler # Subclasses use possible names of C compiler
cc_names = ['xlc','xlc_r'] cc_names = ['xlc', 'xlc_r']
# Subclasses use possible names of C++ compiler # Subclasses use possible names of C++ compiler
cxx_names = ['xlC','xlC_r','xlc++','xlc++_r'] cxx_names = ['xlC', 'xlC_r', 'xlc++', 'xlc++_r']
# Subclasses use possible names of Fortran 77 compiler # Subclasses use possible names of Fortran 77 compiler
f77_names = ['xlf','xlf_r'] f77_names = ['xlf', 'xlf_r']
# Subclasses use possible names of Fortran 90 compiler # Subclasses use possible names of Fortran 90 compiler
fc_names = ['xlf90','xlf90_r','xlf95','xlf95_r','xlf2003','xlf2003_r','xlf2008','xlf2008_r'] fc_names = ['xlf90', 'xlf90_r', 'xlf95', 'xlf95_r',
'xlf2003', 'xlf2003_r', 'xlf2008', 'xlf2008_r']
# Named wrapper links within spack.build_env_path # Named wrapper links within spack.build_env_path
link_paths = { 'cc' : 'xl/xlc', link_paths = {'cc': 'xl/xlc',
'cxx' : 'xl/xlc++', 'cxx': 'xl/xlc++',
'f77' : 'xl/xlf', 'f77': 'xl/xlf',
'fc' : 'xl/xlf90' } 'fc': 'xl/xlf90'}
@property @property
def openmp_flag(self): def openmp_flag(self):
@ -56,7 +58,6 @@ def cxx11_flag(self):
else: else:
return "-qlanglvl=extended0x" return "-qlanglvl=extended0x"
@classmethod @classmethod
def default_version(cls, comp): def default_version(cls, comp):
"""The '-qversion' is the standard option fo XL compilers. """The '-qversion' is the standard option fo XL compilers.
@ -82,29 +83,28 @@ def default_version(cls, comp):
""" """
return get_compiler_version( return get_compiler_version(
comp, '-qversion',r'([0-9]?[0-9]\.[0-9])') comp, '-qversion', r'([0-9]?[0-9]\.[0-9])')
@classmethod @classmethod
def fc_version(cls, fc): def fc_version(cls, fc):
"""The fortran and C/C++ versions of the XL compiler are always two units apart. """The fortran and C/C++ versions of the XL compiler are always
By this we mean that the fortran release that goes with XL C/C++ 11.1 is 13.1. two units apart. By this we mean that the fortran release that
Having such a difference in version number is confusing spack quite a lot. goes with XL C/C++ 11.1 is 13.1. Having such a difference in
Most notably if you keep the versions as is the default xl compiler will only version number is confusing spack quite a lot. Most notably
have fortran and no C/C++. if you keep the versions as is the default xl compiler will
So we associate the Fortran compiler with the version associated to the C/C++ only have fortran and no C/C++. So we associate the Fortran
compiler. compiler with the version associated to the C/C++ compiler.
One last stumble. Version numbers over 10 have at least a .1 those under 10 One last stumble. Version numbers over 10 have at least a .1
a .0. There is no xlf 9.x or under currently available. BG/P and BG/L can those under 10 a .0. There is no xlf 9.x or under currently
such a compiler mix and possibly older version of AIX and linux on power. available. BG/P and BG/L can such a compiler mix and possibly
older version of AIX and linux on power.
""" """
fver = get_compiler_version(fc, '-qversion',r'([0-9]?[0-9]\.[0-9])') fver = get_compiler_version(fc, '-qversion', r'([0-9]?[0-9]\.[0-9])')
cver = float(fver) - 2 cver = float(fver) - 2
if cver < 10 : if cver < 10:
cver = cver - 0.1 cver = cver - 0.1
return str(cver) return str(cver)
@classmethod @classmethod
def f77_version(cls, f77): def f77_version(cls, f77):
return cls.fc_version(f77) return cls.fc_version(f77)

View file

@ -40,12 +40,12 @@
import spack.error import spack.error
from spack.version import * from spack.version import *
from functools import partial from functools import partial
from spec import DependencyMap
from itertools import chain from itertools import chain
from spack.config import * from spack.config import *
class DefaultConcretizer(object): class DefaultConcretizer(object):
"""This class doesn't have any state, it just provides some methods for """This class doesn't have any state, it just provides some methods for
concretization. You can subclass it to override just some of the concretization. You can subclass it to override just some of the
default concretization strategies, or you can override all of them. default concretization strategies, or you can override all of them.
@ -61,14 +61,19 @@ def _valid_virtuals_and_externals(self, spec):
if not providers: if not providers:
raise UnsatisfiableProviderSpecError(providers[0], spec) raise UnsatisfiableProviderSpecError(providers[0], spec)
spec_w_preferred_providers = find_spec( spec_w_preferred_providers = find_spec(
spec, lambda(x): spack.pkgsort.spec_has_preferred_provider(x.name, spec.name)) spec,
lambda x: spack.pkgsort.spec_has_preferred_provider(
x.name, spec.name))
if not spec_w_preferred_providers: if not spec_w_preferred_providers:
spec_w_preferred_providers = spec spec_w_preferred_providers = spec
provider_cmp = partial(spack.pkgsort.provider_compare, spec_w_preferred_providers.name, spec.name) provider_cmp = partial(spack.pkgsort.provider_compare,
spec_w_preferred_providers.name,
spec.name)
candidates = sorted(providers, cmp=provider_cmp) candidates = sorted(providers, cmp=provider_cmp)
# For each candidate package, if it has externals, add those to the usable list. # For each candidate package, if it has externals, add those
# if it's not buildable, then *only* add the externals. # to the usable list. if it's not buildable, then *only* add
# the externals.
usable = [] usable = []
for cspec in candidates: for cspec in candidates:
if is_spec_buildable(cspec): if is_spec_buildable(cspec):
@ -85,7 +90,7 @@ def _valid_virtuals_and_externals(self, spec):
def cmp_externals(a, b): def cmp_externals(a, b):
if a.name != b.name and (not a.external or a.external_module and if a.name != b.name and (not a.external or a.external_module and
not b.external and b.external_module): not b.external and b.external_module):
# We're choosing between different providers, so # We're choosing between different providers, so
# maintain order from provider sort # maintain order from provider sort
return candidates.index(a) - candidates.index(b) return candidates.index(a) - candidates.index(b)
@ -114,26 +119,26 @@ def choose_virtual_or_external(self, spec):
# Find the nearest spec in the dag that has a compiler. We'll # Find the nearest spec in the dag that has a compiler. We'll
# use that spec to calibrate compiler compatibility. # use that spec to calibrate compiler compatibility.
abi_exemplar = find_spec(spec, lambda(x): x.compiler) abi_exemplar = find_spec(spec, lambda x: x.compiler)
if not abi_exemplar: if not abi_exemplar:
abi_exemplar = spec.root abi_exemplar = spec.root
# Make a list including ABI compatibility of specs with the exemplar. # Make a list including ABI compatibility of specs with the exemplar.
strict = [spack.abi.compatible(c, abi_exemplar) for c in candidates] strict = [spack.abi.compatible(c, abi_exemplar) for c in candidates]
loose = [spack.abi.compatible(c, abi_exemplar, loose=True) for c in candidates] loose = [spack.abi.compatible(c, abi_exemplar, loose=True)
for c in candidates]
keys = zip(strict, loose, candidates) keys = zip(strict, loose, candidates)
# Sort candidates from most to least compatibility. # Sort candidates from most to least compatibility.
# Note: # Note:
# 1. We reverse because True > False. # 1. We reverse because True > False.
# 2. Sort is stable, so c's keep their order. # 2. Sort is stable, so c's keep their order.
keys.sort(key=lambda k:k[:2], reverse=True) keys.sort(key=lambda k: k[:2], reverse=True)
# Pull the candidates back out and return them in order # Pull the candidates back out and return them in order
candidates = [c for s,l,c in keys] candidates = [c for s, l, c in keys]
return candidates return candidates
def concretize_version(self, spec): def concretize_version(self, spec):
"""If the spec is already concrete, return. Otherwise take """If the spec is already concrete, return. Otherwise take
the preferred version from spackconfig, and default to the package's the preferred version from spackconfig, and default to the package's
@ -167,7 +172,8 @@ def prefer_key(v):
if valid_versions: if valid_versions:
# Disregard @develop and take the next valid version # Disregard @develop and take the next valid version
if ver(valid_versions[0]) == ver('develop') and len(valid_versions) > 1: if ver(valid_versions[0]) == ver('develop') and \
len(valid_versions) > 1:
spec.versions = ver([valid_versions[1]]) spec.versions = ver([valid_versions[1]])
else: else:
spec.versions = ver([valid_versions[0]]) spec.versions = ver([valid_versions[0]])
@ -193,40 +199,45 @@ def prefer_key(v):
return True # Things changed return True # Things changed
def _concretize_operating_system(self, spec): def _concretize_operating_system(self, spec):
platform = spec.architecture.platform
if spec.architecture.platform_os is not None and isinstance( if spec.architecture.platform_os is not None and isinstance(
spec.architecture.platform_os,spack.architecture.OperatingSystem): spec.architecture.platform_os,
spack.architecture.OperatingSystem):
return False return False
if spec.root.architecture and spec.root.architecture.platform_os: if spec.root.architecture and spec.root.architecture.platform_os:
if isinstance(spec.root.architecture.platform_os,spack.architecture.OperatingSystem): if isinstance(spec.root.architecture.platform_os,
spec.architecture.platform_os = spec.root.architecture.platform_os spack.architecture.OperatingSystem):
spec.architecture.platform_os = \
spec.root.architecture.platform_os
else: else:
spec.architecture.platform_os = spec.architecture.platform.operating_system('default_os') spec.architecture.platform_os = \
return True #changed spec.architecture.platform.operating_system('default_os')
return True # changed
def _concretize_target(self, spec): def _concretize_target(self, spec):
platform = spec.architecture.platform
if spec.architecture.target is not None and isinstance( if spec.architecture.target is not None and isinstance(
spec.architecture.target, spack.architecture.Target): spec.architecture.target, spack.architecture.Target):
return False return False
if spec.root.architecture and spec.root.architecture.target: if spec.root.architecture and spec.root.architecture.target:
if isinstance(spec.root.architecture.target,spack.architecture.Target): if isinstance(spec.root.architecture.target,
spack.architecture.Target):
spec.architecture.target = spec.root.architecture.target spec.architecture.target = spec.root.architecture.target
else: else:
spec.architecture.target = spec.architecture.platform.target('default_target') spec.architecture.target = spec.architecture.platform.target(
return True #changed 'default_target')
return True # changed
def _concretize_platform(self, spec): def _concretize_platform(self, spec):
if spec.architecture.platform is not None and isinstance( if spec.architecture.platform is not None and isinstance(
spec.architecture.platform, spack.architecture.Platform): spec.architecture.platform, spack.architecture.Platform):
return False return False
if spec.root.architecture and spec.root.architecture.platform: if spec.root.architecture and spec.root.architecture.platform:
if isinstance(spec.root.architecture.platform,spack.architecture.Platform): if isinstance(spec.root.architecture.platform,
spack.architecture.Platform):
spec.architecture.platform = spec.root.architecture.platform spec.architecture.platform = spec.root.architecture.platform
else: else:
spec.architecture.platform = spack.architecture.platform() spec.architecture.platform = spack.architecture.platform()
return True #changed? return True # changed?
def concretize_architecture(self, spec): def concretize_architecture(self, spec):
"""If the spec is empty provide the defaults of the platform. If the """If the spec is empty provide the defaults of the platform. If the
@ -245,25 +256,29 @@ def concretize_architecture(self, spec):
return True return True
# Concretize the operating_system and target based of the spec # Concretize the operating_system and target based of the spec
ret = any((self._concretize_platform(spec), ret = any((self._concretize_platform(spec),
self._concretize_operating_system(spec), self._concretize_operating_system(spec),
self._concretize_target(spec))) self._concretize_target(spec)))
return ret return ret
def concretize_variants(self, spec): def concretize_variants(self, spec):
"""If the spec already has variants filled in, return. Otherwise, add """If the spec already has variants filled in, return. Otherwise, add
the default variants from the package specification. the user preferences from packages.yaml or the default variants from
the package specification.
""" """
changed = False changed = False
preferred_variants = spack.pkgsort.spec_preferred_variants(
spec.package_class.name)
for name, variant in spec.package_class.variants.items(): for name, variant in spec.package_class.variants.items():
if name not in spec.variants: if name not in spec.variants:
spec.variants[name] = spack.spec.VariantSpec(name, variant.default)
changed = True changed = True
if name in preferred_variants:
spec.variants[name] = preferred_variants.get(name)
else:
spec.variants[name] = \
spack.spec.VariantSpec(name, variant.default)
return changed return changed
def concretize_compiler(self, spec): def concretize_compiler(self, spec):
"""If the spec already has a compiler, we're done. If not, then take """If the spec already has a compiler, we're done. If not, then take
the compiler used for the nearest ancestor with a compiler the compiler used for the nearest ancestor with a compiler
@ -278,30 +293,32 @@ def concretize_compiler(self, spec):
""" """
# Pass on concretizing the compiler if the target is not yet determined # Pass on concretizing the compiler if the target is not yet determined
if not spec.architecture.platform_os: if not spec.architecture.platform_os:
#Although this usually means changed, this means awaiting other changes # Although this usually means changed, this means awaiting other
# changes
return True return True
# Only use a matching compiler if it is of the proper style # Only use a matching compiler if it is of the proper style
# Takes advantage of the proper logic already existing in compiler_for_spec # Takes advantage of the proper logic already existing in
# Should think whether this can be more efficient # compiler_for_spec Should think whether this can be more
# efficient
def _proper_compiler_style(cspec, arch): def _proper_compiler_style(cspec, arch):
platform = arch.platform platform = arch.platform
compilers = spack.compilers.compilers_for_spec(cspec, compilers = spack.compilers.compilers_for_spec(cspec,
platform=platform) platform=platform)
return filter(lambda c: c.operating_system == return filter(lambda c: c.operating_system ==
arch.platform_os, compilers) arch.platform_os, compilers)
#return compilers # return compilers
all_compilers = spack.compilers.all_compilers() all_compilers = spack.compilers.all_compilers()
if (spec.compiler and if (spec.compiler and
spec.compiler.concrete and spec.compiler.concrete and
spec.compiler in all_compilers): spec.compiler in all_compilers):
return False return False
#Find the another spec that has a compiler, or the root if none do # Find the another spec that has a compiler, or the root if none do
other_spec = spec if spec.compiler else find_spec(spec, lambda(x) : x.compiler) other_spec = spec if spec.compiler else find_spec(
spec, lambda x: x.compiler)
if not other_spec: if not other_spec:
other_spec = spec.root other_spec = spec.root
@ -313,24 +330,30 @@ def _proper_compiler_style(cspec, arch):
spec.compiler = other_compiler.copy() spec.compiler = other_compiler.copy()
return True return True
# Filter the compilers into a sorted list based on the compiler_order from spackconfig # Filter the compilers into a sorted list based on the compiler_order
compiler_list = all_compilers if not other_compiler else spack.compilers.find(other_compiler) # from spackconfig
cmp_compilers = partial(spack.pkgsort.compiler_compare, other_spec.name) compiler_list = all_compilers if not other_compiler else \
spack.compilers.find(other_compiler)
cmp_compilers = partial(
spack.pkgsort.compiler_compare, other_spec.name)
matches = sorted(compiler_list, cmp=cmp_compilers) matches = sorted(compiler_list, cmp=cmp_compilers)
if not matches: if not matches:
raise UnavailableCompilerVersionError(other_compiler) arch = spec.architecture
raise UnavailableCompilerVersionError(other_compiler,
arch.platform_os)
# copy concrete version into other_compiler # copy concrete version into other_compiler
index = 0 index = 0
while not _proper_compiler_style(matches[index], spec.architecture): while not _proper_compiler_style(matches[index], spec.architecture):
index += 1 index += 1
if index == len(matches) - 1: if index == len(matches) - 1:
raise NoValidVersionError(spec) arch = spec.architecture
raise UnavailableCompilerVersionError(spec.compiler,
arch.platform_os)
spec.compiler = matches[index].copy() spec.compiler = matches[index].copy()
assert(spec.compiler.concrete) assert(spec.compiler.concrete)
return True # things changed. return True # things changed.
def concretize_compiler_flags(self, spec): def concretize_compiler_flags(self, spec):
""" """
The compiler flags are updated to match those of the spec whose The compiler flags are updated to match those of the spec whose
@ -338,54 +361,66 @@ def concretize_compiler_flags(self, spec):
Default specs set at the compiler level will still be added later. Default specs set at the compiler level will still be added later.
""" """
if not spec.architecture.platform_os: if not spec.architecture.platform_os:
#Although this usually means changed, this means awaiting other changes # Although this usually means changed, this means awaiting other
# changes
return True return True
ret = False ret = False
for flag in spack.spec.FlagMap.valid_compiler_flags(): for flag in spack.spec.FlagMap.valid_compiler_flags():
try: try:
nearest = next(p for p in spec.traverse(direction='parents') nearest = next(p for p in spec.traverse(direction='parents')
if ((p.compiler == spec.compiler and p is not spec) if ((p.compiler == spec.compiler and
and flag in p.compiler_flags)) p is not spec) and
if not flag in spec.compiler_flags or \ flag in p.compiler_flags))
not (sorted(spec.compiler_flags[flag]) >= sorted(nearest.compiler_flags[flag])): if flag not in spec.compiler_flags or \
not (sorted(spec.compiler_flags[flag]) >=
sorted(nearest.compiler_flags[flag])):
if flag in spec.compiler_flags: if flag in spec.compiler_flags:
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) | spec.compiler_flags[flag] = list(
set(nearest.compiler_flags[flag])) set(spec.compiler_flags[flag]) |
set(nearest.compiler_flags[flag]))
else: else:
spec.compiler_flags[flag] = nearest.compiler_flags[flag] spec.compiler_flags[
flag] = nearest.compiler_flags[flag]
ret = True ret = True
except StopIteration: except StopIteration:
if (flag in spec.root.compiler_flags and ((not flag in spec.compiler_flags) or if (flag in spec.root.compiler_flags and
sorted(spec.compiler_flags[flag]) != sorted(spec.root.compiler_flags[flag]))): ((flag not in spec.compiler_flags) or
sorted(spec.compiler_flags[flag]) !=
sorted(spec.root.compiler_flags[flag]))):
if flag in spec.compiler_flags: if flag in spec.compiler_flags:
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) | spec.compiler_flags[flag] = list(
set(spec.root.compiler_flags[flag])) set(spec.compiler_flags[flag]) |
set(spec.root.compiler_flags[flag]))
else: else:
spec.compiler_flags[flag] = spec.root.compiler_flags[flag] spec.compiler_flags[
flag] = spec.root.compiler_flags[flag]
ret = True ret = True
else: else:
if not flag in spec.compiler_flags: if flag not in spec.compiler_flags:
spec.compiler_flags[flag] = [] spec.compiler_flags[flag] = []
# Include the compiler flag defaults from the config files # Include the compiler flag defaults from the config files
# This ensures that spack will detect conflicts that stem from a change # This ensures that spack will detect conflicts that stem from a change
# in default compiler flags. # in default compiler flags.
compiler = spack.compilers.compiler_for_spec(spec.compiler, spec.architecture) compiler = spack.compilers.compiler_for_spec(
spec.compiler, spec.architecture)
for flag in compiler.flags: for flag in compiler.flags:
if flag not in spec.compiler_flags: if flag not in spec.compiler_flags:
spec.compiler_flags[flag] = compiler.flags[flag] spec.compiler_flags[flag] = compiler.flags[flag]
if compiler.flags[flag] != []: if compiler.flags[flag] != []:
ret = True ret = True
else: else:
if ((sorted(spec.compiler_flags[flag]) != sorted(compiler.flags[flag])) and if ((sorted(spec.compiler_flags[flag]) !=
(not set(spec.compiler_flags[flag]) >= set(compiler.flags[flag]))): sorted(compiler.flags[flag])) and
(not set(spec.compiler_flags[flag]) >=
set(compiler.flags[flag]))):
ret = True ret = True
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) | spec.compiler_flags[flag] = list(
set(compiler.flags[flag])) set(spec.compiler_flags[flag]) |
set(compiler.flags[flag]))
return ret return ret
@ -396,8 +431,8 @@ def find_spec(spec, condition):
# First search parents, then search children # First search parents, then search children
deptype = ('build', 'link') deptype = ('build', 'link')
dagiter = chain( dagiter = chain(
spec.traverse(direction='parents', deptype=deptype, root=False), spec.traverse(direction='parents', deptype=deptype, root=False),
spec.traverse(direction='children', deptype=deptype, root=False)) spec.traverse(direction='children', deptype=deptype, root=False))
visited = set() visited = set()
for relative in dagiter: for relative in dagiter:
if condition(relative): if condition(relative):
@ -406,8 +441,10 @@ def find_spec(spec, condition):
# Then search all other relatives in the DAG *except* spec # Then search all other relatives in the DAG *except* spec
for relative in spec.root.traverse(deptypes=spack.alldeps): for relative in spec.root.traverse(deptypes=spack.alldeps):
if relative is spec: continue if relative is spec:
if id(relative) in visited: continue continue
if id(relative) in visited:
continue
if condition(relative): if condition(relative):
return relative return relative
@ -454,25 +491,33 @@ def cmp_specs(lhs, rhs):
class UnavailableCompilerVersionError(spack.error.SpackError): class UnavailableCompilerVersionError(spack.error.SpackError):
"""Raised when there is no available compiler that satisfies a """Raised when there is no available compiler that satisfies a
compiler spec.""" compiler spec."""
def __init__(self, compiler_spec):
def __init__(self, compiler_spec, operating_system):
super(UnavailableCompilerVersionError, self).__init__( super(UnavailableCompilerVersionError, self).__init__(
"No available compiler version matches '%s'" % compiler_spec, "No available compiler version matches '%s' on operating_system %s"
% (compiler_spec, operating_system),
"Run 'spack compilers' to see available compiler Options.") "Run 'spack compilers' to see available compiler Options.")
class NoValidVersionError(spack.error.SpackError): class NoValidVersionError(spack.error.SpackError):
"""Raised when there is no way to have a concrete version for a """Raised when there is no way to have a concrete version for a
particular spec.""" particular spec."""
def __init__(self, spec): def __init__(self, spec):
super(NoValidVersionError, self).__init__( super(NoValidVersionError, self).__init__(
"There are no valid versions for %s that match '%s'" % (spec.name, spec.versions)) "There are no valid versions for %s that match '%s'"
% (spec.name, spec.versions))
class NoBuildError(spack.error.SpackError): class NoBuildError(spack.error.SpackError):
"""Raised when a package is configured with the buildable option False, but """Raised when a package is configured with the buildable option False, but
no satisfactory external versions can be found""" no satisfactory external versions can be found"""
def __init__(self, spec): def __init__(self, spec):
super(NoBuildError, self).__init__( msg = ("The spec '%s' is configured as not buildable, "
"The spec '%s' is configured as not buildable, and no matching external installs were found" % spec.name) "and no matching external installs were found")
super(NoBuildError, self).__init__(msg % spec.name)

View file

@ -1,4 +1,3 @@
# flake8: noqa
############################################################################## ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
@ -123,15 +122,18 @@
import re import re
import sys import sys
import jsonschema
import llnl.util.tty as tty
import spack
import yaml import yaml
from jsonschema import Draft4Validator, validators import jsonschema
from llnl.util.filesystem import mkdirp
from ordereddict_backport import OrderedDict
from spack.error import SpackError
from yaml.error import MarkedYAMLError from yaml.error import MarkedYAMLError
from jsonschema import Draft4Validator, validators
from ordereddict_backport import OrderedDict
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
import spack
from spack.error import SpackError
import spack.schema
# Hacked yaml for configuration files preserves line numbers. # Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
@ -139,251 +141,12 @@
"""Dict from section names -> schema for that section.""" """Dict from section names -> schema for that section."""
section_schemas = { section_schemas = {
'compilers': { 'compilers': spack.schema.compilers.schema,
'$schema': 'http://json-schema.org/schema#', 'mirrors': spack.schema.mirrors.schema,
'title': 'Spack compiler configuration file schema', 'repos': spack.schema.repos.schema,
'type': 'object', 'packages': spack.schema.packages.schema,
'additionalProperties': False, 'targets': spack.schema.targets.schema,
'patternProperties': { 'modules': spack.schema.modules.schema,
'compilers:?': { # optional colon for overriding site config.
'type': 'array',
'items': {
'compiler': {
'type': 'object',
'additionalProperties': False,
'required': ['paths', 'spec', 'modules', 'operating_system'],
'properties': {
'paths': {
'type': 'object',
'required': ['cc', 'cxx', 'f77', 'fc'],
'additionalProperties': False,
'properties': {
'cc': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'cxx': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'f77': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'fc': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'cflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'cxxflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'fflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'cppflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'ldflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'ldlibs': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]}}},
'spec': { 'type': 'string'},
'operating_system': { 'type': 'string'},
'alias': { 'anyOf': [ {'type' : 'string'},
{'type' : 'null' }]},
'modules': { 'anyOf': [ {'type' : 'string'},
{'type' : 'null' },
{'type': 'array'},
]}
},},},},},},
'mirrors': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack mirror configuration file schema',
'type': 'object',
'additionalProperties': False,
'patternProperties': {
r'mirrors:?': {
'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': {
'type': 'string'},},},},},
'repos': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack repository configuration file schema',
'type': 'object',
'additionalProperties': False,
'patternProperties': {
r'repos:?': {
'type': 'array',
'default': [],
'items': {
'type': 'string'},},},},
'packages': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack package configuration file schema',
'type': 'object',
'additionalProperties': False,
'patternProperties': {
r'packages:?': {
'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': { # package name
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'version': {
'type' : 'array',
'default' : [],
'items' : { 'anyOf' : [ { 'type' : 'string' },
{ 'type' : 'number'}]}}, #version strings
'compiler': {
'type' : 'array',
'default' : [],
'items' : { 'type' : 'string' } }, #compiler specs
'buildable': {
'type': 'boolean',
'default': True,
},
'modules': {
'type' : 'object',
'default' : {},
},
'providers': {
'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': {
'type' : 'array',
'default' : [],
'items' : { 'type' : 'string' },},},},
'paths': {
'type' : 'object',
'default' : {},
}
},},},},},},
'modules': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack module file configuration file schema',
'type': 'object',
'additionalProperties': False,
'definitions': {
'array_of_strings': {
'type': 'array',
'default': [],
'items': {
'type': 'string'
}
},
'dictionary_of_strings': {
'type': 'object',
'patternProperties': {
r'\w[\w-]*': { # key
'type': 'string'
}
}
},
'dependency_selection': {
'type': 'string',
'enum': ['none', 'direct', 'all']
},
'module_file_configuration': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'filter': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'environment_blacklist': {
'type': 'array',
'default': [],
'items': {
'type': 'string'
}
}
}
},
'autoload': {'$ref': '#/definitions/dependency_selection'},
'prerequisites': {'$ref': '#/definitions/dependency_selection'},
'conflict': {'$ref': '#/definitions/array_of_strings'},
'load': {'$ref': '#/definitions/array_of_strings'},
'suffixes': {'$ref': '#/definitions/dictionary_of_strings'},
'environment': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'set': {'$ref': '#/definitions/dictionary_of_strings'},
'unset': {'$ref': '#/definitions/array_of_strings'},
'prepend_path': {'$ref': '#/definitions/dictionary_of_strings'},
'append_path': {'$ref': '#/definitions/dictionary_of_strings'}
}
}
}
},
'module_type_configuration': {
'type': 'object',
'default': {},
'anyOf': [
{
'properties': {
'hash_length': {
'type': 'integer',
'minimum': 0,
'default': 7
},
'whitelist': {'$ref': '#/definitions/array_of_strings'},
'blacklist': {'$ref': '#/definitions/array_of_strings'},
'naming_scheme': {
'type': 'string' # Can we be more specific here?
}
}
},
{
'patternProperties': {r'\w[\w-]*': {'$ref': '#/definitions/module_file_configuration'}}
}
]
}
},
'patternProperties': {
r'modules:?': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'prefix_inspections': {
'type': 'object',
'patternProperties': {
r'\w[\w-]*': { # path to be inspected for existence (relative to prefix)
'$ref': '#/definitions/array_of_strings'
}
}
},
'enable': {
'type': 'array',
'default': [],
'items': {
'type': 'string',
'enum': ['tcl', 'dotkit']
}
},
'tcl': {
'allOf': [
{'$ref': '#/definitions/module_type_configuration'}, # Base configuration
{} # Specific tcl extensions
]
},
'dotkit': {
'allOf': [
{'$ref': '#/definitions/module_type_configuration'}, # Base configuration
{} # Specific dotkit extensions
]
},
}
},
},
},
} }
"""OrderedDict of config scopes keyed by name. """OrderedDict of config scopes keyed by name.
@ -400,7 +163,7 @@ def validate_section_name(section):
def extend_with_default(validator_class): def extend_with_default(validator_class):
"""Add support for the 'default' attribute for properties and patternProperties. """Add support for the 'default' attr for properties and patternProperties.
jsonschema does not handle this out of the box -- it only jsonschema does not handle this out of the box -- it only
validates. This allows us to set default values for configs validates. This allows us to set default values for configs
@ -409,13 +172,15 @@ def extend_with_default(validator_class):
""" """
validate_properties = validator_class.VALIDATORS["properties"] validate_properties = validator_class.VALIDATORS["properties"]
validate_pattern_properties = validator_class.VALIDATORS["patternProperties"] validate_pattern_properties = validator_class.VALIDATORS[
"patternProperties"]
def set_defaults(validator, properties, instance, schema): def set_defaults(validator, properties, instance, schema):
for property, subschema in properties.iteritems(): for property, subschema in properties.iteritems():
if "default" in subschema: if "default" in subschema:
instance.setdefault(property, subschema["default"]) instance.setdefault(property, subschema["default"])
for err in validate_properties(validator, properties, instance, schema): for err in validate_properties(
validator, properties, instance, schema):
yield err yield err
def set_pp_defaults(validator, properties, instance, schema): def set_pp_defaults(validator, properties, instance, schema):
@ -426,7 +191,8 @@ def set_pp_defaults(validator, properties, instance, schema):
if re.match(property, key) and val is None: if re.match(property, key) and val is None:
instance[key] = subschema["default"] instance[key] = subschema["default"]
for err in validate_pattern_properties(validator, properties, instance, schema): for err in validate_pattern_properties(
validator, properties, instance, schema):
yield err yield err
return validators.extend(validator_class, { return validators.extend(validator_class, {
@ -491,7 +257,8 @@ def write_section(self, section):
except jsonschema.ValidationError as e: except jsonschema.ValidationError as e:
raise ConfigSanityError(e, data) raise ConfigSanityError(e, data)
except (yaml.YAMLError, IOError) as e: except (yaml.YAMLError, IOError) as e:
raise ConfigFileError("Error writing to config file: '%s'" % str(e)) raise ConfigFileError(
"Error writing to config file: '%s'" % str(e))
def clear(self): def clear(self):
"""Empty cached config information.""" """Empty cached config information."""
@ -506,7 +273,7 @@ def clear(self):
ConfigScope('site', os.path.join(spack.etc_path, 'spack')) ConfigScope('site', os.path.join(spack.etc_path, 'spack'))
"""User configuration can override both spack defaults and site config.""" """User configuration can override both spack defaults and site config."""
ConfigScope('user', os.path.expanduser('~/.spack')) ConfigScope('user', spack.user_config_path)
def highest_precedence_scope(): def highest_precedence_scope():
@ -689,7 +456,7 @@ def print_section(section):
data = syaml.syaml_dict() data = syaml.syaml_dict()
data[section] = get_config(section) data[section] = get_config(section)
syaml.dump(data, stream=sys.stdout, default_flow_style=False) syaml.dump(data, stream=sys.stdout, default_flow_style=False)
except (yaml.YAMLError, IOError) as e: except (yaml.YAMLError, IOError):
raise ConfigError("Error reading configuration: %s" % section) raise ConfigError("Error reading configuration: %s" % section)
@ -720,7 +487,8 @@ def spec_externals(spec):
path = get_path_from_module(module) path = get_path_from_module(module)
external_spec = spack.spec.Spec(external_spec, external=path, external_module=module) external_spec = spack.spec.Spec(
external_spec, external=path, external_module=module)
if external_spec.satisfies(spec): if external_spec.satisfies(spec):
external_specs.append(external_spec) external_specs.append(external_spec)
@ -754,6 +522,7 @@ def get_path(path, data):
class ConfigFormatError(ConfigError): class ConfigFormatError(ConfigError):
"""Raised when a configuration format does not match its schema.""" """Raised when a configuration format does not match its schema."""
def __init__(self, validation_error, data): def __init__(self, validation_error, data):
# Try to get line number from erroneous instance and its parent # Try to get line number from erroneous instance and its parent
instance_mark = getattr(validation_error.instance, '_start_mark', None) instance_mark = getattr(validation_error.instance, '_start_mark', None)

View file

@ -119,6 +119,7 @@ def from_dict(cls, spec, dictionary):
class Database(object): class Database(object):
def __init__(self, root, db_dir=None): def __init__(self, root, db_dir=None):
"""Create a Database for Spack installations under ``root``. """Create a Database for Spack installations under ``root``.
@ -165,11 +166,11 @@ def __init__(self, root, db_dir=None):
def write_transaction(self, timeout=_db_lock_timeout): def write_transaction(self, timeout=_db_lock_timeout):
"""Get a write lock context manager for use in a `with` block.""" """Get a write lock context manager for use in a `with` block."""
return WriteTransaction(self, self._read, self._write, timeout) return WriteTransaction(self.lock, self._read, self._write, timeout)
def read_transaction(self, timeout=_db_lock_timeout): def read_transaction(self, timeout=_db_lock_timeout):
"""Get a read lock context manager for use in a `with` block.""" """Get a read lock context manager for use in a `with` block."""
return ReadTransaction(self, self._read, None, timeout) return ReadTransaction(self.lock, self._read, timeout=timeout)
def _write_to_yaml(self, stream): def _write_to_yaml(self, stream):
"""Write out the databsae to a YAML file. """Write out the databsae to a YAML file.
@ -352,12 +353,22 @@ def _check_ref_counts(self):
"Invalid ref_count: %s: %d (expected %d), in DB %s" % "Invalid ref_count: %s: %d (expected %d), in DB %s" %
(key, found, expected, self._index_path)) (key, found, expected, self._index_path))
def _write(self): def _write(self, type, value, traceback):
"""Write the in-memory database index to its file path. """Write the in-memory database index to its file path.
Does no locking. This is a helper function called by the WriteTransaction context
manager. If there is an exception while the write lock is active,
nothing will be written to the database file, but the in-memory
database *may* be left in an inconsistent state. It will be consistent
after the start of the next transaction, when it read from disk again.
This routine does no locking.
""" """
# Do not write if exceptions were raised
if type is not None:
return
temp_file = self._index_path + ( temp_file = self._index_path + (
'.%s.%s.temp' % (socket.getfqdn(), os.getpid())) '.%s.%s.temp' % (socket.getfqdn(), os.getpid()))
@ -589,50 +600,8 @@ def missing(self, spec):
return key in self._data and not self._data[key].installed return key in self._data and not self._data[key].installed
class _Transaction(object):
"""Simple nested transaction context manager that uses a file lock.
This class can trigger actions when the lock is acquired for the
first time and released for the last.
Timeout for lock is customizable.
"""
def __init__(self, db,
acquire_fn=None,
release_fn=None,
timeout=_db_lock_timeout):
self._db = db
self._timeout = timeout
self._acquire_fn = acquire_fn
self._release_fn = release_fn
def __enter__(self):
if self._enter() and self._acquire_fn:
self._acquire_fn()
def __exit__(self, type, value, traceback):
if self._exit() and self._release_fn:
self._release_fn()
class ReadTransaction(_Transaction):
def _enter(self):
return self._db.lock.acquire_read(self._timeout)
def _exit(self):
return self._db.lock.release_read()
class WriteTransaction(_Transaction):
def _enter(self):
return self._db.lock.acquire_write(self._timeout)
def _exit(self):
return self._db.lock.release_write()
class CorruptDatabaseError(SpackError): class CorruptDatabaseError(SpackError):
def __init__(self, path, msg=''): def __init__(self, path, msg=''):
super(CorruptDatabaseError, self).__init__( super(CorruptDatabaseError, self).__init__(
"Spack database is corrupt: %s. %s." % (path, msg), "Spack database is corrupt: %s. %s." % (path, msg),
@ -640,6 +609,7 @@ def __init__(self, path, msg=''):
class InvalidDatabaseVersionError(SpackError): class InvalidDatabaseVersionError(SpackError):
def __init__(self, expected, found): def __init__(self, expected, found):
super(InvalidDatabaseVersionError, self).__init__( super(InvalidDatabaseVersionError, self).__init__(
"Expected database version %s but found version %s." "Expected database version %s but found version %s."

View file

@ -189,7 +189,7 @@ def _depends_on(pkg, spec, when=None, type=None):
type = ('build', 'link') type = ('build', 'link')
if isinstance(type, str): if isinstance(type, str):
type = (type,) type = spack.spec.special_types.get(type, (type,))
for deptype in type: for deptype in type:
if deptype not in spack.spec.alldeps: if deptype not in spack.spec.alldeps:
@ -349,9 +349,10 @@ def __init__(self, directive, package):
class UnknownDependencyTypeError(DirectiveError): class UnknownDependencyTypeError(DirectiveError):
"""This is raised when a dependency is of an unknown type.""" """This is raised when a dependency is of an unknown type."""
def __init__(self, directive, package, deptype): def __init__(self, directive, package, deptype):
super(UnknownDependencyTypeError, self).__init__( super(UnknownDependencyTypeError, self).__init__(
directive, directive,
"Package '%s' cannot depend on a package via %s." % "Package '%s' cannot depend on a package via %s."
(package, deptype)) % (package, deptype))
self.package = package self.package = package

View file

@ -22,16 +22,13 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import re
import os import os
import exceptions import exceptions
import hashlib
import shutil import shutil
import glob import glob
import tempfile import tempfile
import yaml import yaml
import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp from llnl.util.filesystem import join_path, mkdirp
import spack import spack
@ -51,10 +48,10 @@ class DirectoryLayout(object):
install, and they can use this to customize the nesting structure of install, and they can use this to customize the nesting structure of
spack installs. spack installs.
""" """
def __init__(self, root): def __init__(self, root):
self.root = root self.root = root
@property @property
def hidden_file_paths(self): def hidden_file_paths(self):
"""Return a list of hidden files used by the directory layout. """Return a list of hidden files used by the directory layout.
@ -67,25 +64,21 @@ def hidden_file_paths(self):
""" """
raise NotImplementedError() raise NotImplementedError()
def all_specs(self): def all_specs(self):
"""To be implemented by subclasses to traverse all specs for which there is """To be implemented by subclasses to traverse all specs for which there is
a directory within the root. a directory within the root.
""" """
raise NotImplementedError() raise NotImplementedError()
def relative_path_for_spec(self, spec): def relative_path_for_spec(self, spec):
"""Implemented by subclasses to return a relative path from the install """Implemented by subclasses to return a relative path from the install
root to a unique location for the provided spec.""" root to a unique location for the provided spec."""
raise NotImplementedError() raise NotImplementedError()
def create_install_directory(self, spec): def create_install_directory(self, spec):
"""Creates the installation directory for a spec.""" """Creates the installation directory for a spec."""
raise NotImplementedError() raise NotImplementedError()
def check_installed(self, spec): def check_installed(self, spec):
"""Checks whether a spec is installed. """Checks whether a spec is installed.
@ -95,7 +88,6 @@ def check_installed(self, spec):
""" """
raise NotImplementedError() raise NotImplementedError()
def extension_map(self, spec): def extension_map(self, spec):
"""Get a dict of currently installed extension packages for a spec. """Get a dict of currently installed extension packages for a spec.
@ -104,7 +96,6 @@ def extension_map(self, spec):
""" """
raise NotImplementedError() raise NotImplementedError()
def check_extension_conflict(self, spec, ext_spec): def check_extension_conflict(self, spec, ext_spec):
"""Ensure that ext_spec can be activated in spec. """Ensure that ext_spec can be activated in spec.
@ -113,7 +104,6 @@ def check_extension_conflict(self, spec, ext_spec):
""" """
raise NotImplementedError() raise NotImplementedError()
def check_activated(self, spec, ext_spec): def check_activated(self, spec, ext_spec):
"""Ensure that ext_spec can be removed from spec. """Ensure that ext_spec can be removed from spec.
@ -121,26 +111,22 @@ def check_activated(self, spec, ext_spec):
""" """
raise NotImplementedError() raise NotImplementedError()
def add_extension(self, spec, ext_spec): def add_extension(self, spec, ext_spec):
"""Add to the list of currently installed extensions.""" """Add to the list of currently installed extensions."""
raise NotImplementedError() raise NotImplementedError()
def remove_extension(self, spec, ext_spec): def remove_extension(self, spec, ext_spec):
"""Remove from the list of currently installed extensions.""" """Remove from the list of currently installed extensions."""
raise NotImplementedError() raise NotImplementedError()
def path_for_spec(self, spec): def path_for_spec(self, spec):
"""Return an absolute path from the root to a directory for the spec.""" """Return absolute path from the root to a directory for the spec."""
_check_concrete(spec) _check_concrete(spec)
path = self.relative_path_for_spec(spec) path = self.relative_path_for_spec(spec)
assert(not path.startswith(self.root)) assert(not path.startswith(self.root))
return os.path.join(self.root, path) return os.path.join(self.root, path)
def remove_install_directory(self, spec): def remove_install_directory(self, spec):
"""Removes a prefix and any empty parent directories from the root. """Removes a prefix and any empty parent directories from the root.
Raised RemoveFailedError if something goes wrong. Raised RemoveFailedError if something goes wrong.
@ -177,6 +163,7 @@ class YamlDirectoryLayout(DirectoryLayout):
only enabled variants are included in the install path. only enabled variants are included in the install path.
Disabled variants are omitted. Disabled variants are omitted.
""" """
def __init__(self, root, **kwargs): def __init__(self, root, **kwargs):
super(YamlDirectoryLayout, self).__init__(root) super(YamlDirectoryLayout, self).__init__(root)
self.metadata_dir = kwargs.get('metadata_dir', '.spack') self.metadata_dir = kwargs.get('metadata_dir', '.spack')
@ -191,12 +178,10 @@ def __init__(self, root, **kwargs):
# Cache of already written/read extension maps. # Cache of already written/read extension maps.
self._extension_maps = {} self._extension_maps = {}
@property @property
def hidden_file_paths(self): def hidden_file_paths(self):
return (self.metadata_dir,) return (self.metadata_dir,)
def relative_path_for_spec(self, spec): def relative_path_for_spec(self, spec):
_check_concrete(spec) _check_concrete(spec)
@ -208,20 +193,19 @@ def relative_path_for_spec(self, spec):
spec.version, spec.version,
spec.dag_hash(self.hash_len)) spec.dag_hash(self.hash_len))
path = join_path(spec.architecture, path = join_path(
spec.architecture,
"%s-%s" % (spec.compiler.name, spec.compiler.version), "%s-%s" % (spec.compiler.name, spec.compiler.version),
dir_name) dir_name)
return path return path
def write_spec(self, spec, path): def write_spec(self, spec, path):
"""Write a spec out to a file.""" """Write a spec out to a file."""
_check_concrete(spec) _check_concrete(spec)
with open(path, 'w') as f: with open(path, 'w') as f:
spec.to_yaml(f) spec.to_yaml(f)
def read_spec(self, path): def read_spec(self, path):
"""Read the contents of a file and parse them as a spec""" """Read the contents of a file and parse them as a spec"""
try: try:
@ -237,32 +221,26 @@ def read_spec(self, path):
spec._mark_concrete() spec._mark_concrete()
return spec return spec
def spec_file_path(self, spec): def spec_file_path(self, spec):
"""Gets full path to spec file""" """Gets full path to spec file"""
_check_concrete(spec) _check_concrete(spec)
return join_path(self.metadata_path(spec), self.spec_file_name) return join_path(self.metadata_path(spec), self.spec_file_name)
def metadata_path(self, spec): def metadata_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir) return join_path(self.path_for_spec(spec), self.metadata_dir)
def build_log_path(self, spec): def build_log_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir, return join_path(self.path_for_spec(spec), self.metadata_dir,
self.build_log_name) self.build_log_name)
def build_env_path(self, spec): def build_env_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir, return join_path(self.path_for_spec(spec), self.metadata_dir,
self.build_env_name) self.build_env_name)
def build_packages_path(self, spec): def build_packages_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir, return join_path(self.path_for_spec(spec), self.metadata_dir,
self.packages_dir) self.packages_dir)
def create_install_directory(self, spec): def create_install_directory(self, spec):
_check_concrete(spec) _check_concrete(spec)
@ -273,7 +251,6 @@ def create_install_directory(self, spec):
mkdirp(self.metadata_path(spec)) mkdirp(self.metadata_path(spec))
self.write_spec(spec, self.spec_file_path(spec)) self.write_spec(spec, self.spec_file_path(spec))
def check_installed(self, spec): def check_installed(self, spec):
_check_concrete(spec) _check_concrete(spec)
path = self.path_for_spec(spec) path = self.path_for_spec(spec)
@ -284,7 +261,7 @@ def check_installed(self, spec):
if not os.path.isfile(spec_file_path): if not os.path.isfile(spec_file_path):
raise InconsistentInstallDirectoryError( raise InconsistentInstallDirectoryError(
'Inconsistent state: install prefix exists but contains no spec.yaml:', 'Install prefix exists but contains no spec.yaml:',
" " + path) " " + path)
installed_spec = self.read_spec(spec_file_path) installed_spec = self.read_spec(spec_file_path)
@ -297,7 +274,6 @@ def check_installed(self, spec):
raise InconsistentInstallDirectoryError( raise InconsistentInstallDirectoryError(
'Spec file in %s does not match hash!' % spec_file_path) 'Spec file in %s does not match hash!' % spec_file_path)
def all_specs(self): def all_specs(self):
if not os.path.isdir(self.root): if not os.path.isdir(self.root):
return [] return []
@ -307,20 +283,17 @@ def all_specs(self):
spec_files = glob.glob(pattern) spec_files = glob.glob(pattern)
return [self.read_spec(s) for s in spec_files] return [self.read_spec(s) for s in spec_files]
def specs_by_hash(self): def specs_by_hash(self):
by_hash = {} by_hash = {}
for spec in self.all_specs(): for spec in self.all_specs():
by_hash[spec.dag_hash()] = spec by_hash[spec.dag_hash()] = spec
return by_hash return by_hash
def extension_file_path(self, spec): def extension_file_path(self, spec):
"""Gets full path to an installed package's extension file""" """Gets full path to an installed package's extension file"""
_check_concrete(spec) _check_concrete(spec)
return join_path(self.metadata_path(spec), self.extension_file_name) return join_path(self.metadata_path(spec), self.extension_file_name)
def _write_extensions(self, spec, extensions): def _write_extensions(self, spec, extensions):
path = self.extension_file_path(spec) path = self.extension_file_path(spec)
@ -332,23 +305,22 @@ def _write_extensions(self, spec, extensions):
# write tmp file # write tmp file
with tmp: with tmp:
yaml.dump({ yaml.dump({
'extensions' : [ 'extensions': [
{ ext.name : { {ext.name: {
'hash' : ext.dag_hash(), 'hash': ext.dag_hash(),
'path' : str(ext.prefix) 'path': str(ext.prefix)
}} for ext in sorted(extensions.values())] }} for ext in sorted(extensions.values())]
}, tmp, default_flow_style=False) }, tmp, default_flow_style=False)
# Atomic update by moving tmpfile on top of old one. # Atomic update by moving tmpfile on top of old one.
os.rename(tmp.name, path) os.rename(tmp.name, path)
def _extension_map(self, spec): def _extension_map(self, spec):
"""Get a dict<name -> spec> for all extensions currently """Get a dict<name -> spec> for all extensions currently
installed for this package.""" installed for this package."""
_check_concrete(spec) _check_concrete(spec)
if not spec in self._extension_maps: if spec not in self._extension_maps:
path = self.extension_file_path(spec) path = self.extension_file_path(spec)
if not os.path.exists(path): if not os.path.exists(path):
self._extension_maps[spec] = {} self._extension_maps[spec] = {}
@ -363,14 +335,14 @@ def _extension_map(self, spec):
dag_hash = entry[name]['hash'] dag_hash = entry[name]['hash']
prefix = entry[name]['path'] prefix = entry[name]['path']
if not dag_hash in by_hash: if dag_hash not in by_hash:
raise InvalidExtensionSpecError( raise InvalidExtensionSpecError(
"Spec %s not found in %s" % (dag_hash, prefix)) "Spec %s not found in %s" % (dag_hash, prefix))
ext_spec = by_hash[dag_hash] ext_spec = by_hash[dag_hash]
if not prefix == ext_spec.prefix: if prefix != ext_spec.prefix:
raise InvalidExtensionSpecError( raise InvalidExtensionSpecError(
"Prefix %s does not match spec with hash %s: %s" "Prefix %s does not match spec hash %s: %s"
% (prefix, dag_hash, ext_spec)) % (prefix, dag_hash, ext_spec))
exts[ext_spec.name] = ext_spec exts[ext_spec.name] = ext_spec
@ -378,13 +350,11 @@ def _extension_map(self, spec):
return self._extension_maps[spec] return self._extension_maps[spec]
def extension_map(self, spec): def extension_map(self, spec):
"""Defensive copying version of _extension_map() for external API.""" """Defensive copying version of _extension_map() for external API."""
_check_concrete(spec) _check_concrete(spec)
return self._extension_map(spec).copy() return self._extension_map(spec).copy()
def check_extension_conflict(self, spec, ext_spec): def check_extension_conflict(self, spec, ext_spec):
exts = self._extension_map(spec) exts = self._extension_map(spec)
if ext_spec.name in exts: if ext_spec.name in exts:
@ -394,13 +364,11 @@ def check_extension_conflict(self, spec, ext_spec):
else: else:
raise ExtensionConflictError(spec, ext_spec, installed_spec) raise ExtensionConflictError(spec, ext_spec, installed_spec)
def check_activated(self, spec, ext_spec): def check_activated(self, spec, ext_spec):
exts = self._extension_map(spec) exts = self._extension_map(spec)
if (not ext_spec.name in exts) or (ext_spec != exts[ext_spec.name]): if (ext_spec.name not in exts) or (ext_spec != exts[ext_spec.name]):
raise NoSuchExtensionError(spec, ext_spec) raise NoSuchExtensionError(spec, ext_spec)
def add_extension(self, spec, ext_spec): def add_extension(self, spec, ext_spec):
_check_concrete(spec) _check_concrete(spec)
_check_concrete(ext_spec) _check_concrete(ext_spec)
@ -413,7 +381,6 @@ def add_extension(self, spec, ext_spec):
exts[ext_spec.name] = ext_spec exts[ext_spec.name] = ext_spec
self._write_extensions(spec, exts) self._write_extensions(spec, exts)
def remove_extension(self, spec, ext_spec): def remove_extension(self, spec, ext_spec):
_check_concrete(spec) _check_concrete(spec)
_check_concrete(ext_spec) _check_concrete(ext_spec)
@ -429,12 +396,14 @@ def remove_extension(self, spec, ext_spec):
class DirectoryLayoutError(SpackError): class DirectoryLayoutError(SpackError):
"""Superclass for directory layout errors.""" """Superclass for directory layout errors."""
def __init__(self, message, long_msg=None): def __init__(self, message, long_msg=None):
super(DirectoryLayoutError, self).__init__(message, long_msg) super(DirectoryLayoutError, self).__init__(message, long_msg)
class SpecHashCollisionError(DirectoryLayoutError): class SpecHashCollisionError(DirectoryLayoutError):
"""Raised when there is a hash collision in an install layout.""" """Raised when there is a hash collision in an install layout."""
def __init__(self, installed_spec, new_spec): def __init__(self, installed_spec, new_spec):
super(SpecHashCollisionError, self).__init__( super(SpecHashCollisionError, self).__init__(
'Specs %s and %s have the same SHA-1 prefix!' 'Specs %s and %s have the same SHA-1 prefix!'
@ -443,6 +412,7 @@ def __init__(self, installed_spec, new_spec):
class RemoveFailedError(DirectoryLayoutError): class RemoveFailedError(DirectoryLayoutError):
"""Raised when a DirectoryLayout cannot remove an install prefix.""" """Raised when a DirectoryLayout cannot remove an install prefix."""
def __init__(self, installed_spec, prefix, error): def __init__(self, installed_spec, prefix, error):
super(RemoveFailedError, self).__init__( super(RemoveFailedError, self).__init__(
'Could not remove prefix %s for %s : %s' 'Could not remove prefix %s for %s : %s'
@ -452,12 +422,15 @@ def __init__(self, installed_spec, prefix, error):
class InconsistentInstallDirectoryError(DirectoryLayoutError): class InconsistentInstallDirectoryError(DirectoryLayoutError):
"""Raised when a package seems to be installed to the wrong place.""" """Raised when a package seems to be installed to the wrong place."""
def __init__(self, message, long_msg=None): def __init__(self, message, long_msg=None):
super(InconsistentInstallDirectoryError, self).__init__(message, long_msg) super(InconsistentInstallDirectoryError, self).__init__(
message, long_msg)
class InstallDirectoryAlreadyExistsError(DirectoryLayoutError): class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
"""Raised when create_install_directory is called unnecessarily.""" """Raised when create_install_directory is called unnecessarily."""
def __init__(self, path): def __init__(self, path):
super(InstallDirectoryAlreadyExistsError, self).__init__( super(InstallDirectoryAlreadyExistsError, self).__init__(
"Install path %s already exists!") "Install path %s already exists!")
@ -473,22 +446,26 @@ class InvalidExtensionSpecError(DirectoryLayoutError):
class ExtensionAlreadyInstalledError(DirectoryLayoutError): class ExtensionAlreadyInstalledError(DirectoryLayoutError):
"""Raised when an extension is added to a package that already has it.""" """Raised when an extension is added to a package that already has it."""
def __init__(self, spec, ext_spec): def __init__(self, spec, ext_spec):
super(ExtensionAlreadyInstalledError, self).__init__( super(ExtensionAlreadyInstalledError, self).__init__(
"%s is already installed in %s" % (ext_spec.short_spec, spec.short_spec)) "%s is already installed in %s"
% (ext_spec.short_spec, spec.short_spec))
class ExtensionConflictError(DirectoryLayoutError): class ExtensionConflictError(DirectoryLayoutError):
"""Raised when an extension is added to a package that already has it.""" """Raised when an extension is added to a package that already has it."""
def __init__(self, spec, ext_spec, conflict): def __init__(self, spec, ext_spec, conflict):
super(ExtensionConflictError, self).__init__( super(ExtensionConflictError, self).__init__(
"%s cannot be installed in %s because it conflicts with %s"% ( "%s cannot be installed in %s because it conflicts with %s"
ext_spec.short_spec, spec.short_spec, conflict.short_spec)) % (ext_spec.short_spec, spec.short_spec, conflict.short_spec))
class NoSuchExtensionError(DirectoryLayoutError): class NoSuchExtensionError(DirectoryLayoutError):
"""Raised when an extension isn't there on deactivate.""" """Raised when an extension isn't there on deactivate."""
def __init__(self, spec, ext_spec): def __init__(self, spec, ext_spec):
super(NoSuchExtensionError, self).__init__( super(NoSuchExtensionError, self).__init__(
"%s cannot be removed from %s because it's not activated."% ( "%s cannot be removed from %s because it's not activated."
ext_spec.short_spec, spec.short_spec)) % (ext_spec.short_spec, spec.short_spec))

View file

@ -1,4 +1,4 @@
# ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
@ -21,7 +21,7 @@
# You should have received a copy of the GNU Lesser General Public # You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ##############################################################################
import collections import collections
import inspect import inspect
import json import json
@ -37,6 +37,10 @@ def __init__(self, name, **kwargs):
self.args = {'name': name} self.args = {'name': name}
self.args.update(kwargs) self.args.update(kwargs)
def update_args(self, **kwargs):
self.__dict__.update(kwargs)
self.args.update(kwargs)
class NameValueModifier(object): class NameValueModifier(object):
@ -44,7 +48,11 @@ def __init__(self, name, value, **kwargs):
self.name = name self.name = name
self.value = value self.value = value
self.separator = kwargs.get('separator', ':') self.separator = kwargs.get('separator', ':')
self.args = {'name': name, 'value': value, 'delim': self.separator} self.args = {'name': name, 'value': value, 'separator': self.separator}
self.args.update(kwargs)
def update_args(self, **kwargs):
self.__dict__.update(kwargs)
self.args.update(kwargs) self.args.update(kwargs)
@ -279,7 +287,10 @@ def from_sourcing_files(*args, **kwargs):
shell = '{shell}'.format(**info) shell = '{shell}'.format(**info)
shell_options = '{shell_options}'.format(**info) shell_options = '{shell_options}'.format(**info)
source_file = '{source_command} {file} {concatenate_on_success}' source_file = '{source_command} {file} {concatenate_on_success}'
dump_environment = 'python -c "import os, json; print json.dumps(dict(os.environ))"' # NOQA: ignore=E501
dump_cmd = "import os, json; print json.dumps(dict(os.environ))"
dump_environment = 'python -c "%s"' % dump_cmd
# Construct the command that will be executed # Construct the command that will be executed
command = [source_file.format(file=file, **info) for file in args] command = [source_file.format(file=file, **info) for file in args]
command.append(dump_environment) command.append(dump_environment)
@ -318,8 +329,10 @@ def from_sourcing_files(*args, **kwargs):
for x in unset_variables: for x in unset_variables:
env.unset(x) env.unset(x)
# Variables that have been modified # Variables that have been modified
common_variables = set(this_environment).intersection(set(after_source_env)) # NOQA: ignore=E501 common_variables = set(
modified_variables = [x for x in common_variables if this_environment[x] != after_source_env[x]] # NOQA: ignore=E501 this_environment).intersection(set(after_source_env))
modified_variables = [x for x in common_variables
if this_environment[x] != after_source_env[x]]
def return_separator_if_any(first_value, second_value): def return_separator_if_any(first_value, second_value):
separators = ':', ';' separators = ':', ';'
@ -397,7 +410,7 @@ def set_or_unset_not_first(variable, changes, errstream):
if indexes: if indexes:
good = '\t \t{context} at {filename}:{lineno}' good = '\t \t{context} at {filename}:{lineno}'
nogood = '\t--->\t{context} at {filename}:{lineno}' nogood = '\t--->\t{context} at {filename}:{lineno}'
message = 'Suspicious requests to set or unset the variable \'{var}\' found' # NOQA: ignore=E501 message = "Suspicious requests to set or unset '{var}' found"
errstream(message.format(var=variable)) errstream(message.format(var=variable))
for ii, item in enumerate(changes): for ii, item in enumerate(changes):
print_format = nogood if ii in indexes else good print_format = nogood if ii in indexes else good

View file

@ -27,21 +27,21 @@
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
class SpackError(Exception): class SpackError(Exception):
"""This is the superclass for all Spack errors. """This is the superclass for all Spack errors.
Subclasses can be found in the modules they have to do with. Subclasses can be found in the modules they have to do with.
""" """
def __init__(self, message, long_message=None): def __init__(self, message, long_message=None):
super(SpackError, self).__init__() super(SpackError, self).__init__()
self.message = message self.message = message
self._long_message = long_message self._long_message = long_message
@property @property
def long_message(self): def long_message(self):
return self._long_message return self._long_message
def die(self): def die(self):
if spack.debug: if spack.debug:
sys.excepthook(*sys.exc_info()) sys.excepthook(*sys.exc_info())
@ -52,21 +52,23 @@ def die(self):
print self.long_message print self.long_message
os._exit(1) os._exit(1)
def __str__(self): def __str__(self):
msg = self.message msg = self.message
if self._long_message: if self._long_message:
msg += "\n %s" % self._long_message msg += "\n %s" % self._long_message
return msg return msg
class UnsupportedPlatformError(SpackError): class UnsupportedPlatformError(SpackError):
"""Raised by packages when a platform is not supported""" """Raised by packages when a platform is not supported"""
def __init__(self, message): def __init__(self, message):
super(UnsupportedPlatformError, self).__init__(message) super(UnsupportedPlatformError, self).__init__(message)
class NoNetworkConnectionError(SpackError): class NoNetworkConnectionError(SpackError):
"""Raised when an operation needs an internet connection.""" """Raised when an operation needs an internet connection."""
def __init__(self, message, url): def __init__(self, message, url):
super(NoNetworkConnectionError, self).__init__( super(NoNetworkConnectionError, self).__init__(
"No network connection: " + str(message), "No network connection: " + str(message),

View file

@ -356,6 +356,7 @@ def __str__(self):
class CacheURLFetchStrategy(URLFetchStrategy): class CacheURLFetchStrategy(URLFetchStrategy):
"""The resource associated with a cache URL may be out of date.""" """The resource associated with a cache URL may be out of date."""
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(CacheURLFetchStrategy, self).__init__(*args, **kwargs) super(CacheURLFetchStrategy, self).__init__(*args, **kwargs)
@ -836,6 +837,7 @@ def for_package_version(pkg, version):
class FsCache(object): class FsCache(object):
def __init__(self, root): def __init__(self, root):
self.root = os.path.abspath(root) self.root = os.path.abspath(root)

View file

@ -0,0 +1,185 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import shutil
from llnl.util.filesystem import *
from llnl.util.lock import *
from spack.error import SpackError
class FileCache(object):
"""This class manages cached data in the filesystem.
- Cache files are fetched and stored by unique keys. Keys can be relative
paths, so that thre can be some hierarchy in the cache.
- The FileCache handles locking cache files for reading and writing, so
client code need not manage locks for cache entries.
"""
def __init__(self, root):
"""Create a file cache object.
This will create the cache directory if it does not exist yet.
"""
self.root = root.rstrip(os.path.sep)
if not os.path.exists(self.root):
mkdirp(self.root)
self._locks = {}
def destroy(self):
"""Remove all files under the cache root."""
for f in os.listdir(self.root):
path = join_path(self.root, f)
if os.path.isdir(path):
shutil.rmtree(path, True)
else:
os.remove(path)
def cache_path(self, key):
"""Path to the file in the cache for a particular key."""
return join_path(self.root, key)
def _lock_path(self, key):
"""Path to the file in the cache for a particular key."""
keyfile = os.path.basename(key)
keydir = os.path.dirname(key)
return join_path(self.root, keydir, '.' + keyfile + '.lock')
def _get_lock(self, key):
"""Create a lock for a key, if necessary, and return a lock object."""
if key not in self._locks:
lock_file = self._lock_path(key)
if not os.path.exists(lock_file):
touch(lock_file)
self._locks[key] = Lock(lock_file)
return self._locks[key]
def init_entry(self, key):
"""Ensure we can access a cache file. Create a lock for it if needed.
Return whether the cache file exists yet or not.
"""
cache_path = self.cache_path(key)
exists = os.path.exists(cache_path)
if exists:
if not os.path.isfile(cache_path):
raise CacheError("Cache file is not a file: %s" % cache_path)
if not os.access(cache_path, os.R_OK | os.W_OK):
raise CacheError("Cannot access cache file: %s" % cache_path)
else:
# if the file is hierarchical, make parent directories
parent = os.path.dirname(cache_path)
if parent.rstrip(os.path.sep) != self.root:
mkdirp(parent)
if not os.access(parent, os.R_OK | os.W_OK):
raise CacheError("Cannot access cache directory: %s" % parent)
# ensure lock is created for this key
self._get_lock(key)
return exists
def read_transaction(self, key):
"""Get a read transaction on a file cache item.
Returns a ReadTransaction context manager and opens the cache file for
reading. You can use it like this:
with spack.user_cache.read_transaction(key) as cache_file:
cache_file.read()
"""
return ReadTransaction(
self._get_lock(key), lambda: open(self.cache_path(key)))
def write_transaction(self, key):
"""Get a write transaction on a file cache item.
Returns a WriteTransaction context manager that opens a temporary file
for writing. Once the context manager finishes, if nothing went wrong,
moves the file into place on top of the old file atomically.
"""
class WriteContextManager(object):
def __enter__(cm):
cm.orig_filename = self.cache_path(key)
cm.orig_file = None
if os.path.exists(cm.orig_filename):
cm.orig_file = open(cm.orig_filename, 'r')
cm.tmp_filename = self.cache_path(key) + '.tmp'
cm.tmp_file = open(cm.tmp_filename, 'w')
return cm.orig_file, cm.tmp_file
def __exit__(cm, type, value, traceback):
if cm.orig_file:
cm.orig_file.close()
cm.tmp_file.close()
if value:
# remove tmp on exception & raise it
shutil.rmtree(cm.tmp_filename, True)
raise value
else:
os.rename(cm.tmp_filename, cm.orig_filename)
return WriteTransaction(self._get_lock(key), WriteContextManager)
def mtime(self, key):
"""Return modification time of cache file, or 0 if it does not exist.
Time is in units returned by os.stat in the mtime field, which is
platform-dependent.
"""
if not self.init_entry(key):
return 0
else:
sinfo = os.stat(self.cache_path(key))
return sinfo.st_mtime
def remove(self, key):
lock = self._get_lock(key)
try:
lock.acquire_write()
os.unlink(self.cache_path(key))
finally:
lock.release_write()
os.unlink(self._lock_path(key))
class CacheError(SpackError):
pass

View file

@ -61,7 +61,6 @@
can take a number of specs as input. can take a number of specs as input.
""" """
__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
from heapq import * from heapq import *
@ -71,6 +70,8 @@
import spack import spack
from spack.spec import Spec from spack.spec import Spec
__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
def topological_sort(spec, **kwargs): def topological_sort(spec, **kwargs):
"""Topological sort for specs. """Topological sort for specs.
@ -94,6 +95,7 @@ def topological_sort(spec, **kwargs):
nodes = spec.index() nodes = spec.index()
topo_order = [] topo_order = []
par = dict((name, parents(nodes[name])) for name in nodes.keys())
remaining = [name for name in nodes.keys() if not parents(nodes[name])] remaining = [name for name in nodes.keys() if not parents(nodes[name])]
heapify(remaining) heapify(remaining)
@ -102,12 +104,12 @@ def topological_sort(spec, **kwargs):
topo_order.append(name) topo_order.append(name)
node = nodes[name] node = nodes[name]
for dep in children(node).values(): for dep in children(node):
del parents(dep)[node.name] par[dep.name].remove(node)
if not parents(dep): if not par[dep.name]:
heappush(remaining, dep.name) heappush(remaining, dep.name)
if any(parents(s) for s in spec.traverse()): if any(par.get(s.name, []) for s in spec.traverse()):
raise ValueError("Spec has cycles!") raise ValueError("Spec has cycles!")
else: else:
return topo_order return topo_order
@ -132,7 +134,9 @@ def find(seq, predicate):
states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge') states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge')
NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states
class AsciiGraph(object): class AsciiGraph(object):
def __init__(self): def __init__(self):
# These can be set after initialization or after a call to # These can be set after initialization or after a call to
# graph() to change behavior. # graph() to change behavior.
@ -153,18 +157,15 @@ def __init__(self):
self._prev_state = None # State of previous line self._prev_state = None # State of previous line
self._prev_index = None # Index of expansion point of prev line self._prev_index = None # Index of expansion point of prev line
def _indent(self): def _indent(self):
self._out.write(self.indent * ' ') self._out.write(self.indent * ' ')
def _write_edge(self, string, index, sub=0): def _write_edge(self, string, index, sub=0):
"""Write a colored edge to the output stream.""" """Write a colored edge to the output stream."""
name = self._frontier[index][sub] name = self._frontier[index][sub]
edge = "@%s{%s}" % (self._name_to_color[name], string) edge = "@%s{%s}" % (self._name_to_color[name], string)
self._out.write(edge) self._out.write(edge)
def _connect_deps(self, i, deps, label=None): def _connect_deps(self, i, deps, label=None):
"""Connect dependencies to existing edges in the frontier. """Connect dependencies to existing edges in the frontier.
@ -199,7 +200,8 @@ def _connect_deps(self, i, deps, label=None):
collapse = True collapse = True
if self._prev_state == EXPAND_RIGHT: if self._prev_state == EXPAND_RIGHT:
# Special case where previous line expanded and i is off by 1. # Special case where previous line expanded and i is off by 1.
self._back_edge_line([], j, i+1, True, label + "-1.5 " + str((i+1,j))) self._back_edge_line([], j, i + 1, True,
label + "-1.5 " + str((i + 1, j)))
collapse = False collapse = False
else: else:
@ -207,19 +209,20 @@ def _connect_deps(self, i, deps, label=None):
if self._prev_state == NODE and self._prev_index < i: if self._prev_state == NODE and self._prev_index < i:
i += 1 i += 1
if i-j > 1: if i - j > 1:
# We need two lines to connect if distance > 1 # We need two lines to connect if distance > 1
self._back_edge_line([], j, i, True, label + "-1 " + str((i,j))) self._back_edge_line([], j, i, True,
label + "-1 " + str((i, j)))
collapse = False collapse = False
self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i,j))) self._back_edge_line([j], -1, -1, collapse,
label + "-2 " + str((i, j)))
return True return True
elif deps: elif deps:
self._frontier.insert(i, deps) self._frontier.insert(i, deps)
return False return False
def _set_state(self, state, index, label=None): def _set_state(self, state, index, label=None):
if state not in states: if state not in states:
raise ValueError("Invalid graph state!") raise ValueError("Invalid graph state!")
@ -233,7 +236,6 @@ def _set_state(self, state, index, label=None):
self._out.write("%-20s" % (str(label) if label else '')) self._out.write("%-20s" % (str(label) if label else ''))
self._out.write("%s" % self._frontier) self._out.write("%s" % self._frontier)
def _back_edge_line(self, prev_ends, end, start, collapse, label=None): def _back_edge_line(self, prev_ends, end, start, collapse, label=None):
"""Write part of a backwards edge in the graph. """Write part of a backwards edge in the graph.
@ -287,27 +289,26 @@ def advance(to_pos, edges):
self._indent() self._indent()
for p in prev_ends: for p in prev_ends:
advance(p, lambda: [("| ", self._pos)] ) advance(p, lambda: [("| ", self._pos)])
advance(p+1, lambda: [("|/", self._pos)] ) advance(p + 1, lambda: [("|/", self._pos)])
if end >= 0: if end >= 0:
advance(end + 1, lambda: [("| ", self._pos)] ) advance(end + 1, lambda: [("| ", self._pos)])
advance(start - 1, lambda: [("|", self._pos), ("_", end)] ) advance(start - 1, lambda: [("|", self._pos), ("_", end)])
else: else:
advance(start - 1, lambda: [("| ", self._pos)] ) advance(start - 1, lambda: [("| ", self._pos)])
if start >= 0: if start >= 0:
advance(start, lambda: [("|", self._pos), ("/", end)] ) advance(start, lambda: [("|", self._pos), ("/", end)])
if collapse: if collapse:
advance(flen, lambda: [(" /", self._pos)] ) advance(flen, lambda: [(" /", self._pos)])
else: else:
advance(flen, lambda: [("| ", self._pos)] ) advance(flen, lambda: [("| ", self._pos)])
self._set_state(BACK_EDGE, end, label) self._set_state(BACK_EDGE, end, label)
self._out.write("\n") self._out.write("\n")
def _node_line(self, index, name): def _node_line(self, index, name):
"""Writes a line with a node at index.""" """Writes a line with a node at index."""
self._indent() self._indent()
@ -316,14 +317,13 @@ def _node_line(self, index, name):
self._out.write("%s " % self.node_character) self._out.write("%s " % self.node_character)
for c in range(index+1, len(self._frontier)): for c in range(index + 1, len(self._frontier)):
self._write_edge("| ", c) self._write_edge("| ", c)
self._out.write(" %s" % name) self._out.write(" %s" % name)
self._set_state(NODE, index) self._set_state(NODE, index)
self._out.write("\n") self._out.write("\n")
def _collapse_line(self, index): def _collapse_line(self, index):
"""Write a collapsing line after a node was added at index.""" """Write a collapsing line after a node was added at index."""
self._indent() self._indent()
@ -335,36 +335,33 @@ def _collapse_line(self, index):
self._set_state(COLLAPSE, index) self._set_state(COLLAPSE, index)
self._out.write("\n") self._out.write("\n")
def _merge_right_line(self, index): def _merge_right_line(self, index):
"""Edge at index is same as edge to right. Merge directly with '\'""" """Edge at index is same as edge to right. Merge directly with '\'"""
self._indent() self._indent()
for c in range(index): for c in range(index):
self._write_edge("| ", c) self._write_edge("| ", c)
self._write_edge("|", index) self._write_edge("|", index)
self._write_edge("\\", index+1) self._write_edge("\\", index + 1)
for c in range(index+1, len(self._frontier)): for c in range(index + 1, len(self._frontier)):
self._write_edge("| ", c ) self._write_edge("| ", c)
self._set_state(MERGE_RIGHT, index) self._set_state(MERGE_RIGHT, index)
self._out.write("\n") self._out.write("\n")
def _expand_right_line(self, index): def _expand_right_line(self, index):
self._indent() self._indent()
for c in range(index): for c in range(index):
self._write_edge("| ", c) self._write_edge("| ", c)
self._write_edge("|", index) self._write_edge("|", index)
self._write_edge("\\", index+1) self._write_edge("\\", index + 1)
for c in range(index+2, len(self._frontier)): for c in range(index + 2, len(self._frontier)):
self._write_edge(" \\", c) self._write_edge(" \\", c)
self._set_state(EXPAND_RIGHT, index) self._set_state(EXPAND_RIGHT, index)
self._out.write("\n") self._out.write("\n")
def write(self, spec, **kwargs): def write(self, spec, **kwargs):
"""Write out an ascii graph of the provided spec. """Write out an ascii graph of the provided spec.
@ -398,7 +395,7 @@ def write(self, spec, **kwargs):
# Colors associated with each node in the DAG. # Colors associated with each node in the DAG.
# Edges are colored by the node they point to. # Edges are colored by the node they point to.
self._name_to_color = dict((name, self.colors[i % len(self.colors)]) self._name_to_color = dict((name, self.colors[i % len(self.colors)])
for i, name in enumerate(topo_order)) for i, name in enumerate(topo_order))
# Frontier tracks open edges of the graph as it's written out. # Frontier tracks open edges of the graph as it's written out.
self._frontier = [[spec.name]] self._frontier = [[spec.name]]
@ -407,7 +404,8 @@ def write(self, spec, **kwargs):
i = find(self._frontier, lambda f: len(f) > 1) i = find(self._frontier, lambda f: len(f) > 1)
if i >= 0: if i >= 0:
# Expand frontier until there are enough columns for all children. # Expand frontier until there are enough columns for all
# children.
# Figure out how many back connections there are and # Figure out how many back connections there are and
# sort them so we do them in order # sort them so we do them in order
@ -424,8 +422,9 @@ def write(self, spec, **kwargs):
prev_ends = [] prev_ends = []
for j, (b, d) in enumerate(back): for j, (b, d) in enumerate(back):
self._frontier[i].remove(d) self._frontier[i].remove(d)
if i-b > 1: if i - b > 1:
self._back_edge_line(prev_ends, b, i, False, 'left-1') self._back_edge_line(prev_ends, b, i, False,
'left-1')
del prev_ends[:] del prev_ends[:]
prev_ends.append(b) prev_ends.append(b)
@ -439,12 +438,13 @@ def write(self, spec, **kwargs):
elif len(self._frontier[i]) > 1: elif len(self._frontier[i]) > 1:
# Expand forward after doing all back connections # Expand forward after doing all back connections
if (i+1 < len(self._frontier) and len(self._frontier[i+1]) == 1 if (i + 1 < len(self._frontier) and
and self._frontier[i+1][0] in self._frontier[i]): len(self._frontier[i + 1]) == 1 and
self._frontier[i + 1][0] in self._frontier[i]):
# We need to connect to the element to the right. # We need to connect to the element to the right.
# Keep lines straight by connecting directly and # Keep lines straight by connecting directly and
# avoiding unnecessary expand/contract. # avoiding unnecessary expand/contract.
name = self._frontier[i+1][0] name = self._frontier[i + 1][0]
self._frontier[i].remove(name) self._frontier[i].remove(name)
self._merge_right_line(i) self._merge_right_line(i)
@ -458,9 +458,8 @@ def write(self, spec, **kwargs):
self._frontier.pop(i) self._frontier.pop(i)
self._connect_deps(i, deps, "post-expand") self._connect_deps(i, deps, "post-expand")
# Handle any remaining back edges to the right # Handle any remaining back edges to the right
j = i+1 j = i + 1
while j < len(self._frontier): while j < len(self._frontier):
deps = self._frontier.pop(j) deps = self._frontier.pop(j)
if not self._connect_deps(j, deps, "back-from-right"): if not self._connect_deps(j, deps, "back-from-right"):
@ -477,9 +476,10 @@ def write(self, spec, **kwargs):
# Replace node with its dependencies # Replace node with its dependencies
self._frontier.pop(i) self._frontier.pop(i)
if node.dependencies: if node.dependencies():
deps = sorted((d for d in node.dependencies), reverse=True) deps = sorted((d.name for d in node.dependencies()),
self._connect_deps(i, deps, "new-deps") # anywhere. reverse=True)
self._connect_deps(i, deps, "new-deps") # anywhere.
elif self._frontier: elif self._frontier:
self._collapse_line(i) self._collapse_line(i)
@ -501,7 +501,6 @@ def graph_ascii(spec, **kwargs):
graph.write(spec, color=color, out=out) graph.write(spec, color=color, out=out)
def graph_dot(*specs, **kwargs): def graph_dot(*specs, **kwargs):
"""Generate a graph in dot format of all provided specs. """Generate a graph in dot format of all provided specs.

View file

@ -45,6 +45,7 @@
from llnl.util.filesystem import join_path from llnl.util.filesystem import join_path
import spack import spack
@memoized @memoized
def all_hook_modules(): def all_hook_modules():
modules = [] modules = []
@ -58,6 +59,7 @@ def all_hook_modules():
class HookRunner(object): class HookRunner(object):
def __init__(self, hook_name): def __init__(self, hook_name):
self.hook_name = hook_name self.hook_name = hook_name

View file

@ -23,8 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import spack
def pre_uninstall(pkg): def pre_uninstall(pkg):
assert(pkg.spec.concrete) assert(pkg.spec.concrete)

View file

@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os import os
import stat
import re import re
import llnl.util.tty as tty import llnl.util.tty as tty
@ -62,10 +63,21 @@ def filter_shebang(path):
if re.search(r'^#!(/[^/]*)*lua\b', original): if re.search(r'^#!(/[^/]*)*lua\b', original):
original = re.sub(r'^#', '--', original) original = re.sub(r'^#', '--', original)
# Change non-writable files to be writable if needed.
saved_mode = None
if not os.access(path, os.W_OK):
st = os.stat(path)
saved_mode = st.st_mode
os.chmod(path, saved_mode | stat.S_IWRITE)
with open(path, 'w') as new_file: with open(path, 'w') as new_file:
new_file.write(new_sbang_line) new_file.write(new_sbang_line)
new_file.write(original) new_file.write(original)
# Restore original permissions.
if saved_mode is not None:
os.chmod(path, saved_mode)
tty.warn("Patched overlong shebang in %s" % path) tty.warn("Patched overlong shebang in %s" % path)

View file

@ -40,9 +40,8 @@
import spack.url as url import spack.url as url
import spack.fetch_strategy as fs import spack.fetch_strategy as fs
from spack.spec import Spec from spack.spec import Spec
from spack.stage import Stage
from spack.version import * from spack.version import *
from spack.util.compression import extension, allowed_archive from spack.util.compression import allowed_archive
def mirror_archive_filename(spec, fetcher): def mirror_archive_filename(spec, fetcher):
@ -52,10 +51,10 @@ def mirror_archive_filename(spec, fetcher):
if isinstance(fetcher, fs.URLFetchStrategy): if isinstance(fetcher, fs.URLFetchStrategy):
if fetcher.expand_archive: if fetcher.expand_archive:
# If we fetch this version with a URLFetchStrategy, use URL's archive type # If we fetch with a URLFetchStrategy, use URL's archive type
ext = url.downloaded_file_extension(fetcher.url) ext = url.downloaded_file_extension(fetcher.url)
else: else:
# If the archive shouldn't be expanded, don't check for its extension. # If the archive shouldn't be expanded, don't check extension.
ext = None ext = None
else: else:
# Otherwise we'll make a .tar.gz ourselves # Otherwise we'll make a .tar.gz ourselves
@ -106,7 +105,9 @@ def get_matching_versions(specs, **kwargs):
def suggest_archive_basename(resource): def suggest_archive_basename(resource):
""" """
Return a tentative basename for an archive. Raise an exception if the name is among the allowed archive types. Return a tentative basename for an archive.
Raises an exception if the name is not an allowed archive type.
:param fetcher: :param fetcher:
:return: :return:
@ -170,7 +171,7 @@ def create(path, specs, **kwargs):
'error': [] 'error': []
} }
# Iterate through packages and download all the safe tarballs for each of them # Iterate through packages and download all safe tarballs for each
for spec in version_specs: for spec in version_specs:
add_single_spec(spec, mirror_root, categories, **kwargs) add_single_spec(spec, mirror_root, categories, **kwargs)
@ -190,12 +191,15 @@ def add_single_spec(spec, mirror_root, categories, **kwargs):
fetcher = stage.fetcher fetcher = stage.fetcher
if ii == 0: if ii == 0:
# create a subdirectory for the current package@version # create a subdirectory for the current package@version
archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec, fetcher))) archive_path = os.path.abspath(join_path(
mirror_root, mirror_archive_path(spec, fetcher)))
name = spec.format("$_$@") name = spec.format("$_$@")
else: else:
resource = stage.resource resource = stage.resource
archive_path = join_path(subdir, suggest_archive_basename(resource)) archive_path = join_path(
name = "{resource} ({pkg}).".format(resource=resource.name, pkg=spec.format("$_$@")) subdir, suggest_archive_basename(resource))
name = "{resource} ({pkg}).".format(
resource=resource.name, pkg=spec.format("$_$@"))
subdir = os.path.dirname(archive_path) subdir = os.path.dirname(archive_path)
mkdirp(subdir) mkdirp(subdir)
@ -217,15 +221,18 @@ def add_single_spec(spec, mirror_root, categories, **kwargs):
categories['present'].append(spec) categories['present'].append(spec)
else: else:
categories['mirrored'].append(spec) categories['mirrored'].append(spec)
except Exception as e: except Exception as e:
if spack.debug: if spack.debug:
sys.excepthook(*sys.exc_info()) sys.excepthook(*sys.exc_info())
else: else:
tty.warn("Error while fetching %s" % spec.format('$_$@'), e.message) tty.warn("Error while fetching %s"
% spec.format('$_$@'), e.message)
categories['error'].append(spec) categories['error'].append(spec)
class MirrorError(spack.error.SpackError): class MirrorError(spack.error.SpackError):
"""Superclass of all mirror-creation related errors.""" """Superclass of all mirror-creation related errors."""
def __init__(self, msg, long_msg=None): def __init__(self, msg, long_msg=None):
super(MirrorError, self).__init__(msg, long_msg) super(MirrorError, self).__init__(msg, long_msg)

View file

@ -272,13 +272,25 @@ def naming_scheme(self):
@property @property
def tokens(self): def tokens(self):
"""Tokens that can be substituted in environment variable values
and naming schemes
"""
tokens = { tokens = {
'name': self.spec.name, 'name': self.spec.name,
'version': self.spec.version, 'version': self.spec.version,
'compiler': self.spec.compiler 'compiler': self.spec.compiler,
'prefix': self.spec.package.prefix
} }
return tokens return tokens
@property
def upper_tokens(self):
"""Tokens that can be substituted in environment variable names"""
upper_tokens = {
'name': self.spec.name.replace('-', '_').upper()
}
return upper_tokens
@property @property
def use_name(self): def use_name(self):
""" """
@ -438,11 +450,17 @@ def prerequisite(self, spec):
def process_environment_command(self, env): def process_environment_command(self, env):
for command in env: for command in env:
# Token expansion from configuration file
name = command.args.get('name', '').format(**self.upper_tokens)
value = str(command.args.get('value', '')).format(**self.tokens)
command.update_args(name=name, value=value)
# Format the line int the module file
try: try:
yield self.environment_modifications_formats[type( yield self.environment_modifications_formats[type(
command)].format(**command.args) command)].format(**command.args)
except KeyError: except KeyError:
message = 'Cannot handle command of type {command} : skipping request' # NOQA: ignore=E501 message = ('Cannot handle command of type {command}: '
'skipping request')
details = '{context} at {filename}:{lineno}' details = '{context} at {filename}:{lineno}'
tty.warn(message.format(command=type(command))) tty.warn(message.format(command=type(command)))
tty.warn(details.format(**command.args)) tty.warn(details.format(**command.args))
@ -471,12 +489,14 @@ class Dotkit(EnvModule):
path = join_path(spack.share_path, 'dotkit') path = join_path(spack.share_path, 'dotkit')
environment_modifications_formats = { environment_modifications_formats = {
PrependPath: 'dk_alter {name} {value}\n', PrependPath: 'dk_alter {name} {value}\n',
RemovePath: 'dk_unalter {name} {value}\n',
SetEnv: 'dk_setenv {name} {value}\n' SetEnv: 'dk_setenv {name} {value}\n'
} }
autoload_format = 'dk_op {module_file}\n' autoload_format = 'dk_op {module_file}\n'
default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501 default_naming_format = \
'{name}-{version}-{compiler.name}-{compiler.version}'
@property @property
def file_name(self): def file_name(self):
@ -502,7 +522,8 @@ def header(self):
def prerequisite(self, spec): def prerequisite(self, spec):
tty.warn('prerequisites: not supported by dotkit module files') tty.warn('prerequisites: not supported by dotkit module files')
tty.warn('\tYou may want to check ~/.spack/modules.yaml') tty.warn('\tYou may want to check %s/modules.yaml'
% spack.user_config_path)
return '' return ''
@ -510,9 +531,9 @@ class TclModule(EnvModule):
name = 'tcl' name = 'tcl'
path = join_path(spack.share_path, "modules") path = join_path(spack.share_path, "modules")
environment_modifications_formats = { environment_modifications_formats = {
PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n', PrependPath: 'prepend-path --delim "{separator}" {name} \"{value}\"\n',
AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n', AppendPath: 'append-path --delim "{separator}" {name} \"{value}\"\n',
RemovePath: 'remove-path --delim "{delim}" {name} \"{value}\"\n', RemovePath: 'remove-path --delim "{separator}" {name} \"{value}\"\n',
SetEnv: 'setenv {name} \"{value}\"\n', SetEnv: 'setenv {name} \"{value}\"\n',
UnsetEnv: 'unsetenv {name}\n' UnsetEnv: 'unsetenv {name}\n'
} }
@ -524,7 +545,8 @@ class TclModule(EnvModule):
prerequisite_format = 'prereq {module_file}\n' prerequisite_format = 'prereq {module_file}\n'
default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501 default_naming_format = \
'{name}-{version}-{compiler.name}-{compiler.version}'
@property @property
def file_name(self): def file_name(self):
@ -535,7 +557,7 @@ def header(self):
timestamp = datetime.datetime.now() timestamp = datetime.datetime.now()
# TCL Modulefile header # TCL Modulefile header
header = '#%Module1.0\n' header = '#%Module1.0\n'
header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501 header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp
header += '##\n' header += '##\n'
header += '## %s\n' % self.spec.short_spec header += '## %s\n' % self.spec.short_spec
header += '##\n' header += '##\n'
@ -565,10 +587,12 @@ def module_specific_content(self, configuration):
for naming_dir, conflict_dir in zip( for naming_dir, conflict_dir in zip(
self.naming_scheme.split('/'), item.split('/')): self.naming_scheme.split('/'), item.split('/')):
if naming_dir != conflict_dir: if naming_dir != conflict_dir:
message = 'conflict scheme does not match naming scheme [{spec}]\n\n' # NOQA: ignore=E501 message = 'conflict scheme does not match naming '
message += 'scheme [{spec}]\n\n'
message += 'naming scheme : "{nformat}"\n' message += 'naming scheme : "{nformat}"\n'
message += 'conflict scheme : "{cformat}"\n\n' message += 'conflict scheme : "{cformat}"\n\n'
message += '** You may want to check your `modules.yaml` configuration file **\n' # NOQA: ignore=E501 message += '** You may want to check your '
message += '`modules.yaml` configuration file **\n'
tty.error(message.format(spec=self.spec, tty.error(message.format(spec=self.spec,
nformat=self.naming_scheme, nformat=self.naming_scheme,
cformat=item)) cformat=item))

View file

@ -43,15 +43,13 @@
depending on the scenario, regular old conditionals might be clearer, depending on the scenario, regular old conditionals might be clearer,
so package authors should use their judgement. so package authors should use their judgement.
""" """
import sys
import functools import functools
import collections
from llnl.util.lang import * from llnl.util.lang import *
import spack.architecture import spack.architecture
import spack.error import spack.error
from spack.spec import parse_anonymous_spec, Spec from spack.spec import parse_anonymous_spec
class SpecMultiMethod(object): class SpecMultiMethod(object):
@ -89,13 +87,13 @@ class SpecMultiMethod(object):
See the docs for decorators below for more details. See the docs for decorators below for more details.
""" """
def __init__(self, default=None): def __init__(self, default=None):
self.method_list = [] self.method_list = []
self.default = default self.default = default
if default: if default:
functools.update_wrapper(self, default) functools.update_wrapper(self, default)
def register(self, spec, method): def register(self, spec, method):
"""Register a version of a method for a particular sys_type.""" """Register a version of a method for a particular sys_type."""
self.method_list.append((spec, method)) self.method_list.append((spec, method))
@ -105,12 +103,10 @@ def register(self, spec, method):
else: else:
assert(self.__name__ == method.__name__) assert(self.__name__ == method.__name__)
def __get__(self, obj, objtype): def __get__(self, obj, objtype):
"""This makes __call__ support instance methods.""" """This makes __call__ support instance methods."""
return functools.partial(self.__call__, obj) return functools.partial(self.__call__, obj)
def __call__(self, package_self, *args, **kwargs): def __call__(self, package_self, *args, **kwargs):
"""Find the first method with a spec that matches the """Find the first method with a spec that matches the
package's spec. If none is found, call the default package's spec. If none is found, call the default
@ -127,7 +123,6 @@ def __call__(self, package_self, *args, **kwargs):
type(package_self), self.__name__, spec, type(package_self), self.__name__, spec,
[m[0] for m in self.method_list]) [m[0] for m in self.method_list])
def __str__(self): def __str__(self):
return "SpecMultiMethod {\n\tdefault: %s,\n\tspecs: %s\n}" % ( return "SpecMultiMethod {\n\tdefault: %s,\n\tspecs: %s\n}" % (
self.default, self.method_list) self.default, self.method_list)
@ -195,11 +190,13 @@ def install(self, prefix):
platform-specific versions. There's not much we can do to get platform-specific versions. There's not much we can do to get
around this because of the way decorators work. around this because of the way decorators work.
""" """
def __init__(self, spec): def __init__(self, spec):
pkg = get_calling_module_name() pkg = get_calling_module_name()
if spec is True: if spec is True:
spec = pkg spec = pkg
self.spec = parse_anonymous_spec(spec, pkg) if spec is not False else None self.spec = (parse_anonymous_spec(spec, pkg)
if spec is not False else None)
def __call__(self, method): def __call__(self, method):
# Get the first definition of the method in the calling scope # Get the first definition of the method in the calling scope
@ -218,12 +215,14 @@ def __call__(self, method):
class MultiMethodError(spack.error.SpackError): class MultiMethodError(spack.error.SpackError):
"""Superclass for multimethod dispatch errors""" """Superclass for multimethod dispatch errors"""
def __init__(self, message): def __init__(self, message):
super(MultiMethodError, self).__init__(message) super(MultiMethodError, self).__init__(message)
class NoSuchMethodError(spack.error.SpackError): class NoSuchMethodError(spack.error.SpackError):
"""Raised when we can't find a version of a multi-method.""" """Raised when we can't find a version of a multi-method."""
def __init__(self, cls, method_name, spec, possible_specs): def __init__(self, cls, method_name, spec, possible_specs):
super(NoSuchMethodError, self).__init__( super(NoSuchMethodError, self).__init__(
"Package %s does not support %s called with %s. Options are: %s" "Package %s does not support %s called with %s. Options are: %s"

View file

@ -7,6 +7,7 @@
from spack.util.multiproc import parmap from spack.util.multiproc import parmap
import spack.compilers import spack.compilers
class Cnl(OperatingSystem): class Cnl(OperatingSystem):
""" Compute Node Linux (CNL) is the operating system used for the Cray XC """ Compute Node Linux (CNL) is the operating system used for the Cray XC
series super computers. It is a very stripped down version of GNU/Linux. series super computers. It is a very stripped down version of GNU/Linux.
@ -14,22 +15,25 @@ class Cnl(OperatingSystem):
modules. If updated, user must make sure that version and name are modules. If updated, user must make sure that version and name are
updated to indicate that OS has been upgraded (or downgraded) updated to indicate that OS has been upgraded (or downgraded)
""" """
def __init__(self): def __init__(self):
name = 'CNL' name = 'CNL'
version = '10' version = '10'
super(Cnl, self).__init__(name, version) super(Cnl, self).__init__(name, version)
def __str__(self):
return self.name
def find_compilers(self, *paths): def find_compilers(self, *paths):
types = spack.compilers.all_compiler_types() types = spack.compilers.all_compiler_types()
compiler_lists = parmap(lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types) compiler_lists = parmap(
lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types)
# ensure all the version calls we made are cached in the parent # ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot. # process, as well. This speeds up Spack a lot.
clist = reduce(lambda x,y: x+y, compiler_lists) clist = reduce(lambda x, y: x + y, compiler_lists)
return clist return clist
def find_compiler(self, cmp_cls, *paths): def find_compiler(self, cmp_cls, *paths):
compilers = [] compilers = []
if cmp_cls.PrgEnv: if cmp_cls.PrgEnv:
@ -46,12 +50,15 @@ def find_compiler(self, cmp_cls, *paths):
module_paths = ':' + ':'.join(p for p in paths) module_paths = ':' + ':'.join(p for p in paths)
os.environ['MODULEPATH'] = module_paths os.environ['MODULEPATH'] = module_paths
output = modulecmd('avail', cmp_cls.PrgEnv_compiler, output=str, error=str) output = modulecmd(
matches = re.findall(r'(%s)/([\d\.]+[\d])' % cmp_cls.PrgEnv_compiler, output) 'avail', cmp_cls.PrgEnv_compiler, output=str, error=str)
matches = re.findall(
r'(%s)/([\d\.]+[\d])' % cmp_cls.PrgEnv_compiler, output)
for name, version in matches: for name, version in matches:
v = version v = version
comp = cmp_cls(spack.spec.CompilerSpec(name + '@' + v), self, comp = cmp_cls(
['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name +'/' + v]) spack.spec.CompilerSpec(name + '@' + v), self,
['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name + '/' + v])
compilers.append(comp) compilers.append(comp)

View file

@ -2,6 +2,7 @@
import platform as py_platform import platform as py_platform
from spack.architecture import OperatingSystem from spack.architecture import OperatingSystem
class LinuxDistro(OperatingSystem): class LinuxDistro(OperatingSystem):
""" This class will represent the autodetected operating system """ This class will represent the autodetected operating system
for a Linux System. Since there are many different flavors of for a Linux System. Since there are many different flavors of
@ -9,6 +10,7 @@ class LinuxDistro(OperatingSystem):
autodetection using the python module platform and the method autodetection using the python module platform and the method
platform.dist() platform.dist()
""" """
def __init__(self): def __init__(self):
distname, version, _ = py_platform.linux_distribution( distname, version, _ = py_platform.linux_distribution(
full_distribution_name=False) full_distribution_name=False)

View file

@ -1,6 +1,7 @@
import platform as py_platform import platform as py_platform
from spack.architecture import OperatingSystem from spack.architecture import OperatingSystem
class MacOs(OperatingSystem): class MacOs(OperatingSystem):
"""This class represents the macOS operating system. This will be """This class represents the macOS operating system. This will be
auto detected using the python platform.mac_ver. The macOS auto detected using the python platform.mac_ver. The macOS

View file

@ -39,6 +39,7 @@
import os import os
import platform import platform
import re import re
import sys
import textwrap import textwrap
import time import time
from StringIO import StringIO from StringIO import StringIO
@ -64,7 +65,7 @@
from spack.stage import Stage, ResourceStage, StageComposite from spack.stage import Stage, ResourceStage, StageComposite
from spack.util.compression import allowed_archive from spack.util.compression import allowed_archive
from spack.util.environment import dump_environment from spack.util.environment import dump_environment
from spack.util.executable import ProcessError, which from spack.util.executable import ProcessError
from spack.version import * from spack.version import *
"""Allowed URL schemes for spack packages.""" """Allowed URL schemes for spack packages."""
@ -330,12 +331,10 @@ def install(self, spec, prefix):
Most software comes in nicely packaged tarballs, like this one: Most software comes in nicely packaged tarballs, like this one:
http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz
Taking a page from homebrew, spack deduces pretty much everything it Taking a page from homebrew, spack deduces pretty much everything it
needs to know from the URL above. If you simply type this: needs to know from the URL above. If you simply type this:
spack create http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz spack create http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz
Spack will download the tarball, generate an md5 hash, figure out the Spack will download the tarball, generate an md5 hash, figure out the
version and the name of the package from the URL, and create a new version and the name of the package from the URL, and create a new
package file for you with all the names and attributes set correctly. package file for you with all the names and attributes set correctly.
@ -785,50 +784,12 @@ def activated(self):
exts = spack.install_layout.extension_map(self.extendee_spec) exts = spack.install_layout.extension_map(self.extendee_spec)
return (self.name in exts) and (exts[self.name] == self.spec) return (self.name in exts) and (exts[self.name] == self.spec)
def preorder_traversal(self, visited=None, **kwargs):
"""This does a preorder traversal of the package's dependence DAG."""
virtual = kwargs.get("virtual", False)
if visited is None:
visited = set()
if self.name in visited:
return
visited.add(self.name)
if not virtual:
yield self
for name in sorted(self.dependencies.keys()):
dep_spec = self.get_dependency(name)
spec = dep_spec.spec
# Currently, we do not descend into virtual dependencies, as this
# makes doing a sensible traversal much harder. We just assume
# that ANY of the virtual deps will work, which might not be true
# (due to conflicts or unsatisfiable specs). For now this is ok,
# but we might want to reinvestigate if we start using a lot of
# complicated virtual dependencies
# TODO: reinvestigate this.
if spec.virtual:
if virtual:
yield spec
continue
for pkg in spack.repo.get(name).preorder_traversal(visited,
**kwargs):
yield pkg
def provides(self, vpkg_name): def provides(self, vpkg_name):
""" """
True if this package provides a virtual package with the specified name True if this package provides a virtual package with the specified name
""" """
return any(s.name == vpkg_name for s in self.provided) return any(s.name == vpkg_name for s in self.provided)
def virtual_dependencies(self, visited=None):
for spec in sorted(set(self.preorder_traversal(virtual=True))):
yield spec
@property @property
def installed(self): def installed(self):
return os.path.isdir(self.prefix) return os.path.isdir(self.prefix)
@ -898,13 +859,13 @@ def do_fetch(self, mirror_only=False):
# Ask the user whether to skip the checksum if we're # Ask the user whether to skip the checksum if we're
# interactive, but just fail if non-interactive. # interactive, but just fail if non-interactive.
checksum_msg = "Add a checksum or use --no-checksum to skip this check." # NOQA: ignore=E501 ck_msg = "Add a checksum or use --no-checksum to skip this check."
ignore_checksum = False ignore_checksum = False
if sys.stdout.isatty(): if sys.stdout.isatty():
ignore_checksum = tty.get_yes_or_no(" Fetch anyway?", ignore_checksum = tty.get_yes_or_no(" Fetch anyway?",
default=False) default=False)
if ignore_checksum: if ignore_checksum:
tty.msg("Fetching with no checksum.", checksum_msg) tty.msg("Fetching with no checksum.", ck_msg)
if not ignore_checksum: if not ignore_checksum:
raise FetchError("Will not fetch %s" % raise FetchError("Will not fetch %s" %
@ -1396,7 +1357,15 @@ def setup_dependent_package(self, module, dependent_spec):
def do_uninstall(self, force=False): def do_uninstall(self, force=False):
if not self.installed: if not self.installed:
raise InstallError(str(self.spec) + " is not installed.") # prefix may not exist, but DB may be inconsistent. Try to fix by
# removing, but omit hooks.
specs = spack.installed_db.query(self.spec, installed=True)
if specs:
spack.installed_db.remove(specs[0])
tty.msg("Removed stale DB entry for %s" % self.spec.short_spec)
return
else:
raise InstallError(str(self.spec) + " is not installed.")
if not force: if not force:
dependents = self.installed_dependents dependents = self.installed_dependents
@ -1495,9 +1464,10 @@ def do_deactivate(self, **kwargs):
continue continue
for dep in aspec.traverse(deptype='run'): for dep in aspec.traverse(deptype='run'):
if self.spec == dep: if self.spec == dep:
msg = ("Cannot deactivate %s because %s is activated "
"and depends on it.")
raise ActivationError( raise ActivationError(
"Cannot deactivate %s because %s is activated and depends on it." # NOQA: ignore=E501 msg % (self.spec.short_spec, aspec.short_spec))
% (self.spec.short_spec, aspec.short_spec))
self.extendee_spec.package.deactivate(self, **self.extendee_args) self.extendee_spec.package.deactivate(self, **self.extendee_args)
@ -1726,6 +1696,7 @@ def use_cray_compiler_names():
os.environ['FC'] = 'ftn' os.environ['FC'] = 'ftn'
os.environ['F77'] = 'ftn' os.environ['F77'] = 'ftn'
def flatten_dependencies(spec, flat_dir): def flatten_dependencies(spec, flat_dir):
"""Make each dependency of spec present in dir via symlink.""" """Make each dependency of spec present in dir via symlink."""
for dep in spec.traverse(root=False): for dep in spec.traverse(root=False):
@ -1890,12 +1861,14 @@ class ExtensionError(PackageError):
class ExtensionConflictError(ExtensionError): class ExtensionConflictError(ExtensionError):
def __init__(self, path): def __init__(self, path):
super(ExtensionConflictError, self).__init__( super(ExtensionConflictError, self).__init__(
"Extension blocked by file: %s" % path) "Extension blocked by file: %s" % path)
class ActivationError(ExtensionError): class ActivationError(ExtensionError):
def __init__(self, msg, long_msg=None): def __init__(self, msg, long_msg=None):
super(ActivationError, self).__init__(msg, long_msg) super(ActivationError, self).__init__(msg, long_msg)

View file

@ -29,6 +29,7 @@
class Token: class Token:
"""Represents tokens; generated from input by lexer and fed to parse().""" """Represents tokens; generated from input by lexer and fed to parse()."""
def __init__(self, type, value='', start=0, end=0): def __init__(self, type, value='', start=0, end=0):
self.type = type self.type = type
self.value = value self.value = value
@ -51,11 +52,13 @@ def __cmp__(self, other):
class Lexer(object): class Lexer(object):
"""Base class for Lexers that keep track of line numbers.""" """Base class for Lexers that keep track of line numbers."""
def __init__(self, lexicon): def __init__(self, lexicon):
self.scanner = re.Scanner(lexicon) self.scanner = re.Scanner(lexicon)
def token(self, type, value=''): def token(self, type, value=''):
return Token(type, value, self.scanner.match.start(0), self.scanner.match.end(0)) return Token(type, value,
self.scanner.match.start(0), self.scanner.match.end(0))
def lex(self, text): def lex(self, text):
tokens, remainder = self.scanner.scan(text) tokens, remainder = self.scanner.scan(text)
@ -66,10 +69,11 @@ def lex(self, text):
class Parser(object): class Parser(object):
"""Base class for simple recursive descent parsers.""" """Base class for simple recursive descent parsers."""
def __init__(self, lexer): def __init__(self, lexer):
self.tokens = iter([]) # iterators over tokens, handled in order. Starts empty. self.tokens = iter([]) # iterators over tokens, handled in order.
self.token = Token(None) # last accepted token starts at beginning of file self.token = Token(None) # last accepted token
self.next = None # next token self.next = None # next token
self.lexer = lexer self.lexer = lexer
self.text = None self.text = None
@ -82,11 +86,12 @@ def gettok(self):
def push_tokens(self, iterable): def push_tokens(self, iterable):
"""Adds all tokens in some iterable to the token stream.""" """Adds all tokens in some iterable to the token stream."""
self.tokens = itertools.chain(iter(iterable), iter([self.next]), self.tokens) self.tokens = itertools.chain(
iter(iterable), iter([self.next]), self.tokens)
self.gettok() self.gettok()
def accept(self, id): def accept(self, id):
"""Puts the next symbol in self.token if we like it. Then calls gettok()""" """Put the next symbol in self.token if accepted, then call gettok()"""
if self.next and self.next.is_a(id): if self.next and self.next.is_a(id):
self.token = self.next self.token = self.next
self.gettok() self.gettok()
@ -124,9 +129,9 @@ def parse(self, text):
return self.do_parse() return self.do_parse()
class ParseError(spack.error.SpackError): class ParseError(spack.error.SpackError):
"""Raised when we don't hit an error while parsing.""" """Raised when we don't hit an error while parsing."""
def __init__(self, message, string, pos): def __init__(self, message, string, pos):
super(ParseError, self).__init__(message) super(ParseError, self).__init__(message)
self.string = string self.string = string
@ -135,5 +140,6 @@ def __init__(self, message, string, pos):
class LexError(ParseError): class LexError(ParseError):
"""Raised when we don't know how to lex something.""" """Raised when we don't know how to lex something."""
def __init__(self, message, string, pos): def __init__(self, message, string, pos):
super(LexError, self).__init__(message, string, pos) super(LexError, self).__init__(message, string, pos)

View file

@ -24,7 +24,6 @@
############################################################################## ##############################################################################
import os import os
import llnl.util.tty as tty
from llnl.util.filesystem import join_path from llnl.util.filesystem import join_path
import spack import spack
@ -59,7 +58,6 @@ def __init__(self, pkg, path_or_url, level):
if not os.path.isfile(self.path): if not os.path.isfile(self.path):
raise NoSuchPatchFileError(pkg_name, self.path) raise NoSuchPatchFileError(pkg_name, self.path)
def apply(self, stage): def apply(self, stage):
"""Fetch this patch, if necessary, and apply it to the source """Fetch this patch, if necessary, and apply it to the source
code in the supplied stage. code in the supplied stage.
@ -84,9 +82,9 @@ def apply(self, stage):
patch_stage.destroy() patch_stage.destroy()
class NoSuchPatchFileError(spack.error.SpackError): class NoSuchPatchFileError(spack.error.SpackError):
"""Raised when user specifies a patch file that doesn't exist.""" """Raised when user specifies a patch file that doesn't exist."""
def __init__(self, package, path): def __init__(self, package, path):
super(NoSuchPatchFileError, self).__init__( super(NoSuchPatchFileError, self).__init__(
"No such patch file for package %s: %s" % (package, path)) "No such patch file for package %s: %s" % (package, path))

Some files were not shown because too many files have changed in this diff Show more