Merge branch 'develop' of https://github.com/LLNL/spack into features/install_with_phases

Conflicts:
	lib/spack/spack/package.py
This commit is contained in:
alalazo 2016-07-20 21:02:19 +02:00
commit 40cb314638
58 changed files with 1566 additions and 737 deletions

5
.gitignore vendored
View file

@ -1,11 +1,12 @@
/var/spack/stage /var/spack/stage
/var/spack/cache /var/spack/cache
*.pyc *.pyc
/opt/ /opt
*~ *~
.DS_Store .DS_Store
.idea .idea
/etc/spack/* /etc/spack/licenses
/etc/spack/*.yaml
/etc/spackconfig /etc/spackconfig
/share/spack/dotkit /share/spack/dotkit
/share/spack/modules /share/spack/modules

View file

@ -1,8 +1,17 @@
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
# This is the default spack module files generation configuration. # This is the default configuration for Spack's module file generation.
# #
# Changes to this file will affect all users of this spack install, # Settings here are versioned with Spack and are intended to provide
# although users can override these settings in their ~/.spack/modules.yaml. # sensible defaults out of the box. Spack maintainers should edit this
# file to keep it current.
#
# Users can override these settings by editing the following files.
#
# Per-spack-instance settings (overrides defaults):
# $SPACK_ROOT/etc/spack/modules.yaml
#
# Per-user settings (overrides default and site settings):
# ~/.spack/modules.yaml
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
modules: modules:
enable: enable:

View file

@ -0,0 +1,21 @@
# -------------------------------------------------------------------------
# This file controls default concretization preferences for Spack.
#
# Settings here are versioned with Spack and are intended to provide
# sensible defaults out of the box. Spack maintainers should edit this
# file to keep it current.
#
# Users can override these settings by editing the following files.
#
# Per-spack-instance settings (overrides defaults):
# $SPACK_ROOT/etc/spack/packages.yaml
#
# Per-user settings (overrides default and site settings):
# ~/.spack/packages.yaml
# -------------------------------------------------------------------------
packages:
all:
providers:
mpi: [openmpi, mpich]
blas: [openblas]
lapack: [openblas]

View file

@ -0,0 +1,14 @@
# -------------------------------------------------------------------------
# This is the default spack repository configuration. It includes the
# builtin spack package repository.
#
# Users can override these settings by editing the following files.
#
# Per-spack-instance settings (overrides defaults):
# $SPACK_ROOT/etc/spack/repos.yaml
#
# Per-user settings (overrides default and site settings):
# ~/.spack/repos.yaml
# -------------------------------------------------------------------------
repos:
- $spack/var/spack/repos/builtin

View file

@ -1,8 +0,0 @@
# -------------------------------------------------------------------------
# This is the default spack repository configuration.
#
# Changes to this file will affect all users of this spack install,
# although users can override these settings in their ~/.spack/repos.yaml.
# -------------------------------------------------------------------------
repos:
- $spack/var/spack/repos/builtin

4
lib/spack/env/cc vendored
View file

@ -110,13 +110,13 @@ case "$command" in
comp="CXX" comp="CXX"
lang_flags=CXX lang_flags=CXX
;; ;;
f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor) ftn|f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
command="$SPACK_FC" command="$SPACK_FC"
language="Fortran 90" language="Fortran 90"
comp="FC" comp="FC"
lang_flags=F lang_flags=F
;; ;;
f77|gfortran|ifort|pgfortran|xlf|nagfor) f77|gfortran|ifort|pgfortran|xlf|nagfor|ftn)
command="$SPACK_F77" command="$SPACK_F77"
language="Fortran 77" language="Fortran 77"
comp="F77" comp="F77"

1
lib/spack/env/craype/CC vendored Symbolic link
View file

@ -0,0 +1 @@
../cc

1
lib/spack/env/craype/cc vendored Symbolic link
View file

@ -0,0 +1 @@
../cc

1
lib/spack/env/craype/ftn vendored Symbolic link
View file

@ -0,0 +1 @@
../cc

View file

@ -42,7 +42,7 @@
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
'set_executable', 'copy_mode', 'unset_executable_mode', 'set_executable', 'copy_mode', 'unset_executable_mode',
'remove_dead_links', 'remove_linked_tree', 'find_library_path', 'remove_dead_links', 'remove_linked_tree', 'find_library_path',
'fix_darwin_install_name', 'to_link_flags'] 'fix_darwin_install_name', 'to_link_flags', 'to_lib_name']
def filter_file(regex, repl, *filenames, **kwargs): def filter_file(regex, repl, *filenames, **kwargs):
@ -431,6 +431,13 @@ def fix_darwin_install_name(path):
break break
def to_lib_name(library):
"""Transforms a path to the library /path/to/lib<name>.xyz into <name>
"""
# Assume libXYZ.suffix
return os.path.basename(library)[3:].split(".")[0]
def to_link_flags(library): def to_link_flags(library):
"""Transforms a path to a <library> into linking flags -L<dir> -l<name>. """Transforms a path to a <library> into linking flags -L<dir> -l<name>.
@ -438,8 +445,7 @@ def to_link_flags(library):
A string of linking flags. A string of linking flags.
""" """
dir = os.path.dirname(library) dir = os.path.dirname(library)
# Assume libXYZ.suffix name = to_lib_name(library)
name = os.path.basename(library)[3:].split(".")[0]
res = '-L%s -l%s' % (dir, name) res = '-L%s -l%s' % (dir, name)
return res return res

View file

@ -27,16 +27,18 @@
import sys import sys
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import attr_setdefault
import spack import spack
import spack.spec
import spack.config import spack.config
import spack.spec
from llnl.util.lang import *
from llnl.util.tty.colify import *
from llnl.util.tty.color import *
# #
# Settings for commands that modify configuration # Settings for commands that modify configuration
# #
# Commands that modify confguration By default modify the *highest* priority scope. # Commands that modify confguration By default modify the *highest*
# priority scope.
default_modify_scope = spack.config.highest_precedence_scope().name default_modify_scope = spack.config.highest_precedence_scope().name
# Commands that list confguration list *all* scopes by default. # Commands that list confguration list *all* scopes by default.
default_list_scope = None default_list_scope = None
@ -107,11 +109,11 @@ def parse_specs(args, **kwargs):
return specs return specs
except spack.parse.ParseError, e: except spack.parse.ParseError as e:
tty.error(e.message, e.string, e.pos * " " + "^") tty.error(e.message, e.string, e.pos * " " + "^")
sys.exit(1) sys.exit(1)
except spack.spec.SpecError, e: except spack.spec.SpecError as e:
tty.error(e.message) tty.error(e.message)
sys.exit(1) sys.exit(1)
@ -145,3 +147,97 @@ def disambiguate_spec(spec):
tty.die(*args) tty.die(*args)
return matching_specs[0] return matching_specs[0]
def ask_for_confirmation(message):
while True:
tty.msg(message + '[y/n]')
choice = raw_input().lower()
if choice == 'y':
break
elif choice == 'n':
raise SystemExit('Operation aborted')
tty.warn('Please reply either "y" or "n"')
def gray_hash(spec, length):
return colorize('@K{%s}' % spec.dag_hash(length))
def display_specs(specs, **kwargs):
mode = kwargs.get('mode', 'short')
hashes = kwargs.get('long', False)
namespace = kwargs.get('namespace', False)
flags = kwargs.get('show_flags', False)
variants = kwargs.get('variants', False)
hlen = 7
if kwargs.get('very_long', False):
hashes = True
hlen = None
nfmt = '.' if namespace else '_'
ffmt = '$%+' if flags else ''
vfmt = '$+' if variants else ''
format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler'))
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0:
print
header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
architecture, spack.spec.compiler_color,
compiler)
tty.hline(colorize(header), char='-')
specs = index[(architecture, compiler)]
specs.sort()
abbreviated = [s.format(format_string, color=True) for s in specs]
if mode == 'paths':
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
width += 2
format = " %%-%ds%%s" % width
for abbrv, spec in zip(abbreviated, specs):
if hashes:
print(gray_hash(spec, hlen), )
print(format % (abbrv, spec.prefix))
elif mode == 'deps':
for spec in specs:
print(spec.tree(
format=format_string,
color=True,
indent=4,
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
elif mode == 'short':
# Print columns of output if not printing flags
if not flags:
def fmt(s):
string = ""
if hashes:
string += gray_hash(s, hlen) + ' '
string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
return string
colify(fmt(s) for s in specs)
# Print one entry per line if including flags
else:
for spec in specs:
# Print the hash if necessary
hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
print(hsh + spec.format(format_string, color=True) + '\n')
else:
raise ValueError(
"Invalid mode for display_specs: %s. Must be one of (paths,"
"deps, short)." % mode) # NOQA: ignore=E501

View file

@ -23,7 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os import os
from subprocess import check_call
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp from llnl.util.filesystem import join_path, mkdirp
@ -31,26 +30,49 @@
import spack import spack
from spack.util.executable import which from spack.util.executable import which
_SPACK_UPSTREAM = 'https://github.com/llnl/spack'
description = "Create a new installation of spack in another prefix" description = "Create a new installation of spack in another prefix"
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('prefix', help="names of prefix where we should install spack") subparser.add_argument(
'-r', '--remote', action='store', dest='remote',
help="name of the remote to bootstrap from", default='origin')
subparser.add_argument(
'prefix',
help="names of prefix where we should install spack")
def get_origin_url(): def get_origin_info(remote):
git_dir = join_path(spack.prefix, '.git') git_dir = join_path(spack.prefix, '.git')
git = which('git', required=True) git = which('git', required=True)
try:
branch = git('symbolic-ref', '--short', 'HEAD', output=str)
except ProcessError:
branch = 'develop'
tty.warn('No branch found; using default branch: %s' % branch)
if remote == 'origin' and \
branch not in ('master', 'develop'):
branch = 'develop'
tty.warn('Unknown branch found; using default branch: %s' % branch)
try:
origin_url = git( origin_url = git(
'--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url', '--git-dir=%s' % git_dir,
'config', '--get', 'remote.%s.url' % remote,
output=str) output=str)
return origin_url.strip() except ProcessError:
origin_url = _SPACK_UPSTREAM
tty.warn('No git repository found; '
'using default upstream URL: %s' % origin_url)
return (origin_url.strip(), branch.strip())
def bootstrap(parser, args): def bootstrap(parser, args):
origin_url = get_origin_url() origin_url, branch = get_origin_info(args.remote)
prefix = args.prefix prefix = args.prefix
tty.msg("Fetching spack from origin: %s" % origin_url) tty.msg("Fetching spack from '%s': %s" % (args.remote, origin_url))
if os.path.isfile(prefix): if os.path.isfile(prefix):
tty.die("There is already a file at %s" % prefix) tty.die("There is already a file at %s" % prefix)
@ -62,7 +84,8 @@ def bootstrap(parser, args):
files_in_the_way = os.listdir(prefix) files_in_the_way = os.listdir(prefix)
if files_in_the_way: if files_in_the_way:
tty.die("There are already files there! Delete these files before boostrapping spack.", tty.die("There are already files there! "
"Delete these files before boostrapping spack.",
*files_in_the_way) *files_in_the_way)
tty.msg("Installing:", tty.msg("Installing:",
@ -73,8 +96,10 @@ def bootstrap(parser, args):
git = which('git', required=True) git = which('git', required=True)
git('init', '--shared', '-q') git('init', '--shared', '-q')
git('remote', 'add', 'origin', origin_url) git('remote', 'add', 'origin', origin_url)
git('fetch', 'origin', 'master:refs/remotes/origin/master', '-n', '-q') git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
git('reset', '--hard', 'origin/master', '-q') '-n', '-q')
git('reset', '--hard', 'origin/%s' % branch, '-q')
git('checkout', '-B', branch, 'origin/%s' % branch, '-q')
tty.msg("Successfully created a new spack in %s" % prefix, tty.msg("Successfully created a new spack in %s" % prefix,
"Run %s/bin/spack to use this installation." % prefix) "Run %s/bin/spack to use this installation." % prefix)

View file

@ -42,7 +42,8 @@ def setup_parser(subparser):
'--keep-stage', action='store_true', dest='keep_stage', '--keep-stage', action='store_true', dest='keep_stage',
help="Don't clean up staging area when command completes.") help="Don't clean up staging area when command completes.")
subparser.add_argument( subparser.add_argument(
'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for') 'versions', nargs=argparse.REMAINDER,
help='Versions to generate checksums for')
def get_checksums(versions, urls, **kwargs): def get_checksums(versions, urls, **kwargs):
@ -59,10 +60,10 @@ def get_checksums(versions, urls, **kwargs):
with Stage(url, keep=keep_stage) as stage: with Stage(url, keep=keep_stage) as stage:
stage.fetch() stage.fetch()
if i == 0 and first_stage_function: if i == 0 and first_stage_function:
first_stage_function(stage) first_stage_function(stage, url)
hashes.append((version, hashes.append((version, spack.util.crypto.checksum(
spack.util.crypto.checksum(hashlib.md5, stage.archive_file))) hashlib.md5, stage.archive_file)))
i += 1 i += 1
except FailedDownloadError as e: except FailedDownloadError as e:
tty.msg("Failed to fetch %s" % url) tty.msg("Failed to fetch %s" % url)
@ -79,12 +80,12 @@ def checksum(parser, args):
# If the user asked for specific versions, use those. # If the user asked for specific versions, use those.
if args.versions: if args.versions:
versions = {} versions = {}
for v in args.versions: for version in args.versions:
v = ver(v) version = ver(version)
if not isinstance(v, Version): if not isinstance(version, Version):
tty.die("Cannot generate checksums for version lists or " + tty.die("Cannot generate checksums for version lists or " +
"version ranges. Use unambiguous versions.") "version ranges. Use unambiguous versions.")
versions[v] = pkg.url_for_version(v) versions[version] = pkg.url_for_version(version)
else: else:
versions = pkg.fetch_remote_versions() versions = pkg.fetch_remote_versions()
if not versions: if not versions:
@ -111,5 +112,7 @@ def checksum(parser, args):
if not version_hashes: if not version_hashes:
tty.die("Could not fetch any versions for %s" % pkg.name) tty.die("Could not fetch any versions for %s" % pkg.name)
version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes] version_lines = [
" version('%s', '%s')" % (v, h) for v, h in version_hashes
]
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines) tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)

View file

@ -0,0 +1,24 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################

View file

@ -0,0 +1,96 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
import spack.modules
from spack.util.pattern import Bunch
__all__ = ['add_common_arguments']
_arguments = {}
def add_common_arguments(parser, list_of_arguments):
for argument in list_of_arguments:
if argument not in _arguments:
message = 'Trying to add the non existing argument "{0}" to a command' # NOQA: ignore=E501
raise KeyError(message.format(argument))
x = _arguments[argument]
parser.add_argument(*x.flags, **x.kwargs)
class ConstraintAction(argparse.Action):
"""Constructs a list of specs based on a constraint given on the command line
An instance of this class is supposed to be used as an argument action
in a parser. It will read a constraint and will attach a list of matching
specs to the namespace
"""
qualifiers = {}
def __call__(self, parser, namespace, values, option_string=None):
# Query specs from command line
d = self.qualifiers.get(namespace.subparser_name, {})
specs = [s for s in spack.installed_db.query(**d)]
values = ' '.join(values)
if values:
specs = [x for x in specs if x.satisfies(values, strict=True)]
namespace.specs = specs
parms = Bunch(
flags=('constraint',),
kwargs={
'nargs': '*',
'help': 'Constraint to select a subset of installed packages',
'action': ConstraintAction
})
_arguments['constraint'] = parms
parms = Bunch(
flags=('-m', '--module-type'),
kwargs={
'help': 'Type of module files',
'default': 'tcl',
'choices': spack.modules.module_types
})
_arguments['module_type'] = parms
parms = Bunch(
flags=('-y', '--yes-to-all'),
kwargs={
'action': 'store_true',
'dest': 'yes_to_all',
'help': 'Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501
})
_arguments['yes_to_all'] = parms
parms = Bunch(
flags=('-r', '--dependencies'),
kwargs={
'action': 'store_true',
'dest': 'recurse_dependencies',
'help': 'Recursively traverse spec dependencies'
})
_arguments['recurse_dependencies'] = parms

View file

@ -103,54 +103,19 @@ def install(self, spec, prefix):
${install} ${install}
""") """)
def make_version_calls(ver_hash_tuples):
"""Adds a version() call to the package for each version found."""
max_len = max(len(str(v)) for v, h in ver_hash_tuples)
format = " version(%%-%ds, '%%s')" % (max_len + 2)
return '\n'.join(format % ("'%s'" % v, h) for v, h in ver_hash_tuples)
def setup_parser(subparser):
subparser.add_argument('url', nargs='?', help="url of package archive")
subparser.add_argument(
'--keep-stage', action='store_true',
help="Don't clean up staging area when command completes.")
subparser.add_argument(
'-n', '--name', dest='alternate_name', default=None, metavar='NAME',
help="Override the autodetected name for the created package.")
subparser.add_argument(
'-r', '--repo', default=None,
help="Path to a repository where the package should be created.")
subparser.add_argument(
'-N', '--namespace',
help="Specify a namespace for the package. Must be the namespace of "
"a repository registered with Spack.")
subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
help="Overwrite any existing package file with the same name.")
setup_parser.subparser = subparser
class ConfigureGuesser(object):
def __call__(self, stage):
"""Try to guess the type of build system used by the project.
Set any necessary build dependencies or extensions.
Set the appropriate default installation instructions."""
# Build dependencies and extensions # Build dependencies and extensions
dependenciesDict = { dependencies_dict = {
'autotools': "# depends_on('foo')", 'autotools': "# depends_on('foo')",
'cmake': "depends_on('cmake', type='build')", 'cmake': "depends_on('cmake')",
'scons': "depends_on('scons', type='build')", 'scons': "depends_on('scons')",
'python': "extends('python', type=nolink)", 'python': "extends('python')",
'R': "extends('R')", 'R': "extends('R')",
'octave': "extends('octave')",
'unknown': "# depends_on('foo')" 'unknown': "# depends_on('foo')"
} }
# Default installation instructions # Default installation instructions
installDict = { install_dict = {
'autotools': """\ 'autotools': """\
# FIXME: Modify the configure line to suit your build system here. # FIXME: Modify the configure line to suit your build system here.
configure('--prefix={0}'.format(prefix)) configure('--prefix={0}'.format(prefix))
@ -182,12 +147,62 @@ def __call__(self, stage):
R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir),
self.stage.source_path)""", self.stage.source_path)""",
'octave': """\
# FIXME: Add logic to build and install here.
octave('--quiet', '--norc',
'--built-in-docstrings-file=/dev/null',
'--texi-macros-file=/dev/null',
'--eval', 'pkg prefix {0}; pkg install {1}'.format(
prefix, self.stage.archive_file))""",
'unknown': """\ 'unknown': """\
# FIXME: Unknown build system # FIXME: Unknown build system
make() make()
make('install')""" make('install')"""
} }
def make_version_calls(ver_hash_tuples):
"""Adds a version() call to the package for each version found."""
max_len = max(len(str(v)) for v, h in ver_hash_tuples)
format = " version(%%-%ds, '%%s')" % (max_len + 2)
return '\n'.join(format % ("'%s'" % v, h) for v, h in ver_hash_tuples)
def setup_parser(subparser):
subparser.add_argument('url', nargs='?', help="url of package archive")
subparser.add_argument(
'--keep-stage', action='store_true',
help="Don't clean up staging area when command completes.")
subparser.add_argument(
'-n', '--name', dest='alternate_name', default=None, metavar='NAME',
help="Override the autodetected name for the created package.")
subparser.add_argument(
'-r', '--repo', default=None,
help="Path to a repository where the package should be created.")
subparser.add_argument(
'-N', '--namespace',
help="Specify a namespace for the package. Must be the namespace of "
"a repository registered with Spack.")
subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
help="Overwrite any existing package file with the same name.")
setup_parser.subparser = subparser
class BuildSystemGuesser(object):
def __call__(self, stage, url):
"""Try to guess the type of build system used by a project based on
the contents of its archive or the URL it was downloaded from."""
# Most octave extensions are hosted on Octave-Forge:
# http://octave.sourceforge.net/index.html
# They all have the same base URL.
if 'downloads.sourceforge.net/octave/' in url:
self.build_system = 'octave'
return
# A list of clues that give us an idea of the build system a package # A list of clues that give us an idea of the build system a package
# uses. If the regular expression matches a file contained in the # uses. If the regular expression matches a file contained in the
# archive, the corresponding build system is assumed. # archive, the corresponding build system is assumed.
@ -224,12 +239,6 @@ def __call__(self, stage):
self.build_system = build_system self.build_system = build_system
# Set any necessary build dependencies or extensions.
self.dependencies = dependenciesDict[build_system]
# Set the appropriate default installation instructions
self.install = installDict[build_system]
def guess_name_and_version(url, args): def guess_name_and_version(url, args):
# Try to deduce name and version of the new package from the URL # Try to deduce name and version of the new package from the URL
@ -334,8 +343,8 @@ def create(parser, args):
# Fetch tarballs (prompting user if necessary) # Fetch tarballs (prompting user if necessary)
versions, urls = fetch_tarballs(url, name, version) versions, urls = fetch_tarballs(url, name, version)
# Try to guess what configure system is used. # Try to guess what build system is used.
guesser = ConfigureGuesser() guesser = BuildSystemGuesser()
ver_hash_tuples = spack.cmd.checksum.get_checksums( ver_hash_tuples = spack.cmd.checksum.get_checksums(
versions, urls, versions, urls,
first_stage_function=guesser, first_stage_function=guesser,
@ -344,13 +353,13 @@ def create(parser, args):
if not ver_hash_tuples: if not ver_hash_tuples:
tty.die("Could not fetch any tarballs for %s" % name) tty.die("Could not fetch any tarballs for %s" % name)
# Prepend 'py-' to python package names, by convention. # Add prefix to package name if it is an extension.
if guesser.build_system == 'python': if guesser.build_system == 'python':
name = 'py-%s' % name name = 'py-{0}'.format(name)
# Prepend 'r-' to R package names, by convention.
if guesser.build_system == 'R': if guesser.build_system == 'R':
name = 'r-%s' % name name = 'r-{0}'.format(name)
if guesser.build_system == 'octave':
name = 'octave-{0}'.format(name)
# Create a directory for the new package. # Create a directory for the new package.
pkg_path = repo.filename_for_package_name(name) pkg_path = repo.filename_for_package_name(name)
@ -367,8 +376,8 @@ def create(parser, args):
class_name=mod_to_class(name), class_name=mod_to_class(name),
url=url, url=url,
versions=make_version_calls(ver_hash_tuples), versions=make_version_calls(ver_hash_tuples),
dependencies=guesser.dependencies, dependencies=dependencies_dict[guesser.build_system],
install=guesser.install)) install=install_dict[guesser.build_system]))
# If everything checks out, go ahead and edit. # If everything checks out, go ahead and edit.
spack.editor(pkg_path) spack.editor(pkg_path)

View file

@ -31,7 +31,7 @@
from llnl.util.lang import * from llnl.util.lang import *
from llnl.util.tty.colify import * from llnl.util.tty.colify import *
from llnl.util.tty.color import * from llnl.util.tty.color import *
from llnl.util.lang import * from spack.cmd import display_specs
description = "Find installed spack packages" description = "Find installed spack packages"
@ -104,89 +104,6 @@ def setup_parser(subparser):
help='optional specs to filter results') help='optional specs to filter results')
def gray_hash(spec, length):
return colorize('@K{%s}' % spec.dag_hash(length))
def display_specs(specs, **kwargs):
mode = kwargs.get('mode', 'short')
hashes = kwargs.get('long', False)
namespace = kwargs.get('namespace', False)
flags = kwargs.get('show_flags', False)
variants = kwargs.get('variants', False)
hlen = 7
if kwargs.get('very_long', False):
hashes = True
hlen = None
nfmt = '.' if namespace else '_'
ffmt = '$%+' if flags else ''
vfmt = '$+' if variants else ''
format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler'))
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0:
print
header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
architecture, spack.spec.compiler_color,
compiler)
tty.hline(colorize(header), char='-')
specs = index[(architecture, compiler)]
specs.sort()
abbreviated = [s.format(format_string, color=True) for s in specs]
if mode == 'paths':
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
width += 2
format = " %%-%ds%%s" % width
for abbrv, spec in zip(abbreviated, specs):
if hashes:
print(gray_hash(spec, hlen), )
print(format % (abbrv, spec.prefix))
elif mode == 'deps':
for spec in specs:
print(spec.tree(
format=format_string,
color=True,
indent=4,
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
elif mode == 'short':
# Print columns of output if not printing flags
if not flags:
def fmt(s):
string = ""
if hashes:
string += gray_hash(s, hlen) + ' '
string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
return string
colify(fmt(s) for s in specs)
# Print one entry per line if including flags
else:
for spec in specs:
# Print the hash if necessary
hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
print(hsh + spec.format(format_string, color=True) + '\n')
else:
raise ValueError(
"Invalid mode for display_specs: %s. Must be one of (paths,"
"deps, short)." % mode) # NOQA: ignore=E501
def query_arguments(args): def query_arguments(args):
# Check arguments # Check arguments
if args.explicit and args.implicit: if args.explicit and args.implicit:

View file

@ -29,9 +29,11 @@
description = "Get detailed information on a particular package" description = "Get detailed information on a particular package"
def padder(str_list, extra=0): def padder(str_list, extra=0):
"""Return a function to pad elements of a list.""" """Return a function to pad elements of a list."""
length = max(len(str(s)) for s in str_list) + extra length = max(len(str(s)) for s in str_list) + extra
def pad(string): def pad(string):
string = str(string) string = str(string)
padding = max(0, length - len(string)) padding = max(0, length - len(string))
@ -40,7 +42,8 @@ def pad(string):
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.") subparser.add_argument(
'name', metavar="PACKAGE", help="Name of package to get info for.")
def print_text_info(pkg): def print_text_info(pkg):
@ -84,7 +87,7 @@ def print_text_info(pkg):
for deptype in ('build', 'link', 'run'): for deptype in ('build', 'link', 'run'):
print print
print "%s Dependencies:" % deptype.capitalize() print "%s Dependencies:" % deptype.capitalize()
deps = pkg.dependencies(deptype) deps = pkg.dependencies_of_type(deptype)
if deps: if deps:
colify(deps, indent=4) colify(deps, indent=4)
else: else:

View file

@ -23,135 +23,233 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function from __future__ import print_function
import collections
import os import os
import shutil import shutil
import sys import sys
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.cmd import spack.cmd
from llnl.util.filesystem import mkdirp import spack.cmd.common.arguments as arguments
import llnl.util.filesystem as filesystem
from spack.modules import module_types from spack.modules import module_types
from spack.util.string import *
description = "Manipulate modules and dotkits." description = "Manipulate module files"
# Dictionary that will be populated with the list of sub-commands
# Each sub-command must be callable and accept 3 arguments :
# - mtype : the type of the module file
# - specs : the list of specs to be processed
# - args : namespace containing the parsed command line arguments
callbacks = {}
def subcommand(subparser_name):
"""Registers a function in the callbacks dictionary"""
def decorator(callback):
callbacks[subparser_name] = callback
return callback
return decorator
def setup_parser(subparser): def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command') sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subparser_name')
sp.add_parser('refresh', help='Regenerate all module files.') # spack module refresh
refresh_parser = sp.add_parser('refresh', help='Regenerate module files')
refresh_parser.add_argument(
'--delete-tree',
help='Delete the module file tree before refresh',
action='store_true'
)
arguments.add_common_arguments(
refresh_parser, ['constraint', 'module_type', 'yes_to_all']
)
find_parser = sp.add_parser('find', help='Find module files for packages.') # spack module find
find_parser = sp.add_parser('find', help='Find module files for packages')
arguments.add_common_arguments(find_parser, ['constraint', 'module_type'])
find_parser.add_argument( # spack module rm
'module_type', rm_parser = sp.add_parser('rm', help='Remove module files')
help="Type of module to find file for. [" + arguments.add_common_arguments(
'|'.join(module_types) + "]") rm_parser, ['constraint', 'module_type', 'yes_to_all']
)
find_parser.add_argument( # spack module loads
'-r', '--dependencies', action='store_true', loads_parser = sp.add_parser(
dest='recurse_dependencies', 'loads',
help='Recursively traverse dependencies for modules to load.') help='Prompt the list of modules associated with a constraint'
)
find_parser.add_argument( loads_parser.add_argument(
'-s', '--shell', action='store_true', dest='shell', '--input-only', action='store_false', dest='shell',
help='Generate shell script (instead of input for module command)') help='Generate input for module command (instead of a shell script)'
)
find_parser.add_argument( loads_parser.add_argument(
'-p', '--prefix', dest='prefix', '-p', '--prefix', dest='prefix', default='',
help='Prepend to module names when issuing module load commands') help='Prepend to module names when issuing module load commands'
)
find_parser.add_argument( arguments.add_common_arguments(
'spec', nargs='+', loads_parser, ['constraint', 'module_type', 'recurse_dependencies']
help='spec to find a module file for.') )
def module_find(mtype, flags, spec_array): class MultipleMatches(Exception):
"""Look at all installed packages and see if the spec provided pass
class NoMatch(Exception):
pass
@subcommand('loads')
def loads(mtype, specs, args):
"""Prompt the list of modules associated with a list of specs"""
# Get a comprehensive list of specs
if args.recurse_dependencies:
specs_from_user_constraint = specs[:]
specs = []
# FIXME : during module file creation nodes seem to be visited
# FIXME : multiple times even if cover='nodes' is given. This
# FIXME : work around permits to get a unique list of spec anyhow.
# FIXME : (same problem as in spack/modules.py)
seen = set()
seen_add = seen.add
for spec in specs_from_user_constraint:
specs.extend(
[item for item in spec.traverse(order='post', cover='nodes') if not (item in seen or seen_add(item))] # NOQA: ignore=E501
)
module_cls = module_types[mtype]
modules = [(spec, module_cls(spec).use_name)
for spec in specs if os.path.exists(module_cls(spec).file_name)]
module_commands = {
'tcl': 'module load ',
'dotkit': 'dotkit use '
}
d = {
'command': '' if not args.shell else module_commands[mtype],
'prefix': args.prefix
}
prompt_template = '{comment}{command}{prefix}{name}'
for spec, mod in modules:
d['comment'] = '' if not args.shell else '# {0}\n'.format(
spec.format())
d['name'] = mod
print(prompt_template.format(**d))
@subcommand('find')
def find(mtype, specs, args):
"""
Look at all installed packages and see if the spec provided
matches any. If it does, check whether there is a module file matches any. If it does, check whether there is a module file
of type <mtype> there, and print out the name that the user of type <mtype> there, and print out the name that the user
should type to use that package's module. should type to use that package's module.
prefix:
Prepend this to module names when issuing "module load" commands.
Some systems seem to need it.
""" """
if mtype not in module_types: if len(specs) == 0:
tty.die("Invalid module type: '%s'. Options are %s" % raise NoMatch()
(mtype, comma_or(module_types)))
# -------------------------------------- if len(specs) > 1:
def _find_modules(spec, modules_list): raise MultipleMatches()
"""Finds all modules and sub-modules for a spec"""
if str(spec.version) == 'system':
# No Spack module for system-installed packages
return
if flags.recurse_dependencies:
for dep in spec.dependencies():
_find_modules(dep, modules_list)
spec = specs.pop()
mod = module_types[mtype](spec) mod = module_types[mtype](spec)
if not os.path.isfile(mod.file_name): if not os.path.isfile(mod.file_name):
tty.die("No %s module is installed for %s" % (mtype, spec)) tty.die("No %s module is installed for %s" % (mtype, spec))
modules_list.append((spec, mod))
# --------------------------------------
raw_specs = spack.cmd.parse_specs(spec_array)
modules = set() # Modules we will load
seen = set()
for raw_spec in raw_specs:
# ----------- Make sure the spec only resolves to ONE thing
specs = spack.installed_db.query(raw_spec)
if len(specs) == 0:
tty.die("No installed packages match spec %s" % raw_spec)
if len(specs) > 1:
tty.error("Multiple matches for spec %s. Choose one:" % raw_spec)
for s in specs:
sys.stderr.write(s.tree(color=True))
sys.exit(1)
spec = specs[0]
# ----------- Chase down modules for it and all its dependencies
modules_dups = list()
_find_modules(spec, modules_dups)
# Remove duplicates while keeping order
modules_unique = list()
for spec,mod in modules_dups:
if mod.use_name not in seen:
modules_unique.append((spec,mod))
seen.add(mod.use_name)
# Output...
if flags.shell:
module_cmd = {'tcl': 'module load', 'dotkit': 'dotkit use'}[mtype]
for spec,mod in modules_unique:
if flags.shell:
print('# %s' % spec.format())
print('%s %s%s' % (module_cmd, flags.prefix, mod.use_name))
else:
print(mod.use_name) print(mod.use_name)
def module_refresh():
"""Regenerate all module files for installed packages known to
spack (some packages may no longer exist)."""
specs = [s for s in spack.installed_db.query(installed=True, known=True)]
for name, cls in module_types.items(): @subcommand('rm')
tty.msg("Regenerating %s module files." % name) def rm(mtype, specs, args):
if os.path.isdir(cls.path): """Deletes module files associated with items in specs"""
module_cls = module_types[mtype]
specs_with_modules = [
spec for spec in specs if os.path.exists(module_cls(spec).file_name)]
modules = [module_cls(spec) for spec in specs_with_modules]
if not modules:
tty.msg('No module file matches your query')
raise SystemExit(1)
# Ask for confirmation
if not args.yes_to_all:
tty.msg('You are about to remove {0} module files the following specs:\n'.format(mtype)) # NOQA: ignore=E501
spack.cmd.display_specs(specs_with_modules, long=True)
print('')
spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
# Remove the module files
for s in modules:
s.remove()
@subcommand('refresh')
def refresh(mtype, specs, args):
"""Regenerate module files for item in specs"""
# Prompt a message to the user about what is going to change
if not specs:
tty.msg('No package matches your query')
return
if not args.yes_to_all:
tty.msg('You are about to regenerate {name} module files for the following specs:\n'.format(name=mtype)) # NOQA: ignore=E501
spack.cmd.display_specs(specs, long=True)
print('')
spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
cls = module_types[mtype]
# Detect name clashes
writers = [cls(spec) for spec in specs]
file2writer = collections.defaultdict(list)
for item in writers:
file2writer[item.file_name].append(item)
if len(file2writer) != len(writers):
message = 'Name clashes detected in module files:\n'
for filename, writer_list in file2writer.items():
if len(writer_list) > 1:
message += '\nfile : {0}\n'.format(filename)
for x in writer_list:
message += 'spec : {0}\n'.format(x.spec.format(color=True))
tty.error(message)
tty.error('Operation aborted')
raise SystemExit(1)
# Proceed regenerating module files
tty.msg('Regenerating {name} module files'.format(name=mtype))
if os.path.isdir(cls.path) and args.delete_tree:
shutil.rmtree(cls.path, ignore_errors=False) shutil.rmtree(cls.path, ignore_errors=False)
mkdirp(cls.path) filesystem.mkdirp(cls.path)
for spec in specs: for x in writers:
cls(spec).write() x.write(overwrite=True)
def module(parser, args): def module(parser, args):
if args.module_command == 'refresh': # Qualifiers to be used when querying the db for specs
module_refresh() constraint_qualifiers = {
'refresh': {
'installed': True,
'known': True
},
}
arguments.ConstraintAction.qualifiers.update(constraint_qualifiers)
elif args.module_command == 'find': module_type = args.module_type
module_find(args.module_type, args, args.spec) constraint = args.constraint
try:
callbacks[args.subparser_name](module_type, args.specs, args)
except MultipleMatches:
message = 'the constraint \'{query}\' matches multiple packages, and this is not allowed in this context' # NOQA: ignore=E501
tty.error(message.format(query=constraint))
for s in args.specs:
sys.stderr.write(s.format(color=True) + '\n')
raise SystemExit(1)
except NoMatch:
message = 'the constraint \'{query}\' match no package, and this is not allowed in this context' # NOQA: ignore=E501
tty.die(message.format(query=constraint))

View file

@ -22,10 +22,8 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import re
import cgi import cgi
from StringIO import StringIO from StringIO import StringIO
import llnl.util.tty as tty
from llnl.util.tty.colify import * from llnl.util.tty.colify import *
import spack import spack
@ -34,8 +32,8 @@
def github_url(pkg): def github_url(pkg):
"""Link to a package file on github.""" """Link to a package file on github."""
return ("https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" % url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" # NOQA: ignore=E501
pkg.name) return (url % pkg.name)
def rst_table(elts): def rst_table(elts):
@ -49,6 +47,7 @@ def rst_table(elts):
def print_rst_package_list(): def print_rst_package_list():
"""Print out information on all packages in restructured text.""" """Print out information on all packages in restructured text."""
pkgs = sorted(spack.repo.all_packages(), key=lambda s: s.name.lower()) pkgs = sorted(spack.repo.all_packages(), key=lambda s: s.name.lower())
pkg_names = [p.name for p in pkgs]
print ".. _package-list:" print ".. _package-list:"
print print
@ -62,7 +61,7 @@ def print_rst_package_list():
print "Spack currently has %d mainline packages:" % len(pkgs) print "Spack currently has %d mainline packages:" % len(pkgs)
print print
print rst_table("`%s`_" % p.name for p in pkgs) print rst_table("`%s`_" % p for p in pkg_names)
print print
print "-----" print "-----"
@ -79,14 +78,15 @@ def print_rst_package_list():
print print
if pkg.versions: if pkg.versions:
print "Versions:" print "Versions:"
print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions))) print " " + ", ".join(str(v) for v in
reversed(sorted(pkg.versions)))
for deptype in ('build', 'link', 'run'): for deptype in spack.alldeps:
deps = pkg.dependencies(deptype) deps = pkg.dependencies_of_type(deptype)
if deps: if deps:
print "%s Dependencies" % deptype.capitalize() print "%s Dependencies" % deptype.capitalize()
print " " + ", ".join("`%s`_" % d if d != "mpi" else d print " " + ", ".join("%s_" % d if d in pkg_names
for d in build_deps) else d for d in deps)
print print
print "Description:" print "Description:"

View file

@ -30,7 +30,6 @@
import spack import spack
import spack.cmd import spack.cmd
import spack.repository import spack.repository
from spack.cmd.find import display_specs
description = "Remove an installed package" description = "Remove an installed package"
@ -47,17 +46,6 @@
} }
def ask_for_confirmation(message):
while True:
tty.msg(message + '[y/n]')
choice = raw_input().lower()
if choice == 'y':
break
elif choice == 'n':
raise SystemExit('Operation aborted')
tty.warn('Please reply either "y" or "n"')
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-f', '--force', action='store_true', dest='force', '-f', '--force', action='store_true', dest='force',
@ -65,32 +53,37 @@ def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-a', '--all', action='store_true', dest='all', '-a', '--all', action='store_true', dest='all',
help="USE CAREFULLY. Remove ALL installed packages that match each " + help="USE CAREFULLY. Remove ALL installed packages that match each " +
"supplied spec. i.e., if you say uninstall libelf, ALL versions of " + "supplied spec. i.e., if you say uninstall libelf, ALL versions of " + # NOQA: ignore=E501
"libelf are uninstalled. This is both useful and dangerous, like rm -r.") "libelf are uninstalled. This is both useful and dangerous, like rm -r.") # NOQA: ignore=E501
subparser.add_argument( subparser.add_argument(
'-d', '--dependents', action='store_true', dest='dependents', '-d', '--dependents', action='store_true', dest='dependents',
help='Also uninstall any packages that depend on the ones given via command line.' help='Also uninstall any packages that depend on the ones given via command line.' # NOQA: ignore=E501
) )
subparser.add_argument( subparser.add_argument(
'-y', '--yes-to-all', action='store_true', dest='yes_to_all', '-y', '--yes-to-all', action='store_true', dest='yes_to_all',
help='Assume "yes" is the answer to every confirmation asked to the user.' help='Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501
) )
subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall") subparser.add_argument(
'packages',
nargs=argparse.REMAINDER,
help="specs of packages to uninstall"
)
def concretize_specs(specs, allow_multiple_matches=False, force=False): def concretize_specs(specs, allow_multiple_matches=False, force=False):
""" """Returns a list of specs matching the non necessarily
Returns a list of specs matching the non necessarily concretized specs given from cli concretized specs given from cli
Args: Args:
specs: list of specs to be matched against installed packages specs: list of specs to be matched against installed packages
allow_multiple_matches : boolean (if True multiple matches for each item in specs are admitted) allow_multiple_matches : if True multiple matches are admitted
Return: Return:
list of specs list of specs
""" """
specs_from_cli = [] # List of specs that match expressions given via command line # List of specs that match expressions given via command line
specs_from_cli = []
has_errors = False has_errors = False
for spec in specs: for spec in specs:
matching = spack.installed_db.query(spec) matching = spack.installed_db.query(spec)
@ -99,7 +92,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
if not allow_multiple_matches and len(matching) > 1: if not allow_multiple_matches and len(matching) > 1:
tty.error("%s matches multiple packages:" % spec) tty.error("%s matches multiple packages:" % spec)
print() print()
display_specs(matching, **display_args) spack.cmd.display_specs(matching, **display_args)
print() print()
has_errors = True has_errors = True
@ -116,8 +109,8 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
def installed_dependents(specs): def installed_dependents(specs):
""" """Returns a dictionary that maps a spec with a list of its
Returns a dictionary that maps a spec with a list of its installed dependents installed dependents
Args: Args:
specs: list of specs to be checked for dependents specs: list of specs to be checked for dependents
@ -147,7 +140,7 @@ def do_uninstall(specs, force):
try: try:
# should work if package is known to spack # should work if package is known to spack
packages.append(item.package) packages.append(item.package)
except spack.repository.UnknownPackageError as e: except spack.repository.UnknownPackageError:
# The package.py file has gone away -- but still # The package.py file has gone away -- but still
# want to uninstall. # want to uninstall.
spack.Package(item).do_uninstall(force=True) spack.Package(item).do_uninstall(force=True)
@ -169,17 +162,20 @@ def uninstall(parser, args):
with spack.installed_db.write_transaction(): with spack.installed_db.write_transaction():
specs = spack.cmd.parse_specs(args.packages) specs = spack.cmd.parse_specs(args.packages)
# Gets the list of installed specs that match the ones give via cli # Gets the list of installed specs that match the ones give via cli
uninstall_list = concretize_specs(specs, args.all, args.force) # takes care of '-a' is given in the cli # takes care of '-a' is given in the cli
dependent_list = installed_dependents(uninstall_list) # takes care of '-d' uninstall_list = concretize_specs(specs, args.all, args.force)
dependent_list = installed_dependents(
uninstall_list) # takes care of '-d'
# Process dependent_list and update uninstall_list # Process dependent_list and update uninstall_list
has_error = False has_error = False
if dependent_list and not args.dependents and not args.force: if dependent_list and not args.dependents and not args.force:
for spec, lst in dependent_list.items(): for spec, lst in dependent_list.items():
tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True)) tty.error("Will not uninstall %s" %
spec.format("$_$@$%@$#", color=True))
print('') print('')
print("The following packages depend on it:") print("The following packages depend on it:")
display_specs(lst, **display_args) spack.cmd.display_specs(lst, **display_args)
print('') print('')
has_error = True has_error = True
elif args.dependents: elif args.dependents:
@ -188,14 +184,14 @@ def uninstall(parser, args):
uninstall_list = list(set(uninstall_list)) uninstall_list = list(set(uninstall_list))
if has_error: if has_error:
tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') # NOQA: ignore=E501
if not args.yes_to_all: if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ") tty.msg("The following packages will be uninstalled : ")
print('') print('')
display_specs(uninstall_list, **display_args) spack.cmd.display_specs(uninstall_list, **display_args)
print('') print('')
ask_for_confirmation('Do you want to proceed ? ') spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
# Uninstall everything on the list # Uninstall everything on the list
do_uninstall(uninstall_list, args.force) do_uninstall(uninstall_list, args.force)

View file

@ -328,6 +328,11 @@
'anyOf': [ 'anyOf': [
{ {
'properties': { 'properties': {
'hash_length': {
'type': 'integer',
'minimum': 0,
'default': 7
},
'whitelist': {'$ref': '#/definitions/array_of_strings'}, 'whitelist': {'$ref': '#/definitions/array_of_strings'},
'blacklist': {'$ref': '#/definitions/array_of_strings'}, 'blacklist': {'$ref': '#/definitions/array_of_strings'},
'naming_scheme': { 'naming_scheme': {
@ -492,8 +497,15 @@ def clear(self):
"""Empty cached config information.""" """Empty cached config information."""
self.sections = {} self.sections = {}
"""Default configuration scope is the lowest-level scope. These are
versioned with Spack and can be overridden by sites or users."""
ConfigScope('defaults', os.path.join(spack.etc_path, 'spack', 'defaults'))
ConfigScope('site', os.path.join(spack.etc_path, 'spack')), """Site configuration is per spack instance, for sites or projects.
No site-level configs should be checked into spack by default."""
ConfigScope('site', os.path.join(spack.etc_path, 'spack'))
"""User configuration can override both spack defaults and site config."""
ConfigScope('user', os.path.expanduser('~/.spack')) ConfigScope('user', os.path.expanduser('~/.spack'))

View file

@ -60,7 +60,7 @@
_db_dirname = '.spack-db' _db_dirname = '.spack-db'
# DB version. This is stuck in the DB file to track changes in format. # DB version. This is stuck in the DB file to track changes in format.
_db_version = Version('0.9.1') _db_version = Version('0.9.2')
# Default timeout for spack database locks is 5 min. # Default timeout for spack database locks is 5 min.
_db_lock_timeout = 60 _db_lock_timeout = 60
@ -215,14 +215,10 @@ def _read_spec_from_yaml(self, hash_key, installs, parent_key=None):
# Add dependencies from other records in the install DB to # Add dependencies from other records in the install DB to
# form a full spec. # form a full spec.
if 'dependencies' in spec_dict[spec.name]: if 'dependencies' in spec_dict[spec.name]:
for dep in spec_dict[spec.name]['dependencies'].values(): yaml_deps = spec_dict[spec.name]['dependencies']
if type(dep) == tuple: for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps):
dep_hash, deptypes = dep child = self._read_spec_from_yaml(dhash, installs, hash_key)
else: spec._add_dependency(child, dtypes)
dep_hash = dep
deptypes = spack.alldeps
child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
spec._add_dependency(child, deptypes)
# Specs from the database need to be marked concrete because # Specs from the database need to be marked concrete because
# they represent actual installations. # they represent actual installations.
@ -639,13 +635,14 @@ def _exit(self):
class CorruptDatabaseError(SpackError): class CorruptDatabaseError(SpackError):
def __init__(self, path, msg=''): def __init__(self, path, msg=''):
super(CorruptDatabaseError, self).__init__( super(CorruptDatabaseError, self).__init__(
"Spack database is corrupt: %s. %s." + \ "Spack database is corrupt: %s. %s." % (path, msg),
"Try running `spack reindex` to fix." % (path, msg)) "Try running `spack reindex` to fix.")
class InvalidDatabaseVersionError(SpackError): class InvalidDatabaseVersionError(SpackError):
def __init__(self, expected, found): def __init__(self, expected, found):
super(InvalidDatabaseVersionError, self).__init__( super(InvalidDatabaseVersionError, self).__init__(
"Expected database version %s but found version %s." + \ "Expected database version %s but found version %s."
"Try running `spack reindex` to fix." % % (expected, found),
(expected, found)) "`spack reindex` may fix this, or you may need a newer "
"Spack version.")

View file

@ -34,6 +34,7 @@
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp from llnl.util.filesystem import join_path, mkdirp
import spack
from spack.spec import Spec from spack.spec import Spec
from spack.error import SpackError from spack.error import SpackError
@ -223,8 +224,14 @@ def write_spec(self, spec, path):
def read_spec(self, path): def read_spec(self, path):
"""Read the contents of a file and parse them as a spec""" """Read the contents of a file and parse them as a spec"""
try:
with open(path) as f: with open(path) as f:
spec = Spec.from_yaml(f) spec = Spec.from_yaml(f)
except Exception as e:
if spack.debug:
raise
raise SpecReadError(
'Unable to read file: %s' % path, 'Cause: ' + str(e))
# Specs read from actual installations are always concrete # Specs read from actual installations are always concrete
spec._mark_concrete() spec._mark_concrete()
@ -456,10 +463,12 @@ def __init__(self, path):
"Install path %s already exists!") "Install path %s already exists!")
class SpecReadError(DirectoryLayoutError):
"""Raised when directory layout can't read a spec."""
class InvalidExtensionSpecError(DirectoryLayoutError): class InvalidExtensionSpecError(DirectoryLayoutError):
"""Raised when an extension file has a bad spec in it.""" """Raised when an extension file has a bad spec in it."""
def __init__(self, message):
super(InvalidExtensionSpecError, self).__init__(message)
class ExtensionAlreadyInstalledError(DirectoryLayoutError): class ExtensionAlreadyInstalledError(DirectoryLayoutError):

View file

@ -188,6 +188,8 @@ def parse_config_options(module_generator):
##### #####
# Automatic loading loads # Automatic loading loads
module_file_actions['hash_length'] = module_configuration.get(
'hash_length', 7)
module_file_actions['autoload'] = dependencies( module_file_actions['autoload'] = dependencies(
module_generator.spec, module_file_actions.get('autoload', 'none')) module_generator.spec, module_file_actions.get('autoload', 'none'))
# Prerequisites # Prerequisites
@ -237,6 +239,7 @@ class EnvModule(object):
formats = {} formats = {}
class __metaclass__(type): class __metaclass__(type):
def __init__(cls, name, bases, dict): def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict) type.__init__(cls, name, bases, dict)
if cls.name != 'env_module' and cls.name in CONFIGURATION[ if cls.name != 'env_module' and cls.name in CONFIGURATION[
@ -295,7 +298,9 @@ def use_name(self):
if constraint in self.spec: if constraint in self.spec:
suffixes.append(suffix) suffixes.append(suffix)
# Always append the hash to make the module file unique # Always append the hash to make the module file unique
suffixes.append(self.spec.dag_hash()) hash_length = configuration.pop('hash_length', 7)
if hash_length != 0:
suffixes.append(self.spec.dag_hash(length=hash_length))
name = '-'.join(suffixes) name = '-'.join(suffixes)
return name return name
@ -338,7 +343,7 @@ def blacklisted(self):
return False return False
def write(self): def write(self, overwrite=False):
""" """
Writes out a module file for this object. Writes out a module file for this object.
@ -399,6 +404,15 @@ def write(self):
for line in self.module_specific_content(module_configuration): for line in self.module_specific_content(module_configuration):
module_file_content += line module_file_content += line
# Print a warning in case I am accidentally overwriting
# a module file that is already there (name clash)
if not overwrite and os.path.exists(self.file_name):
message = 'Module file already exists : skipping creation\n'
message += 'file : {0.file_name}\n'
message += 'spec : {0.spec}'
tty.warn(message.format(self))
return
# Dump to file # Dump to file
with open(self.file_name, 'w') as f: with open(self.file_name, 'w') as f:
f.write(module_file_content) f.write(module_file_content)
@ -454,7 +468,7 @@ def remove(self):
class Dotkit(EnvModule): class Dotkit(EnvModule):
name = 'dotkit' name = 'dotkit'
path = join_path(spack.share_path, 'dotkit')
environment_modifications_formats = { environment_modifications_formats = {
PrependPath: 'dk_alter {name} {value}\n', PrependPath: 'dk_alter {name} {value}\n',
SetEnv: 'dk_setenv {name} {value}\n' SetEnv: 'dk_setenv {name} {value}\n'
@ -466,7 +480,7 @@ class Dotkit(EnvModule):
@property @property
def file_name(self): def file_name(self):
return join_path(spack.share_path, "dotkit", self.spec.architecture, return join_path(self.path, self.spec.architecture,
'%s.dk' % self.use_name) '%s.dk' % self.use_name)
@property @property
@ -494,7 +508,7 @@ def prerequisite(self, spec):
class TclModule(EnvModule): class TclModule(EnvModule):
name = 'tcl' name = 'tcl'
path = join_path(spack.share_path, "modules")
environment_modifications_formats = { environment_modifications_formats = {
PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n', PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n',
AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n', AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n',
@ -514,7 +528,7 @@ class TclModule(EnvModule):
@property @property
def file_name(self): def file_name(self):
return join_path(spack.share_path, "modules", self.spec.architecture, self.use_name) return join_path(self.path, self.spec.architecture, self.use_name)
@property @property
def header(self): def header(self):

View file

@ -64,7 +64,7 @@
from spack.stage import Stage, ResourceStage, StageComposite from spack.stage import Stage, ResourceStage, StageComposite
from spack.util.compression import allowed_archive from spack.util.compression import allowed_archive
from spack.util.environment import dump_environment from spack.util.environment import dump_environment
from spack.util.executable import ProcessError from spack.util.executable import ProcessError, which
from spack.version import * from spack.version import *
"""Allowed URL schemes for spack packages.""" """Allowed URL schemes for spack packages."""
@ -718,6 +718,11 @@ def fetcher(self):
def fetcher(self, f): def fetcher(self, f):
self._fetcher = f self._fetcher = f
def dependencies_of_type(self, *deptypes):
"""Get subset of the dependencies with certain types."""
return dict((name, conds) for name, conds in self.dependencies.items()
if any(d in self._deptypes[name] for d in deptypes))
@property @property
def extendee_spec(self): def extendee_spec(self):
""" """
@ -1063,7 +1068,8 @@ def do_install(self,
run_tests -- Run tests within the package's install() run_tests -- Run tests within the package's install()
""" """
if not self.spec.concrete: if not self.spec.concrete:
raise ValueError("Can only install concrete packages: %s." % self.spec.name) raise ValueError("Can only install concrete packages: %s."
% self.spec.name)
# No installation needed if package is external # No installation needed if package is external
if self.spec.external: if self.spec.external:
@ -1713,6 +1719,13 @@ def install_dependency_symlinks(pkg, spec, prefix):
flatten_dependencies(spec, prefix) flatten_dependencies(spec, prefix)
def use_cray_compiler_names():
"""Compiler names for builds that rely on cray compiler names."""
os.environ['CC'] = 'cc'
os.environ['CXX'] = 'CC'
os.environ['FC'] = 'ftn'
os.environ['F77'] = 'ftn'
def flatten_dependencies(spec, flat_dir): def flatten_dependencies(spec, flat_dir):
"""Make each dependency of spec present in dir via symlink.""" """Make each dependency of spec present in dir via symlink."""
for dep in spec.traverse(root=False): for dep in spec.traverse(root=False):

View file

@ -2,6 +2,8 @@
from spack.architecture import Platform, Target from spack.architecture import Platform, Target
from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.linux_distro import LinuxDistro
from spack.operating_systems.cnl import Cnl from spack.operating_systems.cnl import Cnl
from spack.util.executable import which
class CrayXc(Platform): class CrayXc(Platform):
priority = 20 priority = 20
@ -9,7 +11,6 @@ class CrayXc(Platform):
back_end = 'ivybridge' back_end = 'ivybridge'
default = 'ivybridge' default = 'ivybridge'
front_os = "SuSE11"
back_os = "CNL10" back_os = "CNL10"
default_os = "CNL10" default_os = "CNL10"
@ -37,10 +38,22 @@ def __init__(self):
self.add_target('haswell', self.add_target('haswell',
Target('haswell', 'craype-haswell')) Target('haswell', 'craype-haswell'))
self.add_operating_system('SuSE11', LinuxDistro()) # Front end of the cray platform is a linux distro.
linux_dist = LinuxDistro()
self.front_os = str(linux_dist)
self.add_operating_system(str(linux_dist), linux_dist)
self.add_operating_system('CNL10', Cnl()) self.add_operating_system('CNL10', Cnl())
@classmethod @classmethod
def detect(self): def detect(self):
return os.path.exists('/opt/cray/craype') try:
cc_verbose = which('ftn')
text = cc_verbose('-craype-verbose',
output=str, error=str,
ignore_errors=True).split()
if '-D__CRAYXC' in text:
return True
else:
return False
except:
return False

View file

@ -26,8 +26,10 @@
import spack import spack
from spack.version import * from spack.version import *
class PreferredPackages(object): class PreferredPackages(object):
_default_order = {'compiler' : [ 'gcc', 'intel', 'clang', 'pgi', 'xlc' ] } # Arbitrary, but consistent # Arbitrary, but consistent
_default_order = {'compiler': ['gcc', 'intel', 'clang', 'pgi', 'xlc']}
def __init__(self): def __init__(self):
self.preferred = spack.config.get_config('packages') self.preferred = spack.config.get_config('packages')
@ -35,24 +37,25 @@ def __init__(self):
# Given a package name, sort component (e.g, version, compiler, ...), and # Given a package name, sort component (e.g, version, compiler, ...), and
# a second_key (used by providers), return the list # a second_key (used by providers), return the list
def _order_for_package(self, pkgname, component, second_key, test_all=True): def _order_for_package(self, pkgname, component, second_key,
test_all=True):
pkglist = [pkgname] pkglist = [pkgname]
if test_all: if test_all:
pkglist.append('all') pkglist.append('all')
for pkg in pkglist: for pkg in pkglist:
order = self.preferred.get(pkg, {}).get(component, {}) order = self.preferred.get(pkg, {}).get(component, {})
if type(order) is dict: if isinstance(order, dict) and second_key:
order = order.get(second_key, {}) order = order.get(second_key, {})
if not order: if not order:
continue continue
return [str(s).strip() for s in order] return [str(s).strip() for s in order]
return [] return []
# A generic sorting function. Given a package name and sort # A generic sorting function. Given a package name and sort
# component, return less-than-0, 0, or greater-than-0 if # component, return less-than-0, 0, or greater-than-0 if
# a is respectively less-than, equal to, or greater than b. # a is respectively less-than, equal to, or greater than b.
def _component_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key): def _component_compare(self, pkgname, component, a, b,
reverse_natural_compare, second_key):
if a is None: if a is None:
return -1 return -1
if b is None: if b is None:
@ -84,92 +87,102 @@ def _component_compare(self, pkgname, component, a, b, reverse_natural_compare,
else: else:
return 0 return 0
# A sorting function for specs. Similar to component_compare, but # A sorting function for specs. Similar to component_compare, but
# a and b are considered to match entries in the sorting list if they # a and b are considered to match entries in the sorting list if they
# satisfy the list component. # satisfy the list component.
def _spec_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key): def _spec_compare(self, pkgname, component, a, b,
if not a or not a.concrete: reverse_natural_compare, second_key):
if not a or (not a.concrete and not second_key):
return -1 return -1
if not b or not b.concrete: if not b or (not b.concrete and not second_key):
return 1 return 1
specs = self._spec_for_pkgname(pkgname, component, second_key) specs = self._spec_for_pkgname(pkgname, component, second_key)
a_index = None a_index = None
b_index = None b_index = None
reverse = -1 if reverse_natural_compare else 1 reverse = -1 if reverse_natural_compare else 1
for i, cspec in enumerate(specs): for i, cspec in enumerate(specs):
if a_index == None and (cspec.satisfies(a) or a.satisfies(cspec)): if a_index is None and (cspec.satisfies(a) or a.satisfies(cspec)):
a_index = i a_index = i
if b_index: if b_index:
break break
if b_index == None and (cspec.satisfies(b) or b.satisfies(cspec)): if b_index is None and (cspec.satisfies(b) or b.satisfies(cspec)):
b_index = i b_index = i
if a_index: if a_index:
break break
if a_index != None and b_index == None: return -1 if a_index is not None and b_index is None:
elif a_index == None and b_index != None: return 1 return -1
elif a_index != None and b_index == a_index: return -1 * cmp(a, b) elif a_index is None and b_index is not None:
elif a_index != None and b_index != None and a_index != b_index: return cmp(a_index, b_index) return 1
else: return cmp(a, b) * reverse elif a_index is not None and b_index == a_index:
return -1 * cmp(a, b)
elif (a_index is not None and b_index is not None and
a_index != b_index):
return cmp(a_index, b_index)
else:
return cmp(a, b) * reverse
# Given a sort order specified by the pkgname/component/second_key, return # Given a sort order specified by the pkgname/component/second_key, return
# a list of CompilerSpecs, VersionLists, or Specs for that sorting list. # a list of CompilerSpecs, VersionLists, or Specs for that sorting list.
def _spec_for_pkgname(self, pkgname, component, second_key): def _spec_for_pkgname(self, pkgname, component, second_key):
key = (pkgname, component, second_key) key = (pkgname, component, second_key)
if not key in self._spec_for_pkgname_cache: if key not in self._spec_for_pkgname_cache:
pkglist = self._order_for_package(pkgname, component, second_key) pkglist = self._order_for_package(pkgname, component, second_key)
if not pkglist: if not pkglist:
if component in self._default_order: if component in self._default_order:
pkglist = self._default_order[component] pkglist = self._default_order[component]
if component == 'compiler': if component == 'compiler':
self._spec_for_pkgname_cache[key] = [spack.spec.CompilerSpec(s) for s in pkglist] self._spec_for_pkgname_cache[key] = \
[spack.spec.CompilerSpec(s) for s in pkglist]
elif component == 'version': elif component == 'version':
self._spec_for_pkgname_cache[key] = [VersionList(s) for s in pkglist] self._spec_for_pkgname_cache[key] = \
[VersionList(s) for s in pkglist]
else: else:
self._spec_for_pkgname_cache[key] = [spack.spec.Spec(s) for s in pkglist] self._spec_for_pkgname_cache[key] = \
[spack.spec.Spec(s) for s in pkglist]
return self._spec_for_pkgname_cache[key] return self._spec_for_pkgname_cache[key]
def provider_compare(self, pkgname, provider_str, a, b): def provider_compare(self, pkgname, provider_str, a, b):
"""Return less-than-0, 0, or greater than 0 if a is respecively less-than, equal-to, or """Return less-than-0, 0, or greater than 0 if a is respecively
greater-than b. A and b are possible implementations of provider_str. less-than, equal-to, or greater-than b. A and b are possible
One provider is less-than another if it is preferred over the other. implementations of provider_str. One provider is less-than another
For example, provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would return -1 if if it is preferred over the other. For example,
mvapich should be preferred over openmpi for scorep.""" provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would
return self._spec_compare(pkgname, 'providers', a, b, False, provider_str) return -1 if mvapich should be preferred over openmpi for scorep."""
return self._spec_compare(pkgname, 'providers', a, b, False,
provider_str)
def spec_has_preferred_provider(self, pkgname, provider_str): def spec_has_preferred_provider(self, pkgname, provider_str):
"""Return True iff the named package has a list of preferred provider""" """Return True iff the named package has a list of preferred
return bool(self._order_for_package(pkgname, 'providers', provider_str, False)) providers"""
return bool(self._order_for_package(pkgname, 'providers',
provider_str, False))
def version_compare(self, pkgname, a, b): def version_compare(self, pkgname, a, b):
"""Return less-than-0, 0, or greater than 0 if version a of pkgname is """Return less-than-0, 0, or greater than 0 if version a of pkgname is
respecively less-than, equal-to, or greater-than version b of pkgname. respectively less-than, equal-to, or greater-than version b of
One version is less-than another if it is preferred over the other.""" pkgname. One version is less-than another if it is preferred over
the other."""
return self._spec_compare(pkgname, 'version', a, b, True, None) return self._spec_compare(pkgname, 'version', a, b, True, None)
def variant_compare(self, pkgname, a, b): def variant_compare(self, pkgname, a, b):
"""Return less-than-0, 0, or greater than 0 if variant a of pkgname is """Return less-than-0, 0, or greater than 0 if variant a of pkgname is
respecively less-than, equal-to, or greater-than variant b of pkgname. respectively less-than, equal-to, or greater-than variant b of
One variant is less-than another if it is preferred over the other.""" pkgname. One variant is less-than another if it is preferred over
the other."""
return self._component_compare(pkgname, 'variant', a, b, False, None) return self._component_compare(pkgname, 'variant', a, b, False, None)
def architecture_compare(self, pkgname, a, b): def architecture_compare(self, pkgname, a, b):
"""Return less-than-0, 0, or greater than 0 if architecture a of pkgname is """Return less-than-0, 0, or greater than 0 if architecture a of pkgname
respecively less-than, equal-to, or greater-than architecture b of pkgname. is respectively less-than, equal-to, or greater-than architecture b
One architecture is less-than another if it is preferred over the other.""" of pkgname. One architecture is less-than another if it is preferred
return self._component_compare(pkgname, 'architecture', a, b, False, None) over the other."""
return self._component_compare(pkgname, 'architecture', a, b,
False, None)
def compiler_compare(self, pkgname, a, b): def compiler_compare(self, pkgname, a, b):
"""Return less-than-0, 0, or greater than 0 if compiler a of pkgname is """Return less-than-0, 0, or greater than 0 if compiler a of pkgname is
respecively less-than, equal-to, or greater-than compiler b of pkgname. respecively less-than, equal-to, or greater-than compiler b of
One compiler is less-than another if it is preferred over the other.""" pkgname. One compiler is less-than another if it is preferred over
the other."""
return self._spec_compare(pkgname, 'compiler', a, b, False, None) return self._spec_compare(pkgname, 'compiler', a, b, False, None)

View file

@ -96,7 +96,6 @@
expansion when it is the first character in an id typed on the command line. expansion when it is the first character in an id typed on the command line.
""" """
import sys import sys
import itertools
import hashlib import hashlib
import base64 import base64
import imp import imp
@ -116,8 +115,6 @@
import spack.error import spack.error
import spack.compilers as compilers import spack.compilers as compilers
# TODO: move display_specs to some other location.
from spack.cmd.find import display_specs
from spack.version import * from spack.version import *
from spack.util.string import * from spack.util.string import *
from spack.util.prefix import Prefix from spack.util.prefix import Prefix
@ -155,6 +152,7 @@
every time we call str()""" every time we call str()"""
_any_version = VersionList([':']) _any_version = VersionList([':'])
# Special types of dependencies.
alldeps = ('build', 'link', 'run') alldeps = ('build', 'link', 'run')
nolink = ('build', 'run') nolink = ('build', 'run')
@ -296,10 +294,15 @@ def __repr__(self):
@key_ordering @key_ordering
class DependencySpec(object): class DependencySpec(object):
""" """Dependencies can be one (or more) of several types:
Dependencies have conditions in which they apply.
This stores both what is depended on and why it is a dependency. - build: needs to be in the PATH at build time.
- link: is linked to and added to compiler flags.
- run: needs to be in the PATH for the package to run.
Fields:
- spec: the spack.spec.Spec description of a dependency.
- deptypes: strings representing the type of dependency this is.
""" """
def __init__(self, spec, deptypes): def __init__(self, spec, deptypes):
self.spec = spec self.spec = spec
@ -558,15 +561,15 @@ def dependents(self, deptype=None):
def _find_deps_dict(self, where, deptype): def _find_deps_dict(self, where, deptype):
deptype = self._deptype_norm(deptype) deptype = self._deptype_norm(deptype)
return [(dep.spec.name, dep) return dict((dep.spec.name, dep)
for dep in where.values() for dep in where.values()
if deptype and any(d in deptype for d in dep.deptypes)] if deptype and any(d in deptype for d in dep.deptypes))
def dependencies_dict(self, deptype=None): def dependencies_dict(self, deptype=None):
return dict(self._find_deps_dict(self._dependencies, deptype)) return self._find_deps_dict(self._dependencies, deptype)
def dependents_dict(self, deptype=None): def dependents_dict(self, deptype=None):
return dict(self._find_deps_dict(self._dependents, deptype)) return self._find_deps_dict(self._dependents, deptype)
# #
# Private routines here are called by the parser when building a spec. # Private routines here are called by the parser when building a spec.
@ -644,7 +647,8 @@ def _set_platform(self, value):
mod = imp.load_source(mod_name, path) mod = imp.load_source(mod_name, path)
class_name = mod_to_class(value) class_name = mod_to_class(value)
if not hasattr(mod, class_name): if not hasattr(mod, class_name):
tty.die('No class %s defined in %s' % (class_name, mod_name)) tty.die(
'No class %s defined in %s' % (class_name, mod_name))
cls = getattr(mod, class_name) cls = getattr(mod, class_name)
if not inspect.isclass(cls): if not inspect.isclass(cls):
tty.die('%s.%s is not a class' % (mod_name, class_name)) tty.die('%s.%s is not a class' % (mod_name, class_name))
@ -667,13 +671,15 @@ def _set_platform(self, value):
def _set_os(self, value): def _set_os(self, value):
"""Called by the parser to set the architecture operating system""" """Called by the parser to set the architecture operating system"""
if self.architecture.platform: arch = self.architecture
self.architecture.platform_os = self.architecture.platform.operating_system(value) if arch.platform:
arch.platform_os = arch.platform.operating_system(value)
def _set_target(self, value): def _set_target(self, value):
"""Called by the parser to set the architecture target""" """Called by the parser to set the architecture target"""
if self.architecture.platform: arch = self.architecture
self.architecture.target = self.architecture.platform.target(value) if arch.platform:
arch.target = arch.platform.target(value)
def _add_dependency(self, spec, deptypes): def _add_dependency(self, spec, deptypes):
"""Called by the parser to add another spec as a dependency.""" """Called by the parser to add another spec as a dependency."""
@ -688,7 +694,8 @@ def _add_dependency(self, spec, deptypes):
# #
@property @property
def fullname(self): def fullname(self):
return (('%s.%s' % (self.namespace, self.name)) if self.namespace else return (
('%s.%s' % (self.namespace, self.name)) if self.namespace else
(self.name if self.name else '')) (self.name if self.name else ''))
@property @property
@ -745,15 +752,15 @@ def concrete(self):
if self._concrete: if self._concrete:
return True return True
self._concrete = bool(not self.virtual self._concrete = bool(not self.virtual and
and self.namespace is not None self.namespace is not None and
and self.versions.concrete self.versions.concrete and
and self.variants.concrete self.variants.concrete and
and self.architecture self.architecture and
and self.architecture.concrete self.architecture.concrete and
and self.compiler and self.compiler.concrete self.compiler and self.compiler.concrete and
and self.compiler_flags.concrete self.compiler_flags.concrete and
and self._dependencies.concrete) self._dependencies.concrete)
return self._concrete return self._concrete
def traverse(self, visited=None, deptype=None, **kwargs): def traverse(self, visited=None, deptype=None, **kwargs):
@ -914,9 +921,11 @@ def to_node_dict(self):
d = { d = {
'parameters': params, 'parameters': params,
'arch': self.architecture, 'arch': self.architecture,
'dependencies': dict((d, (deps[d].spec.dag_hash(), 'dependencies': dict(
deps[d].deptypes)) (name, {
for d in sorted(deps.keys())) 'hash': dspec.spec.dag_hash(),
'type': [str(s) for s in dspec.deptypes]})
for name, dspec in deps.items())
} }
# Older concrete specs do not have a namespace. Omit for # Older concrete specs do not have a namespace. Omit for
@ -982,13 +991,33 @@ def from_node_dict(node):
raise SpackRecordError( raise SpackRecordError(
"Did not find a valid format for variants in YAML file") "Did not find a valid format for variants in YAML file")
# XXX(deptypes): why are dependencies not meant to be read here? # Don't read dependencies here; from_node_dict() is used by
#for name, dep_info in node['dependencies'].items(): # from_yaml() to read the root *and* each dependency spec.
# (dag_hash, deptypes) = dep_info
# spec._dependencies[name] = DependencySpec(dag_hash, deptypes)
return spec return spec
@staticmethod
def read_yaml_dep_specs(dependency_dict):
"""Read the DependencySpec portion of a YAML-formatted Spec.
This needs to be backward-compatible with older spack spec
formats so that reindex will work on old specs/databases.
"""
for dep_name, elt in dependency_dict.items():
if isinstance(elt, basestring):
# original format, elt is just the dependency hash.
dag_hash, deptypes = elt, ['build', 'link']
elif isinstance(elt, tuple):
# original deptypes format: (used tuples, not future-proof)
dag_hash, deptypes = elt
elif isinstance(elt, dict):
# new format: elements of dependency spec are keyed.
dag_hash, deptypes = elt['hash'], elt['type']
else:
raise SpecError("Couldn't parse dependency types in spec.")
yield dep_name, dag_hash, list(deptypes)
@staticmethod @staticmethod
def from_yaml(stream): def from_yaml(stream):
"""Construct a spec from YAML. """Construct a spec from YAML.
@ -1000,27 +1029,30 @@ def from_yaml(stream):
represent more than the DAG does. represent more than the DAG does.
""" """
deps = {}
spec = None
try: try:
yfile = yaml.load(stream) yfile = yaml.load(stream)
except MarkedYAMLError, e: except MarkedYAMLError, e:
raise SpackYAMLError("error parsing YAML spec:", str(e)) raise SpackYAMLError("error parsing YAML spec:", str(e))
for node in yfile['spec']: nodes = yfile['spec']
name = next(iter(node))
dep = Spec.from_node_dict(node)
if not spec:
spec = dep
deps[dep.name] = dep
for node in yfile['spec']: # Read nodes out of list. Root spec is the first element;
# dependencies are the following elements.
dep_list = [Spec.from_node_dict(node) for node in nodes]
if not dep_list:
raise SpecError("YAML spec contains no nodes.")
deps = dict((spec.name, spec) for spec in dep_list)
spec = dep_list[0]
for node in nodes:
# get dependency dict from the node.
name = next(iter(node)) name = next(iter(node))
for dep_name, (dep, deptypes) in \ yaml_deps = node[name]['dependencies']
node[name]['dependencies'].items(): for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps):
deps[name]._dependencies[dep_name] = \ # Fill in dependencies by looking them up by name in deps dict
DependencySpec(deps[dep_name], deptypes) deps[name]._dependencies[dname] = DependencySpec(
deps[dname], set(dtypes))
return spec return spec
def _concretize_helper(self, presets=None, visited=None): def _concretize_helper(self, presets=None, visited=None):
@ -1171,14 +1203,16 @@ def _expand_virtual_packages(self):
def feq(cfield, sfield): def feq(cfield, sfield):
return (not cfield) or (cfield == sfield) return (not cfield) or (cfield == sfield)
if replacement is spec or (feq(replacement.name, spec.name) and if replacement is spec or (
feq(replacement.name, spec.name) and
feq(replacement.versions, spec.versions) and feq(replacement.versions, spec.versions) and
feq(replacement.compiler, spec.compiler) and feq(replacement.compiler, spec.compiler) and
feq(replacement.architecture, spec.architecture) and feq(replacement.architecture, spec.architecture) and
feq(replacement._dependencies, spec._dependencies) and feq(replacement._dependencies, spec._dependencies) and
feq(replacement.variants, spec.variants) and feq(replacement.variants, spec.variants) and
feq(replacement.external, spec.external) and feq(replacement.external, spec.external) and
feq(replacement.external_module, spec.external_module)): feq(replacement.external_module,
spec.external_module)):
continue continue
# Refine this spec to the candidate. This uses # Refine this spec to the candidate. This uses
# replace_with AND dup so that it can work in # replace_with AND dup so that it can work in
@ -1235,10 +1269,10 @@ def concretize(self):
if s.namespace is None: if s.namespace is None:
s.namespace = spack.repo.repo_for_pkg(s.name).namespace s.namespace = spack.repo.repo_for_pkg(s.name).namespace
for s in self.traverse(root=False): for s in self.traverse(root=False):
if s.external_module: if s.external_module:
compiler = spack.compilers.compiler_for_spec(s.compiler, s.architecture) compiler = spack.compilers.compiler_for_spec(
s.compiler, s.architecture)
for mod in compiler.modules: for mod in compiler.modules:
load_module(mod) load_module(mod)
@ -1505,13 +1539,13 @@ def normalize(self, force=False):
# Ensure first that all packages & compilers in the DAG exist. # Ensure first that all packages & compilers in the DAG exist.
self.validate_names() self.validate_names()
# Get all the dependencies into one DependencyMap # Get all the dependencies into one DependencyMap
spec_deps = self.flat_dependencies_with_deptype(copy=False, spec_deps = self.flat_dependencies_with_deptype(
deptype_query=alldeps) copy=False, deptype_query=alldeps)
# Initialize index of virtual dependency providers if # Initialize index of virtual dependency providers if
# concretize didn't pass us one already # concretize didn't pass us one already
provider_index = ProviderIndex([s.spec for s in spec_deps.values()], provider_index = ProviderIndex(
restrict=True) [s.spec for s in spec_deps.values()], restrict=True)
# traverse the package DAG and fill out dependencies according # traverse the package DAG and fill out dependencies according
# to package files & their 'when' specs # to package files & their 'when' specs
@ -1584,20 +1618,17 @@ def constrain(self, other, deps=True):
other.variants[v]) other.variants[v])
# TODO: Check out the logic here # TODO: Check out the logic here
if self.architecture is not None and other.architecture is not None: sarch, oarch = self.architecture, other.architecture
if self.architecture.platform is not None and other.architecture.platform is not None: if sarch is not None and oarch is not None:
if self.architecture.platform != other.architecture.platform: if sarch.platform is not None and oarch.platform is not None:
raise UnsatisfiableArchitectureSpecError(self.architecture, if sarch.platform != oarch.platform:
other.architecture) raise UnsatisfiableArchitectureSpecError(sarch, oarch)
if self.architecture.platform_os is not None and other.architecture.platform_os is not None: if sarch.platform_os is not None and oarch.platform_os is not None:
if self.architecture.platform_os != other.architecture.platform_os: if sarch.platform_os != oarch.platform_os:
raise UnsatisfiableArchitectureSpecError(self.architecture, raise UnsatisfiableArchitectureSpecError(sarch, oarch)
other.architecture) if sarch.target is not None and oarch.target is not None:
if self.architecture.target is not None and other.architecture.target is not None: if sarch.target != oarch.target:
if self.architecture.target != other.architecture.target: raise UnsatisfiableArchitectureSpecError(sarch, oarch)
raise UnsatisfiableArchitectureSpecError(self.architecture,
other.architecture)
changed = False changed = False
if self.compiler is not None and other.compiler is not None: if self.compiler is not None and other.compiler is not None:
@ -1612,15 +1643,16 @@ def constrain(self, other, deps=True):
changed |= self.compiler_flags.constrain(other.compiler_flags) changed |= self.compiler_flags.constrain(other.compiler_flags)
old = str(self.architecture) old = str(self.architecture)
if self.architecture is None or other.architecture is None: sarch, oarch = self.architecture, other.architecture
self.architecture = self.architecture or other.architecture if sarch is None or other.architecture is None:
self.architecture = sarch or oarch
else: else:
if self.architecture.platform is None or other.architecture.platform is None: if sarch.platform is None or oarch.platform is None:
self.architecture.platform = self.architecture.platform or other.architecture.platform self.architecture.platform = sarch.platform or oarch.platform
if self.architecture.platform_os is None or other.architecture.platform_os is None: if sarch.platform_os is None or oarch.platform_os is None:
self.architecture.platform_os = self.architecture.platform_os or other.architecture.platform_os sarch.platform_os = sarch.platform_os or oarch.platform_os
if self.architecture.target is None or other.architecture.target is None: if sarch.target is None or oarch.target is None:
self.architecture.target = self.architecture.target or other.architecture.target sarch.target = sarch.target or oarch.target
changed |= (str(self.architecture) != old) changed |= (str(self.architecture) != old)
if deps: if deps:
@ -1751,15 +1783,25 @@ def satisfies(self, other, deps=True, strict=False):
# Architecture satisfaction is currently just string equality. # Architecture satisfaction is currently just string equality.
# If not strict, None means unconstrained. # If not strict, None means unconstrained.
if self.architecture and other.architecture: sarch, oarch = self.architecture, other.architecture
if ((self.architecture.platform and other.architecture.platform and self.architecture.platform != other.architecture.platform) or if sarch and oarch:
(self.architecture.platform_os and other.architecture.platform_os and self.architecture.platform_os != other.architecture.platform_os) or if ((sarch.platform and
(self.architecture.target and other.architecture.target and self.architecture.target != other.architecture.target)): oarch.platform and
sarch.platform != oarch.platform) or
(sarch.platform_os and
oarch.platform_os and
sarch.platform_os != oarch.platform_os) or
(sarch.target and
oarch.target and
sarch.target != oarch.target)):
return False return False
elif strict and ((other.architecture and not self.architecture) or
(other.architecture.platform and not self.architecture.platform) or elif strict and ((oarch and not sarch) or
(other.architecture.platform_os and not self.architecture.platform_os) or (oarch.platform and not sarch.platform) or
(other.architecture.target and not self.architecture.target)): (oarch.platform_os and not sarch.platform_os) or
(oarch.target and not sarch.target)):
return False return False
if not self.compiler_flags.satisfies( if not self.compiler_flags.satisfies(
@ -1841,11 +1883,16 @@ def _dup(self, other, **kwargs):
# We don't count dependencies as changes here # We don't count dependencies as changes here
changed = True changed = True
if hasattr(self, 'name'): if hasattr(self, 'name'):
changed = (self.name != other.name and self.versions != other.versions and \ changed = (self.name != other.name and
self.architecture != other.architecture and self.compiler != other.compiler and \ self.versions != other.versions and
self.variants != other.variants and self._normal != other._normal and \ self.architecture != other.architecture and
self.concrete != other.concrete and self.external != other.external and \ self.compiler != other.compiler and
self.external_module != other.external_module and self.compiler_flags != other.compiler_flags) self.variants != other.variants and
self._normal != other._normal and
self.concrete != other.concrete and
self.external != other.external and
self.external_module != other.external_module and
self.compiler_flags != other.compiler_flags)
# Local node attributes get copied first. # Local node attributes get copied first.
self.name = other.name self.name = other.name
@ -2000,7 +2047,6 @@ def _cmp_node(self):
self.compiler, self.compiler,
self.compiler_flags) self.compiler_flags)
def eq_node(self, other): def eq_node(self, other):
"""Equality with another spec, not including dependencies.""" """Equality with another spec, not including dependencies."""
return self._cmp_node() == other._cmp_node() return self._cmp_node() == other._cmp_node()
@ -2196,7 +2242,6 @@ def write(s, c):
def dep_string(self): def dep_string(self):
return ''.join("^" + dep.format() for dep in self.sorted_deps()) return ''.join("^" + dep.format() for dep in self.sorted_deps())
def __cmp__(self, other): def __cmp__(self, other):
# Package name sort order is not configurable, always goes alphabetical # Package name sort order is not configurable, always goes alphabetical
if self.name != other.name: if self.name != other.name:
@ -2205,23 +2250,23 @@ def __cmp__(self, other):
# Package version is second in compare order # Package version is second in compare order
pkgname = self.name pkgname = self.name
if self.versions != other.versions: if self.versions != other.versions:
return spack.pkgsort.version_compare(pkgname, return spack.pkgsort.version_compare(
self.versions, other.versions) pkgname, self.versions, other.versions)
# Compiler is third # Compiler is third
if self.compiler != other.compiler: if self.compiler != other.compiler:
return spack.pkgsort.compiler_compare(pkgname, return spack.pkgsort.compiler_compare(
self.compiler, other.compiler) pkgname, self.compiler, other.compiler)
# Variants # Variants
if self.variants != other.variants: if self.variants != other.variants:
return spack.pkgsort.variant_compare(pkgname, return spack.pkgsort.variant_compare(
self.variants, other.variants) pkgname, self.variants, other.variants)
# Target # Target
if self.architecture != other.architecture: if self.architecture != other.architecture:
return spack.pkgsort.architecture_compare(pkgname, return spack.pkgsort.architecture_compare(
self.architecture, other.architecture) pkgname, self.architecture, other.architecture)
# Dependency is not configurable # Dependency is not configurable
if self._dependencies != other._dependencies: if self._dependencies != other._dependencies:
@ -2230,7 +2275,6 @@ def __cmp__(self, other):
# Equal specs # Equal specs
return 0 return 0
def __str__(self): def __str__(self):
return self.format() + self.dep_string() return self.format() + self.dep_string()
@ -2244,12 +2288,14 @@ def tree(self, **kwargs):
indent = kwargs.pop('indent', 0) indent = kwargs.pop('indent', 0)
fmt = kwargs.pop('format', '$_$@$%@+$+$=') fmt = kwargs.pop('format', '$_$@$%@+$+$=')
prefix = kwargs.pop('prefix', None) prefix = kwargs.pop('prefix', None)
deptypes = kwargs.pop('deptypes', ('build', 'link'))
check_kwargs(kwargs, self.tree) check_kwargs(kwargs, self.tree)
out = "" out = ""
cur_id = 0 cur_id = 0
ids = {} ids = {}
for d, node in self.traverse(order='pre', cover=cover, depth=True): for d, node in self.traverse(
order='pre', cover=cover, depth=True, deptypes=deptypes):
if prefix is not None: if prefix is not None:
out += prefix(node) out += prefix(node)
out += " " * indent out += " " * indent
@ -2303,8 +2349,8 @@ def __init__(self):
# Lexer is always the same for every parser. # Lexer is always the same for every parser.
_lexer = SpecLexer() _lexer = SpecLexer()
class SpecParser(spack.parse.Parser):
class SpecParser(spack.parse.Parser):
def __init__(self): def __init__(self):
super(SpecParser, self).__init__(_lexer) super(SpecParser, self).__init__(_lexer)
self.previous = None self.previous = None
@ -2357,8 +2403,8 @@ def do_parse(self):
except spack.parse.ParseError, e: except spack.parse.ParseError, e:
raise SpecParseError(e) raise SpecParseError(e)
# If the spec has an os or a target and no platform, give it
# If the spec has an os or a target and no platform, give it the default platform # the default platform
for spec in specs: for spec in specs:
for s in spec.traverse(): for s in spec.traverse():
if s.architecture.os_string or s.architecture.target_string: if s.architecture.os_string or s.architecture.target_string:

View file

@ -32,15 +32,17 @@
from spack.test.tally_plugin import Tally from spack.test.tally_plugin import Tally
"""Names of tests to be included in Spack's test suite""" """Names of tests to be included in Spack's test suite"""
test_names = ['architecture', 'versions', 'url_parse', 'url_substitution', 'packages', 'stage', test_names = [
'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize', 'architecture', 'versions', 'url_parse', 'url_substitution', 'packages',
'multimethod', 'install', 'package_sanity', 'config', 'stage', 'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize',
'directory_layout', 'pattern', 'python_version', 'git_fetch', 'multimethod', 'install', 'package_sanity', 'config', 'directory_layout',
'svn_fetch', 'hg_fetch', 'mirror', 'modules', 'url_extrapolate', 'pattern', 'python_version', 'git_fetch', 'svn_fetch', 'hg_fetch',
'cc', 'link_tree', 'spec_yaml', 'optional_deps', 'mirror', 'modules', 'url_extrapolate', 'cc', 'link_tree', 'spec_yaml',
'make_executable', 'configure_guess', 'lock', 'database', 'optional_deps', 'make_executable', 'build_system_guess', 'lock',
'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find', 'database', 'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find',
'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd'] 'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd',
'cmd.module'
]
def list_tests(): def list_tests():

View file

@ -1,7 +1,31 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
""" Test checks if the architecture class is created correctly and also that """ Test checks if the architecture class is created correctly and also that
the functions are looking for the correct architecture name the functions are looking for the correct architecture name
""" """
import unittest import itertools
import os import os
import platform as py_platform import platform as py_platform
import spack import spack
@ -14,9 +38,8 @@
from spack.test.mock_packages_test import * from spack.test.mock_packages_test import *
#class ArchitectureTest(unittest.TestCase):
class ArchitectureTest(MockPackagesTest):
class ArchitectureTest(MockPackagesTest):
def setUp(self): def setUp(self):
super(ArchitectureTest, self).setUp() super(ArchitectureTest, self).setUp()
self.platform = spack.architecture.platform() self.platform = spack.architecture.platform()
@ -50,10 +73,8 @@ def test_dict_functions_for_architecture(self):
self.assertTrue(isinstance(new_arch.target, self.assertTrue(isinstance(new_arch.target,
spack.architecture.Target)) spack.architecture.Target))
def test_platform(self): def test_platform(self):
output_platform_class = spack.architecture.platform() output_platform_class = spack.architecture.platform()
my_arch_class = None
if os.path.exists('/opt/cray/craype'): if os.path.exists('/opt/cray/craype'):
my_platform_class = CrayXc() my_platform_class = CrayXc()
elif os.path.exists('/bgsys'): elif os.path.exists('/bgsys'):
@ -110,7 +131,8 @@ def test_user_input_combination(self):
o, t = arch o, t = arch
spec = Spec("libelf os=%s target=%s" % (o, t)) spec = Spec("libelf os=%s target=%s" % (o, t))
spec.concretize() spec.concretize()
results.append(spec.architecture.platform_os == self.platform.operating_system(o)) results.append(spec.architecture.platform_os ==
self.platform.operating_system(o))
results.append(spec.architecture.target == self.platform.target(t)) results.append(spec.architecture.target == self.platform.target(t))
res = all(results) res = all(results)

View file

@ -28,14 +28,14 @@
import unittest import unittest
from llnl.util.filesystem import * from llnl.util.filesystem import *
from spack.cmd.create import ConfigureGuesser from spack.cmd.create import BuildSystemGuesser
from spack.stage import Stage from spack.stage import Stage
from spack.test.mock_packages_test import * from spack.test.mock_packages_test import *
from spack.util.executable import which from spack.util.executable import which
class InstallTest(unittest.TestCase): class InstallTest(unittest.TestCase):
"""Tests the configure guesser in spack create""" """Tests the build system guesser in spack create"""
def setUp(self): def setUp(self):
self.tar = which('tar') self.tar = which('tar')
@ -44,12 +44,10 @@ def setUp(self):
os.chdir(self.tmpdir) os.chdir(self.tmpdir)
self.stage = None self.stage = None
def tearDown(self): def tearDown(self):
shutil.rmtree(self.tmpdir, ignore_errors=True) shutil.rmtree(self.tmpdir, ignore_errors=True)
os.chdir(self.orig_dir) os.chdir(self.orig_dir)
def check_archive(self, filename, system): def check_archive(self, filename, system):
mkdirp('archive') mkdirp('archive')
touch(join_path('archive', filename)) touch(join_path('archive', filename))
@ -60,24 +58,24 @@ def check_archive(self, filename, system):
with Stage(url) as stage: with Stage(url) as stage:
stage.fetch() stage.fetch()
guesser = ConfigureGuesser() guesser = BuildSystemGuesser()
guesser(stage) guesser(stage, url)
self.assertEqual(system, guesser.build_system) self.assertEqual(system, guesser.build_system)
def test_python(self):
self.check_archive('setup.py', 'python')
def test_autotools(self): def test_autotools(self):
self.check_archive('configure', 'autotools') self.check_archive('configure', 'autotools')
def test_cmake(self): def test_cmake(self):
self.check_archive('CMakeLists.txt', 'cmake') self.check_archive('CMakeLists.txt', 'cmake')
def test_scons(self):
self.check_archive('SConstruct', 'scons')
def test_python(self):
self.check_archive('setup.py', 'python')
def test_R(self):
self.check_archive('NAMESPACE', 'R')
def test_unknown(self): def test_unknown(self):
self.check_archive('foobar', 'unknown') self.check_archive('foobar', 'unknown')

View file

@ -27,11 +27,7 @@
import spack.cmd.find import spack.cmd.find
import unittest import unittest
from spack.util.pattern import Bunch
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class FindTest(unittest.TestCase): class FindTest(unittest.TestCase):

View file

@ -0,0 +1,83 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
import os.path
import spack.cmd.module as module
import spack.modules as modules
import spack.test.mock_database
class TestModule(spack.test.mock_database.MockDatabase):
def _get_module_files(self, args):
return [
modules.module_types[args.module_type](spec).file_name for spec in args.specs # NOQA: ignore=E501
]
def test_module_common_operations(self):
parser = argparse.ArgumentParser()
module.setup_parser(parser)
# Try to remove a non existing module [tcl]
args = parser.parse_args(['rm', 'doesnotexist'])
self.assertRaises(SystemExit, module.module, parser, args)
# Remove existing modules [tcl]
args = parser.parse_args(['rm', '-y', 'mpileaks'])
module_files = self._get_module_files(args)
for item in module_files:
self.assertTrue(os.path.exists(item))
module.module(parser, args)
for item in module_files:
self.assertFalse(os.path.exists(item))
# Add them back [tcl]
args = parser.parse_args(['refresh', '-y', 'mpileaks'])
module.module(parser, args)
for item in module_files:
self.assertTrue(os.path.exists(item))
# TODO : test the --delete-tree option
# TODO : this requires having a separate directory for test modules
# Try to find a module with multiple matches
args = parser.parse_args(['find', 'mpileaks'])
self.assertRaises(SystemExit, module.module, parser, args)
# Try to find a module with no matches
args = parser.parse_args(['find', 'doesnotexist'])
self.assertRaises(SystemExit, module.module, parser, args)
# Try to find a module
args = parser.parse_args(['find', 'libelf'])
module.module(parser, args)
# Remove existing modules [dotkit]
args = parser.parse_args(['rm', '-y', '-m', 'dotkit', 'mpileaks'])
module_files = self._get_module_files(args)
for item in module_files:
self.assertTrue(os.path.exists(item))
module.module(parser, args)
for item in module_files:
self.assertFalse(os.path.exists(item))
# Add them back [dotkit]
args = parser.parse_args(['refresh', '-y', '-m', 'dotkit', 'mpileaks'])
module.module(parser, args)
for item in module_files:
self.assertTrue(os.path.exists(item))
# TODO : add tests for loads and find to check the prompt format

View file

@ -27,7 +27,6 @@
import StringIO import StringIO
import spack.modules import spack.modules
import unittest
from spack.test.mock_packages_test import MockPackagesTest from spack.test.mock_packages_test import MockPackagesTest
FILE_REGISTRY = collections.defaultdict(StringIO.StringIO) FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
@ -266,7 +265,7 @@ def test_alter_environment(self):
def test_blacklist(self): def test_blacklist(self):
spack.modules.CONFIGURATION = configuration_blacklist spack.modules.CONFIGURATION = configuration_blacklist
spec = spack.spec.Spec('mpileaks') spec = spack.spec.Spec('mpileaks ^zmpi')
content = self.get_modulefile_content(spec) content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1) self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
self.assertEqual(len([x for x in content if 'module load ' in x]), 1) self.assertEqual(len([x for x in content if 'module load ' in x]), 1)

View file

@ -28,42 +28,50 @@
def composite(interface=None, method_list=None, container=list): def composite(interface=None, method_list=None, container=list):
""" """Returns a class decorator that patches a class adding all the methods
Returns a class decorator that patches a class adding all the methods it needs to be a composite for a given it needs to be a composite for a given interface.
interface.
:param interface: class exposing the interface to which the composite object must conform. Only non-private and :param interface: class exposing the interface to which the composite
non-special methods will be taken into account object must conform. Only non-private and non-special methods will be
taken into account
:param method_list: names of methods that should be part of the composite :param method_list: names of methods that should be part of the composite
:param container: container for the composite object (default = list). Must fulfill the MutableSequence contract. :param container: container for the composite object (default = list).
The composite class will expose the container API to manage object composition Must fulfill the MutableSequence contract. The composite class will expose
the container API to manage object composition
:return: class decorator :return: class decorator
""" """
# Check if container fulfills the MutableSequence contract and raise an exception if it doesn't # Check if container fulfills the MutableSequence contract and raise an
# The patched class returned by the decorator will inherit from the container class to expose the # exception if it doesn't. The patched class returned by the decorator will
# interface needed to manage objects composition # inherit from the container class to expose the interface needed to manage
# objects composition
if not issubclass(container, collections.MutableSequence): if not issubclass(container, collections.MutableSequence):
raise TypeError("Container must fulfill the MutableSequence contract") raise TypeError("Container must fulfill the MutableSequence contract")
# Check if at least one of the 'interface' or the 'method_list' arguments are defined # Check if at least one of the 'interface' or the 'method_list' arguments
# are defined
if interface is None and method_list is None: if interface is None and method_list is None:
raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite") raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite") # NOQA : ignore=E501
def cls_decorator(cls): def cls_decorator(cls):
# Retrieve the base class of the composite. Inspect its methods and decide which ones will be overridden # Retrieve the base class of the composite. Inspect its methods and
# decide which ones will be overridden
def no_special_no_private(x): def no_special_no_private(x):
return inspect.ismethod(x) and not x.__name__.startswith('_') return inspect.ismethod(x) and not x.__name__.startswith('_')
# Patch the behavior of each of the methods in the previous list. This is done associating an instance of the # Patch the behavior of each of the methods in the previous list.
# descriptor below to any method that needs to be patched. # This is done associating an instance of the descriptor below to
# any method that needs to be patched.
class IterateOver(object): class IterateOver(object):
"""Decorator used to patch methods in a composite.
It iterates over all the items in the instance containing the
associated attribute and calls for each of them an attribute
with the same name
""" """
Decorator used to patch methods in a composite. It iterates over all the items in the instance containing the
associated attribute and calls for each of them an attribute with the same name
"""
def __init__(self, name, func=None): def __init__(self, name, func=None):
self.name = name self.name = name
self.func = func self.func = func
@ -72,8 +80,9 @@ def __get__(self, instance, owner):
def getter(*args, **kwargs): def getter(*args, **kwargs):
for item in instance: for item in instance:
getattr(item, self.name)(*args, **kwargs) getattr(item, self.name)(*args, **kwargs)
# If we are using this descriptor to wrap a method from an interface, then we must conditionally # If we are using this descriptor to wrap a method from an
# use the `functools.wraps` decorator to set the appropriate fields. # interface, then we must conditionally use the
# `functools.wraps` decorator to set the appropriate fields
if self.func is not None: if self.func is not None:
getter = functools.wraps(self.func)(getter) getter = functools.wraps(self.func)(getter)
return getter return getter
@ -81,7 +90,8 @@ def getter(*args, **kwargs):
dictionary_for_type_call = {} dictionary_for_type_call = {}
# Construct a dictionary with the methods explicitly passed as name # Construct a dictionary with the methods explicitly passed as name
if method_list is not None: if method_list is not None:
# python@2.7: method_list_dict = {name: IterateOver(name) for name in method_list} # python@2.7: method_list_dict = {name: IterateOver(name) for name
# in method_list}
method_list_dict = {} method_list_dict = {}
for name in method_list: for name in method_list:
method_list_dict[name] = IterateOver(name) method_list_dict[name] = IterateOver(name)
@ -89,28 +99,40 @@ def getter(*args, **kwargs):
# Construct a dictionary with the methods inspected from the interface # Construct a dictionary with the methods inspected from the interface
if interface is not None: if interface is not None:
########## ##########
# python@2.7: interface_methods = {name: method for name, method in inspect.getmembers(interface, predicate=no_special_no_private)} # python@2.7: interface_methods = {name: method for name, method in
# inspect.getmembers(interface, predicate=no_special_no_private)}
interface_methods = {} interface_methods = {}
for name, method in inspect.getmembers(interface, predicate=no_special_no_private): for name, method in inspect.getmembers(interface, predicate=no_special_no_private): # NOQA: ignore=E501
interface_methods[name] = method interface_methods[name] = method
########## ##########
# python@2.7: interface_methods_dict = {name: IterateOver(name, method) for name, method in interface_methods.iteritems()} # python@2.7: interface_methods_dict = {name: IterateOver(name,
# method) for name, method in interface_methods.iteritems()}
interface_methods_dict = {} interface_methods_dict = {}
for name, method in interface_methods.iteritems(): for name, method in interface_methods.iteritems():
interface_methods_dict[name] = IterateOver(name, method) interface_methods_dict[name] = IterateOver(name, method)
########## ##########
dictionary_for_type_call.update(interface_methods_dict) dictionary_for_type_call.update(interface_methods_dict)
# Get the methods that are defined in the scope of the composite class and override any previous definition # Get the methods that are defined in the scope of the composite
# class and override any previous definition
########## ##########
# python@2.7: cls_method = {name: method for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)} # python@2.7: cls_method = {name: method for name, method in
# inspect.getmembers(cls, predicate=inspect.ismethod)}
cls_method = {} cls_method = {}
for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): # NOQA: ignore=E501
cls_method[name] = method cls_method[name] = method
########## ##########
dictionary_for_type_call.update(cls_method) dictionary_for_type_call.update(cls_method)
# Generate the new class on the fly and return it # Generate the new class on the fly and return it
# FIXME : inherit from interface if we start to use ABC classes? # FIXME : inherit from interface if we start to use ABC classes?
wrapper_class = type(cls.__name__, (cls, container), dictionary_for_type_call) wrapper_class = type(cls.__name__, (cls, container),
dictionary_for_type_call)
return wrapper_class return wrapper_class
return cls_decorator return cls_decorator
class Bunch(object):
"""Carries a bunch of named attributes (from Alex Martelli bunch)"""
def __init__(self, **kwargs):
self.__dict__.update(kwargs)

View file

@ -74,25 +74,25 @@ case unload:
# tool's commands to add/remove the result from the environment. # tool's commands to add/remove the result from the environment.
switch ($_sp_subcommand) switch ($_sp_subcommand)
case "use": case "use":
set _sp_full_spec = ( "`\spack $_sp_flags module find dotkit $_sp_spec`" ) set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type dotkit $_sp_spec`" )
if ( $? == 0 ) then if ( $? == 0 ) then
use $_sp_module_args $_sp_full_spec use $_sp_module_args $_sp_full_spec
endif endif
breaksw breaksw
case "unuse": case "unuse":
set _sp_full_spec = ( "`\spack $_sp_flags module find dotkit $_sp_spec`" ) set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type dotkit $_sp_spec`" )
if ( $? == 0 ) then if ( $? == 0 ) then
unuse $_sp_module_args $_sp_full_spec unuse $_sp_module_args $_sp_full_spec
endif endif
breaksw breaksw
case "load": case "load":
set _sp_full_spec = ( "`\spack $_sp_flags module find tcl $_sp_spec`" ) set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type tcl $_sp_spec`" )
if ( $? == 0 ) then if ( $? == 0 ) then
module load $_sp_module_args $_sp_full_spec module load $_sp_module_args $_sp_full_spec
endif endif
breaksw breaksw
case "unload": case "unload":
set _sp_full_spec = ( "`\spack $_sp_flags module find tcl $_sp_spec`" ) set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type tcl $_sp_spec`" )
if ( $? == 0 ) then if ( $? == 0 ) then
module unload $_sp_module_args $_sp_full_spec module unload $_sp_module_args $_sp_full_spec
endif endif

View file

@ -117,19 +117,19 @@ function spack {
# If spack module command comes back with an error, do nothing. # If spack module command comes back with an error, do nothing.
case $_sp_subcommand in case $_sp_subcommand in
"use") "use")
if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args dotkit $_sp_spec); then if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type dotkit $_sp_spec); then
use $_sp_module_args $_sp_full_spec use $_sp_module_args $_sp_full_spec
fi ;; fi ;;
"unuse") "unuse")
if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args dotkit $_sp_spec); then if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type dotkit $_sp_spec); then
unuse $_sp_module_args $_sp_full_spec unuse $_sp_module_args $_sp_full_spec
fi ;; fi ;;
"load") "load")
if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args tcl $_sp_spec); then if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type tcl $_sp_spec); then
module load $_sp_module_args $_sp_full_spec module load $_sp_module_args $_sp_full_spec
fi ;; fi ;;
"unload") "unload")
if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args tcl $_sp_spec); then if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type tcl $_sp_spec); then
module unload $_sp_module_args $_sp_full_spec module unload $_sp_module_args $_sp_full_spec
fi ;; fi ;;
esac esac

View file

@ -12,9 +12,9 @@ class Adios(Package):
""" """
homepage = "http://www.olcf.ornl.gov/center-projects/adios/" homepage = "http://www.olcf.ornl.gov/center-projects/adios/"
url = "http://users.nccs.gov/~pnorbert/adios-1.9.0.tar.gz" url = "https://github.com/ornladios/ADIOS/archive/v1.9.0.tar.gz"
version('1.9.0', 'dbf5cb10e32add2f04c9b4052b7ffa76') version('1.9.0', '310ff02388bbaa2b1c1710ee970b5678')
# Lots of setting up here for this package # Lots of setting up here for this package
# module swap PrgEnv-intel PrgEnv-$COMP # module swap PrgEnv-intel PrgEnv-$COMP

View file

@ -0,0 +1,42 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Cdo(Package):
"""CDO is a collection of command line Operators to manipulate and analyse
Climate and NWP model Data. """
homepage = "https://code.zmaw.de/projects/cdo"
url = "https://code.zmaw.de/attachments/download/10198/cdo-1.6.9.tar.gz"
version('1.6.9', 'bf0997bf20e812f35e10188a930e24e2')
depends_on('netcdf')
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix))
make()
make('install')

View file

@ -30,6 +30,7 @@ class Cmake(Package):
homepage = 'https://www.cmake.org' homepage = 'https://www.cmake.org'
url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz' url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz'
version('3.6.0', 'aa40fbecf49d99c083415c2411d12db9')
version('3.5.2', '701386a1b5ec95f8d1075ecf96383e02') version('3.5.2', '701386a1b5ec95f8d1075ecf96383e02')
version('3.5.1', 'ca051f4a66375c89d1a524e726da0296') version('3.5.1', 'ca051f4a66375c89d1a524e726da0296')
version('3.5.0', '33c5d09d4c33d4ffcc63578a6ba8777e') version('3.5.0', '33c5d09d4c33d4ffcc63578a6ba8777e')

View file

@ -35,6 +35,7 @@ class Flex(Package):
version('2.5.39', 'e133e9ead8ec0a58d81166b461244fde') version('2.5.39', 'e133e9ead8ec0a58d81166b461244fde')
depends_on("bison", type='build') depends_on("bison", type='build')
depends_on("m4", type='build')
def install(self, spec, prefix): def install(self, spec, prefix):
configure("--prefix=%s" % prefix) configure("--prefix=%s" % prefix)

View file

@ -23,7 +23,9 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from spack import * from spack import *
import os, sys import os
import sys
class Hypre(Package): class Hypre(Package):
"""Hypre is a library of high performance preconditioners that """Hypre is a library of high performance preconditioners that
@ -37,7 +39,7 @@ class Hypre(Package):
version('2.10.0b', '768be38793a35bb5d055905b271f5b8e') version('2.10.0b', '768be38793a35bb5d055905b271f5b8e')
# hypre does not know how to build shared libraries on Darwin # hypre does not know how to build shared libraries on Darwin
variant('shared', default=sys.platform!='darwin', description="Build shared library version (disables static library)") variant('shared', default=(sys.platform != 'darwin'), description="Build shared library version (disables static library)")
# SuperluDist have conflicting headers with those in Hypre # SuperluDist have conflicting headers with those in Hypre
variant('internal-superlu', default=True, description="Use internal Superlu routines") variant('internal-superlu', default=True, description="Use internal Superlu routines")
@ -46,21 +48,26 @@ class Hypre(Package):
depends_on("lapack") depends_on("lapack")
def install(self, spec, prefix): def install(self, spec, prefix):
blas_dir = spec['blas'].prefix os.environ['CC'] = spec['mpi'].mpicc
lapack_dir = spec['lapack'].prefix os.environ['CXX'] = spec['mpi'].mpicxx
mpi_dir = spec['mpi'].prefix os.environ['F77'] = spec['mpi'].mpif77
os.environ['CC'] = os.path.join(mpi_dir, 'bin', 'mpicc')
os.environ['CXX'] = os.path.join(mpi_dir, 'bin', 'mpicxx')
os.environ['F77'] = os.path.join(mpi_dir, 'bin', 'mpif77')
# Since +shared does not build on macOS and also Atlas does not have
# a single static lib to build against, link against shared libs with
# a hope that --whole-archive linker option (or alike) was used
# to command the linker to include whole static libs' content into the
# shared lib
# Note: --with-(lapack|blas)_libs= needs space separated list of names
configure_args = [ configure_args = [
"--prefix=%s" % prefix, '--prefix=%s' % prefix,
"--with-lapack-libs=lapack", '--with-lapack-libs=%s' % to_lib_name(
"--with-lapack-lib-dirs=%s/lib" % lapack_dir, spec['lapack'].lapack_shared_lib),
"--with-blas-libs=blas", '--with-lapack-lib-dirs=%s/lib' % spec['lapack'].prefix,
"--with-blas-lib-dirs=%s/lib" % blas_dir] '--with-blas-libs=%s' % to_lib_name(
spec['blas'].blas_shared_lib),
'--with-blas-lib-dirs=%s/lib' % spec['blas'].prefix
]
if '+shared' in self.spec: if '+shared' in self.spec:
configure_args.append("--enable-shared") configure_args.append("--enable-shared")
@ -76,4 +83,12 @@ def install(self, spec, prefix):
configure(*configure_args) configure(*configure_args)
make() make()
if self.run_tests:
make("check")
make("test")
Executable(join_path('test', 'ij'))()
sstruct = Executable(join_path('test', 'struct'))
sstruct()
sstruct('-in', 'test/sstruct.in.default', '-solver', '40',
'-rhsone')
make("install") make("install")

View file

@ -0,0 +1,25 @@
diff --git a/src/libjasper/jpc/jpc_dec.c b/src/libjasper/jpc/jpc_dec.c
index fa72a0e..1f4845f 100644
--- a/src/libjasper/jpc/jpc_dec.c
+++ b/src/libjasper/jpc/jpc_dec.c
@@ -1069,12 +1069,18 @@ static int jpc_dec_tiledecode(jpc_dec_t *dec, jpc_dec_tile_t *tile)
/* Apply an inverse intercomponent transform if necessary. */
switch (tile->cp->mctid) {
case JPC_MCT_RCT:
- assert(dec->numcomps == 3);
+ if (dec->numcomps != 3 && dec->numcomps != 4) {
+ jas_eprintf("bad number of components (%d)\n", dec->numcomps);
+ return -1;
+ }
jpc_irct(tile->tcomps[0].data, tile->tcomps[1].data,
tile->tcomps[2].data);
break;
case JPC_MCT_ICT:
- assert(dec->numcomps == 3);
+ if (dec->numcomps != 3 && dec->numcomps != 4) {
+ jas_eprintf("bad number of components (%d)\n", dec->numcomps);
+ return -1;
+ }
jpc_iict(tile->tcomps[0].data, tile->tcomps[1].data,
tile->tcomps[2].data);
break;

View file

@ -0,0 +1,63 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Jasper(Package):
"""Library for manipulating JPEG-2000 images"""
homepage = "https://www.ece.uvic.ca/~frodo/jasper/"
url = "https://www.ece.uvic.ca/~frodo/jasper/software/jasper-1.900.1.zip"
version('1.900.1', 'a342b2b4495b3e1394e161eb5d85d754')
variant('shared', default=True,
description='Builds shared versions of the libraries')
variant('debug', default=False,
description='Builds debug versions of the libraries')
depends_on('libjpeg-turbo')
# Fixes a bug (still in upstream as of v.1.900.1) where an assertion fails
# when certain JPEG-2000 files with an alpha channel are processed
# see: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=469786
patch('fix_alpha_channel_assert_fail.patch')
def install(self, spec, prefix):
configure_options = [
'--prefix={0}'.format(prefix),
'--mandir={0}'.format(spec.prefix.man),
]
if '+shared' in spec:
configure_options.append('--enable-shared')
if '+debug' not in spec:
configure_options.append('--disable-debug')
configure(*configure_options)
make()
make('install')

View file

@ -9,7 +9,13 @@ class Mkl(IntelInstaller):
Note: You will have to add the download file to a Note: You will have to add the download file to a
mirror so that Spack can find it. For instructions on how to set up a mirror so that Spack can find it. For instructions on how to set up a
mirror, see http://software.llnl.gov/spack/mirrors.html""" mirror, see http://software.llnl.gov/spack/mirrors.html.
To set the threading layer at run time set MKL_THREADING_LAYER
variable to one of the following values: INTEL, SEQUENTIAL, PGI.
To set interface layer at run time, use set the MKL_INTERFACE_LAYER
variable to LP64 or ILP64.
"""
homepage = "https://software.intel.com/en-us/intel-mkl" homepage = "https://software.intel.com/en-us/intel-mkl"
@ -18,6 +24,11 @@ class Mkl(IntelInstaller):
version('11.3.3.210', 'f72546df27f5ebb0941b5d21fd804e34', version('11.3.3.210', 'f72546df27f5ebb0941b5d21fd804e34',
url="file://%s/l_mkl_11.3.3.210.tgz" % os.getcwd()) url="file://%s/l_mkl_11.3.3.210.tgz" % os.getcwd())
# virtual dependency
provides('blas')
provides('lapack')
# TODO: MKL also provides implementation of Scalapack.
def install(self, spec, prefix): def install(self, spec, prefix):
self.intel_prefix = os.path.join(prefix, "pkg") self.intel_prefix = os.path.join(prefix, "pkg")
@ -26,3 +37,28 @@ def install(self, spec, prefix):
mkl_dir = os.path.join(self.intel_prefix, "mkl") mkl_dir = os.path.join(self.intel_prefix, "mkl")
for f in os.listdir(mkl_dir): for f in os.listdir(mkl_dir):
os.symlink(os.path.join(mkl_dir, f), os.path.join(self.prefix, f)) os.symlink(os.path.join(mkl_dir, f), os.path.join(self.prefix, f))
def setup_dependent_package(self, module, dspec):
# For now use Single Dynamic Library:
# To set the threading layer at run time, use the
# mkl_set_threading_layer function or set MKL_THREADING_LAYER
# variable to one of the following values: INTEL, SEQUENTIAL, PGI.
# To set interface layer at run time, use the mkl_set_interface_layer
# function or set the MKL_INTERFACE_LAYER variable to LP64 or ILP64.
# Otherwise one would need to specify several libraries
# (e.g. mkl_intel_lp64;mkl_sequential;mkl_core), which reflect
# different interface and threading layers.
name = 'libmkl_rt.%s' % dso_suffix
libdir = find_library_path(name, self.prefix.lib64, self.prefix.lib)
self.spec.blas_shared_lib = join_path(libdir, name)
self.spec.lapack_shared_lib = self.spec.blas_shared_lib
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
# set up MKLROOT for everyone using MKL package
spack_env.set('MKLROOT', self.prefix)
def setup_environment(self, spack_env, env):
env.set('MKLROOT', self.prefix)

View file

@ -23,7 +23,10 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from spack import * from spack import *
import os, sys, glob import os
import sys
import glob
class Mumps(Package): class Mumps(Package):
"""MUMPS: a MUltifrontal Massively Parallel sparse direct Solver""" """MUMPS: a MUltifrontal Massively Parallel sparse direct Solver"""
@ -44,13 +47,11 @@ class Mumps(Package):
variant('idx64', default=False, description='Use int64_t/integer*8 as default index type') variant('idx64', default=False, description='Use int64_t/integer*8 as default index type')
variant('shared', default=True, description='Build shared libraries') variant('shared', default=True, description='Build shared libraries')
depends_on('scotch + esmumps', when='~ptscotch+scotch') depends_on('scotch + esmumps', when='~ptscotch+scotch')
depends_on('scotch + esmumps + mpi', when='+ptscotch') depends_on('scotch + esmumps + mpi', when='+ptscotch')
depends_on('metis@5:', when='+metis') depends_on('metis@5:', when='+metis')
depends_on('parmetis', when="+parmetis") depends_on('parmetis', when="+parmetis")
depends_on('blas') depends_on('blas')
depends_on('lapack')
depends_on('scalapack', when='+mpi') depends_on('scalapack', when='+mpi')
depends_on('mpi', when='+mpi') depends_on('mpi', when='+mpi')
@ -60,42 +61,52 @@ class Mumps(Package):
# end before install # end before install
# def patch(self): # def patch(self):
def write_makefile_inc(self): def write_makefile_inc(self):
if ('+parmetis' in self.spec or '+ptscotch' in self.spec) and '+mpi' not in self.spec: if ('+parmetis' in self.spec or '+ptscotch' in self.spec) and '+mpi' not in self.spec: # NOQA: ignore=E501
raise RuntimeError('You cannot use the variants parmetis or ptscotch without mpi') raise RuntimeError('You cannot use the variants parmetis or ptscotch without mpi') # NOQA: ignore=E501
makefile_conf = ["LIBBLAS = -L%s -lblas" % self.spec['blas'].prefix.lib] makefile_conf = ["LIBBLAS = %s" % to_link_flags(
self.spec['blas'].blas_shared_lib)
]
orderings = ['-Dpord'] orderings = ['-Dpord']
if '+ptscotch' in self.spec or '+scotch' in self.spec: if '+ptscotch' in self.spec or '+scotch' in self.spec:
join_lib = ' -l%s' % ('pt' if '+ptscotch' in self.spec else '') join_lib = ' -l%s' % ('pt' if '+ptscotch' in self.spec else '')
makefile_conf.extend( makefile_conf.extend([
["ISCOTCH = -I%s" % self.spec['scotch'].prefix.include, "ISCOTCH = -I%s" % self.spec['scotch'].prefix.include,
"LSCOTCH = -L%s %s%s" % (self.spec['scotch'].prefix.lib, "LSCOTCH = -L%s %s%s" % (self.spec['scotch'].prefix.lib,
join_lib, join_lib,
join_lib.join(['esmumps', 'scotch', 'scotcherr']))]) join_lib.join(['esmumps',
'scotch',
'scotcherr']))
])
orderings.append('-Dscotch') orderings.append('-Dscotch')
if '+ptscotch' in self.spec: if '+ptscotch' in self.spec:
orderings.append('-Dptscotch') orderings.append('-Dptscotch')
if '+parmetis' in self.spec and '+metis' in self.spec: if '+parmetis' in self.spec and '+metis' in self.spec:
libname = 'parmetis' if '+parmetis' in self.spec else 'metis' makefile_conf.extend([
makefile_conf.extend( "IMETIS = -I%s" % self.spec['parmetis'].prefix.include,
["IMETIS = -I%s" % self.spec['parmetis'].prefix.include, "LMETIS = -L%s -l%s -L%s -l%s" % (
"LMETIS = -L%s -l%s -L%s -l%s" % (self.spec['parmetis'].prefix.lib, 'parmetis',self.spec['metis'].prefix.lib, 'metis')]) self.spec['parmetis'].prefix.lib, 'parmetis',
self.spec['metis'].prefix.lib, 'metis')
])
orderings.append('-Dparmetis') orderings.append('-Dparmetis')
elif '+metis' in self.spec: elif '+metis' in self.spec:
makefile_conf.extend( makefile_conf.extend([
["IMETIS = -I%s" % self.spec['metis'].prefix.include, "IMETIS = -I%s" % self.spec['metis'].prefix.include,
"LMETIS = -L%s -l%s" % (self.spec['metis'].prefix.lib, 'metis')]) "LMETIS = -L%s -l%s" % (self.spec['metis'].prefix.lib, 'metis')
])
orderings.append('-Dmetis') orderings.append('-Dmetis')
makefile_conf.append("ORDERINGSF = %s" % (' '.join(orderings))) makefile_conf.append("ORDERINGSF = %s" % (' '.join(orderings)))
# when building shared libs need -fPIC, otherwise # when building shared libs need -fPIC, otherwise
# /usr/bin/ld: graph.o: relocation R_X86_64_32 against `.rodata.str1.1' can not be used when making a shared object; recompile with -fPIC # /usr/bin/ld: graph.o: relocation R_X86_64_32 against `.rodata.str1.1'
# can not be used when making a shared object; recompile with -fPIC
fpic = '-fPIC' if '+shared' in self.spec else '' fpic = '-fPIC' if '+shared' in self.spec else ''
# TODO: test this part, it needs a full blas, scalapack and # TODO: test this part, it needs a full blas, scalapack and
# partitionning environment with 64bit integers # partitionning environment with 64bit integers
@ -104,7 +115,7 @@ def write_makefile_inc(self):
# the fortran compilation flags most probably are # the fortran compilation flags most probably are
# working only for intel and gnu compilers this is # working only for intel and gnu compilers this is
# perhaps something the compiler should provide # perhaps something the compiler should provide
['OPTF = %s -O -DALLOW_NON_INIT %s' % (fpic,'-fdefault-integer-8' if self.compiler.name == "gcc" else '-i8'), ['OPTF = %s -O -DALLOW_NON_INIT %s' % (fpic, '-fdefault-integer-8' if self.compiler.name == "gcc" else '-i8'), # NOQA: ignore=E501
'OPTL = %s -O ' % fpic, 'OPTL = %s -O ' % fpic,
'OPTC = %s -O -DINTSIZE64' % fpic]) 'OPTC = %s -O -DINTSIZE64' % fpic])
else: else:
@ -113,7 +124,6 @@ def write_makefile_inc(self):
'OPTL = %s -O ' % fpic, 'OPTL = %s -O ' % fpic,
'OPTC = %s -O ' % fpic]) 'OPTC = %s -O ' % fpic])
if '+mpi' in self.spec: if '+mpi' in self.spec:
makefile_conf.extend( makefile_conf.extend(
["CC = %s" % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), ["CC = %s" % join_path(self.spec['mpi'].prefix.bin, 'mpicc'),
@ -134,16 +144,17 @@ def write_makefile_inc(self):
if '+shared' in self.spec: if '+shared' in self.spec:
if sys.platform == 'darwin': if sys.platform == 'darwin':
# Building dylibs with mpif90 causes segfaults on 10.8 and 10.10. Use gfortran. (Homebrew) # Building dylibs with mpif90 causes segfaults on 10.8 and
# 10.10. Use gfortran. (Homebrew)
makefile_conf.extend([ makefile_conf.extend([
'LIBEXT=.dylib', 'LIBEXT=.dylib',
'AR=%s -dynamiclib -Wl,-install_name -Wl,%s/$(notdir $@) -undefined dynamic_lookup -o ' % (os.environ['FC'],prefix.lib), 'AR=%s -dynamiclib -Wl,-install_name -Wl,%s/$(notdir $@) -undefined dynamic_lookup -o ' % (os.environ['FC'], prefix.lib), # NOQA: ignore=E501
'RANLIB=echo' 'RANLIB=echo'
]) ])
else: else:
makefile_conf.extend([ makefile_conf.extend([
'LIBEXT=.so', 'LIBEXT=.so',
'AR=$(FL) -shared -Wl,-soname -Wl,%s/$(notdir $@) -o' % prefix.lib, 'AR=$(FL) -shared -Wl,-soname -Wl,%s/$(notdir $@) -o' % prefix.lib, # NOQA: ignore=E501
'RANLIB=echo' 'RANLIB=echo'
]) ])
else: else:
@ -153,9 +164,8 @@ def write_makefile_inc(self):
'RANLIB = ranlib' 'RANLIB = ranlib'
]) ])
makefile_inc_template = join_path(
makefile_inc_template = join_path(os.path.dirname(self.module.__file__), os.path.dirname(self.module.__file__), 'Makefile.inc')
'Makefile.inc')
with open(makefile_inc_template, "r") as fh: with open(makefile_inc_template, "r") as fh:
makefile_conf.extend(fh.read().split('\n')) makefile_conf.extend(fh.read().split('\n'))
@ -164,8 +174,6 @@ def write_makefile_inc(self):
makefile_inc = '\n'.join(makefile_conf) makefile_inc = '\n'.join(makefile_conf)
fh.write(makefile_inc) fh.write(makefile_inc)
def install(self, spec, prefix): def install(self, spec, prefix):
make_libs = [] make_libs = []
@ -196,8 +204,8 @@ def install(self, spec, prefix):
for f in glob.glob(join_path('libseq', '*.h')): for f in glob.glob(join_path('libseq', '*.h')):
install(f, prefix.include) install(f, prefix.include)
# FIXME: extend the tests to mpirun -np 2 (or alike) when build with MPI # FIXME: extend the tests to mpirun -np 2 when build with MPI
# FIXME: use something like numdiff to compare blessed output with the current # FIXME: use something like numdiff to compare output files
with working_dir('examples'): with working_dir('examples'):
if '+float' in spec: if '+float' in spec:
os.system('./ssimpletest < input_simpletest_real') os.system('./ssimpletest < input_simpletest_real')

View file

@ -33,6 +33,7 @@ class Netcdf(Package):
homepage = "http://www.unidata.ucar.edu/software/netcdf" homepage = "http://www.unidata.ucar.edu/software/netcdf"
url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz" url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz"
version('4.4.1', '7843e35b661c99e1d49e60791d5072d8')
version('4.4.0', 'cffda0cbd97fdb3a06e9274f7aef438e') version('4.4.0', 'cffda0cbd97fdb3a06e9274f7aef438e')
version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae') version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae')
@ -47,8 +48,10 @@ class Netcdf(Package):
# Required for NetCDF-4 support # Required for NetCDF-4 support
depends_on("zlib") depends_on("zlib")
depends_on("hdf5+mpi", when='+mpi') depends_on('hdf5@:1.8+mpi', when='@:4.4.0+mpi')
depends_on("hdf5~mpi", when='~mpi') depends_on('hdf5+mpi', when='@4.4.1:+mpi')
depends_on('hdf5@:1.8~mpi', when='@:4.4.0~mpi')
depends_on('hdf5~mpi', when='@4.4.1:~mpi')
def install(self, spec, prefix): def install(self, spec, prefix):
# Environment variables # Environment variables

View file

@ -25,8 +25,10 @@
from spack import * from spack import *
import sys import sys
class NetlibScalapack(Package): class NetlibScalapack(Package):
"""ScaLAPACK is a library of high-performance linear algebra routines for parallel distributed memory machines""" """ScaLAPACK is a library of high-performance linear algebra routines for
parallel distributed memory machines"""
homepage = "http://www.netlib.org/scalapack/" homepage = "http://www.netlib.org/scalapack/"
url = "http://www.netlib.org/scalapack/scalapack-2.0.2.tgz" url = "http://www.netlib.org/scalapack/scalapack-2.0.2.tgz"
@ -48,11 +50,23 @@ class NetlibScalapack(Package):
def install(self, spec, prefix): def install(self, spec, prefix):
options = [ options = [
"-DBUILD_SHARED_LIBS:BOOL=%s" % ('ON' if '+shared' in spec else 'OFF'), "-DBUILD_SHARED_LIBS:BOOL=%s" % ('ON' if '+shared' in spec else
"-DBUILD_STATIC_LIBS:BOOL=%s" % ('OFF' if '+shared' in spec else 'ON'), 'OFF'),
"-DUSE_OPTIMIZED_LAPACK_BLAS:BOOL=ON", # forces scalapack to use find_package(LAPACK) "-DBUILD_STATIC_LIBS:BOOL=%s" % ('OFF' if '+shared' in spec else
'ON'),
# forces scalapack to use find_package(LAPACK):
"-DUSE_OPTIMIZED_LAPACK_BLAS:BOOL=ON",
] ]
# Make sure we use Spack's Lapack:
options.extend([
'-DLAPACK_FOUND=true',
'-DLAPACK_INCLUDE_DIRS=%s' % spec['lapack'].prefix.include,
'-DLAPACK_LIBRARIES=%s' % (
spec['lapack'].lapack_shared_lib if '+shared' in spec else
spec['lapack'].lapack_static_lib),
])
if '+fpic' in spec: if '+fpic' in spec:
options.extend([ options.extend([
"-DCMAKE_C_FLAGS=-fPIC", "-DCMAKE_C_FLAGS=-fPIC",
@ -66,16 +80,15 @@ def install(self, spec, prefix):
make() make()
make("install") make("install")
# The shared libraries are not installed correctly on Darwin; correct this # The shared libraries are not installed correctly on Darwin:
if (sys.platform == 'darwin') and ('+shared' in spec): if (sys.platform == 'darwin') and ('+shared' in spec):
fix_darwin_install_name(prefix.lib) fix_darwin_install_name(prefix.lib)
def setup_dependent_package(self, module, dependent_spec): def setup_dependent_package(self, module, dependent_spec):
spec = self.spec spec = self.spec
lib_dsuffix = '.dylib' if sys.platform == 'darwin' else '.so' lib_suffix = dso_suffix if '+shared' in spec else 'a'
lib_suffix = lib_dsuffix if '+shared' in spec else '.a'
spec.fc_link = '-L%s -lscalapack' % spec.prefix.lib spec.fc_link = '-L%s -lscalapack' % spec.prefix.lib
spec.cc_link = spec.fc_link spec.cc_link = spec.fc_link
spec.libraries = [join_path(spec.prefix.lib, 'libscalapack%s' % lib_suffix)] spec.libraries = [join_path(spec.prefix.lib,
'libscalapack.%s' % lib_suffix)]

View file

@ -46,6 +46,7 @@ class PyH5py(Package):
depends_on('hdf5@1.8.4:') depends_on('hdf5@1.8.4:')
depends_on('hdf5+mpi', when='+mpi') depends_on('hdf5+mpi', when='+mpi')
depends_on('mpi', when='+mpi') depends_on('mpi', when='+mpi')
depends_on('py-mpi4py', when='+mpi')
# Build and runtime dependencies # Build and runtime dependencies
depends_on('py-numpy@1.6.1:', type=nolink) depends_on('py-numpy@1.6.1:', type=nolink)

View file

@ -44,6 +44,7 @@ class PyNumpy(Package):
extends('python') extends('python')
depends_on('py-nose', type='build') depends_on('py-nose', type='build')
depends_on('py-setuptools', type='build')
depends_on('blas', when='+blas') depends_on('blas', when='+blas')
depends_on('lapack', when='+lapack') depends_on('lapack', when='+lapack')

View file

@ -53,6 +53,7 @@ class Python(Package):
extendable = True extendable = True
variant('tk', default=False, description='Provide support for Tkinter')
variant('ucs4', default=False, description='Enable UCS4 (wide) unicode strings') variant('ucs4', default=False, description='Enable UCS4 (wide) unicode strings')
# From https://docs.python.org/2/c-api/unicode.html: Python's default # From https://docs.python.org/2/c-api/unicode.html: Python's default
# builds use a 16-bit type for Py_UNICODE and store Unicode values # builds use a 16-bit type for Py_UNICODE and store Unicode values
@ -68,6 +69,8 @@ class Python(Package):
depends_on("ncurses") depends_on("ncurses")
depends_on("sqlite") depends_on("sqlite")
depends_on("zlib") depends_on("zlib")
depends_on("tk", when="+tk")
depends_on("tcl", when="+tk")
def install(self, spec, prefix): def install(self, spec, prefix):
# Need this to allow python build to find the Python installation. # Need this to allow python build to find the Python installation.
@ -77,24 +80,32 @@ def install(self, spec, prefix):
# Rest of install is pretty standard except setup.py needs to # Rest of install is pretty standard except setup.py needs to
# be able to read the CPPFLAGS and LDFLAGS as it scans for the # be able to read the CPPFLAGS and LDFLAGS as it scans for the
# library and headers to build # library and headers to build
cppflags = ' -I'.join([ include_dirs = [
spec['openssl'].prefix.include, spec['bzip2'].prefix.include, spec['openssl'].prefix.include, spec['bzip2'].prefix.include,
spec['readline'].prefix.include, spec['ncurses'].prefix.include, spec['readline'].prefix.include, spec['ncurses'].prefix.include,
spec['sqlite'].prefix.include, spec['zlib'].prefix.include spec['sqlite'].prefix.include, spec['zlib'].prefix.include
]) ]
ldflags = ' -L'.join([ library_dirs = [
spec['openssl'].prefix.lib, spec['bzip2'].prefix.lib, spec['openssl'].prefix.lib, spec['bzip2'].prefix.lib,
spec['readline'].prefix.lib, spec['ncurses'].prefix.lib, spec['readline'].prefix.lib, spec['ncurses'].prefix.lib,
spec['sqlite'].prefix.lib, spec['zlib'].prefix.lib spec['sqlite'].prefix.lib, spec['zlib'].prefix.lib
]
if '+tk' in spec:
include_dirs.extend([
spec['tk'].prefix.include, spec['tcl'].prefix.include
])
library_dirs.extend([
spec['tk'].prefix.lib, spec['tcl'].prefix.lib
]) ])
config_args = [ config_args = [
"--prefix={0}".format(prefix), "--prefix={0}".format(prefix),
"--with-threads", "--with-threads",
"--enable-shared", "--enable-shared",
"CPPFLAGS=-I{0}".format(cppflags), "CPPFLAGS=-I{0}".format(" -I".join(include_dirs)),
"LDFLAGS=-L{0}".format(ldflags) "LDFLAGS=-L{0}".format(" -L".join(library_dirs))
] ]
if '+ucs4' in spec: if '+ucs4' in spec:
@ -116,6 +127,25 @@ def install(self, spec, prefix):
self.filter_compilers(spec, prefix) self.filter_compilers(spec, prefix)
# TODO: Once better testing support is integrated, add the following tests
# https://wiki.python.org/moin/TkInter
#
# if '+tk' in spec:
# env['TK_LIBRARY'] = join_path(spec['tk'].prefix.lib,
# 'tk{0}'.format(spec['tk'].version.up_to(2)))
# env['TCL_LIBRARY'] = join_path(spec['tcl'].prefix.lib,
# 'tcl{0}'.format(spec['tcl'].version.up_to(2)))
#
# $ python
# >>> import _tkinter
#
# if spec.satisfies('@3:')
# >>> import tkinter
# >>> tkinter._test()
# else:
# >>> import Tkinter
# >>> Tkinter._test()
def filter_compilers(self, spec, prefix): def filter_compilers(self, spec, prefix):
"""Run after install to tell the configuration files and Makefiles """Run after install to tell the configuration files and Makefiles
to use the compilers that Spack built the package with. to use the compilers that Spack built the package with.

View file

@ -32,21 +32,20 @@ class SuiteSparse(Package):
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html' homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz' url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
version('4.5.3', '8ec57324585df3c6483ad7f556afccbd') version('4.5.3', '8ec57324585df3c6483ad7f556afccbd')
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
# FIXME: (see below) variant('tbb', default=True, description='Build with Intel TBB')
# variant('tbb', default=True, description='Build with Intel TBB')
depends_on('blas') depends_on('blas')
depends_on('lapack') depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1:') depends_on('metis@5.1.0', when='@4.5.1:')
# FIXME:
# in @4.5.1. TBB support in SPQR seems to be broken as TBB-related linkng # in @4.5.1. TBB support in SPQR seems to be broken as TBB-related linkng
# flags does not seem to be used, which leads to linking errors on Linux. # flags does not seem to be used, which leads to linking errors on Linux.
# Try re-enabling in future versions. depends_on('tbb', when='@4.5.3:+tbb')
# depends_on('tbb', when='+tbb')
patch('tbb_453.patch', when='@4.5.3')
def install(self, spec, prefix): def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned. # The build system of SuiteSparse is quite old-fashioned.
@ -73,20 +72,24 @@ def install(self, spec, prefix):
]) ])
# Intel TBB in SuiteSparseQR # Intel TBB in SuiteSparseQR
if '+tbb' in spec: if 'tbb' in spec:
make_args.extend([ make_args.extend([
'SPQR_CONFIG=-DHAVE_TBB', 'SPQR_CONFIG=-DHAVE_TBB',
'TBB=-L%s -ltbb' % spec['tbb'].prefix.lib, 'TBB=-L%s -ltbb' % spec['tbb'].prefix.lib,
]) ])
# BLAS arguments require path to libraries # Make sure Spack's Blas/Lapack is used. Otherwise System's
# FIXME: (blas/lapack always provide libblas and liblapack as aliases) # Blas/Lapack might be picked up.
blas = to_link_flags(spec['blas'].blas_shared_lib)
lapack = to_link_flags(spec['lapack'].lapack_shared_lib)
if '@4.5.1' in spec: if '@4.5.1' in spec:
# adding -lstdc++ is clearly an ugly way to do this, but it follows # adding -lstdc++ is clearly an ugly way to do this, but it follows
# with the TCOV path of SparseSuite 4.5.1's Suitesparse_config.mk # with the TCOV path of SparseSuite 4.5.1's Suitesparse_config.mk
blas += ' -lstdc++'
make_args.extend([ make_args.extend([
'BLAS=-lblas -lstdc++', 'BLAS=%s' % blas,
'LAPACK=-llapack' 'LAPACK=%s' % lapack
]) ])
make('install', *make_args) make('install', *make_args)

View file

@ -0,0 +1,13 @@
diff --git a/SPQR/Lib/Makefile b/SPQR/Lib/Makefile
index eaade58..d0de852 100644
--- a/SPQR/Lib/Makefile
+++ b/SPQR/Lib/Makefile
@@ -13,7 +13,7 @@ ccode: all
include ../../SuiteSparse_config/SuiteSparse_config.mk
# SPQR depends on CHOLMOD, AMD, COLAMD, LAPACK, the BLAS and SuiteSparse_config
-LDLIBS += -lamd -lcolamd -lcholmod -lsuitesparseconfig $(LAPACK) $(BLAS)
+LDLIBS += -lamd -lcolamd -lcholmod -lsuitesparseconfig $(TBB) $(LAPACK) $(BLAS)
# compile and install in SuiteSparse/lib
library:

View file

@ -24,6 +24,7 @@
############################################################################## ##############################################################################
from spack import * from spack import *
class Tcl(Package): class Tcl(Package):
"""Tcl (Tool Command Language) is a very powerful but easy to """Tcl (Tool Command Language) is a very powerful but easy to
learn dynamic programming language, suitable for a very wide learn dynamic programming language, suitable for a very wide
@ -34,9 +35,6 @@ class Tcl(Package):
extensible.""" extensible."""
homepage = "http://www.tcl.tk" homepage = "http://www.tcl.tk"
def url_for_version(self, version):
return 'http://prdownloads.sourceforge.net/tcl/tcl%s-src.tar.gz' % version
version('8.6.5', '0e6426a4ca9401825fbc6ecf3d89a326') version('8.6.5', '0e6426a4ca9401825fbc6ecf3d89a326')
version('8.6.4', 'd7cbb91f1ded1919370a30edd1534304') version('8.6.4', 'd7cbb91f1ded1919370a30edd1534304')
version('8.6.3', 'db382feca91754b7f93da16dc4cdad1f') version('8.6.3', 'db382feca91754b7f93da16dc4cdad1f')
@ -44,8 +42,18 @@ def url_for_version(self, version):
depends_on('zlib') depends_on('zlib')
def url_for_version(self, version):
base_url = 'http://prdownloads.sourceforge.net/tcl'
return '{0}/tcl{1}-src.tar.gz'.format(base_url, version)
def setup_environment(self, spack_env, env):
# When using Tkinter from within spack provided python+tk, python
# will not be able to find Tcl/Tk unless TCL_LIBRARY is set.
env.set('TCL_LIBRARY', join_path(self.prefix.lib, 'tcl{0}'.format(
self.spec.version.up_to(2))))
def install(self, spec, prefix): def install(self, spec, prefix):
with working_dir('unix'): with working_dir('unix'):
configure("--prefix=%s" % prefix) configure("--prefix={0}".format(prefix))
make() make()
make("install") make("install")

View file

@ -24,11 +24,13 @@
############################################################################## ##############################################################################
from spack import * from spack import *
class TheSilverSearcher(Package): class TheSilverSearcher(Package):
"""Fast recursive grep alternative""" """Fast recursive grep alternative"""
homepage = "http://geoff.greer.fm/ag/" homepage = "http://geoff.greer.fm/ag/"
url = "http://geoff.greer.fm/ag/releases/the_silver_searcher-0.30.0.tar.gz" url = "http://geoff.greer.fm/ag/releases/the_silver_searcher-0.32.0.tar.gz"
version('0.32.0', '3fdfd5836924246073d5344257a06823')
version('0.30.0', '95e2e7859fab1156c835aff7413481db') version('0.30.0', '95e2e7859fab1156c835aff7413481db')
depends_on('pcre') depends_on('pcre')

View file

@ -24,6 +24,7 @@
############################################################################## ##############################################################################
from spack import * from spack import *
class Tk(Package): class Tk(Package):
"""Tk is a graphical user interface toolkit that takes developing """Tk is a graphical user interface toolkit that takes developing
desktop applications to a higher level than conventional desktop applications to a higher level than conventional
@ -33,16 +34,24 @@ class Tk(Package):
and more.""" and more."""
homepage = "http://www.tcl.tk" homepage = "http://www.tcl.tk"
def url_for_version(self, version): version('8.6.5', '11dbbd425c3e0201f20d6a51482ce6c4')
return "http://prdownloads.sourceforge.net/tcl/tk%s-src.tar.gz" % version
version('8.6.3', '85ca4dbf4dcc19777fd456f6ee5d0221') version('8.6.3', '85ca4dbf4dcc19777fd456f6ee5d0221')
depends_on("tcl") depends_on("tcl")
def url_for_version(self, version):
base_url = "http://prdownloads.sourceforge.net/tcl"
return "{0}/tk{1}-src.tar.gz".format(base_url, version)
def setup_environment(self, spack_env, env):
# When using Tkinter from within spack provided python+tk, python
# will not be able to find Tcl/Tk unless TK_LIBRARY is set.
env.set('TK_LIBRARY', join_path(self.prefix.lib, 'tk{0}'.format(
self.spec.version.up_to(2))))
def install(self, spec, prefix): def install(self, spec, prefix):
with working_dir('unix'): with working_dir('unix'):
configure("--prefix=%s" % prefix, configure("--prefix={0}".format(prefix),
"--with-tcl=%s" % spec['tcl'].prefix.lib) "--with-tcl={0}".format(spec['tcl'].prefix.lib))
make() make()
make("install") make("install")

View file

@ -118,6 +118,7 @@ def install(self, spec, prefix):
options.extend(std_cmake_args) options.extend(std_cmake_args)
mpi_bin = spec['mpi'].prefix.bin mpi_bin = spec['mpi'].prefix.bin
# Note: -DXYZ_LIBRARY_NAMES= needs semicolon separated list of names
options.extend([ options.extend([
'-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON', '-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON',
'-DTrilinos_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON', '-DTrilinos_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON',
@ -131,10 +132,12 @@ def install(self, spec, prefix):
'-DTPL_ENABLE_MPI:BOOL=ON', '-DTPL_ENABLE_MPI:BOOL=ON',
'-DMPI_BASE_DIR:PATH=%s' % spec['mpi'].prefix, '-DMPI_BASE_DIR:PATH=%s' % spec['mpi'].prefix,
'-DTPL_ENABLE_BLAS=ON', '-DTPL_ENABLE_BLAS=ON',
'-DBLAS_LIBRARY_NAMES=blas', # FIXME: don't hardcode names '-DBLAS_LIBRARY_NAMES=%s' % to_lib_name(
spec['blas'].blas_shared_lib),
'-DBLAS_LIBRARY_DIRS=%s' % spec['blas'].prefix.lib, '-DBLAS_LIBRARY_DIRS=%s' % spec['blas'].prefix.lib,
'-DTPL_ENABLE_LAPACK=ON', '-DTPL_ENABLE_LAPACK=ON',
'-DLAPACK_LIBRARY_NAMES=lapack', '-DLAPACK_LIBRARY_NAMES=%s' % to_lib_name(
spec['lapack'].lapack_shared_lib),
'-DLAPACK_LIBRARY_DIRS=%s' % spec['lapack'].prefix, '-DLAPACK_LIBRARY_DIRS=%s' % spec['lapack'].prefix,
'-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=ON', '-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=ON',
'-DTrilinos_ENABLE_CXX11:BOOL=ON', '-DTrilinos_ENABLE_CXX11:BOOL=ON',