diff --git a/.gitignore b/.gitignore index 040df3eafd..960b5b0035 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,12 @@ /var/spack/stage /var/spack/cache *.pyc -/opt/ +/opt *~ .DS_Store .idea -/etc/spack/* +/etc/spack/licenses +/etc/spack/*.yaml /etc/spackconfig /share/spack/dotkit /share/spack/modules diff --git a/etc/spack/modules.yaml b/etc/spack/defaults/modules.yaml similarity index 53% rename from etc/spack/modules.yaml rename to etc/spack/defaults/modules.yaml index 9ae54a2d09..8864a76547 100644 --- a/etc/spack/modules.yaml +++ b/etc/spack/defaults/modules.yaml @@ -1,8 +1,17 @@ # ------------------------------------------------------------------------- -# This is the default spack module files generation configuration. +# This is the default configuration for Spack's module file generation. # -# Changes to this file will affect all users of this spack install, -# although users can override these settings in their ~/.spack/modules.yaml. +# Settings here are versioned with Spack and are intended to provide +# sensible defaults out of the box. Spack maintainers should edit this +# file to keep it current. +# +# Users can override these settings by editing the following files. +# +# Per-spack-instance settings (overrides defaults): +# $SPACK_ROOT/etc/spack/modules.yaml +# +# Per-user settings (overrides default and site settings): +# ~/.spack/modules.yaml # ------------------------------------------------------------------------- modules: enable: diff --git a/etc/spack/defaults/packages.yaml b/etc/spack/defaults/packages.yaml new file mode 100644 index 0000000000..83f9eb7ece --- /dev/null +++ b/etc/spack/defaults/packages.yaml @@ -0,0 +1,21 @@ +# ------------------------------------------------------------------------- +# This file controls default concretization preferences for Spack. +# +# Settings here are versioned with Spack and are intended to provide +# sensible defaults out of the box. Spack maintainers should edit this +# file to keep it current. +# +# Users can override these settings by editing the following files. +# +# Per-spack-instance settings (overrides defaults): +# $SPACK_ROOT/etc/spack/packages.yaml +# +# Per-user settings (overrides default and site settings): +# ~/.spack/packages.yaml +# ------------------------------------------------------------------------- +packages: + all: + providers: + mpi: [openmpi, mpich] + blas: [openblas] + lapack: [openblas] diff --git a/etc/spack/defaults/repos.yaml b/etc/spack/defaults/repos.yaml new file mode 100644 index 0000000000..f3e00653eb --- /dev/null +++ b/etc/spack/defaults/repos.yaml @@ -0,0 +1,14 @@ +# ------------------------------------------------------------------------- +# This is the default spack repository configuration. It includes the +# builtin spack package repository. +# +# Users can override these settings by editing the following files. +# +# Per-spack-instance settings (overrides defaults): +# $SPACK_ROOT/etc/spack/repos.yaml +# +# Per-user settings (overrides default and site settings): +# ~/.spack/repos.yaml +# ------------------------------------------------------------------------- +repos: + - $spack/var/spack/repos/builtin diff --git a/etc/spack/repos.yaml b/etc/spack/repos.yaml deleted file mode 100644 index 2d4ff54ce6..0000000000 --- a/etc/spack/repos.yaml +++ /dev/null @@ -1,8 +0,0 @@ -# ------------------------------------------------------------------------- -# This is the default spack repository configuration. -# -# Changes to this file will affect all users of this spack install, -# although users can override these settings in their ~/.spack/repos.yaml. -# ------------------------------------------------------------------------- -repos: - - $spack/var/spack/repos/builtin diff --git a/lib/spack/env/cc b/lib/spack/env/cc index bf98b4c354..c6bb50d261 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -110,13 +110,13 @@ case "$command" in comp="CXX" lang_flags=CXX ;; - f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor) + ftn|f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor) command="$SPACK_FC" language="Fortran 90" comp="FC" lang_flags=F ;; - f77|gfortran|ifort|pgfortran|xlf|nagfor) + f77|gfortran|ifort|pgfortran|xlf|nagfor|ftn) command="$SPACK_F77" language="Fortran 77" comp="F77" diff --git a/lib/spack/env/craype/CC b/lib/spack/env/craype/CC new file mode 120000 index 0000000000..82c2b8e90a --- /dev/null +++ b/lib/spack/env/craype/CC @@ -0,0 +1 @@ +../cc \ No newline at end of file diff --git a/lib/spack/env/craype/cc b/lib/spack/env/craype/cc new file mode 120000 index 0000000000..82c2b8e90a --- /dev/null +++ b/lib/spack/env/craype/cc @@ -0,0 +1 @@ +../cc \ No newline at end of file diff --git a/lib/spack/env/craype/ftn b/lib/spack/env/craype/ftn new file mode 120000 index 0000000000..82c2b8e90a --- /dev/null +++ b/lib/spack/env/craype/ftn @@ -0,0 +1 @@ +../cc \ No newline at end of file diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index e800c6717a..6e4cd338fe 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -42,7 +42,7 @@ 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', 'set_executable', 'copy_mode', 'unset_executable_mode', 'remove_dead_links', 'remove_linked_tree', 'find_library_path', - 'fix_darwin_install_name', 'to_link_flags'] + 'fix_darwin_install_name', 'to_link_flags', 'to_lib_name'] def filter_file(regex, repl, *filenames, **kwargs): @@ -431,6 +431,13 @@ def fix_darwin_install_name(path): break +def to_lib_name(library): + """Transforms a path to the library /path/to/lib.xyz into + """ + # Assume libXYZ.suffix + return os.path.basename(library)[3:].split(".")[0] + + def to_link_flags(library): """Transforms a path to a into linking flags -L -l. @@ -438,8 +445,7 @@ def to_link_flags(library): A string of linking flags. """ dir = os.path.dirname(library) - # Assume libXYZ.suffix - name = os.path.basename(library)[3:].split(".")[0] + name = to_lib_name(library) res = '-L%s -l%s' % (dir, name) return res diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 672999159c..230115df50 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -27,16 +27,18 @@ import sys import llnl.util.tty as tty -from llnl.util.lang import attr_setdefault - import spack -import spack.spec import spack.config +import spack.spec +from llnl.util.lang import * +from llnl.util.tty.colify import * +from llnl.util.tty.color import * # # Settings for commands that modify configuration # -# Commands that modify confguration By default modify the *highest* priority scope. +# Commands that modify confguration By default modify the *highest* +# priority scope. default_modify_scope = spack.config.highest_precedence_scope().name # Commands that list confguration list *all* scopes by default. default_list_scope = None @@ -48,7 +50,7 @@ ignore_files = r'^\.|^__init__.py$|^#' SETUP_PARSER = "setup_parser" -DESCRIPTION = "description" +DESCRIPTION = "description" command_path = os.path.join(spack.lib_path, "spack", "cmd") @@ -71,7 +73,7 @@ def get_module(name): module_name, fromlist=[name, SETUP_PARSER, DESCRIPTION], level=0) - attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op + attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op attr_setdefault(module, DESCRIPTION, "") fn_name = get_cmd_function_name(name) @@ -101,17 +103,17 @@ def parse_specs(args, **kwargs): specs = spack.spec.parse(args) for spec in specs: if concretize: - spec.concretize() # implies normalize + spec.concretize() # implies normalize elif normalize: spec.normalize() return specs - except spack.parse.ParseError, e: + except spack.parse.ParseError as e: tty.error(e.message, e.string, e.pos * " " + "^") sys.exit(1) - except spack.spec.SpecError, e: + except spack.spec.SpecError as e: tty.error(e.message) sys.exit(1) @@ -127,7 +129,7 @@ def elide_list(line_list, max_num=10): [1, 2, 3, '...', 6] """ if len(line_list) > max_num: - return line_list[:max_num-1] + ['...'] + line_list[-1:] + return line_list[:max_num - 1] + ['...'] + line_list[-1:] else: return line_list @@ -138,10 +140,104 @@ def disambiguate_spec(spec): tty.die("Spec '%s' matches no installed packages." % spec) elif len(matching_specs) > 1: - args = ["%s matches multiple packages." % spec, - "Matching packages:"] + args = ["%s matches multiple packages." % spec, + "Matching packages:"] args += [" " + str(s) for s in matching_specs] args += ["Use a more specific spec."] tty.die(*args) return matching_specs[0] + + +def ask_for_confirmation(message): + while True: + tty.msg(message + '[y/n]') + choice = raw_input().lower() + if choice == 'y': + break + elif choice == 'n': + raise SystemExit('Operation aborted') + tty.warn('Please reply either "y" or "n"') + + +def gray_hash(spec, length): + return colorize('@K{%s}' % spec.dag_hash(length)) + + +def display_specs(specs, **kwargs): + mode = kwargs.get('mode', 'short') + hashes = kwargs.get('long', False) + namespace = kwargs.get('namespace', False) + flags = kwargs.get('show_flags', False) + variants = kwargs.get('variants', False) + + hlen = 7 + if kwargs.get('very_long', False): + hashes = True + hlen = None + + nfmt = '.' if namespace else '_' + ffmt = '$%+' if flags else '' + vfmt = '$+' if variants else '' + format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt) + + # Make a dict with specs keyed by architecture and compiler. + index = index_by(specs, ('architecture', 'compiler')) + + # Traverse the index and print out each package + for i, (architecture, compiler) in enumerate(sorted(index)): + if i > 0: + print + + header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color, + architecture, spack.spec.compiler_color, + compiler) + tty.hline(colorize(header), char='-') + + specs = index[(architecture, compiler)] + specs.sort() + + abbreviated = [s.format(format_string, color=True) for s in specs] + if mode == 'paths': + # Print one spec per line along with prefix path + width = max(len(s) for s in abbreviated) + width += 2 + format = " %%-%ds%%s" % width + + for abbrv, spec in zip(abbreviated, specs): + if hashes: + print(gray_hash(spec, hlen), ) + print(format % (abbrv, spec.prefix)) + + elif mode == 'deps': + for spec in specs: + print(spec.tree( + format=format_string, + color=True, + indent=4, + prefix=(lambda s: gray_hash(s, hlen)) if hashes else None)) + + elif mode == 'short': + # Print columns of output if not printing flags + if not flags: + + def fmt(s): + string = "" + if hashes: + string += gray_hash(s, hlen) + ' ' + string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True) + + return string + + colify(fmt(s) for s in specs) + # Print one entry per line if including flags + else: + for spec in specs: + # Print the hash if necessary + hsh = gray_hash(spec, hlen) + ' ' if hashes else '' + print(hsh + spec.format(format_string, color=True) + '\n') + + else: + raise ValueError( + "Invalid mode for display_specs: %s. Must be one of (paths," + "deps, short)." % mode) # NOQA: ignore=E501 diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py index bec11439b5..60e2bd3a11 100644 --- a/lib/spack/spack/cmd/bootstrap.py +++ b/lib/spack/spack/cmd/bootstrap.py @@ -23,7 +23,6 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -from subprocess import check_call import llnl.util.tty as tty from llnl.util.filesystem import join_path, mkdirp @@ -31,26 +30,49 @@ import spack from spack.util.executable import which +_SPACK_UPSTREAM = 'https://github.com/llnl/spack' + description = "Create a new installation of spack in another prefix" + def setup_parser(subparser): - subparser.add_argument('prefix', help="names of prefix where we should install spack") + subparser.add_argument( + '-r', '--remote', action='store', dest='remote', + help="name of the remote to bootstrap from", default='origin') + subparser.add_argument( + 'prefix', + help="names of prefix where we should install spack") -def get_origin_url(): +def get_origin_info(remote): git_dir = join_path(spack.prefix, '.git') git = which('git', required=True) - origin_url = git( - '--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url', - output=str) - return origin_url.strip() + try: + branch = git('symbolic-ref', '--short', 'HEAD', output=str) + except ProcessError: + branch = 'develop' + tty.warn('No branch found; using default branch: %s' % branch) + if remote == 'origin' and \ + branch not in ('master', 'develop'): + branch = 'develop' + tty.warn('Unknown branch found; using default branch: %s' % branch) + try: + origin_url = git( + '--git-dir=%s' % git_dir, + 'config', '--get', 'remote.%s.url' % remote, + output=str) + except ProcessError: + origin_url = _SPACK_UPSTREAM + tty.warn('No git repository found; ' + 'using default upstream URL: %s' % origin_url) + return (origin_url.strip(), branch.strip()) def bootstrap(parser, args): - origin_url = get_origin_url() + origin_url, branch = get_origin_info(args.remote) prefix = args.prefix - tty.msg("Fetching spack from origin: %s" % origin_url) + tty.msg("Fetching spack from '%s': %s" % (args.remote, origin_url)) if os.path.isfile(prefix): tty.die("There is already a file at %s" % prefix) @@ -62,7 +84,8 @@ def bootstrap(parser, args): files_in_the_way = os.listdir(prefix) if files_in_the_way: - tty.die("There are already files there! Delete these files before boostrapping spack.", + tty.die("There are already files there! " + "Delete these files before boostrapping spack.", *files_in_the_way) tty.msg("Installing:", @@ -73,8 +96,10 @@ def bootstrap(parser, args): git = which('git', required=True) git('init', '--shared', '-q') git('remote', 'add', 'origin', origin_url) - git('fetch', 'origin', 'master:refs/remotes/origin/master', '-n', '-q') - git('reset', '--hard', 'origin/master', '-q') + git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch), + '-n', '-q') + git('reset', '--hard', 'origin/%s' % branch, '-q') + git('checkout', '-B', branch, 'origin/%s' % branch, '-q') tty.msg("Successfully created a new spack in %s" % prefix, "Run %s/bin/spack to use this installation." % prefix) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 95bd4771ed..aedb0fd99c 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -42,7 +42,8 @@ def setup_parser(subparser): '--keep-stage', action='store_true', dest='keep_stage', help="Don't clean up staging area when command completes.") subparser.add_argument( - 'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for') + 'versions', nargs=argparse.REMAINDER, + help='Versions to generate checksums for') def get_checksums(versions, urls, **kwargs): @@ -59,10 +60,10 @@ def get_checksums(versions, urls, **kwargs): with Stage(url, keep=keep_stage) as stage: stage.fetch() if i == 0 and first_stage_function: - first_stage_function(stage) + first_stage_function(stage, url) - hashes.append((version, - spack.util.crypto.checksum(hashlib.md5, stage.archive_file))) + hashes.append((version, spack.util.crypto.checksum( + hashlib.md5, stage.archive_file))) i += 1 except FailedDownloadError as e: tty.msg("Failed to fetch %s" % url) @@ -79,12 +80,12 @@ def checksum(parser, args): # If the user asked for specific versions, use those. if args.versions: versions = {} - for v in args.versions: - v = ver(v) - if not isinstance(v, Version): + for version in args.versions: + version = ver(version) + if not isinstance(version, Version): tty.die("Cannot generate checksums for version lists or " + "version ranges. Use unambiguous versions.") - versions[v] = pkg.url_for_version(v) + versions[version] = pkg.url_for_version(version) else: versions = pkg.fetch_remote_versions() if not versions: @@ -111,5 +112,7 @@ def checksum(parser, args): if not version_hashes: tty.die("Could not fetch any versions for %s" % pkg.name) - version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes] + version_lines = [ + " version('%s', '%s')" % (v, h) for v, h in version_hashes + ] tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines) diff --git a/lib/spack/spack/cmd/common/__init__.py b/lib/spack/spack/cmd/common/__init__.py new file mode 100644 index 0000000000..ed1ec23bca --- /dev/null +++ b/lib/spack/spack/cmd/common/__init__.py @@ -0,0 +1,24 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py new file mode 100644 index 0000000000..af04170824 --- /dev/null +++ b/lib/spack/spack/cmd/common/arguments.py @@ -0,0 +1,96 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + +import argparse + +import spack.modules +from spack.util.pattern import Bunch +__all__ = ['add_common_arguments'] + +_arguments = {} + + +def add_common_arguments(parser, list_of_arguments): + for argument in list_of_arguments: + if argument not in _arguments: + message = 'Trying to add the non existing argument "{0}" to a command' # NOQA: ignore=E501 + raise KeyError(message.format(argument)) + x = _arguments[argument] + parser.add_argument(*x.flags, **x.kwargs) + + +class ConstraintAction(argparse.Action): + """Constructs a list of specs based on a constraint given on the command line + + An instance of this class is supposed to be used as an argument action + in a parser. It will read a constraint and will attach a list of matching + specs to the namespace + """ + qualifiers = {} + + def __call__(self, parser, namespace, values, option_string=None): + # Query specs from command line + d = self.qualifiers.get(namespace.subparser_name, {}) + specs = [s for s in spack.installed_db.query(**d)] + values = ' '.join(values) + if values: + specs = [x for x in specs if x.satisfies(values, strict=True)] + namespace.specs = specs + +parms = Bunch( + flags=('constraint',), + kwargs={ + 'nargs': '*', + 'help': 'Constraint to select a subset of installed packages', + 'action': ConstraintAction + }) +_arguments['constraint'] = parms + +parms = Bunch( + flags=('-m', '--module-type'), + kwargs={ + 'help': 'Type of module files', + 'default': 'tcl', + 'choices': spack.modules.module_types + }) +_arguments['module_type'] = parms + +parms = Bunch( + flags=('-y', '--yes-to-all'), + kwargs={ + 'action': 'store_true', + 'dest': 'yes_to_all', + 'help': 'Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501 + }) +_arguments['yes_to_all'] = parms + +parms = Bunch( + flags=('-r', '--dependencies'), + kwargs={ + 'action': 'store_true', + 'dest': 'recurse_dependencies', + 'help': 'Recursively traverse spec dependencies' + }) +_arguments['recurse_dependencies'] = parms diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index c9fa687b74..2c440096d1 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -103,6 +103,64 @@ def install(self, spec, prefix): ${install} """) +# Build dependencies and extensions +dependencies_dict = { + 'autotools': "# depends_on('foo')", + 'cmake': "depends_on('cmake')", + 'scons': "depends_on('scons')", + 'python': "extends('python')", + 'R': "extends('R')", + 'octave': "extends('octave')", + 'unknown': "# depends_on('foo')" +} + +# Default installation instructions +install_dict = { + 'autotools': """\ + # FIXME: Modify the configure line to suit your build system here. + configure('--prefix={0}'.format(prefix)) + + # FIXME: Add logic to build and install here. + make() + make('install')""", + + 'cmake': """\ + with working_dir('spack-build', create=True): + # FIXME: Modify the cmake line to suit your build system here. + cmake('..', *std_cmake_args) + + # FIXME: Add logic to build and install here. + make() + make('install')""", + + 'scons': """\ + # FIXME: Add logic to build and install here. + scons('prefix={0}'.format(prefix)) + scons('install')""", + + 'python': """\ + # FIXME: Add logic to build and install here. + python('setup.py', 'install', '--prefix={0}'.format(prefix))""", + + 'R': """\ + # FIXME: Add logic to build and install here. + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path)""", + + 'octave': """\ + # FIXME: Add logic to build and install here. + octave('--quiet', '--norc', + '--built-in-docstrings-file=/dev/null', + '--texi-macros-file=/dev/null', + '--eval', 'pkg prefix {0}; pkg install {1}'.format( + prefix, self.stage.archive_file))""", + + 'unknown': """\ + # FIXME: Unknown build system + make() + make('install')""" +} + def make_version_calls(ver_hash_tuples): """Adds a version() call to the package for each version found.""" @@ -133,60 +191,17 @@ def setup_parser(subparser): setup_parser.subparser = subparser -class ConfigureGuesser(object): - def __call__(self, stage): - """Try to guess the type of build system used by the project. - Set any necessary build dependencies or extensions. - Set the appropriate default installation instructions.""" +class BuildSystemGuesser(object): + def __call__(self, stage, url): + """Try to guess the type of build system used by a project based on + the contents of its archive or the URL it was downloaded from.""" - # Build dependencies and extensions - dependenciesDict = { - 'autotools': "# depends_on('foo')", - 'cmake': "depends_on('cmake', type='build')", - 'scons': "depends_on('scons', type='build')", - 'python': "extends('python', type=nolink)", - 'R': "extends('R')", - 'unknown': "# depends_on('foo')" - } - - # Default installation instructions - installDict = { - 'autotools': """\ - # FIXME: Modify the configure line to suit your build system here. - configure('--prefix={0}'.format(prefix)) - - # FIXME: Add logic to build and install here. - make() - make('install')""", - - 'cmake': """\ - with working_dir('spack-build', create=True): - # FIXME: Modify the cmake line to suit your build system here. - cmake('..', *std_cmake_args) - - # FIXME: Add logic to build and install here. - make() - make('install')""", - - 'scons': """\ - # FIXME: Add logic to build and install here. - scons('prefix={0}'.format(prefix)) - scons('install')""", - - 'python': """\ - # FIXME: Add logic to build and install here. - python('setup.py', 'install', '--prefix={0}'.format(prefix))""", - - 'R': """\ - # FIXME: Add logic to build and install here. - R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), - self.stage.source_path)""", - - 'unknown': """\ - # FIXME: Unknown build system - make() - make('install')""" - } + # Most octave extensions are hosted on Octave-Forge: + # http://octave.sourceforge.net/index.html + # They all have the same base URL. + if 'downloads.sourceforge.net/octave/' in url: + self.build_system = 'octave' + return # A list of clues that give us an idea of the build system a package # uses. If the regular expression matches a file contained in the @@ -224,12 +239,6 @@ def __call__(self, stage): self.build_system = build_system - # Set any necessary build dependencies or extensions. - self.dependencies = dependenciesDict[build_system] - - # Set the appropriate default installation instructions - self.install = installDict[build_system] - def guess_name_and_version(url, args): # Try to deduce name and version of the new package from the URL @@ -334,8 +343,8 @@ def create(parser, args): # Fetch tarballs (prompting user if necessary) versions, urls = fetch_tarballs(url, name, version) - # Try to guess what configure system is used. - guesser = ConfigureGuesser() + # Try to guess what build system is used. + guesser = BuildSystemGuesser() ver_hash_tuples = spack.cmd.checksum.get_checksums( versions, urls, first_stage_function=guesser, @@ -344,13 +353,13 @@ def create(parser, args): if not ver_hash_tuples: tty.die("Could not fetch any tarballs for %s" % name) - # Prepend 'py-' to python package names, by convention. + # Add prefix to package name if it is an extension. if guesser.build_system == 'python': - name = 'py-%s' % name - - # Prepend 'r-' to R package names, by convention. + name = 'py-{0}'.format(name) if guesser.build_system == 'R': - name = 'r-%s' % name + name = 'r-{0}'.format(name) + if guesser.build_system == 'octave': + name = 'octave-{0}'.format(name) # Create a directory for the new package. pkg_path = repo.filename_for_package_name(name) @@ -367,8 +376,8 @@ def create(parser, args): class_name=mod_to_class(name), url=url, versions=make_version_calls(ver_hash_tuples), - dependencies=guesser.dependencies, - install=guesser.install)) + dependencies=dependencies_dict[guesser.build_system], + install=install_dict[guesser.build_system])) # If everything checks out, go ahead and edit. spack.editor(pkg_path) diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index 3ec671f93f..d3ea38c573 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -31,7 +31,7 @@ from llnl.util.lang import * from llnl.util.tty.colify import * from llnl.util.tty.color import * -from llnl.util.lang import * +from spack.cmd import display_specs description = "Find installed spack packages" @@ -104,89 +104,6 @@ def setup_parser(subparser): help='optional specs to filter results') -def gray_hash(spec, length): - return colorize('@K{%s}' % spec.dag_hash(length)) - - -def display_specs(specs, **kwargs): - mode = kwargs.get('mode', 'short') - hashes = kwargs.get('long', False) - namespace = kwargs.get('namespace', False) - flags = kwargs.get('show_flags', False) - variants = kwargs.get('variants', False) - - hlen = 7 - if kwargs.get('very_long', False): - hashes = True - hlen = None - - nfmt = '.' if namespace else '_' - ffmt = '$%+' if flags else '' - vfmt = '$+' if variants else '' - format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt) - - # Make a dict with specs keyed by architecture and compiler. - index = index_by(specs, ('architecture', 'compiler')) - - # Traverse the index and print out each package - for i, (architecture, compiler) in enumerate(sorted(index)): - if i > 0: - print - - header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color, - architecture, spack.spec.compiler_color, - compiler) - tty.hline(colorize(header), char='-') - - specs = index[(architecture, compiler)] - specs.sort() - - abbreviated = [s.format(format_string, color=True) for s in specs] - if mode == 'paths': - # Print one spec per line along with prefix path - width = max(len(s) for s in abbreviated) - width += 2 - format = " %%-%ds%%s" % width - - for abbrv, spec in zip(abbreviated, specs): - if hashes: - print(gray_hash(spec, hlen), ) - print(format % (abbrv, spec.prefix)) - - elif mode == 'deps': - for spec in specs: - print(spec.tree( - format=format_string, - color=True, - indent=4, - prefix=(lambda s: gray_hash(s, hlen)) if hashes else None)) - - elif mode == 'short': - # Print columns of output if not printing flags - if not flags: - - def fmt(s): - string = "" - if hashes: - string += gray_hash(s, hlen) + ' ' - string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True) - - return string - - colify(fmt(s) for s in specs) - # Print one entry per line if including flags - else: - for spec in specs: - # Print the hash if necessary - hsh = gray_hash(spec, hlen) + ' ' if hashes else '' - print(hsh + spec.format(format_string, color=True) + '\n') - - else: - raise ValueError( - "Invalid mode for display_specs: %s. Must be one of (paths," - "deps, short)." % mode) # NOQA: ignore=E501 - - def query_arguments(args): # Check arguments if args.explicit and args.implicit: diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index 5774034062..498518057b 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -29,9 +29,11 @@ description = "Get detailed information on a particular package" + def padder(str_list, extra=0): """Return a function to pad elements of a list.""" length = max(len(str(s)) for s in str_list) + extra + def pad(string): string = str(string) padding = max(0, length - len(string)) @@ -40,7 +42,8 @@ def pad(string): def setup_parser(subparser): - subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.") + subparser.add_argument( + 'name', metavar="PACKAGE", help="Name of package to get info for.") def print_text_info(pkg): @@ -84,7 +87,7 @@ def print_text_info(pkg): for deptype in ('build', 'link', 'run'): print print "%s Dependencies:" % deptype.capitalize() - deps = pkg.dependencies(deptype) + deps = pkg.dependencies_of_type(deptype) if deps: colify(deps, indent=4) else: diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py index 55826d133c..a10e36e077 100644 --- a/lib/spack/spack/cmd/module.py +++ b/lib/spack/spack/cmd/module.py @@ -23,135 +23,233 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from __future__ import print_function + +import collections import os import shutil import sys import llnl.util.tty as tty import spack.cmd -from llnl.util.filesystem import mkdirp +import spack.cmd.common.arguments as arguments +import llnl.util.filesystem as filesystem from spack.modules import module_types -from spack.util.string import * -description = "Manipulate modules and dotkits." +description = "Manipulate module files" + +# Dictionary that will be populated with the list of sub-commands +# Each sub-command must be callable and accept 3 arguments : +# - mtype : the type of the module file +# - specs : the list of specs to be processed +# - args : namespace containing the parsed command line arguments +callbacks = {} + + +def subcommand(subparser_name): + """Registers a function in the callbacks dictionary""" + def decorator(callback): + callbacks[subparser_name] = callback + return callback + return decorator def setup_parser(subparser): - sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command') + sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subparser_name') - sp.add_parser('refresh', help='Regenerate all module files.') + # spack module refresh + refresh_parser = sp.add_parser('refresh', help='Regenerate module files') + refresh_parser.add_argument( + '--delete-tree', + help='Delete the module file tree before refresh', + action='store_true' + ) + arguments.add_common_arguments( + refresh_parser, ['constraint', 'module_type', 'yes_to_all'] + ) - find_parser = sp.add_parser('find', help='Find module files for packages.') + # spack module find + find_parser = sp.add_parser('find', help='Find module files for packages') + arguments.add_common_arguments(find_parser, ['constraint', 'module_type']) - find_parser.add_argument( - 'module_type', - help="Type of module to find file for. [" + - '|'.join(module_types) + "]") + # spack module rm + rm_parser = sp.add_parser('rm', help='Remove module files') + arguments.add_common_arguments( + rm_parser, ['constraint', 'module_type', 'yes_to_all'] + ) - find_parser.add_argument( - '-r', '--dependencies', action='store_true', - dest='recurse_dependencies', - help='Recursively traverse dependencies for modules to load.') - - find_parser.add_argument( - '-s', '--shell', action='store_true', dest='shell', - help='Generate shell script (instead of input for module command)') - - find_parser.add_argument( - '-p', '--prefix', dest='prefix', - help='Prepend to module names when issuing module load commands') - - find_parser.add_argument( - 'spec', nargs='+', - help='spec to find a module file for.') + # spack module loads + loads_parser = sp.add_parser( + 'loads', + help='Prompt the list of modules associated with a constraint' + ) + loads_parser.add_argument( + '--input-only', action='store_false', dest='shell', + help='Generate input for module command (instead of a shell script)' + ) + loads_parser.add_argument( + '-p', '--prefix', dest='prefix', default='', + help='Prepend to module names when issuing module load commands' + ) + arguments.add_common_arguments( + loads_parser, ['constraint', 'module_type', 'recurse_dependencies'] + ) -def module_find(mtype, flags, spec_array): - """Look at all installed packages and see if the spec provided - matches any. If it does, check whether there is a module file - of type there, and print out the name that the user - should type to use that package's module. - prefix: - Prepend this to module names when issuing "module load" commands. - Some systems seem to need it. +class MultipleMatches(Exception): + pass + + +class NoMatch(Exception): + pass + + +@subcommand('loads') +def loads(mtype, specs, args): + """Prompt the list of modules associated with a list of specs""" + # Get a comprehensive list of specs + if args.recurse_dependencies: + specs_from_user_constraint = specs[:] + specs = [] + # FIXME : during module file creation nodes seem to be visited + # FIXME : multiple times even if cover='nodes' is given. This + # FIXME : work around permits to get a unique list of spec anyhow. + # FIXME : (same problem as in spack/modules.py) + seen = set() + seen_add = seen.add + for spec in specs_from_user_constraint: + specs.extend( + [item for item in spec.traverse(order='post', cover='nodes') if not (item in seen or seen_add(item))] # NOQA: ignore=E501 + ) + + module_cls = module_types[mtype] + modules = [(spec, module_cls(spec).use_name) + for spec in specs if os.path.exists(module_cls(spec).file_name)] + + module_commands = { + 'tcl': 'module load ', + 'dotkit': 'dotkit use ' + } + + d = { + 'command': '' if not args.shell else module_commands[mtype], + 'prefix': args.prefix + } + + prompt_template = '{comment}{command}{prefix}{name}' + for spec, mod in modules: + d['comment'] = '' if not args.shell else '# {0}\n'.format( + spec.format()) + d['name'] = mod + print(prompt_template.format(**d)) + + +@subcommand('find') +def find(mtype, specs, args): """ - if mtype not in module_types: - tty.die("Invalid module type: '%s'. Options are %s" % - (mtype, comma_or(module_types))) + Look at all installed packages and see if the spec provided + matches any. If it does, check whether there is a module file + of type there, and print out the name that the user + should type to use that package's module. + """ + if len(specs) == 0: + raise NoMatch() - # -------------------------------------- - def _find_modules(spec, modules_list): - """Finds all modules and sub-modules for a spec""" - if str(spec.version) == 'system': - # No Spack module for system-installed packages - return + if len(specs) > 1: + raise MultipleMatches() - if flags.recurse_dependencies: - for dep in spec.dependencies(): - _find_modules(dep, modules_list) - - mod = module_types[mtype](spec) - if not os.path.isfile(mod.file_name): - tty.die("No %s module is installed for %s" % (mtype, spec)) - modules_list.append((spec, mod)) + spec = specs.pop() + mod = module_types[mtype](spec) + if not os.path.isfile(mod.file_name): + tty.die("No %s module is installed for %s" % (mtype, spec)) + print(mod.use_name) - # -------------------------------------- - raw_specs = spack.cmd.parse_specs(spec_array) - modules = set() # Modules we will load - seen = set() - for raw_spec in raw_specs: +@subcommand('rm') +def rm(mtype, specs, args): + """Deletes module files associated with items in specs""" + module_cls = module_types[mtype] + specs_with_modules = [ + spec for spec in specs if os.path.exists(module_cls(spec).file_name)] + modules = [module_cls(spec) for spec in specs_with_modules] - # ----------- Make sure the spec only resolves to ONE thing - specs = spack.installed_db.query(raw_spec) - if len(specs) == 0: - tty.die("No installed packages match spec %s" % raw_spec) + if not modules: + tty.msg('No module file matches your query') + raise SystemExit(1) - if len(specs) > 1: - tty.error("Multiple matches for spec %s. Choose one:" % raw_spec) - for s in specs: - sys.stderr.write(s.tree(color=True)) - sys.exit(1) - spec = specs[0] + # Ask for confirmation + if not args.yes_to_all: + tty.msg('You are about to remove {0} module files the following specs:\n'.format(mtype)) # NOQA: ignore=E501 + spack.cmd.display_specs(specs_with_modules, long=True) + print('') + spack.cmd.ask_for_confirmation('Do you want to proceed ? ') - # ----------- Chase down modules for it and all its dependencies - modules_dups = list() - _find_modules(spec, modules_dups) + # Remove the module files + for s in modules: + s.remove() - # Remove duplicates while keeping order - modules_unique = list() - for spec,mod in modules_dups: - if mod.use_name not in seen: - modules_unique.append((spec,mod)) - seen.add(mod.use_name) - # Output... - if flags.shell: - module_cmd = {'tcl': 'module load', 'dotkit': 'dotkit use'}[mtype] - for spec,mod in modules_unique: - if flags.shell: - print('# %s' % spec.format()) - print('%s %s%s' % (module_cmd, flags.prefix, mod.use_name)) - else: - print(mod.use_name) +@subcommand('refresh') +def refresh(mtype, specs, args): + """Regenerate module files for item in specs""" + # Prompt a message to the user about what is going to change + if not specs: + tty.msg('No package matches your query') + return -def module_refresh(): - """Regenerate all module files for installed packages known to - spack (some packages may no longer exist).""" - specs = [s for s in spack.installed_db.query(installed=True, known=True)] + if not args.yes_to_all: + tty.msg('You are about to regenerate {name} module files for the following specs:\n'.format(name=mtype)) # NOQA: ignore=E501 + spack.cmd.display_specs(specs, long=True) + print('') + spack.cmd.ask_for_confirmation('Do you want to proceed ? ') - for name, cls in module_types.items(): - tty.msg("Regenerating %s module files." % name) - if os.path.isdir(cls.path): - shutil.rmtree(cls.path, ignore_errors=False) - mkdirp(cls.path) - for spec in specs: - cls(spec).write() + cls = module_types[mtype] + + # Detect name clashes + writers = [cls(spec) for spec in specs] + file2writer = collections.defaultdict(list) + for item in writers: + file2writer[item.file_name].append(item) + + if len(file2writer) != len(writers): + message = 'Name clashes detected in module files:\n' + for filename, writer_list in file2writer.items(): + if len(writer_list) > 1: + message += '\nfile : {0}\n'.format(filename) + for x in writer_list: + message += 'spec : {0}\n'.format(x.spec.format(color=True)) + tty.error(message) + tty.error('Operation aborted') + raise SystemExit(1) + + # Proceed regenerating module files + tty.msg('Regenerating {name} module files'.format(name=mtype)) + if os.path.isdir(cls.path) and args.delete_tree: + shutil.rmtree(cls.path, ignore_errors=False) + filesystem.mkdirp(cls.path) + for x in writers: + x.write(overwrite=True) def module(parser, args): - if args.module_command == 'refresh': - module_refresh() + # Qualifiers to be used when querying the db for specs + constraint_qualifiers = { + 'refresh': { + 'installed': True, + 'known': True + }, + } + arguments.ConstraintAction.qualifiers.update(constraint_qualifiers) - elif args.module_command == 'find': - module_find(args.module_type, args, args.spec) + module_type = args.module_type + constraint = args.constraint + try: + callbacks[args.subparser_name](module_type, args.specs, args) + except MultipleMatches: + message = 'the constraint \'{query}\' matches multiple packages, and this is not allowed in this context' # NOQA: ignore=E501 + tty.error(message.format(query=constraint)) + for s in args.specs: + sys.stderr.write(s.format(color=True) + '\n') + raise SystemExit(1) + except NoMatch: + message = 'the constraint \'{query}\' match no package, and this is not allowed in this context' # NOQA: ignore=E501 + tty.die(message.format(query=constraint)) diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py index bc64c77eab..a27502d30e 100644 --- a/lib/spack/spack/cmd/package-list.py +++ b/lib/spack/spack/cmd/package-list.py @@ -22,10 +22,8 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import re import cgi from StringIO import StringIO -import llnl.util.tty as tty from llnl.util.tty.colify import * import spack @@ -34,21 +32,22 @@ def github_url(pkg): """Link to a package file on github.""" - return ("https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" % - pkg.name) + url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" # NOQA: ignore=E501 + return (url % pkg.name) def rst_table(elts): """Print out a RST-style table.""" cols = StringIO() ncol, widths = colify(elts, output=cols, tty=True) - header = " ".join("=" * (w-1) for w in widths) + header = " ".join("=" * (w - 1) for w in widths) return "%s\n%s%s" % (header, cols.getvalue(), header) def print_rst_package_list(): """Print out information on all packages in restructured text.""" - pkgs = sorted(spack.repo.all_packages(), key=lambda s:s.name.lower()) + pkgs = sorted(spack.repo.all_packages(), key=lambda s: s.name.lower()) + pkg_names = [p.name for p in pkgs] print ".. _package-list:" print @@ -62,7 +61,7 @@ def print_rst_package_list(): print "Spack currently has %d mainline packages:" % len(pkgs) print - print rst_table("`%s`_" % p.name for p in pkgs) + print rst_table("`%s`_" % p for p in pkg_names) print print "-----" @@ -79,14 +78,15 @@ def print_rst_package_list(): print if pkg.versions: print "Versions:" - print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions))) + print " " + ", ".join(str(v) for v in + reversed(sorted(pkg.versions))) - for deptype in ('build', 'link', 'run'): - deps = pkg.dependencies(deptype) + for deptype in spack.alldeps: + deps = pkg.dependencies_of_type(deptype) if deps: print "%s Dependencies" % deptype.capitalize() - print " " + ", ".join("`%s`_" % d if d != "mpi" else d - for d in build_deps) + print " " + ", ".join("%s_" % d if d in pkg_names + else d for d in deps) print print "Description:" diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index a6f08d09ed..a17b7c685c 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -30,7 +30,6 @@ import spack import spack.cmd import spack.repository -from spack.cmd.find import display_specs description = "Remove an installed package" @@ -43,21 +42,10 @@ display_args = { 'long': True, 'show_flags': True, - 'variants':True + 'variants': True } -def ask_for_confirmation(message): - while True: - tty.msg(message + '[y/n]') - choice = raw_input().lower() - if choice == 'y': - break - elif choice == 'n': - raise SystemExit('Operation aborted') - tty.warn('Please reply either "y" or "n"') - - def setup_parser(subparser): subparser.add_argument( '-f', '--force', action='store_true', dest='force', @@ -65,32 +53,37 @@ def setup_parser(subparser): subparser.add_argument( '-a', '--all', action='store_true', dest='all', help="USE CAREFULLY. Remove ALL installed packages that match each " + - "supplied spec. i.e., if you say uninstall libelf, ALL versions of " + - "libelf are uninstalled. This is both useful and dangerous, like rm -r.") + "supplied spec. i.e., if you say uninstall libelf, ALL versions of " + # NOQA: ignore=E501 + "libelf are uninstalled. This is both useful and dangerous, like rm -r.") # NOQA: ignore=E501 subparser.add_argument( '-d', '--dependents', action='store_true', dest='dependents', - help='Also uninstall any packages that depend on the ones given via command line.' + help='Also uninstall any packages that depend on the ones given via command line.' # NOQA: ignore=E501 ) subparser.add_argument( '-y', '--yes-to-all', action='store_true', dest='yes_to_all', - help='Assume "yes" is the answer to every confirmation asked to the user.' + help='Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501 ) - subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall") + subparser.add_argument( + 'packages', + nargs=argparse.REMAINDER, + help="specs of packages to uninstall" + ) def concretize_specs(specs, allow_multiple_matches=False, force=False): - """ - Returns a list of specs matching the non necessarily concretized specs given from cli + """Returns a list of specs matching the non necessarily + concretized specs given from cli Args: specs: list of specs to be matched against installed packages - allow_multiple_matches : boolean (if True multiple matches for each item in specs are admitted) + allow_multiple_matches : if True multiple matches are admitted Return: list of specs """ - specs_from_cli = [] # List of specs that match expressions given via command line + # List of specs that match expressions given via command line + specs_from_cli = [] has_errors = False for spec in specs: matching = spack.installed_db.query(spec) @@ -99,7 +92,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False): if not allow_multiple_matches and len(matching) > 1: tty.error("%s matches multiple packages:" % spec) print() - display_specs(matching, **display_args) + spack.cmd.display_specs(matching, **display_args) print() has_errors = True @@ -116,8 +109,8 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False): def installed_dependents(specs): - """ - Returns a dictionary that maps a spec with a list of its installed dependents + """Returns a dictionary that maps a spec with a list of its + installed dependents Args: specs: list of specs to be checked for dependents @@ -147,7 +140,7 @@ def do_uninstall(specs, force): try: # should work if package is known to spack packages.append(item.package) - except spack.repository.UnknownPackageError as e: + except spack.repository.UnknownPackageError: # The package.py file has gone away -- but still # want to uninstall. spack.Package(item).do_uninstall(force=True) @@ -169,17 +162,20 @@ def uninstall(parser, args): with spack.installed_db.write_transaction(): specs = spack.cmd.parse_specs(args.packages) # Gets the list of installed specs that match the ones give via cli - uninstall_list = concretize_specs(specs, args.all, args.force) # takes care of '-a' is given in the cli - dependent_list = installed_dependents(uninstall_list) # takes care of '-d' + # takes care of '-a' is given in the cli + uninstall_list = concretize_specs(specs, args.all, args.force) + dependent_list = installed_dependents( + uninstall_list) # takes care of '-d' # Process dependent_list and update uninstall_list has_error = False if dependent_list and not args.dependents and not args.force: for spec, lst in dependent_list.items(): - tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True)) + tty.error("Will not uninstall %s" % + spec.format("$_$@$%@$#", color=True)) print('') print("The following packages depend on it:") - display_specs(lst, **display_args) + spack.cmd.display_specs(lst, **display_args) print('') has_error = True elif args.dependents: @@ -188,14 +184,14 @@ def uninstall(parser, args): uninstall_list = list(set(uninstall_list)) if has_error: - tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') + tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') # NOQA: ignore=E501 if not args.yes_to_all: tty.msg("The following packages will be uninstalled : ") print('') - display_specs(uninstall_list, **display_args) + spack.cmd.display_specs(uninstall_list, **display_args) print('') - ask_for_confirmation('Do you want to proceed ? ') + spack.cmd.ask_for_confirmation('Do you want to proceed ? ') # Uninstall everything on the list do_uninstall(uninstall_list, args.force) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 84179e1469..8b5e96f97d 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -328,6 +328,11 @@ 'anyOf': [ { 'properties': { + 'hash_length': { + 'type': 'integer', + 'minimum': 0, + 'default': 7 + }, 'whitelist': {'$ref': '#/definitions/array_of_strings'}, 'blacklist': {'$ref': '#/definitions/array_of_strings'}, 'naming_scheme': { @@ -492,8 +497,15 @@ def clear(self): """Empty cached config information.""" self.sections = {} +"""Default configuration scope is the lowest-level scope. These are + versioned with Spack and can be overridden by sites or users.""" +ConfigScope('defaults', os.path.join(spack.etc_path, 'spack', 'defaults')) -ConfigScope('site', os.path.join(spack.etc_path, 'spack')), +"""Site configuration is per spack instance, for sites or projects. + No site-level configs should be checked into spack by default.""" +ConfigScope('site', os.path.join(spack.etc_path, 'spack')) + +"""User configuration can override both spack defaults and site config.""" ConfigScope('user', os.path.expanduser('~/.spack')) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index c95abd7423..317b0d5784 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -60,7 +60,7 @@ _db_dirname = '.spack-db' # DB version. This is stuck in the DB file to track changes in format. -_db_version = Version('0.9.1') +_db_version = Version('0.9.2') # Default timeout for spack database locks is 5 min. _db_lock_timeout = 60 @@ -215,14 +215,10 @@ def _read_spec_from_yaml(self, hash_key, installs, parent_key=None): # Add dependencies from other records in the install DB to # form a full spec. if 'dependencies' in spec_dict[spec.name]: - for dep in spec_dict[spec.name]['dependencies'].values(): - if type(dep) == tuple: - dep_hash, deptypes = dep - else: - dep_hash = dep - deptypes = spack.alldeps - child = self._read_spec_from_yaml(dep_hash, installs, hash_key) - spec._add_dependency(child, deptypes) + yaml_deps = spec_dict[spec.name]['dependencies'] + for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps): + child = self._read_spec_from_yaml(dhash, installs, hash_key) + spec._add_dependency(child, dtypes) # Specs from the database need to be marked concrete because # they represent actual installations. @@ -639,13 +635,14 @@ def _exit(self): class CorruptDatabaseError(SpackError): def __init__(self, path, msg=''): super(CorruptDatabaseError, self).__init__( - "Spack database is corrupt: %s. %s." + \ - "Try running `spack reindex` to fix." % (path, msg)) + "Spack database is corrupt: %s. %s." % (path, msg), + "Try running `spack reindex` to fix.") class InvalidDatabaseVersionError(SpackError): def __init__(self, expected, found): super(InvalidDatabaseVersionError, self).__init__( - "Expected database version %s but found version %s." + \ - "Try running `spack reindex` to fix." % - (expected, found)) + "Expected database version %s but found version %s." + % (expected, found), + "`spack reindex` may fix this, or you may need a newer " + "Spack version.") diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index a5e76043ad..8150a6da2b 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -34,6 +34,7 @@ import llnl.util.tty as tty from llnl.util.filesystem import join_path, mkdirp +import spack from spack.spec import Spec from spack.error import SpackError @@ -223,8 +224,14 @@ def write_spec(self, spec, path): def read_spec(self, path): """Read the contents of a file and parse them as a spec""" - with open(path) as f: - spec = Spec.from_yaml(f) + try: + with open(path) as f: + spec = Spec.from_yaml(f) + except Exception as e: + if spack.debug: + raise + raise SpecReadError( + 'Unable to read file: %s' % path, 'Cause: ' + str(e)) # Specs read from actual installations are always concrete spec._mark_concrete() @@ -456,10 +463,12 @@ def __init__(self, path): "Install path %s already exists!") +class SpecReadError(DirectoryLayoutError): + """Raised when directory layout can't read a spec.""" + + class InvalidExtensionSpecError(DirectoryLayoutError): """Raised when an extension file has a bad spec in it.""" - def __init__(self, message): - super(InvalidExtensionSpecError, self).__init__(message) class ExtensionAlreadyInstalledError(DirectoryLayoutError): diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index a2e528d295..72656b8ae0 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -188,6 +188,8 @@ def parse_config_options(module_generator): ##### # Automatic loading loads + module_file_actions['hash_length'] = module_configuration.get( + 'hash_length', 7) module_file_actions['autoload'] = dependencies( module_generator.spec, module_file_actions.get('autoload', 'none')) # Prerequisites @@ -237,6 +239,7 @@ class EnvModule(object): formats = {} class __metaclass__(type): + def __init__(cls, name, bases, dict): type.__init__(cls, name, bases, dict) if cls.name != 'env_module' and cls.name in CONFIGURATION[ @@ -295,7 +298,9 @@ def use_name(self): if constraint in self.spec: suffixes.append(suffix) # Always append the hash to make the module file unique - suffixes.append(self.spec.dag_hash()) + hash_length = configuration.pop('hash_length', 7) + if hash_length != 0: + suffixes.append(self.spec.dag_hash(length=hash_length)) name = '-'.join(suffixes) return name @@ -338,7 +343,7 @@ def blacklisted(self): return False - def write(self): + def write(self, overwrite=False): """ Writes out a module file for this object. @@ -399,6 +404,15 @@ def write(self): for line in self.module_specific_content(module_configuration): module_file_content += line + # Print a warning in case I am accidentally overwriting + # a module file that is already there (name clash) + if not overwrite and os.path.exists(self.file_name): + message = 'Module file already exists : skipping creation\n' + message += 'file : {0.file_name}\n' + message += 'spec : {0.spec}' + tty.warn(message.format(self)) + return + # Dump to file with open(self.file_name, 'w') as f: f.write(module_file_content) @@ -454,7 +468,7 @@ def remove(self): class Dotkit(EnvModule): name = 'dotkit' - + path = join_path(spack.share_path, 'dotkit') environment_modifications_formats = { PrependPath: 'dk_alter {name} {value}\n', SetEnv: 'dk_setenv {name} {value}\n' @@ -466,7 +480,7 @@ class Dotkit(EnvModule): @property def file_name(self): - return join_path(spack.share_path, "dotkit", self.spec.architecture, + return join_path(self.path, self.spec.architecture, '%s.dk' % self.use_name) @property @@ -494,7 +508,7 @@ def prerequisite(self, spec): class TclModule(EnvModule): name = 'tcl' - + path = join_path(spack.share_path, "modules") environment_modifications_formats = { PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n', AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n', @@ -514,7 +528,7 @@ class TclModule(EnvModule): @property def file_name(self): - return join_path(spack.share_path, "modules", self.spec.architecture, self.use_name) + return join_path(self.path, self.spec.architecture, self.use_name) @property def header(self): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index c41bd0206e..68360ec532 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -64,7 +64,7 @@ from spack.stage import Stage, ResourceStage, StageComposite from spack.util.compression import allowed_archive from spack.util.environment import dump_environment -from spack.util.executable import ProcessError +from spack.util.executable import ProcessError, which from spack.version import * """Allowed URL schemes for spack packages.""" @@ -718,6 +718,11 @@ def fetcher(self): def fetcher(self, f): self._fetcher = f + def dependencies_of_type(self, *deptypes): + """Get subset of the dependencies with certain types.""" + return dict((name, conds) for name, conds in self.dependencies.items() + if any(d in self._deptypes[name] for d in deptypes)) + @property def extendee_spec(self): """ @@ -840,7 +845,7 @@ def installed_dependents(self): if self.name == spec.name: continue # XXX(deptype): Should build dependencies not count here? - #for dep in spec.traverse(deptype=('run')): + # for dep in spec.traverse(deptype=('run')): for dep in spec.traverse(deptype=spack.alldeps): if self.spec == dep: dependents.append(spec) @@ -852,13 +857,13 @@ def prefix(self): return self.spec.prefix @property - #TODO: Change this to architecture + # TODO: Change this to architecture def compiler(self): """Get the spack.compiler.Compiler object used to build this package""" if not self.spec.concrete: raise ValueError("Can only get a compiler for a concrete package.") return spack.compilers.compiler_for_spec(self.spec.compiler, - self.spec.architecture) + self.spec.architecture) def url_version(self, version): """ @@ -1063,7 +1068,8 @@ def do_install(self, run_tests -- Run tests within the package's install() """ if not self.spec.concrete: - raise ValueError("Can only install concrete packages: %s." % self.spec.name) + raise ValueError("Can only install concrete packages: %s." + % self.spec.name) # No installation needed if package is external if self.spec.external: @@ -1713,6 +1719,13 @@ def install_dependency_symlinks(pkg, spec, prefix): flatten_dependencies(spec, prefix) +def use_cray_compiler_names(): + """Compiler names for builds that rely on cray compiler names.""" + os.environ['CC'] = 'cc' + os.environ['CXX'] = 'CC' + os.environ['FC'] = 'ftn' + os.environ['F77'] = 'ftn' + def flatten_dependencies(spec, flat_dir): """Make each dependency of spec present in dir via symlink.""" for dep in spec.traverse(root=False): diff --git a/lib/spack/spack/platforms/cray_xc.py b/lib/spack/spack/platforms/cray_xc.py index e710303e23..e3c7761a94 100644 --- a/lib/spack/spack/platforms/cray_xc.py +++ b/lib/spack/spack/platforms/cray_xc.py @@ -2,6 +2,8 @@ from spack.architecture import Platform, Target from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.cnl import Cnl +from spack.util.executable import which + class CrayXc(Platform): priority = 20 @@ -9,9 +11,8 @@ class CrayXc(Platform): back_end = 'ivybridge' default = 'ivybridge' - front_os = "SuSE11" back_os = "CNL10" - default_os = "CNL10" + default_os = "CNL10" def __init__(self): ''' Since cori doesn't have ivybridge as a front end it's better @@ -32,15 +33,27 @@ def __init__(self): # Could switch to use modules and fe targets for front end # Currently using compilers by path for front end. self.add_target('sandybridge', Target('sandybridge')) - self.add_target('ivybridge', + self.add_target('ivybridge', Target('ivybridge', 'craype-ivybridge')) - self.add_target('haswell', - Target('haswell','craype-haswell')) + self.add_target('haswell', + Target('haswell', 'craype-haswell')) - self.add_operating_system('SuSE11', LinuxDistro()) + # Front end of the cray platform is a linux distro. + linux_dist = LinuxDistro() + self.front_os = str(linux_dist) + self.add_operating_system(str(linux_dist), linux_dist) self.add_operating_system('CNL10', Cnl()) @classmethod def detect(self): - return os.path.exists('/opt/cray/craype') - + try: + cc_verbose = which('ftn') + text = cc_verbose('-craype-verbose', + output=str, error=str, + ignore_errors=True).split() + if '-D__CRAYXC' in text: + return True + else: + return False + except: + return False diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py index 4820584150..1b94f03de7 100644 --- a/lib/spack/spack/preferred_packages.py +++ b/lib/spack/spack/preferred_packages.py @@ -26,8 +26,10 @@ import spack from spack.version import * + class PreferredPackages(object): - _default_order = {'compiler' : [ 'gcc', 'intel', 'clang', 'pgi', 'xlc' ] } # Arbitrary, but consistent + # Arbitrary, but consistent + _default_order = {'compiler': ['gcc', 'intel', 'clang', 'pgi', 'xlc']} def __init__(self): self.preferred = spack.config.get_config('packages') @@ -35,24 +37,25 @@ def __init__(self): # Given a package name, sort component (e.g, version, compiler, ...), and # a second_key (used by providers), return the list - def _order_for_package(self, pkgname, component, second_key, test_all=True): + def _order_for_package(self, pkgname, component, second_key, + test_all=True): pkglist = [pkgname] if test_all: pkglist.append('all') for pkg in pkglist: order = self.preferred.get(pkg, {}).get(component, {}) - if type(order) is dict: + if isinstance(order, dict) and second_key: order = order.get(second_key, {}) if not order: continue return [str(s).strip() for s in order] return [] - # A generic sorting function. Given a package name and sort # component, return less-than-0, 0, or greater-than-0 if # a is respectively less-than, equal to, or greater than b. - def _component_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key): + def _component_compare(self, pkgname, component, a, b, + reverse_natural_compare, second_key): if a is None: return -1 if b is None: @@ -84,92 +87,102 @@ def _component_compare(self, pkgname, component, a, b, reverse_natural_compare, else: return 0 - # A sorting function for specs. Similar to component_compare, but # a and b are considered to match entries in the sorting list if they # satisfy the list component. - def _spec_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key): - if not a or not a.concrete: + def _spec_compare(self, pkgname, component, a, b, + reverse_natural_compare, second_key): + if not a or (not a.concrete and not second_key): return -1 - if not b or not b.concrete: + if not b or (not b.concrete and not second_key): return 1 specs = self._spec_for_pkgname(pkgname, component, second_key) a_index = None b_index = None reverse = -1 if reverse_natural_compare else 1 for i, cspec in enumerate(specs): - if a_index == None and (cspec.satisfies(a) or a.satisfies(cspec)): + if a_index is None and (cspec.satisfies(a) or a.satisfies(cspec)): a_index = i if b_index: break - if b_index == None and (cspec.satisfies(b) or b.satisfies(cspec)): + if b_index is None and (cspec.satisfies(b) or b.satisfies(cspec)): b_index = i if a_index: break - if a_index != None and b_index == None: return -1 - elif a_index == None and b_index != None: return 1 - elif a_index != None and b_index == a_index: return -1 * cmp(a, b) - elif a_index != None and b_index != None and a_index != b_index: return cmp(a_index, b_index) - else: return cmp(a, b) * reverse - - + if a_index is not None and b_index is None: + return -1 + elif a_index is None and b_index is not None: + return 1 + elif a_index is not None and b_index == a_index: + return -1 * cmp(a, b) + elif (a_index is not None and b_index is not None and + a_index != b_index): + return cmp(a_index, b_index) + else: + return cmp(a, b) * reverse # Given a sort order specified by the pkgname/component/second_key, return # a list of CompilerSpecs, VersionLists, or Specs for that sorting list. def _spec_for_pkgname(self, pkgname, component, second_key): key = (pkgname, component, second_key) - if not key in self._spec_for_pkgname_cache: + if key not in self._spec_for_pkgname_cache: pkglist = self._order_for_package(pkgname, component, second_key) if not pkglist: if component in self._default_order: pkglist = self._default_order[component] if component == 'compiler': - self._spec_for_pkgname_cache[key] = [spack.spec.CompilerSpec(s) for s in pkglist] + self._spec_for_pkgname_cache[key] = \ + [spack.spec.CompilerSpec(s) for s in pkglist] elif component == 'version': - self._spec_for_pkgname_cache[key] = [VersionList(s) for s in pkglist] + self._spec_for_pkgname_cache[key] = \ + [VersionList(s) for s in pkglist] else: - self._spec_for_pkgname_cache[key] = [spack.spec.Spec(s) for s in pkglist] + self._spec_for_pkgname_cache[key] = \ + [spack.spec.Spec(s) for s in pkglist] return self._spec_for_pkgname_cache[key] - def provider_compare(self, pkgname, provider_str, a, b): - """Return less-than-0, 0, or greater than 0 if a is respecively less-than, equal-to, or - greater-than b. A and b are possible implementations of provider_str. - One provider is less-than another if it is preferred over the other. - For example, provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would return -1 if - mvapich should be preferred over openmpi for scorep.""" - return self._spec_compare(pkgname, 'providers', a, b, False, provider_str) - + """Return less-than-0, 0, or greater than 0 if a is respecively + less-than, equal-to, or greater-than b. A and b are possible + implementations of provider_str. One provider is less-than another + if it is preferred over the other. For example, + provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would + return -1 if mvapich should be preferred over openmpi for scorep.""" + return self._spec_compare(pkgname, 'providers', a, b, False, + provider_str) def spec_has_preferred_provider(self, pkgname, provider_str): - """Return True iff the named package has a list of preferred provider""" - return bool(self._order_for_package(pkgname, 'providers', provider_str, False)) - + """Return True iff the named package has a list of preferred + providers""" + return bool(self._order_for_package(pkgname, 'providers', + provider_str, False)) def version_compare(self, pkgname, a, b): """Return less-than-0, 0, or greater than 0 if version a of pkgname is - respecively less-than, equal-to, or greater-than version b of pkgname. - One version is less-than another if it is preferred over the other.""" + respectively less-than, equal-to, or greater-than version b of + pkgname. One version is less-than another if it is preferred over + the other.""" return self._spec_compare(pkgname, 'version', a, b, True, None) - def variant_compare(self, pkgname, a, b): """Return less-than-0, 0, or greater than 0 if variant a of pkgname is - respecively less-than, equal-to, or greater-than variant b of pkgname. - One variant is less-than another if it is preferred over the other.""" + respectively less-than, equal-to, or greater-than variant b of + pkgname. One variant is less-than another if it is preferred over + the other.""" return self._component_compare(pkgname, 'variant', a, b, False, None) - def architecture_compare(self, pkgname, a, b): - """Return less-than-0, 0, or greater than 0 if architecture a of pkgname is - respecively less-than, equal-to, or greater-than architecture b of pkgname. - One architecture is less-than another if it is preferred over the other.""" - return self._component_compare(pkgname, 'architecture', a, b, False, None) - + """Return less-than-0, 0, or greater than 0 if architecture a of pkgname + is respectively less-than, equal-to, or greater-than architecture b + of pkgname. One architecture is less-than another if it is preferred + over the other.""" + return self._component_compare(pkgname, 'architecture', a, b, + False, None) def compiler_compare(self, pkgname, a, b): """Return less-than-0, 0, or greater than 0 if compiler a of pkgname is - respecively less-than, equal-to, or greater-than compiler b of pkgname. - One compiler is less-than another if it is preferred over the other.""" + respecively less-than, equal-to, or greater-than compiler b of + pkgname. One compiler is less-than another if it is preferred over + the other.""" return self._spec_compare(pkgname, 'compiler', a, b, False, None) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index d3a5f66e57..e694f2b2da 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -96,7 +96,6 @@ expansion when it is the first character in an id typed on the command line. """ import sys -import itertools import hashlib import base64 import imp @@ -116,8 +115,6 @@ import spack.error import spack.compilers as compilers -# TODO: move display_specs to some other location. -from spack.cmd.find import display_specs from spack.version import * from spack.util.string import * from spack.util.prefix import Prefix @@ -155,6 +152,7 @@ every time we call str()""" _any_version = VersionList([':']) +# Special types of dependencies. alldeps = ('build', 'link', 'run') nolink = ('build', 'run') @@ -296,10 +294,15 @@ def __repr__(self): @key_ordering class DependencySpec(object): - """ - Dependencies have conditions in which they apply. + """Dependencies can be one (or more) of several types: - This stores both what is depended on and why it is a dependency. + - build: needs to be in the PATH at build time. + - link: is linked to and added to compiler flags. + - run: needs to be in the PATH for the package to run. + + Fields: + - spec: the spack.spec.Spec description of a dependency. + - deptypes: strings representing the type of dependency this is. """ def __init__(self, spec, deptypes): self.spec = spec @@ -558,15 +561,15 @@ def dependents(self, deptype=None): def _find_deps_dict(self, where, deptype): deptype = self._deptype_norm(deptype) - return [(dep.spec.name, dep) - for dep in where.values() - if deptype and any(d in deptype for d in dep.deptypes)] + return dict((dep.spec.name, dep) + for dep in where.values() + if deptype and any(d in deptype for d in dep.deptypes)) def dependencies_dict(self, deptype=None): - return dict(self._find_deps_dict(self._dependencies, deptype)) + return self._find_deps_dict(self._dependencies, deptype) def dependents_dict(self, deptype=None): - return dict(self._find_deps_dict(self._dependents, deptype)) + return self._find_deps_dict(self._dependents, deptype) # # Private routines here are called by the parser when building a spec. @@ -644,7 +647,8 @@ def _set_platform(self, value): mod = imp.load_source(mod_name, path) class_name = mod_to_class(value) if not hasattr(mod, class_name): - tty.die('No class %s defined in %s' % (class_name, mod_name)) + tty.die( + 'No class %s defined in %s' % (class_name, mod_name)) cls = getattr(mod, class_name) if not inspect.isclass(cls): tty.die('%s.%s is not a class' % (mod_name, class_name)) @@ -667,13 +671,15 @@ def _set_platform(self, value): def _set_os(self, value): """Called by the parser to set the architecture operating system""" - if self.architecture.platform: - self.architecture.platform_os = self.architecture.platform.operating_system(value) + arch = self.architecture + if arch.platform: + arch.platform_os = arch.platform.operating_system(value) def _set_target(self, value): """Called by the parser to set the architecture target""" - if self.architecture.platform: - self.architecture.target = self.architecture.platform.target(value) + arch = self.architecture + if arch.platform: + arch.target = arch.platform.target(value) def _add_dependency(self, spec, deptypes): """Called by the parser to add another spec as a dependency.""" @@ -688,8 +694,9 @@ def _add_dependency(self, spec, deptypes): # @property def fullname(self): - return (('%s.%s' % (self.namespace, self.name)) if self.namespace else - (self.name if self.name else '')) + return ( + ('%s.%s' % (self.namespace, self.name)) if self.namespace else + (self.name if self.name else '')) @property def root(self): @@ -745,15 +752,15 @@ def concrete(self): if self._concrete: return True - self._concrete = bool(not self.virtual - and self.namespace is not None - and self.versions.concrete - and self.variants.concrete - and self.architecture - and self.architecture.concrete - and self.compiler and self.compiler.concrete - and self.compiler_flags.concrete - and self._dependencies.concrete) + self._concrete = bool(not self.virtual and + self.namespace is not None and + self.versions.concrete and + self.variants.concrete and + self.architecture and + self.architecture.concrete and + self.compiler and self.compiler.concrete and + self.compiler_flags.concrete and + self._dependencies.concrete) return self._concrete def traverse(self, visited=None, deptype=None, **kwargs): @@ -864,9 +871,9 @@ def return_val(res): for name in sorted(successors): child = successors[name] children = child.spec.traverse_with_deptype( - visited, d=d + 1, deptype=deptype_query, - deptype_query=deptype_query, - _self_deptype=child.deptypes, **kwargs) + visited, d=d + 1, deptype=deptype_query, + deptype_query=deptype_query, + _self_deptype=child.deptypes, **kwargs) for elt in children: yield elt @@ -914,9 +921,11 @@ def to_node_dict(self): d = { 'parameters': params, 'arch': self.architecture, - 'dependencies': dict((d, (deps[d].spec.dag_hash(), - deps[d].deptypes)) - for d in sorted(deps.keys())) + 'dependencies': dict( + (name, { + 'hash': dspec.spec.dag_hash(), + 'type': [str(s) for s in dspec.deptypes]}) + for name, dspec in deps.items()) } # Older concrete specs do not have a namespace. Omit for @@ -982,13 +991,33 @@ def from_node_dict(node): raise SpackRecordError( "Did not find a valid format for variants in YAML file") - # XXX(deptypes): why are dependencies not meant to be read here? - #for name, dep_info in node['dependencies'].items(): - # (dag_hash, deptypes) = dep_info - # spec._dependencies[name] = DependencySpec(dag_hash, deptypes) + # Don't read dependencies here; from_node_dict() is used by + # from_yaml() to read the root *and* each dependency spec. return spec + @staticmethod + def read_yaml_dep_specs(dependency_dict): + """Read the DependencySpec portion of a YAML-formatted Spec. + + This needs to be backward-compatible with older spack spec + formats so that reindex will work on old specs/databases. + """ + for dep_name, elt in dependency_dict.items(): + if isinstance(elt, basestring): + # original format, elt is just the dependency hash. + dag_hash, deptypes = elt, ['build', 'link'] + elif isinstance(elt, tuple): + # original deptypes format: (used tuples, not future-proof) + dag_hash, deptypes = elt + elif isinstance(elt, dict): + # new format: elements of dependency spec are keyed. + dag_hash, deptypes = elt['hash'], elt['type'] + else: + raise SpecError("Couldn't parse dependency types in spec.") + + yield dep_name, dag_hash, list(deptypes) + @staticmethod def from_yaml(stream): """Construct a spec from YAML. @@ -1000,27 +1029,30 @@ def from_yaml(stream): represent more than the DAG does. """ - deps = {} - spec = None - try: yfile = yaml.load(stream) except MarkedYAMLError, e: raise SpackYAMLError("error parsing YAML spec:", str(e)) - for node in yfile['spec']: - name = next(iter(node)) - dep = Spec.from_node_dict(node) - if not spec: - spec = dep - deps[dep.name] = dep + nodes = yfile['spec'] - for node in yfile['spec']: + # Read nodes out of list. Root spec is the first element; + # dependencies are the following elements. + dep_list = [Spec.from_node_dict(node) for node in nodes] + if not dep_list: + raise SpecError("YAML spec contains no nodes.") + deps = dict((spec.name, spec) for spec in dep_list) + spec = dep_list[0] + + for node in nodes: + # get dependency dict from the node. name = next(iter(node)) - for dep_name, (dep, deptypes) in \ - node[name]['dependencies'].items(): - deps[name]._dependencies[dep_name] = \ - DependencySpec(deps[dep_name], deptypes) + yaml_deps = node[name]['dependencies'] + for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps): + # Fill in dependencies by looking them up by name in deps dict + deps[name]._dependencies[dname] = DependencySpec( + deps[dname], set(dtypes)) + return spec def _concretize_helper(self, presets=None, visited=None): @@ -1171,14 +1203,16 @@ def _expand_virtual_packages(self): def feq(cfield, sfield): return (not cfield) or (cfield == sfield) - if replacement is spec or (feq(replacement.name, spec.name) and - feq(replacement.versions, spec.versions) and - feq(replacement.compiler, spec.compiler) and - feq(replacement.architecture, spec.architecture) and - feq(replacement._dependencies, spec._dependencies) and - feq(replacement.variants, spec.variants) and - feq(replacement.external, spec.external) and - feq(replacement.external_module, spec.external_module)): + if replacement is spec or ( + feq(replacement.name, spec.name) and + feq(replacement.versions, spec.versions) and + feq(replacement.compiler, spec.compiler) and + feq(replacement.architecture, spec.architecture) and + feq(replacement._dependencies, spec._dependencies) and + feq(replacement.variants, spec.variants) and + feq(replacement.external, spec.external) and + feq(replacement.external_module, + spec.external_module)): continue # Refine this spec to the candidate. This uses # replace_with AND dup so that it can work in @@ -1235,10 +1269,10 @@ def concretize(self): if s.namespace is None: s.namespace = spack.repo.repo_for_pkg(s.name).namespace - for s in self.traverse(root=False): if s.external_module: - compiler = spack.compilers.compiler_for_spec(s.compiler, s.architecture) + compiler = spack.compilers.compiler_for_spec( + s.compiler, s.architecture) for mod in compiler.modules: load_module(mod) @@ -1505,13 +1539,13 @@ def normalize(self, force=False): # Ensure first that all packages & compilers in the DAG exist. self.validate_names() # Get all the dependencies into one DependencyMap - spec_deps = self.flat_dependencies_with_deptype(copy=False, - deptype_query=alldeps) + spec_deps = self.flat_dependencies_with_deptype( + copy=False, deptype_query=alldeps) # Initialize index of virtual dependency providers if # concretize didn't pass us one already - provider_index = ProviderIndex([s.spec for s in spec_deps.values()], - restrict=True) + provider_index = ProviderIndex( + [s.spec for s in spec_deps.values()], restrict=True) # traverse the package DAG and fill out dependencies according # to package files & their 'when' specs @@ -1584,20 +1618,17 @@ def constrain(self, other, deps=True): other.variants[v]) # TODO: Check out the logic here - if self.architecture is not None and other.architecture is not None: - if self.architecture.platform is not None and other.architecture.platform is not None: - if self.architecture.platform != other.architecture.platform: - raise UnsatisfiableArchitectureSpecError(self.architecture, - other.architecture) - if self.architecture.platform_os is not None and other.architecture.platform_os is not None: - if self.architecture.platform_os != other.architecture.platform_os: - raise UnsatisfiableArchitectureSpecError(self.architecture, - other.architecture) - if self.architecture.target is not None and other.architecture.target is not None: - if self.architecture.target != other.architecture.target: - raise UnsatisfiableArchitectureSpecError(self.architecture, - other.architecture) - + sarch, oarch = self.architecture, other.architecture + if sarch is not None and oarch is not None: + if sarch.platform is not None and oarch.platform is not None: + if sarch.platform != oarch.platform: + raise UnsatisfiableArchitectureSpecError(sarch, oarch) + if sarch.platform_os is not None and oarch.platform_os is not None: + if sarch.platform_os != oarch.platform_os: + raise UnsatisfiableArchitectureSpecError(sarch, oarch) + if sarch.target is not None and oarch.target is not None: + if sarch.target != oarch.target: + raise UnsatisfiableArchitectureSpecError(sarch, oarch) changed = False if self.compiler is not None and other.compiler is not None: @@ -1612,15 +1643,16 @@ def constrain(self, other, deps=True): changed |= self.compiler_flags.constrain(other.compiler_flags) old = str(self.architecture) - if self.architecture is None or other.architecture is None: - self.architecture = self.architecture or other.architecture + sarch, oarch = self.architecture, other.architecture + if sarch is None or other.architecture is None: + self.architecture = sarch or oarch else: - if self.architecture.platform is None or other.architecture.platform is None: - self.architecture.platform = self.architecture.platform or other.architecture.platform - if self.architecture.platform_os is None or other.architecture.platform_os is None: - self.architecture.platform_os = self.architecture.platform_os or other.architecture.platform_os - if self.architecture.target is None or other.architecture.target is None: - self.architecture.target = self.architecture.target or other.architecture.target + if sarch.platform is None or oarch.platform is None: + self.architecture.platform = sarch.platform or oarch.platform + if sarch.platform_os is None or oarch.platform_os is None: + sarch.platform_os = sarch.platform_os or oarch.platform_os + if sarch.target is None or oarch.target is None: + sarch.target = sarch.target or oarch.target changed |= (str(self.architecture) != old) if deps: @@ -1751,15 +1783,25 @@ def satisfies(self, other, deps=True, strict=False): # Architecture satisfaction is currently just string equality. # If not strict, None means unconstrained. - if self.architecture and other.architecture: - if ((self.architecture.platform and other.architecture.platform and self.architecture.platform != other.architecture.platform) or - (self.architecture.platform_os and other.architecture.platform_os and self.architecture.platform_os != other.architecture.platform_os) or - (self.architecture.target and other.architecture.target and self.architecture.target != other.architecture.target)): + sarch, oarch = self.architecture, other.architecture + if sarch and oarch: + if ((sarch.platform and + oarch.platform and + sarch.platform != oarch.platform) or + + (sarch.platform_os and + oarch.platform_os and + sarch.platform_os != oarch.platform_os) or + + (sarch.target and + oarch.target and + sarch.target != oarch.target)): return False - elif strict and ((other.architecture and not self.architecture) or - (other.architecture.platform and not self.architecture.platform) or - (other.architecture.platform_os and not self.architecture.platform_os) or - (other.architecture.target and not self.architecture.target)): + + elif strict and ((oarch and not sarch) or + (oarch.platform and not sarch.platform) or + (oarch.platform_os and not sarch.platform_os) or + (oarch.target and not sarch.target)): return False if not self.compiler_flags.satisfies( @@ -1841,11 +1883,16 @@ def _dup(self, other, **kwargs): # We don't count dependencies as changes here changed = True if hasattr(self, 'name'): - changed = (self.name != other.name and self.versions != other.versions and \ - self.architecture != other.architecture and self.compiler != other.compiler and \ - self.variants != other.variants and self._normal != other._normal and \ - self.concrete != other.concrete and self.external != other.external and \ - self.external_module != other.external_module and self.compiler_flags != other.compiler_flags) + changed = (self.name != other.name and + self.versions != other.versions and + self.architecture != other.architecture and + self.compiler != other.compiler and + self.variants != other.variants and + self._normal != other._normal and + self.concrete != other.concrete and + self.external != other.external and + self.external_module != other.external_module and + self.compiler_flags != other.compiler_flags) # Local node attributes get copied first. self.name = other.name @@ -1889,7 +1936,7 @@ def _dup(self, other, **kwargs): # here. if depspec.spec.name not in new_spec._dependencies: new_spec._add_dependency( - new_nodes[depspec.spec.name], depspec.deptypes) + new_nodes[depspec.spec.name], depspec.deptypes) # Since we preserved structure, we can copy _normal safely. self._normal = other._normal @@ -2000,7 +2047,6 @@ def _cmp_node(self): self.compiler, self.compiler_flags) - def eq_node(self, other): """Equality with another spec, not including dependencies.""" return self._cmp_node() == other._cmp_node() @@ -2196,41 +2242,39 @@ def write(s, c): def dep_string(self): return ''.join("^" + dep.format() for dep in self.sorted_deps()) - def __cmp__(self, other): - #Package name sort order is not configurable, always goes alphabetical + # Package name sort order is not configurable, always goes alphabetical if self.name != other.name: return cmp(self.name, other.name) - #Package version is second in compare order + # Package version is second in compare order pkgname = self.name if self.versions != other.versions: - return spack.pkgsort.version_compare(pkgname, - self.versions, other.versions) + return spack.pkgsort.version_compare( + pkgname, self.versions, other.versions) - #Compiler is third + # Compiler is third if self.compiler != other.compiler: - return spack.pkgsort.compiler_compare(pkgname, - self.compiler, other.compiler) + return spack.pkgsort.compiler_compare( + pkgname, self.compiler, other.compiler) - #Variants + # Variants if self.variants != other.variants: - return spack.pkgsort.variant_compare(pkgname, - self.variants, other.variants) + return spack.pkgsort.variant_compare( + pkgname, self.variants, other.variants) - #Target + # Target if self.architecture != other.architecture: - return spack.pkgsort.architecture_compare(pkgname, - self.architecture, other.architecture) + return spack.pkgsort.architecture_compare( + pkgname, self.architecture, other.architecture) - #Dependency is not configurable + # Dependency is not configurable if self._dependencies != other._dependencies: return -1 if self._dependencies < other._dependencies else 1 - #Equal specs + # Equal specs return 0 - def __str__(self): return self.format() + self.dep_string() @@ -2244,12 +2288,14 @@ def tree(self, **kwargs): indent = kwargs.pop('indent', 0) fmt = kwargs.pop('format', '$_$@$%@+$+$=') prefix = kwargs.pop('prefix', None) + deptypes = kwargs.pop('deptypes', ('build', 'link')) check_kwargs(kwargs, self.tree) out = "" cur_id = 0 ids = {} - for d, node in self.traverse(order='pre', cover=cover, depth=True): + for d, node in self.traverse( + order='pre', cover=cover, depth=True, deptypes=deptypes): if prefix is not None: out += prefix(node) out += " " * indent @@ -2303,8 +2349,8 @@ def __init__(self): # Lexer is always the same for every parser. _lexer = SpecLexer() -class SpecParser(spack.parse.Parser): +class SpecParser(spack.parse.Parser): def __init__(self): super(SpecParser, self).__init__(_lexer) self.previous = None @@ -2357,8 +2403,8 @@ def do_parse(self): except spack.parse.ParseError, e: raise SpecParseError(e) - - # If the spec has an os or a target and no platform, give it the default platform + # If the spec has an os or a target and no platform, give it + # the default platform for spec in specs: for s in spec.traverse(): if s.architecture.os_string or s.architecture.target_string: diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index fb91f24721..a849d5f350 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -32,15 +32,17 @@ from spack.test.tally_plugin import Tally """Names of tests to be included in Spack's test suite""" -test_names = ['architecture', 'versions', 'url_parse', 'url_substitution', 'packages', 'stage', - 'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize', - 'multimethod', 'install', 'package_sanity', 'config', - 'directory_layout', 'pattern', 'python_version', 'git_fetch', - 'svn_fetch', 'hg_fetch', 'mirror', 'modules', 'url_extrapolate', - 'cc', 'link_tree', 'spec_yaml', 'optional_deps', - 'make_executable', 'configure_guess', 'lock', 'database', - 'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find', - 'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd'] +test_names = [ + 'architecture', 'versions', 'url_parse', 'url_substitution', 'packages', + 'stage', 'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize', + 'multimethod', 'install', 'package_sanity', 'config', 'directory_layout', + 'pattern', 'python_version', 'git_fetch', 'svn_fetch', 'hg_fetch', + 'mirror', 'modules', 'url_extrapolate', 'cc', 'link_tree', 'spec_yaml', + 'optional_deps', 'make_executable', 'build_system_guess', 'lock', + 'database', 'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find', + 'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd', + 'cmd.module' +] def list_tests(): diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index ae3f08deed..09bdb021af 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -1,7 +1,31 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## """ Test checks if the architecture class is created correctly and also that the functions are looking for the correct architecture name """ -import unittest +import itertools import os import platform as py_platform import spack @@ -14,9 +38,8 @@ from spack.test.mock_packages_test import * -#class ArchitectureTest(unittest.TestCase): -class ArchitectureTest(MockPackagesTest): +class ArchitectureTest(MockPackagesTest): def setUp(self): super(ArchitectureTest, self).setUp() self.platform = spack.architecture.platform() @@ -36,24 +59,22 @@ def test_dict_functions_for_architecture(self): self.assertEqual(arch, new_arch) - self.assertTrue( isinstance(arch, spack.architecture.Arch) ) - self.assertTrue( isinstance(arch.platform, spack.architecture.Platform) ) - self.assertTrue( isinstance(arch.platform_os, - spack.architecture.OperatingSystem) ) - self.assertTrue( isinstance(arch.target, - spack.architecture.Target) ) - self.assertTrue( isinstance(new_arch, spack.architecture.Arch) ) - self.assertTrue( isinstance(new_arch.platform, - spack.architecture.Platform) ) - self.assertTrue( isinstance(new_arch.platform_os, - spack.architecture.OperatingSystem) ) - self.assertTrue( isinstance(new_arch.target, - spack.architecture.Target) ) - + self.assertTrue(isinstance(arch, spack.architecture.Arch)) + self.assertTrue(isinstance(arch.platform, spack.architecture.Platform)) + self.assertTrue(isinstance(arch.platform_os, + spack.architecture.OperatingSystem)) + self.assertTrue(isinstance(arch.target, + spack.architecture.Target)) + self.assertTrue(isinstance(new_arch, spack.architecture.Arch)) + self.assertTrue(isinstance(new_arch.platform, + spack.architecture.Platform)) + self.assertTrue(isinstance(new_arch.platform_os, + spack.architecture.OperatingSystem)) + self.assertTrue(isinstance(new_arch.target, + spack.architecture.Target)) def test_platform(self): output_platform_class = spack.architecture.platform() - my_arch_class = None if os.path.exists('/opt/cray/craype'): my_platform_class = CrayXc() elif os.path.exists('/bgsys'): @@ -91,7 +112,7 @@ def test_user_defaults(self): default_os = self.platform.operating_system("default_os") default_target = self.platform.target("default_target") - default_spec = Spec("libelf") # default is no args + default_spec = Spec("libelf") # default is no args default_spec.concretize() self.assertEqual(default_os, default_spec.architecture.platform_os) self.assertEqual(default_target, default_spec.architecture.target) @@ -107,10 +128,11 @@ def test_user_input_combination(self): combinations = itertools.product(os_list, target_list) results = [] for arch in combinations: - o,t = arch + o, t = arch spec = Spec("libelf os=%s target=%s" % (o, t)) spec.concretize() - results.append(spec.architecture.platform_os == self.platform.operating_system(o)) + results.append(spec.architecture.platform_os == + self.platform.operating_system(o)) results.append(spec.architecture.target == self.platform.target(t)) res = all(results) diff --git a/lib/spack/spack/test/configure_guess.py b/lib/spack/spack/test/build_system_guess.py similarity index 88% rename from lib/spack/spack/test/configure_guess.py rename to lib/spack/spack/test/build_system_guess.py index bad3673e7a..e728a47cf4 100644 --- a/lib/spack/spack/test/configure_guess.py +++ b/lib/spack/spack/test/build_system_guess.py @@ -28,14 +28,14 @@ import unittest from llnl.util.filesystem import * -from spack.cmd.create import ConfigureGuesser +from spack.cmd.create import BuildSystemGuesser from spack.stage import Stage from spack.test.mock_packages_test import * from spack.util.executable import which class InstallTest(unittest.TestCase): - """Tests the configure guesser in spack create""" + """Tests the build system guesser in spack create""" def setUp(self): self.tar = which('tar') @@ -44,12 +44,10 @@ def setUp(self): os.chdir(self.tmpdir) self.stage = None - def tearDown(self): shutil.rmtree(self.tmpdir, ignore_errors=True) os.chdir(self.orig_dir) - def check_archive(self, filename, system): mkdirp('archive') touch(join_path('archive', filename)) @@ -60,24 +58,24 @@ def check_archive(self, filename, system): with Stage(url) as stage: stage.fetch() - guesser = ConfigureGuesser() - guesser(stage) + guesser = BuildSystemGuesser() + guesser(stage, url) self.assertEqual(system, guesser.build_system) - - def test_python(self): - self.check_archive('setup.py', 'python') - - def test_autotools(self): self.check_archive('configure', 'autotools') - def test_cmake(self): self.check_archive('CMakeLists.txt', 'cmake') + def test_scons(self): + self.check_archive('SConstruct', 'scons') + + def test_python(self): + self.check_archive('setup.py', 'python') + + def test_R(self): + self.check_archive('NAMESPACE', 'R') def test_unknown(self): self.check_archive('foobar', 'unknown') - - diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py index 371e9650e0..fa82db7733 100644 --- a/lib/spack/spack/test/cmd/find.py +++ b/lib/spack/spack/test/cmd/find.py @@ -27,11 +27,7 @@ import spack.cmd.find import unittest - -class Bunch(object): - - def __init__(self, **kwargs): - self.__dict__.update(kwargs) +from spack.util.pattern import Bunch class FindTest(unittest.TestCase): diff --git a/lib/spack/spack/test/cmd/module.py b/lib/spack/spack/test/cmd/module.py new file mode 100644 index 0000000000..36a4a73fe6 --- /dev/null +++ b/lib/spack/spack/test/cmd/module.py @@ -0,0 +1,83 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import argparse +import os.path + +import spack.cmd.module as module +import spack.modules as modules +import spack.test.mock_database + + +class TestModule(spack.test.mock_database.MockDatabase): + + def _get_module_files(self, args): + return [ + modules.module_types[args.module_type](spec).file_name for spec in args.specs # NOQA: ignore=E501 + ] + + def test_module_common_operations(self): + parser = argparse.ArgumentParser() + module.setup_parser(parser) + # Try to remove a non existing module [tcl] + args = parser.parse_args(['rm', 'doesnotexist']) + self.assertRaises(SystemExit, module.module, parser, args) + # Remove existing modules [tcl] + args = parser.parse_args(['rm', '-y', 'mpileaks']) + module_files = self._get_module_files(args) + for item in module_files: + self.assertTrue(os.path.exists(item)) + module.module(parser, args) + for item in module_files: + self.assertFalse(os.path.exists(item)) + # Add them back [tcl] + args = parser.parse_args(['refresh', '-y', 'mpileaks']) + module.module(parser, args) + for item in module_files: + self.assertTrue(os.path.exists(item)) + # TODO : test the --delete-tree option + # TODO : this requires having a separate directory for test modules + # Try to find a module with multiple matches + args = parser.parse_args(['find', 'mpileaks']) + self.assertRaises(SystemExit, module.module, parser, args) + # Try to find a module with no matches + args = parser.parse_args(['find', 'doesnotexist']) + self.assertRaises(SystemExit, module.module, parser, args) + # Try to find a module + args = parser.parse_args(['find', 'libelf']) + module.module(parser, args) + # Remove existing modules [dotkit] + args = parser.parse_args(['rm', '-y', '-m', 'dotkit', 'mpileaks']) + module_files = self._get_module_files(args) + for item in module_files: + self.assertTrue(os.path.exists(item)) + module.module(parser, args) + for item in module_files: + self.assertFalse(os.path.exists(item)) + # Add them back [dotkit] + args = parser.parse_args(['refresh', '-y', '-m', 'dotkit', 'mpileaks']) + module.module(parser, args) + for item in module_files: + self.assertTrue(os.path.exists(item)) + # TODO : add tests for loads and find to check the prompt format diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py index 6d2e3705bd..135cd028e3 100644 --- a/lib/spack/spack/test/modules.py +++ b/lib/spack/spack/test/modules.py @@ -27,7 +27,6 @@ import StringIO import spack.modules -import unittest from spack.test.mock_packages_test import MockPackagesTest FILE_REGISTRY = collections.defaultdict(StringIO.StringIO) @@ -266,7 +265,7 @@ def test_alter_environment(self): def test_blacklist(self): spack.modules.CONFIGURATION = configuration_blacklist - spec = spack.spec.Spec('mpileaks') + spec = spack.spec.Spec('mpileaks ^zmpi') content = self.get_modulefile_content(spec) self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1) self.assertEqual(len([x for x in content if 'module load ' in x]), 1) diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py index 6d4bcb1039..bc5e9d2ffe 100644 --- a/lib/spack/spack/util/pattern.py +++ b/lib/spack/spack/util/pattern.py @@ -28,42 +28,50 @@ def composite(interface=None, method_list=None, container=list): - """ - Returns a class decorator that patches a class adding all the methods it needs to be a composite for a given - interface. + """Returns a class decorator that patches a class adding all the methods + it needs to be a composite for a given interface. - :param interface: class exposing the interface to which the composite object must conform. Only non-private and - non-special methods will be taken into account + :param interface: class exposing the interface to which the composite + object must conform. Only non-private and non-special methods will be + taken into account :param method_list: names of methods that should be part of the composite - :param container: container for the composite object (default = list). Must fulfill the MutableSequence contract. - The composite class will expose the container API to manage object composition + :param container: container for the composite object (default = list). + Must fulfill the MutableSequence contract. The composite class will expose + the container API to manage object composition :return: class decorator """ - # Check if container fulfills the MutableSequence contract and raise an exception if it doesn't - # The patched class returned by the decorator will inherit from the container class to expose the - # interface needed to manage objects composition + # Check if container fulfills the MutableSequence contract and raise an + # exception if it doesn't. The patched class returned by the decorator will + # inherit from the container class to expose the interface needed to manage + # objects composition if not issubclass(container, collections.MutableSequence): raise TypeError("Container must fulfill the MutableSequence contract") - # Check if at least one of the 'interface' or the 'method_list' arguments are defined + # Check if at least one of the 'interface' or the 'method_list' arguments + # are defined if interface is None and method_list is None: - raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite") + raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite") # NOQA : ignore=E501 def cls_decorator(cls): - # Retrieve the base class of the composite. Inspect its methods and decide which ones will be overridden + # Retrieve the base class of the composite. Inspect its methods and + # decide which ones will be overridden def no_special_no_private(x): return inspect.ismethod(x) and not x.__name__.startswith('_') - # Patch the behavior of each of the methods in the previous list. This is done associating an instance of the - # descriptor below to any method that needs to be patched. + # Patch the behavior of each of the methods in the previous list. + # This is done associating an instance of the descriptor below to + # any method that needs to be patched. class IterateOver(object): + """Decorator used to patch methods in a composite. + + It iterates over all the items in the instance containing the + associated attribute and calls for each of them an attribute + with the same name """ - Decorator used to patch methods in a composite. It iterates over all the items in the instance containing the - associated attribute and calls for each of them an attribute with the same name - """ + def __init__(self, name, func=None): self.name = name self.func = func @@ -72,8 +80,9 @@ def __get__(self, instance, owner): def getter(*args, **kwargs): for item in instance: getattr(item, self.name)(*args, **kwargs) - # If we are using this descriptor to wrap a method from an interface, then we must conditionally - # use the `functools.wraps` decorator to set the appropriate fields. + # If we are using this descriptor to wrap a method from an + # interface, then we must conditionally use the + # `functools.wraps` decorator to set the appropriate fields if self.func is not None: getter = functools.wraps(self.func)(getter) return getter @@ -81,7 +90,8 @@ def getter(*args, **kwargs): dictionary_for_type_call = {} # Construct a dictionary with the methods explicitly passed as name if method_list is not None: - # python@2.7: method_list_dict = {name: IterateOver(name) for name in method_list} + # python@2.7: method_list_dict = {name: IterateOver(name) for name + # in method_list} method_list_dict = {} for name in method_list: method_list_dict[name] = IterateOver(name) @@ -89,28 +99,40 @@ def getter(*args, **kwargs): # Construct a dictionary with the methods inspected from the interface if interface is not None: ########## - # python@2.7: interface_methods = {name: method for name, method in inspect.getmembers(interface, predicate=no_special_no_private)} + # python@2.7: interface_methods = {name: method for name, method in + # inspect.getmembers(interface, predicate=no_special_no_private)} interface_methods = {} - for name, method in inspect.getmembers(interface, predicate=no_special_no_private): + for name, method in inspect.getmembers(interface, predicate=no_special_no_private): # NOQA: ignore=E501 interface_methods[name] = method ########## - # python@2.7: interface_methods_dict = {name: IterateOver(name, method) for name, method in interface_methods.iteritems()} + # python@2.7: interface_methods_dict = {name: IterateOver(name, + # method) for name, method in interface_methods.iteritems()} interface_methods_dict = {} for name, method in interface_methods.iteritems(): interface_methods_dict[name] = IterateOver(name, method) ########## dictionary_for_type_call.update(interface_methods_dict) - # Get the methods that are defined in the scope of the composite class and override any previous definition + # Get the methods that are defined in the scope of the composite + # class and override any previous definition ########## - # python@2.7: cls_method = {name: method for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)} + # python@2.7: cls_method = {name: method for name, method in + # inspect.getmembers(cls, predicate=inspect.ismethod)} cls_method = {} - for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): + for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): # NOQA: ignore=E501 cls_method[name] = method ########## dictionary_for_type_call.update(cls_method) # Generate the new class on the fly and return it # FIXME : inherit from interface if we start to use ABC classes? - wrapper_class = type(cls.__name__, (cls, container), dictionary_for_type_call) + wrapper_class = type(cls.__name__, (cls, container), + dictionary_for_type_call) return wrapper_class return cls_decorator + + +class Bunch(object): + """Carries a bunch of named attributes (from Alex Martelli bunch)""" + + def __init__(self, **kwargs): + self.__dict__.update(kwargs) diff --git a/share/spack/csh/spack.csh b/share/spack/csh/spack.csh index d64ce8935b..5acd190449 100644 --- a/share/spack/csh/spack.csh +++ b/share/spack/csh/spack.csh @@ -74,25 +74,25 @@ case unload: # tool's commands to add/remove the result from the environment. switch ($_sp_subcommand) case "use": - set _sp_full_spec = ( "`\spack $_sp_flags module find dotkit $_sp_spec`" ) + set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type dotkit $_sp_spec`" ) if ( $? == 0 ) then use $_sp_module_args $_sp_full_spec endif breaksw case "unuse": - set _sp_full_spec = ( "`\spack $_sp_flags module find dotkit $_sp_spec`" ) + set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type dotkit $_sp_spec`" ) if ( $? == 0 ) then unuse $_sp_module_args $_sp_full_spec endif breaksw case "load": - set _sp_full_spec = ( "`\spack $_sp_flags module find tcl $_sp_spec`" ) + set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type tcl $_sp_spec`" ) if ( $? == 0 ) then module load $_sp_module_args $_sp_full_spec endif breaksw case "unload": - set _sp_full_spec = ( "`\spack $_sp_flags module find tcl $_sp_spec`" ) + set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type tcl $_sp_spec`" ) if ( $? == 0 ) then module unload $_sp_module_args $_sp_full_spec endif diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index 19206255db..c6183f990d 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -117,19 +117,19 @@ function spack { # If spack module command comes back with an error, do nothing. case $_sp_subcommand in "use") - if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args dotkit $_sp_spec); then + if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type dotkit $_sp_spec); then use $_sp_module_args $_sp_full_spec fi ;; "unuse") - if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args dotkit $_sp_spec); then + if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type dotkit $_sp_spec); then unuse $_sp_module_args $_sp_full_spec fi ;; "load") - if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args tcl $_sp_spec); then + if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type tcl $_sp_spec); then module load $_sp_module_args $_sp_full_spec fi ;; "unload") - if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args tcl $_sp_spec); then + if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type tcl $_sp_spec); then module unload $_sp_module_args $_sp_full_spec fi ;; esac diff --git a/var/spack/repos/builtin/packages/adios/package.py b/var/spack/repos/builtin/packages/adios/package.py index 9e0452ba6f..59e0a451a9 100644 --- a/var/spack/repos/builtin/packages/adios/package.py +++ b/var/spack/repos/builtin/packages/adios/package.py @@ -12,9 +12,9 @@ class Adios(Package): """ homepage = "http://www.olcf.ornl.gov/center-projects/adios/" - url = "http://users.nccs.gov/~pnorbert/adios-1.9.0.tar.gz" + url = "https://github.com/ornladios/ADIOS/archive/v1.9.0.tar.gz" - version('1.9.0', 'dbf5cb10e32add2f04c9b4052b7ffa76') + version('1.9.0', '310ff02388bbaa2b1c1710ee970b5678') # Lots of setting up here for this package # module swap PrgEnv-intel PrgEnv-$COMP diff --git a/var/spack/repos/builtin/packages/cdo/package.py b/var/spack/repos/builtin/packages/cdo/package.py new file mode 100644 index 0000000000..7400c3a56c --- /dev/null +++ b/var/spack/repos/builtin/packages/cdo/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Cdo(Package): + """CDO is a collection of command line Operators to manipulate and analyse + Climate and NWP model Data. """ + + homepage = "https://code.zmaw.de/projects/cdo" + url = "https://code.zmaw.de/attachments/download/10198/cdo-1.6.9.tar.gz" + + version('1.6.9', 'bf0997bf20e812f35e10188a930e24e2') + + depends_on('netcdf') + + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix)) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index b39b086396..bfb8764feb 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -30,6 +30,7 @@ class Cmake(Package): homepage = 'https://www.cmake.org' url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz' + version('3.6.0', 'aa40fbecf49d99c083415c2411d12db9') version('3.5.2', '701386a1b5ec95f8d1075ecf96383e02') version('3.5.1', 'ca051f4a66375c89d1a524e726da0296') version('3.5.0', '33c5d09d4c33d4ffcc63578a6ba8777e') diff --git a/var/spack/repos/builtin/packages/flex/package.py b/var/spack/repos/builtin/packages/flex/package.py index 800e4b9d96..9b173bb0dd 100644 --- a/var/spack/repos/builtin/packages/flex/package.py +++ b/var/spack/repos/builtin/packages/flex/package.py @@ -35,6 +35,7 @@ class Flex(Package): version('2.5.39', 'e133e9ead8ec0a58d81166b461244fde') depends_on("bison", type='build') + depends_on("m4", type='build') def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py index f87dae9f4e..65fef57559 100644 --- a/var/spack/repos/builtin/packages/hypre/package.py +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -23,7 +23,9 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os, sys +import os +import sys + class Hypre(Package): """Hypre is a library of high performance preconditioners that @@ -37,7 +39,7 @@ class Hypre(Package): version('2.10.0b', '768be38793a35bb5d055905b271f5b8e') # hypre does not know how to build shared libraries on Darwin - variant('shared', default=sys.platform!='darwin', description="Build shared library version (disables static library)") + variant('shared', default=(sys.platform != 'darwin'), description="Build shared library version (disables static library)") # SuperluDist have conflicting headers with those in Hypre variant('internal-superlu', default=True, description="Use internal Superlu routines") @@ -46,21 +48,26 @@ class Hypre(Package): depends_on("lapack") def install(self, spec, prefix): - blas_dir = spec['blas'].prefix - lapack_dir = spec['lapack'].prefix - mpi_dir = spec['mpi'].prefix - - os.environ['CC'] = os.path.join(mpi_dir, 'bin', 'mpicc') - os.environ['CXX'] = os.path.join(mpi_dir, 'bin', 'mpicxx') - os.environ['F77'] = os.path.join(mpi_dir, 'bin', 'mpif77') - + os.environ['CC'] = spec['mpi'].mpicc + os.environ['CXX'] = spec['mpi'].mpicxx + os.environ['F77'] = spec['mpi'].mpif77 + # Since +shared does not build on macOS and also Atlas does not have + # a single static lib to build against, link against shared libs with + # a hope that --whole-archive linker option (or alike) was used + # to command the linker to include whole static libs' content into the + # shared lib + # Note: --with-(lapack|blas)_libs= needs space separated list of names configure_args = [ - "--prefix=%s" % prefix, - "--with-lapack-libs=lapack", - "--with-lapack-lib-dirs=%s/lib" % lapack_dir, - "--with-blas-libs=blas", - "--with-blas-lib-dirs=%s/lib" % blas_dir] + '--prefix=%s' % prefix, + '--with-lapack-libs=%s' % to_lib_name( + spec['lapack'].lapack_shared_lib), + '--with-lapack-lib-dirs=%s/lib' % spec['lapack'].prefix, + '--with-blas-libs=%s' % to_lib_name( + spec['blas'].blas_shared_lib), + '--with-blas-lib-dirs=%s/lib' % spec['blas'].prefix + ] + if '+shared' in self.spec: configure_args.append("--enable-shared") @@ -76,4 +83,12 @@ def install(self, spec, prefix): configure(*configure_args) make() + if self.run_tests: + make("check") + make("test") + Executable(join_path('test', 'ij'))() + sstruct = Executable(join_path('test', 'struct')) + sstruct() + sstruct('-in', 'test/sstruct.in.default', '-solver', '40', + '-rhsone') make("install") diff --git a/var/spack/repos/builtin/packages/jasper/fix_alpha_channel_assert_fail.patch b/var/spack/repos/builtin/packages/jasper/fix_alpha_channel_assert_fail.patch new file mode 100644 index 0000000000..cbf79ff971 --- /dev/null +++ b/var/spack/repos/builtin/packages/jasper/fix_alpha_channel_assert_fail.patch @@ -0,0 +1,25 @@ +diff --git a/src/libjasper/jpc/jpc_dec.c b/src/libjasper/jpc/jpc_dec.c +index fa72a0e..1f4845f 100644 +--- a/src/libjasper/jpc/jpc_dec.c ++++ b/src/libjasper/jpc/jpc_dec.c +@@ -1069,12 +1069,18 @@ static int jpc_dec_tiledecode(jpc_dec_t *dec, jpc_dec_tile_t *tile) + /* Apply an inverse intercomponent transform if necessary. */ + switch (tile->cp->mctid) { + case JPC_MCT_RCT: +- assert(dec->numcomps == 3); ++ if (dec->numcomps != 3 && dec->numcomps != 4) { ++ jas_eprintf("bad number of components (%d)\n", dec->numcomps); ++ return -1; ++ } + jpc_irct(tile->tcomps[0].data, tile->tcomps[1].data, + tile->tcomps[2].data); + break; + case JPC_MCT_ICT: +- assert(dec->numcomps == 3); ++ if (dec->numcomps != 3 && dec->numcomps != 4) { ++ jas_eprintf("bad number of components (%d)\n", dec->numcomps); ++ return -1; ++ } + jpc_iict(tile->tcomps[0].data, tile->tcomps[1].data, + tile->tcomps[2].data); + break; diff --git a/var/spack/repos/builtin/packages/jasper/package.py b/var/spack/repos/builtin/packages/jasper/package.py new file mode 100644 index 0000000000..f450c7d155 --- /dev/null +++ b/var/spack/repos/builtin/packages/jasper/package.py @@ -0,0 +1,63 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Jasper(Package): + """Library for manipulating JPEG-2000 images""" + + homepage = "https://www.ece.uvic.ca/~frodo/jasper/" + url = "https://www.ece.uvic.ca/~frodo/jasper/software/jasper-1.900.1.zip" + + version('1.900.1', 'a342b2b4495b3e1394e161eb5d85d754') + + variant('shared', default=True, + description='Builds shared versions of the libraries') + variant('debug', default=False, + description='Builds debug versions of the libraries') + + depends_on('libjpeg-turbo') + + # Fixes a bug (still in upstream as of v.1.900.1) where an assertion fails + # when certain JPEG-2000 files with an alpha channel are processed + # see: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=469786 + patch('fix_alpha_channel_assert_fail.patch') + + def install(self, spec, prefix): + configure_options = [ + '--prefix={0}'.format(prefix), + '--mandir={0}'.format(spec.prefix.man), + ] + + if '+shared' in spec: + configure_options.append('--enable-shared') + + if '+debug' not in spec: + configure_options.append('--disable-debug') + + configure(*configure_options) + + make() + make('install') diff --git a/var/spack/repos/builtin/packages/mkl/package.py b/var/spack/repos/builtin/packages/mkl/package.py index 454e78d29c..6ea64f5313 100644 --- a/var/spack/repos/builtin/packages/mkl/package.py +++ b/var/spack/repos/builtin/packages/mkl/package.py @@ -9,7 +9,13 @@ class Mkl(IntelInstaller): Note: You will have to add the download file to a mirror so that Spack can find it. For instructions on how to set up a - mirror, see http://software.llnl.gov/spack/mirrors.html""" + mirror, see http://software.llnl.gov/spack/mirrors.html. + + To set the threading layer at run time set MKL_THREADING_LAYER + variable to one of the following values: INTEL, SEQUENTIAL, PGI. + To set interface layer at run time, use set the MKL_INTERFACE_LAYER + variable to LP64 or ILP64. + """ homepage = "https://software.intel.com/en-us/intel-mkl" @@ -18,6 +24,11 @@ class Mkl(IntelInstaller): version('11.3.3.210', 'f72546df27f5ebb0941b5d21fd804e34', url="file://%s/l_mkl_11.3.3.210.tgz" % os.getcwd()) + # virtual dependency + provides('blas') + provides('lapack') + # TODO: MKL also provides implementation of Scalapack. + def install(self, spec, prefix): self.intel_prefix = os.path.join(prefix, "pkg") @@ -26,3 +37,28 @@ def install(self, spec, prefix): mkl_dir = os.path.join(self.intel_prefix, "mkl") for f in os.listdir(mkl_dir): os.symlink(os.path.join(mkl_dir, f), os.path.join(self.prefix, f)) + + def setup_dependent_package(self, module, dspec): + # For now use Single Dynamic Library: + # To set the threading layer at run time, use the + # mkl_set_threading_layer function or set MKL_THREADING_LAYER + # variable to one of the following values: INTEL, SEQUENTIAL, PGI. + # To set interface layer at run time, use the mkl_set_interface_layer + # function or set the MKL_INTERFACE_LAYER variable to LP64 or ILP64. + + # Otherwise one would need to specify several libraries + # (e.g. mkl_intel_lp64;mkl_sequential;mkl_core), which reflect + # different interface and threading layers. + + name = 'libmkl_rt.%s' % dso_suffix + libdir = find_library_path(name, self.prefix.lib64, self.prefix.lib) + + self.spec.blas_shared_lib = join_path(libdir, name) + self.spec.lapack_shared_lib = self.spec.blas_shared_lib + + def setup_dependent_environment(self, spack_env, run_env, dependent_spec): + # set up MKLROOT for everyone using MKL package + spack_env.set('MKLROOT', self.prefix) + + def setup_environment(self, spack_env, env): + env.set('MKLROOT', self.prefix) diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py index 92c45c9b95..b85a6d2b94 100644 --- a/var/spack/repos/builtin/packages/mumps/package.py +++ b/var/spack/repos/builtin/packages/mumps/package.py @@ -23,7 +23,10 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os, sys, glob +import os +import sys +import glob + class Mumps(Package): """MUMPS: a MUltifrontal Massively Parallel sparse direct Solver""" @@ -44,13 +47,11 @@ class Mumps(Package): variant('idx64', default=False, description='Use int64_t/integer*8 as default index type') variant('shared', default=True, description='Build shared libraries') - depends_on('scotch + esmumps', when='~ptscotch+scotch') depends_on('scotch + esmumps + mpi', when='+ptscotch') depends_on('metis@5:', when='+metis') depends_on('parmetis', when="+parmetis") depends_on('blas') - depends_on('lapack') depends_on('scalapack', when='+mpi') depends_on('mpi', when='+mpi') @@ -60,42 +61,52 @@ class Mumps(Package): # end before install # def patch(self): def write_makefile_inc(self): - if ('+parmetis' in self.spec or '+ptscotch' in self.spec) and '+mpi' not in self.spec: - raise RuntimeError('You cannot use the variants parmetis or ptscotch without mpi') + if ('+parmetis' in self.spec or '+ptscotch' in self.spec) and '+mpi' not in self.spec: # NOQA: ignore=E501 + raise RuntimeError('You cannot use the variants parmetis or ptscotch without mpi') # NOQA: ignore=E501 - makefile_conf = ["LIBBLAS = -L%s -lblas" % self.spec['blas'].prefix.lib] + makefile_conf = ["LIBBLAS = %s" % to_link_flags( + self.spec['blas'].blas_shared_lib) + ] orderings = ['-Dpord'] if '+ptscotch' in self.spec or '+scotch' in self.spec: join_lib = ' -l%s' % ('pt' if '+ptscotch' in self.spec else '') - makefile_conf.extend( - ["ISCOTCH = -I%s" % self.spec['scotch'].prefix.include, - "LSCOTCH = -L%s %s%s" % (self.spec['scotch'].prefix.lib, - join_lib, - join_lib.join(['esmumps', 'scotch', 'scotcherr']))]) + makefile_conf.extend([ + "ISCOTCH = -I%s" % self.spec['scotch'].prefix.include, + "LSCOTCH = -L%s %s%s" % (self.spec['scotch'].prefix.lib, + join_lib, + join_lib.join(['esmumps', + 'scotch', + 'scotcherr'])) + ]) + orderings.append('-Dscotch') if '+ptscotch' in self.spec: orderings.append('-Dptscotch') if '+parmetis' in self.spec and '+metis' in self.spec: - libname = 'parmetis' if '+parmetis' in self.spec else 'metis' - makefile_conf.extend( - ["IMETIS = -I%s" % self.spec['parmetis'].prefix.include, - "LMETIS = -L%s -l%s -L%s -l%s" % (self.spec['parmetis'].prefix.lib, 'parmetis',self.spec['metis'].prefix.lib, 'metis')]) + makefile_conf.extend([ + "IMETIS = -I%s" % self.spec['parmetis'].prefix.include, + "LMETIS = -L%s -l%s -L%s -l%s" % ( + self.spec['parmetis'].prefix.lib, 'parmetis', + self.spec['metis'].prefix.lib, 'metis') + ]) orderings.append('-Dparmetis') elif '+metis' in self.spec: - makefile_conf.extend( - ["IMETIS = -I%s" % self.spec['metis'].prefix.include, - "LMETIS = -L%s -l%s" % (self.spec['metis'].prefix.lib, 'metis')]) + makefile_conf.extend([ + "IMETIS = -I%s" % self.spec['metis'].prefix.include, + "LMETIS = -L%s -l%s" % (self.spec['metis'].prefix.lib, 'metis') + ]) orderings.append('-Dmetis') makefile_conf.append("ORDERINGSF = %s" % (' '.join(orderings))) # when building shared libs need -fPIC, otherwise - # /usr/bin/ld: graph.o: relocation R_X86_64_32 against `.rodata.str1.1' can not be used when making a shared object; recompile with -fPIC + # /usr/bin/ld: graph.o: relocation R_X86_64_32 against `.rodata.str1.1' + # can not be used when making a shared object; recompile with -fPIC fpic = '-fPIC' if '+shared' in self.spec else '' # TODO: test this part, it needs a full blas, scalapack and # partitionning environment with 64bit integers @@ -104,7 +115,7 @@ def write_makefile_inc(self): # the fortran compilation flags most probably are # working only for intel and gnu compilers this is # perhaps something the compiler should provide - ['OPTF = %s -O -DALLOW_NON_INIT %s' % (fpic,'-fdefault-integer-8' if self.compiler.name == "gcc" else '-i8'), + ['OPTF = %s -O -DALLOW_NON_INIT %s' % (fpic, '-fdefault-integer-8' if self.compiler.name == "gcc" else '-i8'), # NOQA: ignore=E501 'OPTL = %s -O ' % fpic, 'OPTC = %s -O -DINTSIZE64' % fpic]) else: @@ -113,7 +124,6 @@ def write_makefile_inc(self): 'OPTL = %s -O ' % fpic, 'OPTC = %s -O ' % fpic]) - if '+mpi' in self.spec: makefile_conf.extend( ["CC = %s" % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), @@ -134,16 +144,17 @@ def write_makefile_inc(self): if '+shared' in self.spec: if sys.platform == 'darwin': - # Building dylibs with mpif90 causes segfaults on 10.8 and 10.10. Use gfortran. (Homebrew) + # Building dylibs with mpif90 causes segfaults on 10.8 and + # 10.10. Use gfortran. (Homebrew) makefile_conf.extend([ 'LIBEXT=.dylib', - 'AR=%s -dynamiclib -Wl,-install_name -Wl,%s/$(notdir $@) -undefined dynamic_lookup -o ' % (os.environ['FC'],prefix.lib), + 'AR=%s -dynamiclib -Wl,-install_name -Wl,%s/$(notdir $@) -undefined dynamic_lookup -o ' % (os.environ['FC'], prefix.lib), # NOQA: ignore=E501 'RANLIB=echo' ]) else: makefile_conf.extend([ 'LIBEXT=.so', - 'AR=$(FL) -shared -Wl,-soname -Wl,%s/$(notdir $@) -o' % prefix.lib, + 'AR=$(FL) -shared -Wl,-soname -Wl,%s/$(notdir $@) -o' % prefix.lib, # NOQA: ignore=E501 'RANLIB=echo' ]) else: @@ -153,9 +164,8 @@ def write_makefile_inc(self): 'RANLIB = ranlib' ]) - - makefile_inc_template = join_path(os.path.dirname(self.module.__file__), - 'Makefile.inc') + makefile_inc_template = join_path( + os.path.dirname(self.module.__file__), 'Makefile.inc') with open(makefile_inc_template, "r") as fh: makefile_conf.extend(fh.read().split('\n')) @@ -164,8 +174,6 @@ def write_makefile_inc(self): makefile_inc = '\n'.join(makefile_conf) fh.write(makefile_inc) - - def install(self, spec, prefix): make_libs = [] @@ -189,15 +197,15 @@ def install(self, spec, prefix): install_tree('lib', prefix.lib) install_tree('include', prefix.include) - if '~mpi' in spec: + if '~mpi' in spec: lib_dsuffix = '.dylib' if sys.platform == 'darwin' else '.so' lib_suffix = lib_dsuffix if '+shared' in spec else '.a' install('libseq/libmpiseq%s' % lib_suffix, prefix.lib) - for f in glob.glob(join_path('libseq','*.h')): + for f in glob.glob(join_path('libseq', '*.h')): install(f, prefix.include) - # FIXME: extend the tests to mpirun -np 2 (or alike) when build with MPI - # FIXME: use something like numdiff to compare blessed output with the current + # FIXME: extend the tests to mpirun -np 2 when build with MPI + # FIXME: use something like numdiff to compare output files with working_dir('examples'): if '+float' in spec: os.system('./ssimpletest < input_simpletest_real') diff --git a/var/spack/repos/builtin/packages/netcdf/package.py b/var/spack/repos/builtin/packages/netcdf/package.py index 063d38e4f9..ad4ee59640 100644 --- a/var/spack/repos/builtin/packages/netcdf/package.py +++ b/var/spack/repos/builtin/packages/netcdf/package.py @@ -33,6 +33,7 @@ class Netcdf(Package): homepage = "http://www.unidata.ucar.edu/software/netcdf" url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz" + version('4.4.1', '7843e35b661c99e1d49e60791d5072d8') version('4.4.0', 'cffda0cbd97fdb3a06e9274f7aef438e') version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae') @@ -47,8 +48,10 @@ class Netcdf(Package): # Required for NetCDF-4 support depends_on("zlib") - depends_on("hdf5+mpi", when='+mpi') - depends_on("hdf5~mpi", when='~mpi') + depends_on('hdf5@:1.8+mpi', when='@:4.4.0+mpi') + depends_on('hdf5+mpi', when='@4.4.1:+mpi') + depends_on('hdf5@:1.8~mpi', when='@:4.4.0~mpi') + depends_on('hdf5~mpi', when='@4.4.1:~mpi') def install(self, spec, prefix): # Environment variables diff --git a/var/spack/repos/builtin/packages/netlib-scalapack/package.py b/var/spack/repos/builtin/packages/netlib-scalapack/package.py index a8250a38de..f7733249cf 100644 --- a/var/spack/repos/builtin/packages/netlib-scalapack/package.py +++ b/var/spack/repos/builtin/packages/netlib-scalapack/package.py @@ -25,8 +25,10 @@ from spack import * import sys + class NetlibScalapack(Package): - """ScaLAPACK is a library of high-performance linear algebra routines for parallel distributed memory machines""" + """ScaLAPACK is a library of high-performance linear algebra routines for + parallel distributed memory machines""" homepage = "http://www.netlib.org/scalapack/" url = "http://www.netlib.org/scalapack/scalapack-2.0.2.tgz" @@ -48,10 +50,22 @@ class NetlibScalapack(Package): def install(self, spec, prefix): options = [ - "-DBUILD_SHARED_LIBS:BOOL=%s" % ('ON' if '+shared' in spec else 'OFF'), - "-DBUILD_STATIC_LIBS:BOOL=%s" % ('OFF' if '+shared' in spec else 'ON'), - "-DUSE_OPTIMIZED_LAPACK_BLAS:BOOL=ON", # forces scalapack to use find_package(LAPACK) - ] + "-DBUILD_SHARED_LIBS:BOOL=%s" % ('ON' if '+shared' in spec else + 'OFF'), + "-DBUILD_STATIC_LIBS:BOOL=%s" % ('OFF' if '+shared' in spec else + 'ON'), + # forces scalapack to use find_package(LAPACK): + "-DUSE_OPTIMIZED_LAPACK_BLAS:BOOL=ON", + ] + + # Make sure we use Spack's Lapack: + options.extend([ + '-DLAPACK_FOUND=true', + '-DLAPACK_INCLUDE_DIRS=%s' % spec['lapack'].prefix.include, + '-DLAPACK_LIBRARIES=%s' % ( + spec['lapack'].lapack_shared_lib if '+shared' in spec else + spec['lapack'].lapack_static_lib), + ]) if '+fpic' in spec: options.extend([ @@ -66,16 +80,15 @@ def install(self, spec, prefix): make() make("install") - # The shared libraries are not installed correctly on Darwin; correct this + # The shared libraries are not installed correctly on Darwin: if (sys.platform == 'darwin') and ('+shared' in spec): fix_darwin_install_name(prefix.lib) - def setup_dependent_package(self, module, dependent_spec): spec = self.spec - lib_dsuffix = '.dylib' if sys.platform == 'darwin' else '.so' - lib_suffix = lib_dsuffix if '+shared' in spec else '.a' + lib_suffix = dso_suffix if '+shared' in spec else 'a' spec.fc_link = '-L%s -lscalapack' % spec.prefix.lib spec.cc_link = spec.fc_link - spec.libraries = [join_path(spec.prefix.lib, 'libscalapack%s' % lib_suffix)] + spec.libraries = [join_path(spec.prefix.lib, + 'libscalapack.%s' % lib_suffix)] diff --git a/var/spack/repos/builtin/packages/py-h5py/package.py b/var/spack/repos/builtin/packages/py-h5py/package.py index c1950a91ac..f96cb9b4cd 100644 --- a/var/spack/repos/builtin/packages/py-h5py/package.py +++ b/var/spack/repos/builtin/packages/py-h5py/package.py @@ -46,6 +46,7 @@ class PyH5py(Package): depends_on('hdf5@1.8.4:') depends_on('hdf5+mpi', when='+mpi') depends_on('mpi', when='+mpi') + depends_on('py-mpi4py', when='+mpi') # Build and runtime dependencies depends_on('py-numpy@1.6.1:', type=nolink) diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index 6bc11a5e48..2febdac658 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -44,6 +44,7 @@ class PyNumpy(Package): extends('python') depends_on('py-nose', type='build') + depends_on('py-setuptools', type='build') depends_on('blas', when='+blas') depends_on('lapack', when='+lapack') diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 516b5c6cfe..bbb1e9c13a 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -53,6 +53,7 @@ class Python(Package): extendable = True + variant('tk', default=False, description='Provide support for Tkinter') variant('ucs4', default=False, description='Enable UCS4 (wide) unicode strings') # From https://docs.python.org/2/c-api/unicode.html: Python's default # builds use a 16-bit type for Py_UNICODE and store Unicode values @@ -68,6 +69,8 @@ class Python(Package): depends_on("ncurses") depends_on("sqlite") depends_on("zlib") + depends_on("tk", when="+tk") + depends_on("tcl", when="+tk") def install(self, spec, prefix): # Need this to allow python build to find the Python installation. @@ -77,24 +80,32 @@ def install(self, spec, prefix): # Rest of install is pretty standard except setup.py needs to # be able to read the CPPFLAGS and LDFLAGS as it scans for the # library and headers to build - cppflags = ' -I'.join([ + include_dirs = [ spec['openssl'].prefix.include, spec['bzip2'].prefix.include, spec['readline'].prefix.include, spec['ncurses'].prefix.include, spec['sqlite'].prefix.include, spec['zlib'].prefix.include - ]) + ] - ldflags = ' -L'.join([ + library_dirs = [ spec['openssl'].prefix.lib, spec['bzip2'].prefix.lib, spec['readline'].prefix.lib, spec['ncurses'].prefix.lib, spec['sqlite'].prefix.lib, spec['zlib'].prefix.lib - ]) + ] + + if '+tk' in spec: + include_dirs.extend([ + spec['tk'].prefix.include, spec['tcl'].prefix.include + ]) + library_dirs.extend([ + spec['tk'].prefix.lib, spec['tcl'].prefix.lib + ]) config_args = [ "--prefix={0}".format(prefix), "--with-threads", "--enable-shared", - "CPPFLAGS=-I{0}".format(cppflags), - "LDFLAGS=-L{0}".format(ldflags) + "CPPFLAGS=-I{0}".format(" -I".join(include_dirs)), + "LDFLAGS=-L{0}".format(" -L".join(library_dirs)) ] if '+ucs4' in spec: @@ -116,6 +127,25 @@ def install(self, spec, prefix): self.filter_compilers(spec, prefix) + # TODO: Once better testing support is integrated, add the following tests + # https://wiki.python.org/moin/TkInter + # + # if '+tk' in spec: + # env['TK_LIBRARY'] = join_path(spec['tk'].prefix.lib, + # 'tk{0}'.format(spec['tk'].version.up_to(2))) + # env['TCL_LIBRARY'] = join_path(spec['tcl'].prefix.lib, + # 'tcl{0}'.format(spec['tcl'].version.up_to(2))) + # + # $ python + # >>> import _tkinter + # + # if spec.satisfies('@3:') + # >>> import tkinter + # >>> tkinter._test() + # else: + # >>> import Tkinter + # >>> Tkinter._test() + def filter_compilers(self, spec, prefix): """Run after install to tell the configuration files and Makefiles to use the compilers that Spack built the package with. diff --git a/var/spack/repos/builtin/packages/suite-sparse/package.py b/var/spack/repos/builtin/packages/suite-sparse/package.py index 2cc89b843f..a71bfd8bd4 100644 --- a/var/spack/repos/builtin/packages/suite-sparse/package.py +++ b/var/spack/repos/builtin/packages/suite-sparse/package.py @@ -32,21 +32,20 @@ class SuiteSparse(Package): homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html' url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz' - version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319') version('4.5.3', '8ec57324585df3c6483ad7f556afccbd') + version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319') - # FIXME: (see below) - # variant('tbb', default=True, description='Build with Intel TBB') + variant('tbb', default=True, description='Build with Intel TBB') depends_on('blas') depends_on('lapack') depends_on('metis@5.1.0', when='@4.5.1:') - # FIXME: # in @4.5.1. TBB support in SPQR seems to be broken as TBB-related linkng # flags does not seem to be used, which leads to linking errors on Linux. - # Try re-enabling in future versions. - # depends_on('tbb', when='+tbb') + depends_on('tbb', when='@4.5.3:+tbb') + + patch('tbb_453.patch', when='@4.5.3') def install(self, spec, prefix): # The build system of SuiteSparse is quite old-fashioned. @@ -73,20 +72,24 @@ def install(self, spec, prefix): ]) # Intel TBB in SuiteSparseQR - if '+tbb' in spec: + if 'tbb' in spec: make_args.extend([ 'SPQR_CONFIG=-DHAVE_TBB', 'TBB=-L%s -ltbb' % spec['tbb'].prefix.lib, ]) - # BLAS arguments require path to libraries - # FIXME: (blas/lapack always provide libblas and liblapack as aliases) + # Make sure Spack's Blas/Lapack is used. Otherwise System's + # Blas/Lapack might be picked up. + blas = to_link_flags(spec['blas'].blas_shared_lib) + lapack = to_link_flags(spec['lapack'].lapack_shared_lib) if '@4.5.1' in spec: # adding -lstdc++ is clearly an ugly way to do this, but it follows # with the TCOV path of SparseSuite 4.5.1's Suitesparse_config.mk - make_args.extend([ - 'BLAS=-lblas -lstdc++', - 'LAPACK=-llapack' - ]) + blas += ' -lstdc++' + + make_args.extend([ + 'BLAS=%s' % blas, + 'LAPACK=%s' % lapack + ]) make('install', *make_args) diff --git a/var/spack/repos/builtin/packages/suite-sparse/tbb_453.patch b/var/spack/repos/builtin/packages/suite-sparse/tbb_453.patch new file mode 100644 index 0000000000..70241ed017 --- /dev/null +++ b/var/spack/repos/builtin/packages/suite-sparse/tbb_453.patch @@ -0,0 +1,13 @@ +diff --git a/SPQR/Lib/Makefile b/SPQR/Lib/Makefile +index eaade58..d0de852 100644 +--- a/SPQR/Lib/Makefile ++++ b/SPQR/Lib/Makefile +@@ -13,7 +13,7 @@ ccode: all + include ../../SuiteSparse_config/SuiteSparse_config.mk + + # SPQR depends on CHOLMOD, AMD, COLAMD, LAPACK, the BLAS and SuiteSparse_config +-LDLIBS += -lamd -lcolamd -lcholmod -lsuitesparseconfig $(LAPACK) $(BLAS) ++LDLIBS += -lamd -lcolamd -lcholmod -lsuitesparseconfig $(TBB) $(LAPACK) $(BLAS) + + # compile and install in SuiteSparse/lib + library: diff --git a/var/spack/repos/builtin/packages/tcl/package.py b/var/spack/repos/builtin/packages/tcl/package.py index a4d8b515bb..ef922314d8 100644 --- a/var/spack/repos/builtin/packages/tcl/package.py +++ b/var/spack/repos/builtin/packages/tcl/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Tcl(Package): """Tcl (Tool Command Language) is a very powerful but easy to learn dynamic programming language, suitable for a very wide @@ -34,9 +35,6 @@ class Tcl(Package): extensible.""" homepage = "http://www.tcl.tk" - def url_for_version(self, version): - return 'http://prdownloads.sourceforge.net/tcl/tcl%s-src.tar.gz' % version - version('8.6.5', '0e6426a4ca9401825fbc6ecf3d89a326') version('8.6.4', 'd7cbb91f1ded1919370a30edd1534304') version('8.6.3', 'db382feca91754b7f93da16dc4cdad1f') @@ -44,8 +42,18 @@ def url_for_version(self, version): depends_on('zlib') + def url_for_version(self, version): + base_url = 'http://prdownloads.sourceforge.net/tcl' + return '{0}/tcl{1}-src.tar.gz'.format(base_url, version) + + def setup_environment(self, spack_env, env): + # When using Tkinter from within spack provided python+tk, python + # will not be able to find Tcl/Tk unless TCL_LIBRARY is set. + env.set('TCL_LIBRARY', join_path(self.prefix.lib, 'tcl{0}'.format( + self.spec.version.up_to(2)))) + def install(self, spec, prefix): with working_dir('unix'): - configure("--prefix=%s" % prefix) + configure("--prefix={0}".format(prefix)) make() make("install") diff --git a/var/spack/repos/builtin/packages/the_silver_searcher/package.py b/var/spack/repos/builtin/packages/the_silver_searcher/package.py index 988619df30..c98e964efa 100644 --- a/var/spack/repos/builtin/packages/the_silver_searcher/package.py +++ b/var/spack/repos/builtin/packages/the_silver_searcher/package.py @@ -24,11 +24,13 @@ ############################################################################## from spack import * + class TheSilverSearcher(Package): """Fast recursive grep alternative""" homepage = "http://geoff.greer.fm/ag/" - url = "http://geoff.greer.fm/ag/releases/the_silver_searcher-0.30.0.tar.gz" + url = "http://geoff.greer.fm/ag/releases/the_silver_searcher-0.32.0.tar.gz" + version('0.32.0', '3fdfd5836924246073d5344257a06823') version('0.30.0', '95e2e7859fab1156c835aff7413481db') depends_on('pcre') diff --git a/var/spack/repos/builtin/packages/tk/package.py b/var/spack/repos/builtin/packages/tk/package.py index 330e1c77f5..894d3af6cc 100644 --- a/var/spack/repos/builtin/packages/tk/package.py +++ b/var/spack/repos/builtin/packages/tk/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Tk(Package): """Tk is a graphical user interface toolkit that takes developing desktop applications to a higher level than conventional @@ -33,16 +34,24 @@ class Tk(Package): and more.""" homepage = "http://www.tcl.tk" - def url_for_version(self, version): - return "http://prdownloads.sourceforge.net/tcl/tk%s-src.tar.gz" % version - + version('8.6.5', '11dbbd425c3e0201f20d6a51482ce6c4') version('8.6.3', '85ca4dbf4dcc19777fd456f6ee5d0221') depends_on("tcl") + def url_for_version(self, version): + base_url = "http://prdownloads.sourceforge.net/tcl" + return "{0}/tk{1}-src.tar.gz".format(base_url, version) + + def setup_environment(self, spack_env, env): + # When using Tkinter from within spack provided python+tk, python + # will not be able to find Tcl/Tk unless TK_LIBRARY is set. + env.set('TK_LIBRARY', join_path(self.prefix.lib, 'tk{0}'.format( + self.spec.version.up_to(2)))) + def install(self, spec, prefix): with working_dir('unix'): - configure("--prefix=%s" % prefix, - "--with-tcl=%s" % spec['tcl'].prefix.lib) + configure("--prefix={0}".format(prefix), + "--with-tcl={0}".format(spec['tcl'].prefix.lib)) make() make("install") diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 56499af8d9..77589bb8f9 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -118,6 +118,7 @@ def install(self, spec, prefix): options.extend(std_cmake_args) mpi_bin = spec['mpi'].prefix.bin + # Note: -DXYZ_LIBRARY_NAMES= needs semicolon separated list of names options.extend([ '-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON', '-DTrilinos_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON', @@ -131,10 +132,12 @@ def install(self, spec, prefix): '-DTPL_ENABLE_MPI:BOOL=ON', '-DMPI_BASE_DIR:PATH=%s' % spec['mpi'].prefix, '-DTPL_ENABLE_BLAS=ON', - '-DBLAS_LIBRARY_NAMES=blas', # FIXME: don't hardcode names + '-DBLAS_LIBRARY_NAMES=%s' % to_lib_name( + spec['blas'].blas_shared_lib), '-DBLAS_LIBRARY_DIRS=%s' % spec['blas'].prefix.lib, '-DTPL_ENABLE_LAPACK=ON', - '-DLAPACK_LIBRARY_NAMES=lapack', + '-DLAPACK_LIBRARY_NAMES=%s' % to_lib_name( + spec['lapack'].lapack_shared_lib), '-DLAPACK_LIBRARY_DIRS=%s' % spec['lapack'].prefix, '-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=ON', '-DTrilinos_ENABLE_CXX11:BOOL=ON',