Merge pull request #14 in SCALE/spack from features/compilers to develop

# By Todd Gamblin
# Via Todd Gamblin
* commit '33a11f32fdaea48192ecb13a85af11f85aa6b8bf': (21 commits)
  Multi-compiler support feature-complete.  Fix SPACK-3, SPACK-4, SPACK-12.
  Executables can optionally ignore error output.
  Enable allow_no_value for config parser.
  Make tests use mock compiler configuration.
  Set default editor to vi if EDITOR is not set.
  Add working_dir, Version back into package build namespace.
  Compiler support now uses configuration files.
  Make debug and verbose output work properly.
  Minor cleanup and bug fixes.
  Add support for configuration files.  Fix SPACK-24.
  Proper exiting for forked process in do_install()
  Move globals to spack's __init__.py
  Move globals to spack's __init__.py
  Adding per-compiler python files
  Initial ability to swap compilers.
  Better satisfies: e.g., v4.7.3 now satisfies v4.7
  Add CompilerSpec class and loading capability.
  Implemented compiler concretization policy.
  Sorted out spack.compilers vs var/spack/compilers
  Make Compiler constructor behave like Spec constructor.
  ...
This commit is contained in:
George Todd Gamblin 2014-06-22 13:00:17 -07:00
commit f7fe65102f
46 changed files with 2210 additions and 417 deletions

1
.gitignore vendored
View file

@ -4,3 +4,4 @@
*~
.DS_Store
.idea
/etc/spackconfig

View file

@ -41,7 +41,7 @@ sys.path.insert(0, SPACK_LIB_PATH)
# If there is no working directory, use the spack prefix.
try:
os.getcwd()
working_dir = os.getcwd()
except OSError:
os.chdir(SPACK_PREFIX)
@ -77,8 +77,11 @@ for cmd in spack.cmd.commands:
args = parser.parse_args()
# Set up environment based on args.
spack.verbose = args.verbose
tty.set_verbose(args.verbose)
tty.set_debug(args.debug)
spack.debug = args.debug
spack.spack_working_dir = working_dir
if args.mock:
from spack.packages import PackageDB
spack.db = PackageDB(spack.mock_packages_path)

View file

@ -107,7 +107,7 @@ Package class names
The **class name** (``Libelf`` in our example) is formed by converting
words separated by `-` or ``_`` in the file name to camel case. If
the name starts with a number, we prefix the class name with
``Num_``. Here are some examples:
``_``. Here are some examples:
================= =================
Module Name Class Name
@ -115,7 +115,7 @@ the name starts with a number, we prefix the class name with
``foo_bar`` ``FooBar``
``docbook-xml`` ``DocbookXml``
``FooBar`` ``Foobar``
``3proxy`` ``Num_3proxy``
``3proxy`` ``_3proxy``
================= =================
The class name is needed by Spack to properly import a package, but

56
lib/spack/env/cc vendored
View file

@ -10,7 +10,7 @@ import argparse
from contextlib import closing
# Import spack parameters through the build environment.
spack_lib = os.environ.get("SPACK_LIB")
spack_lib = os.environ.get("SPACK_LIB")
if not spack_lib:
print "Spack compiler must be run from spack!"
sys.exit(1)
@ -20,24 +20,24 @@ sys.path.append(spack_lib)
from spack.compilation import *
import llnl.util.tty as tty
spack_prefix = get_env_var("SPACK_PREFIX")
spack_build_root = get_env_var("SPACK_BUILD_ROOT")
spack_debug = get_env_flag("SPACK_DEBUG")
spack_deps = get_path("SPACK_DEPENDENCIES")
spack_env_path = get_path("SPACK_ENV_PATH")
spack_prefix = get_env_var("SPACK_PREFIX")
spack_debug = get_env_flag("SPACK_DEBUG")
spack_deps = get_path("SPACK_DEPENDENCIES")
spack_env_path = get_path("SPACK_ENV_PATH")
spack_debug_log_dir = get_env_var("SPACK_DEBUG_LOG_DIR")
spack_spec = get_env_var("SPACK_SPEC")
compiler_spec = get_env_var("SPACK_COMPILER_SPEC")
spack_cc = get_env_var("SPACK_CC", required=False)
spack_cxx = get_env_var("SPACK_CXX", required=False)
spack_f77 = get_env_var("SPACK_F77", required=False)
spack_fc = get_env_var("SPACK_FC", required=False)
# Figure out what type of operation we're doing
command = os.path.basename(sys.argv[0])
cpp, cc, ccld, ld, version_check = range(5)
########################################################################
# TODO: this can to be removed once JIRA issue SPACK-16 is resolved
#
if command == 'CC':
command = 'c++'
########################################################################
if command == 'cpp':
mode = cpp
elif command == 'ld':
@ -49,7 +49,31 @@ elif '-c' in sys.argv:
else:
mode = ccld
if '-V' in sys.argv or '-v' in sys.argv or '--version' in sys.argv:
if command in ('cc', 'gcc', 'c89', 'c99', 'clang'):
command = spack_cc
language = "C"
elif command in ('c++', 'CC', 'g++', 'clang++'):
command = spack_cxx
language = "C++"
elif command in ('f77'):
command = spack_f77
language = "Fortran 77"
elif command in ('fc'):
command = spack_fc
language = "Fortran 90"
elif command in ('ld', 'cpp'):
pass # leave it the same. TODO: what's the right thing?
else:
raise Exception("Unknown compiler: %s" % command)
if command is None:
print "ERROR: Compiler '%s' does not support compiling %s programs." % (
compiler_spec, language)
sys.exit(1)
version_args = ['-V', '-v', '--version', '-dumpversion']
if any(arg in sys.argv for arg in version_args):
mode = version_check
# Parse out the includes, libs, etc. so we can adjust them if need be.
@ -104,8 +128,8 @@ os.environ["PATH"] = ":".join(path)
full_command = [command] + arguments
if spack_debug:
input_log = os.path.join(spack_build_root, 'spack_cc_in.log')
output_log = os.path.join(spack_build_root, 'spack_cc_out.log')
input_log = os.path.join(spack_debug_log_dir, 'spack-cc-%s.in.log' % spack_spec)
output_log = os.path.join(spack_debug_log_dir, 'spack-cc-%s.out.log' % spack_spec)
with closing(open(input_log, 'a')) as log:
args = [os.path.basename(sys.argv[0])] + sys.argv[1:]
log.write("%s\n" % " ".join(arg.replace(' ', r'\ ') for arg in args))

View file

@ -22,6 +22,9 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
__all__ = ['install', 'expand_user', 'working_dir', 'touch', 'mkdirp',
'join_path', 'ancestor', 'can_access']
import os
import re
import shutil

View file

@ -29,10 +29,20 @@
from llnl.util.tty.color import *
debug = False
verbose = False
_debug = False
_verbose = False
indent = " "
def set_debug(flag):
global _debug
_debug = flag
def set_verbose(flag):
global _verbose
_verbose = flag
def msg(message, *args):
cprint("@*b{==>} %s" % cescape(message))
for arg in args:
@ -50,13 +60,13 @@ def info(message, *args, **kwargs):
def verbose(message, *args):
if verbose:
if _verbose:
info(message, *args, format='c')
def debug(*args):
if debug:
info("Debug: " + message, *args, format='*g')
def debug(message, *args):
if _debug:
info(message, *args, format='g')
def error(message, *args):

View file

@ -22,10 +22,144 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from globals import *
from util import *
from error import *
from package import Package
from relations import depends_on, provides, patch
from multimethod import when
#
# When packages call 'from spack import *', this is what is brought in.
#
# Spack internal code calls 'import spack' and accesses other
# variables (spack.db, paths, etc.) directly.
#
# TODO: maybe this should be separated out and should go in build_environment.py?
# TODO: it's not clear where all the stuff that needs to be included in packages
# should live. This file is overloaded for spack core vs. for packages.
__all__ = ['Package', 'when', 'provides', 'depends_on',
'patch', 'Version', 'working_dir']
import os
import tempfile
from llnl.util.filesystem import *
# This lives in $prefix/lib/spac/spack/__file__
prefix = ancestor(__file__, 4)
# The spack script itself
spack_file = join_path(prefix, "bin", "spack")
# spack directory hierarchy
etc_path = join_path(prefix, "etc")
lib_path = join_path(prefix, "lib", "spack")
build_env_path = join_path(lib_path, "env")
module_path = join_path(lib_path, "spack")
compilers_path = join_path(module_path, "compilers")
test_path = join_path(module_path, "test")
var_path = join_path(prefix, "var", "spack")
stage_path = join_path(var_path, "stage")
install_path = join_path(prefix, "opt")
#
# Set up the packages database.
#
from spack.packages import PackageDB
packages_path = join_path(var_path, "packages")
db = PackageDB(packages_path)
#
# Paths to mock files for testing.
#
mock_packages_path = join_path(var_path, "mock_packages")
mock_config_path = join_path(var_path, "mock_configs")
mock_site_config = join_path(mock_config_path, "site_spackconfig")
mock_user_config = join_path(mock_config_path, "user_spackconfig")
#
# This controls how spack lays out install prefixes and
# stage directories.
#
from spack.directory_layout import SpecHashDirectoryLayout
install_layout = SpecHashDirectoryLayout(install_path, prefix_size=6)
#
# This controls how things are concretized in spack.
# Replace it with a subclass if you want different
# policies.
#
from spack.concretize import DefaultConcretizer
concretizer = DefaultConcretizer()
# Version information
from spack.version import Version
spack_version = Version("1.0")
#
# Executables used by Spack
#
from spack.util.executable import Executable, which
# User's editor from the environment
editor = Executable(os.environ.get("EDITOR", "vi"))
# Curl tool for fetching files.
curl = which("curl", required=True)
# Whether to build in tmp space or directly in the stage_path.
# If this is true, then spack will make stage directories in
# a tmp filesystem, and it will symlink them into stage_path.
use_tmp_stage = True
# Locations to use for staging and building, in order of preference
# Use a %u to add a username to the stage paths here, in case this
# is a shared filesystem. Spack will use the first of these paths
# that it can create.
tmp_dirs = []
_default_tmp = tempfile.gettempdir()
if _default_tmp != os.getcwd():
tmp_dirs.append(os.path.join(_default_tmp, 'spack-stage'))
tmp_dirs.append('/nfs/tmp2/%u/spack-stage')
# Whether spack should allow installation of unsafe versions of
# software. "Unsafe" versions are ones it doesn't have a checksum
# for.
do_checksum = True
#
# SYS_TYPE to use for the spack installation.
# Value of this determines what platform spack thinks it is by
# default. You can assign three types of values:
# 1. None
# Spack will try to determine the sys_type automatically.
#
# 2. A string
# Spack will assume that the sys_type is hardcoded to the value.
#
# 3. A function that returns a string:
# Spack will use this function to determine the sys_type.
#
sys_type = None
#
# Places to download tarballs from.
#
# TODO: move to configuration.
#
# Examples:
#
# For a local directory:
# mirrors = ['file:///Users/gamblin2/spack-mirror']
#
# For a website:
# mirrors = ['http://spackports.org/spack-mirror/']
#
# For no mirrors:
# mirrors = []
#
mirrors = []
#
# Extra imports that should be generally usable from package.py files.
#
from llnl.util.filesystem import working_dir
from spack.package import Package
from spack.relations import depends_on, provides, patch
from spack.multimethod import when
from spack.version import Version

View file

@ -34,6 +34,7 @@
from llnl.util.filesystem import *
import spack
import spack.compilers as compilers
from spack.util.executable import Executable, which
from spack.util.environment import *
@ -51,7 +52,9 @@
SPACK_ENV_PATH = 'SPACK_ENV_PATH'
SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
SPACK_PREFIX = 'SPACK_PREFIX'
SPACK_BUILD_ROOT = 'SPACK_BUILD_ROOT'
SPACK_DEBUG = 'SPACK_DEBUG'
SPACK_SPEC = 'SPACK_SPEC'
SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
class MakeExecutable(Executable):
@ -79,6 +82,29 @@ def __call__(self, *args, **kwargs):
super(MakeExecutable, self).__call__(*args, **kwargs)
def set_compiler_environment_variables(pkg):
assert(pkg.spec.concrete)
compiler = compilers.compiler_for_spec(pkg.spec.compiler)
# Set compiler variables used by CMake and autotools
os.environ['CC'] = 'cc'
os.environ['CXX'] = 'c++'
os.environ['F77'] = 'f77'
os.environ['FC'] = 'fc'
# Set SPACK compiler variables so that our wrapper knows what to call
if compiler.cc:
os.environ['SPACK_CC'] = compiler.cc
if compiler.cxx:
os.environ['SPACK_CXX'] = compiler.cxx
if compiler.f77:
os.environ['SPACK_F77'] = compiler.f77
if compiler.fc:
os.environ['SPACK_FC'] = compiler.fc
os.environ['SPACK_COMPILER_SPEC'] = str(pkg.spec.compiler)
def set_build_environment_variables(pkg):
"""This ensures a clean install environment when we build packages.
"""
@ -102,9 +128,6 @@ def set_build_environment_variables(pkg):
# Install prefix
os.environ[SPACK_PREFIX] = pkg.prefix
# Build root for logging.
os.environ[SPACK_BUILD_ROOT] = pkg.stage.expanded_archive_path
# Remove these vars from the environment during build becaus they
# can affect how some packages find libraries. We want to make
# sure that builds never pull in unintended external dependencies.
@ -114,6 +137,12 @@ def set_build_environment_variables(pkg):
bin_dirs = ['%s/bin' % prefix for prefix in dep_prefixes]
path_put_first('PATH', [bin for bin in bin_dirs if os.path.isdir(bin)])
# Working directory for the spack command itself, for debug logs.
if spack.debug:
os.environ[SPACK_DEBUG] = "TRUE"
os.environ[SPACK_SPEC] = str(pkg.spec)
os.environ[SPACK_DEBUG_LOG_DIR] = spack.spack_working_dir
def set_module_variables_for_package(pkg):
"""Populate the module scope of install() with some useful functions.

View file

@ -26,9 +26,9 @@
from subprocess import check_call, check_output
import llnl.util.tty as tty
from llnl.util.filesystem import join_path
import spack
from spack import join_path
description = "Create a new installation of spack in another prefix"

View file

@ -0,0 +1,81 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
from pprint import pprint
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.lang import index_by
import spack.compilers
import spack.spec
import spack.config
from spack.compilation import get_path
description = "Manage compilers"
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='compiler_command')
update_parser = sp.add_parser(
'add', help='Add compilers to the Spack configuration.')
update_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
remove_parser = sp.add_parser('remove', help='remove compiler')
remove_parser.add_argument('path')
list_parser = sp.add_parser('list', help='list available compilers')
def compiler_add(args):
paths = args.add_paths
if not paths:
paths = get_path('PATH')
compilers = spack.compilers.find_compilers(*args.add_paths)
spack.compilers.add_compilers_to_config('user', *compilers)
def compiler_remove(args):
pass
def compiler_list(args):
tty.msg("Available compilers")
index = index_by(spack.compilers.all_compilers(), 'name')
for name, compilers in index.items():
tty.hline(name, char='-', color=spack.spec.compiler_color)
colify(reversed(sorted(compilers)), indent=4)
def compiler(parser, args):
action = { 'add' : compiler_add,
'remove' : compiler_remove,
'list' : compiler_list }
action[args.compiler_command](args)

View file

@ -24,11 +24,11 @@
##############################################################################
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.lang import index_by
import spack.compilers
from spack.cmd.compiler import compiler_list
description = "List available compilers"
description = "List available compilers. Same as 'spack compiler list'."
def compilers(parser, args):
tty.msg("Supported compilers")
colify(spack.compilers.supported_compilers(), indent=4)
compiler_list(args)

View file

@ -0,0 +1,84 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import argparse
import llnl.util.tty as tty
import spack.config
description = "Get and set configuration options."
def setup_parser(subparser):
# User can only choose one
scope_group = subparser.add_mutually_exclusive_group()
scope_group.add_argument(
'--user', action='store_const', const='user', dest='scope',
help="Use config file in user home directory (default).")
scope_group.add_argument(
'--site', action='store_const', const='site', dest='scope',
help="Use config file in spack prefix.")
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
set_parser = sp.add_parser('set', help='Set configuration values.')
set_parser.add_argument('key', help="Key to set value for.")
set_parser.add_argument('value', nargs='?', default=None,
help="Value to associate with key")
get_parser = sp.add_parser('get', help='Get configuration values.')
get_parser.add_argument('key', help="Key to get value for.")
edit_parser = sp.add_parser('edit', help='Edit configuration file.')
def config_set(args):
# default scope for writing is 'user'
if not args.scope:
args.scope = 'user'
config = spack.config.get_config(args.scope)
config.set_value(args.key, args.value)
config.write()
def config_get(args):
config = spack.config.get_config(args.scope)
print config.get_value(args.key)
def config_edit(args):
if not args.scope:
args.scope = 'user'
config_file = spack.config.get_filename(args.scope)
spack.editor(config_file)
def config(parser, args):
action = { 'set' : config_set,
'get' : config_get,
'edit' : config_edit }
action[args.config_command](args)

View file

@ -29,7 +29,7 @@
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.tty.color import *
from llnl.util.lang import partition_list
from llnl.util.lang import partition_list, index_by
import spack
import spack.spec
@ -49,9 +49,6 @@ def setup_parser(subparser):
def find(parser, args):
def hasher():
return collections.defaultdict(hasher)
# Filter out specs that don't exist.
query_specs = spack.cmd.parse_specs(args.query_specs)
query_specs, nonexisting = partition_list(
@ -64,15 +61,9 @@ def hasher():
return
# Make a dict with specs keyed by architecture and compiler.
index = hasher()
for spec in spack.db.installed_package_specs():
# Check whether this installed package matches any query.
if query_specs and not any(spec.satisfies(q) for q in query_specs):
continue
if spec.compiler not in index[spec.architecture]:
index[spec.architecture][spec.compiler] = []
index[spec.architecture][spec.compiler].append(spec)
specs = [s for s in spack.db.installed_package_specs()
if not query_specs or any(s.satisfies(q) for q in query_specs)]
index = index_by(specs, 'architecture', 'compiler')
# Traverse the index and print out each package
for architecture in index:

View file

@ -36,7 +36,10 @@ def setup_parser(subparser):
help="Do not try to install dependencies of requested packages.")
subparser.add_argument(
'--keep-prefix', action='store_true', dest='keep_prefix',
help="Don't clean up staging area when install completes.")
help="Don't remove the install prefix if installation fails.")
subparser.add_argument(
'--keep-stage', action='store_true', dest='keep_stage',
help="Don't remove the build stage if installation succeeds.")
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
@ -55,4 +58,5 @@ def install(parser, args):
for spec in specs:
package = spack.db.get(spec)
package.do_install(keep_prefix=args.keep_prefix,
keep_stage=args.keep_stage,
ignore_deps=args.ignore_deps)

View file

@ -35,6 +35,11 @@ def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
help="Remove regardless of whether other packages depend on this one.")
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
help="USE CAREFULLY. Remove ALL installed packages that match each supplied spec. " +
"i.e., if you say uninstall libelf, ALL versions of libelf are uninstalled. " +
"This is both useful and dangerous, like rm -r.")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
@ -50,15 +55,20 @@ def uninstall(parser, args):
pkgs = []
for spec in specs:
matching_specs = spack.db.get_installed(spec)
if len(matching_specs) > 1:
tty.die("%s matches multiple packages. Which one did you mean?"
% spec, *matching_specs)
if not args.all and len(matching_specs) > 1:
args = ["%s matches multiple packages." % spec,
"Matching packages:"]
args += [" " + str(s) for s in matching_specs]
args += ["You can either:",
" a) Use spack uninstall -a to uninstall ALL matching specs, or",
" b) use a more specific spec."]
tty.die(*args)
elif len(matching_specs) == 0:
if len(matching_specs) == 0:
tty.die("%s does not match any installed packages." % spec)
installed_spec = matching_specs[0]
pkgs.append(spack.db.get(installed_spec))
pkgs.extend(spack.db.get(s) for s in matching_specs)
# Sort packages to be uninstalled by the number of installed dependents
# This ensures we do things in the right order

292
lib/spack/spack/compiler.py Normal file
View file

@ -0,0 +1,292 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import re
import itertools
from datetime import datetime
import llnl.util.tty as tty
from llnl.util.lang import memoized
from llnl.util.filesystem import join_path
import spack.error
import spack.spec
from spack.util.multiproc import parmap
from spack.util.executable import *
from spack.version import Version
from spack.compilation import get_path
__all__ = ['Compiler', 'get_compiler_version']
def _verify_executables(*paths):
for path in paths:
if not os.path.isfile(path) and os.access(path, os.X_OK):
raise CompilerAccessError(path)
_version_cache = {}
def get_compiler_version(compiler_path, version_arg, regex='(.*)'):
if not compiler_path in _version_cache:
compiler = Executable(compiler_path)
output = compiler(version_arg, return_output=True, error=None)
match = re.search(regex, output)
_version_cache[compiler_path] = match.group(1) if match else None
return _version_cache[compiler_path]
def dumpversion(compiler_path):
"""Simple default dumpversion method -- this is what gcc does."""
return get_compiler_version(compiler_path, '-dumpversion')
class Compiler(object):
"""This class encapsulates a Spack "compiler", which includes C,
C++, and Fortran compilers. Subclasses should implement
support for specific compilers, their possible names, arguments,
and how to identify the particular type of compiler."""
# Subclasses use possible names of C compiler
cc_names = []
# Subclasses use possible names of C++ compiler
cxx_names = []
# Subclasses use possible names of Fortran 77 compiler
f77_names = []
# Subclasses use possible names of Fortran 90 compiler
fc_names = []
# Optional prefix regexes for searching for this type of compiler.
# Prefixes are sometimes used for toolchains, e.g. 'powerpc-bgq-linux-'
prefixes = []
# Optional suffix regexes for searching for this type of compiler.
# Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
# version suffix for gcc.
suffixes = [r'-.*']
# Names of generic arguments used by this compiler
arg_rpath = '-Wl,-rpath,%s'
def __init__(self, cc, cxx, f77, fc, version=None):
def check(exe):
if exe is None:
return None
_verify_executables(exe)
return exe
self.cc = check(cc)
self.cxx = check(cxx)
self.f77 = check(f77)
self.fc = check(fc)
# Allow versions to be memoized so we don't have to run
# compilers many times. Record them in the version cache if
# we get them in a constructor
#
# TODO: what to do if compilers have different versions?
#
self._version = version
self._cache_version()
@property
def version(self):
if not self._version:
v = self.cc_version(self.cc)
if v is not None:
self._version = v
return Version(v)
v = self.cxx_version(self.cxx)
if v is not None:
self._version = v
return Version(v)
v = self.f77_version(self.f77)
if v is not None:
self._version = v
return Version(v)
v = self.fc_version(self.fc)
if v is not None:
self._version = v
return Version(v)
raise InvalidCompilerError()
return Version(self._version)
def _cache_version(self):
_version_cache[self.cc] = self._version
_version_cache[self.cxx] = self._version
_version_cache[self.f77] = self._version
_version_cache[self.fc] = self._version
@property
def spec(self):
return spack.spec.CompilerSpec(self.name, self.version)
@classmethod
def default_version(cls, cc):
"""Override just this to override all compiler version functions."""
return dumpversion(cc)
@classmethod
def cc_version(cls, cc):
return cls.default_version(cc)
@classmethod
def cxx_version(cls, cxx):
return cls.default_version(cxx)
@classmethod
def f77_version(cls, f77):
return cls.default_version(f77)
@classmethod
def fc_version(cls, fc):
return cls.default_version(fc)
@classmethod
def _find_matches_in_path(cls, compiler_names, detect_version, *path):
"""Finds compilers in the paths supplied.
Looks for all combinations of ``compiler_names`` with the
``prefixes`` and ``suffixes`` defined for this compiler
class. If any compilers match the compiler_names,
prefixes, or suffixes, uses ``detect_version`` to figure
out what version the compiler is.
This returns a dict with compilers grouped by (prefix,
suffix, version) tuples. This can be further organized by
find().
"""
if not path:
path = get_path('PATH')
prefixes = [''] + cls.prefixes
suffixes = [''] + cls.suffixes
checks = []
for directory in path:
files = os.listdir(directory)
for exe in files:
full_path = join_path(directory, exe)
prod = itertools.product(prefixes, compiler_names, suffixes)
for pre, name, suf in prod:
regex = r'^(%s)%s(%s)$' % (pre, re.escape(name), suf)
match = re.match(regex, exe)
if match:
key = (full_path,) + match.groups()
checks.append(key)
def check(key):
try:
full_path, prefix, suffix = key
version = detect_version(full_path)
return (version, prefix, suffix, full_path)
except ProcessError, e:
tty.debug("Couldn't get version for compiler %s" % full_path, e)
return None
successful = [key for key in parmap(check, checks) if key is not None]
return { (v, p, s) : path for v, p, s, path in successful }
@classmethod
def find(cls, *path):
"""Try to find this type of compiler in the user's
environment. For each set of compilers found, this returns
compiler objects with the cc, cxx, f77, fc paths and the
version filled in.
This will search for compilers with the names in cc_names,
cxx_names, etc. and it will group them if they have common
prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
be grouped with g++-mp-4.7 and gfortran-mp-4.7.
Example return values::
[ gcc('/usr/bin/gcc', '/usr/bin/g++',
'/usr/bin/gfortran', '/usr/bin/gfortran',
Version('4.4.5')),
gcc('/usr/bin/gcc-mp-4.5', '/usr/bin/g++-mp-4.5',
'/usr/bin/gfortran-mp-4.5', '/usr/bin/gfortran-mp-4.5',
Version('4.7.2')) ]
"""
dicts = parmap(
lambda t: cls._find_matches_in_path(*t),
[(cls.cc_names, cls.cc_version) + tuple(path),
(cls.cxx_names, cls.cxx_version) + tuple(path),
(cls.f77_names, cls.f77_version) + tuple(path),
(cls.fc_names, cls.fc_version) + tuple(path)])
all_keys = set()
for d in dicts:
all_keys.update(d)
compilers = []
for k in all_keys:
ver, pre, suf = k
paths = tuple(pn[k] if k in pn else None for pn in dicts)
args = paths + (ver,)
compilers.append(cls(*args))
return compilers
def __repr__(self):
"""Return a string represntation of the compiler toolchain."""
return self.__str__()
def __str__(self):
"""Return a string represntation of the compiler toolchain."""
return "%s(%s)" % (
self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc))))
class CompilerAccessError(spack.error.SpackError):
def __init__(self, path):
super(CompilerAccessError, self).__init__(
"'%s' is not a valid compiler." % path)
class InvalidCompilerError(spack.error.SpackError):
def __init__(self):
super(InvalidCompilerError, self).__init__(
"Compiler has no executables.")

View file

@ -22,23 +22,218 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
#
# This needs to be expanded for full compiler support.
#
"""This module contains functions related to finding compilers on the
system and configuring Spack to use multiple compilers.
"""
import imp
import os
from llnl.util.lang import memoized, list_modules
from llnl.util.filesystem import join_path
import spack
import spack.compilers.gcc
import spack.error
import spack.spec
import spack.config
@memoized
def supported_compilers():
return [c for c in list_modules(spack.compilers_path)]
from spack.util.multiproc import parmap
from spack.compiler import Compiler
from spack.util.executable import which
from spack.util.naming import mod_to_class
from spack.compilation import get_path
_imported_compilers_module = 'spack.compilers'
_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
_default_order = ['gcc', 'intel', 'pgi', 'clang']
def _auto_compiler_spec(function):
def converter(cspec_like):
if not isinstance(cspec_like, spack.spec.CompilerSpec):
cspec_like = spack.spec.CompilerSpec(cspec_like)
return function(cspec_like)
return converter
def supported(compiler):
return compiler in supported_compilers()
def _get_config():
"""Get a Spack config, but make sure it has compiler configuration
first."""
# If any configuration file has compilers, just stick with the
# ones already configured.
config = spack.config.get_config()
existing = [spack.spec.CompilerSpec(s)
for s in config.get_section_names('compiler')]
if existing:
return config
compilers = find_compilers(*get_path('PATH'))
new_compilers = [
c for c in compilers if c.spec not in existing]
add_compilers_to_config('user', *new_compilers)
# After writing compilers to the user config, return a full config
# from all files.
return spack.config.get_config(refresh=True)
@memoized
def default_compiler():
from spack.spec import Compiler
return Compiler('gcc', gcc.get_version())
versions = []
for name in _default_order: # TODO: customize order.
versions = find(name)
if versions: break
if not versions:
raise NoCompilersError()
return sorted(versions)[-1]
def find_compilers(*path):
"""Return a list of compilers found in the suppied paths.
This invokes the find() method for each Compiler class,
and appends the compilers detected to a list.
"""
# Make sure path elements exist, and include /bin directories
# under prefixes.
filtered_path = []
for p in path:
# Eliminate symlinks and just take the real directories.
p = os.path.realpath(p)
if not os.path.isdir(p):
continue
filtered_path.append(p)
# Check for a bin directory, add it if it exists
bin = join_path(p, 'bin')
if os.path.isdir(bin):
filtered_path.append(os.path.realpath(bin))
# Once the paths are cleaned up, do a search for each type of
# compiler. We can spawn a bunch of parallel searches to reduce
# the overhead of spelunking all these directories.
types = all_compiler_types()
compiler_lists = parmap(lambda cls: cls.find(*filtered_path), types)
# ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot.
clist = reduce(lambda x,y: x+y, compiler_lists)
for c in clist: c._cache_version()
return clist
def add_compilers_to_config(scope, *compilers):
config = spack.config.get_config(scope)
for compiler in compilers:
add_compiler(config, compiler)
config.write()
def add_compiler(config, compiler):
def setup_field(cspec, name, exe):
path = exe if exe else "None"
config.set_value('compiler', cspec, name, path)
for c in _required_instance_vars:
setup_field(compiler.spec, c, getattr(compiler, c))
def supported_compilers():
"""Return a set of names of compilers supported by Spack.
See available_compilers() to get a list of all the available
versions of supported compilers.
"""
return sorted(name for name in list_modules(spack.compilers_path))
@_auto_compiler_spec
def supported(compiler_spec):
"""Test if a particular compiler is supported."""
return compiler_spec.name in supported_compilers()
def all_compilers():
"""Return a set of specs for all the compiler versions currently
available to build with. These are instances of CompilerSpec.
"""
configuration = _get_config()
return [spack.spec.CompilerSpec(s)
for s in configuration.get_section_names('compiler')]
@_auto_compiler_spec
def find(compiler_spec):
"""Return specs of available compilers that match the supplied
compiler spec. Return an list if nothing found."""
return [c for c in all_compilers() if c.satisfies(compiler_spec)]
@_auto_compiler_spec
def compilers_for_spec(compiler_spec):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
config = _get_config()
def get_compiler(cspec):
items = { k:v for k,v in config.items('compiler "%s"' % cspec) }
if not all(n in items for n in _required_instance_vars):
raise InvalidCompilerConfigurationError(cspec)
cls = class_for_compiler_name(cspec.name)
compiler_paths = []
for c in _required_instance_vars:
compiler_path = items[c]
if compiler_path != "None":
compiler_paths.append(compiler_path)
else:
compiler_paths.append(None)
args = tuple(compiler_paths) + (compiler_spec.version,)
return cls(*args)
matches = find(compiler_spec)
return [get_compiler(cspec) for cspec in matches]
@_auto_compiler_spec
def compiler_for_spec(compiler_spec):
"""Get the compiler that satisfies compiler_spec. compiler_spec must
be concrete."""
assert(compiler_spec.concrete)
compilers = compilers_for_spec(compiler_spec)
assert(len(compilers) == 1)
return compilers[0]
def class_for_compiler_name(compiler_name):
"""Given a compiler module name, get the corresponding Compiler class."""
assert(supported(compiler_name))
file_path = join_path(spack.compilers_path, compiler_name + ".py")
compiler_mod = imp.load_source(_imported_compilers_module, file_path)
cls = getattr(compiler_mod, mod_to_class(compiler_name))
# make a note of the name in the module so we can get to it easily.
cls.name = compiler_name
return cls
def all_compiler_types():
return [class_for_compiler_name(c) for c in supported_compilers()]
class InvalidCompilerConfigurationError(spack.error.SpackError):
def __init__(self, compiler_spec):
super(InvalidCompilerConfigurationError, self).__init__(
"Invalid configuration for [compiler \"%s\"]: " % compiler_spec,
"Compiler configuration must contain entries for all compilers: %s"
% _required_instance_vars)
class NoCompilersError(spack.error.SpackError):
def __init__(self):
super(NoCompilersError, self).__init__("Spack could not find any compilers!")

View file

@ -0,0 +1,52 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import Compiler
class Clang(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['clang']
# Subclasses use possible names of C++ compiler
cxx_names = ['clang++']
# Subclasses use possible names of Fortran 77 compiler
f77_names = []
# Subclasses use possible names of Fortran 90 compiler
fc_names = []
@classmethod
def default_version(self, comp):
"""The '--version' option works for clang compilers.
Output looks like this::
clang version 3.1 (trunk 149096)
Target: x86_64-unknown-linux-gnu
Thread model: posix
"""
return get_compiler_version(
comp, '--version', r'clang version ([^ ]+)')

View file

@ -22,18 +22,32 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
#
# This is a stub module. It should be expanded when we implement full
# compiler support.
#
from spack.compiler import *
import subprocess
from spack.version import Version
class Gcc(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['gcc']
cc = 'gcc'
cxx = 'g++'
fortran = 'gfortran'
# Subclasses use possible names of C++ compiler
cxx_names = ['g++']
def get_version():
v = subprocess.check_output([cc, '-dumpversion'])
return Version(v)
# Subclasses use possible names of Fortran 77 compiler
f77_names = ['gfortran']
# Subclasses use possible names of Fortran 90 compiler
fc_names = ['gfortran']
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
suffixes = [r'-mp-\d\.\d']
@classmethod
def fc_version(cls, fc):
return get_compiler_version(
fc, '-dumpversion',
# older gfortran versions don't have simple dumpversion output.
r'(?:GNU Fortran \(GCC\))?(\d+\.\d+\.\d+)')
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)

View file

@ -22,18 +22,36 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
#
# This is a stub module. It should be expanded when we implement full
# compiler support.
#
from spack.compiler import *
import subprocess
from spack.version import Version
class Intel(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['icc']
# Subclasses use possible names of C++ compiler
cxx_names = ['icpc']
# Subclasses use possible names of Fortran 77 compiler
f77_names = ['ifort']
# Subclasses use possible names of Fortran 90 compiler
fc_names = ['ifort']
@classmethod
def default_version(cls, comp):
"""The '--version' option seems to be the most consistent one
for intel compilers. Output looks like this::
icpc (ICC) 12.1.5 20120612
Copyright (C) 1985-2012 Intel Corporation. All rights reserved.
or::
ifort (IFORT) 12.1.5 20120612
Copyright (C) 1985-2012 Intel Corporation. All rights reserved.
"""
return get_compiler_version(
comp, '--version', r'\((?:IFORT|ICC)\) ([^ ]+)')
cc = 'icc'
cxx = 'icc'
fortran = 'ifort'
def get_version():
v = subprocess.check_output([cc, '-dumpversion'])
return Version(v)

View file

@ -0,0 +1,51 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import *
class Pgi(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['pgcc']
# Subclasses use possible names of C++ compiler
cxx_names = ['pgCC']
# Subclasses use possible names of Fortran 77 compiler
f77_names = ['pgf77']
# Subclasses use possible names of Fortran 90 compiler
fc_names = ['pgf95', 'pgf90']
@classmethod
def default_version(cls, comp):
"""The '-V' option works for all the PGI compilers.
Output looks like this::
pgf95 10.2-0 64-bit target on x86-64 Linux -tp nehalem-64
Copyright 1989-2000, The Portland Group, Inc. All Rights Reserved.
Copyright 2000-2010, STMicroelectronics, Inc. All Rights Reserved.
"""
return get_compiler_version(
comp, '-V', r'pg[^ ]* ([^ ]+) \d\d\d?-bit target')

View file

@ -33,9 +33,10 @@
TODO: make this customizable and allow users to configure
concretization policies.
"""
import spack.architecture
import spack.compilers
import spack.spec
import spack.compilers
import spack.architecture
import spack.error
from spack.version import *
@ -50,6 +51,15 @@ def concretize_version(self, spec):
"""If the spec is already concrete, return. Otherwise take
the most recent available version, and default to the package's
version if there are no avaialble versions.
TODO: In many cases we probably want to look for installed
versions of each package and use an installed version
if we can link to it. The policy implemented here will
tend to rebuild a lot of stuff becasue it will prefer
a compiler in the spec to any compiler already-
installed things were built with. There is likely
some better policy that finds some middle ground
between these two extremes.
"""
# return if already concrete.
if spec.versions.concrete:
@ -89,26 +99,42 @@ def concretize_architecture(self, spec):
def concretize_compiler(self, spec):
"""Currently just sets the compiler to gcc or throws an exception
if the compiler is set to something else.
TODO: implement below description.
If the spec already has a compiler, we're done. If not, then
take the compiler used for the nearest ancestor with a concrete
compiler, or use the system default if there is no ancestor
with a compiler.
"""If the spec already has a compiler, we're done. If not, then take
the compiler used for the nearest ancestor with a compiler
spec and use that. If the ancestor's compiler is not
concrete, then give it a valid version. If there is no
ancestor with a compiler, use the system default compiler.
Intuition: Use the system default if no package that depends on
this one has a strict compiler requirement. Otherwise, try to
build with the compiler that will be used by libraries that
link to this one, to maximize compatibility.
"""
if spec.compiler and spec.compiler.concrete:
if spec.compiler != spack.compilers.default_compiler():
raise spack.spec.UnknownCompilerError(str(spec.compiler))
else:
spec.compiler = spack.compilers.default_compiler()
all_compilers = spack.compilers.all_compilers()
if (spec.compiler and
spec.compiler.concrete and
spec.compiler in all_compilers):
return
try:
nearest = next(p for p in spec.preorder_traversal(direction='parents')
if p.compiler is not None).compiler
if not nearest in all_compilers:
# Take the newest compiler that saisfies the spec
matches = sorted(spack.compilers.find(nearest))
if not matches:
raise UnavailableCompilerVersionError(nearest)
# copy concrete version into nearest spec
nearest.versions = matches[-1].versions.copy()
assert(nearest.concrete)
spec.compiler = nearest.copy()
except StopIteration:
spec.compiler = spack.compilers.default_compiler().copy()
def choose_provider(self, spec, providers):
@ -123,3 +149,12 @@ def choose_provider(self, spec, providers):
first_key = sorted(index.keys())[0]
latest_version = sorted(index[first_key])[-1]
return latest_version
class UnavailableCompilerVersionError(spack.error.SpackError):
"""Raised when there is no available compiler that satisfies a
compiler spec."""
def __init__(self, compiler_spec):
super(UnavailableCompilerVersionError, self).__init__(
"No available compiler version matches '%s'" % compiler_spec,
"Run 'spack compilers' to see available compiler Options.")

492
lib/spack/spack/config.py Normal file
View file

@ -0,0 +1,492 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""This module implements Spack's configuration file handling.
Configuration file scopes
===============================
When Spack runs, it pulls configuration data from several config
files, much like bash shells. In Spack, there are two configuration
scopes:
1. ``site``: Spack loads site-wide configuration options from
``$(prefix)/etc/spackconfig``.
2. ``user``: Spack next loads per-user configuration options from
~/.spackconfig.
If user options have the same names as site options, the user options
take precedence.
Configuration file format
===============================
Configuration files are formatted using .gitconfig syntax, which is
much like Windows .INI format. This format is implemented by Python's
ConfigParser class, and it's easy to read and versatile.
The file is divided into sections, like this ``compiler`` section::
[compiler]
cc = /usr/bin/gcc
In each section there are options (cc), and each option has a value
(/usr/bin/gcc).
Borrowing from git, we also allow named sections, e.g.:
[compiler "gcc@4.7.3"]
cc = /usr/bin/gcc
This is a compiler section, but it's for the specific compiler,
``gcc@4.7.3``. ``gcc@4.7.3`` is the name.
Keys
===============================
Together, the section, name, and option, separated by periods, are
called a ``key``. Keys can be used on the command line to set
configuration options explicitly (this is also borrowed from git).
For example, to change the C compiler used by gcc@4.7.3, you could do
this:
spack config compiler.gcc@4.7.3.cc /usr/local/bin/gcc
That will create a named compiler section in the user's .spackconfig
like the one shown above.
"""
import os
import re
import inspect
import ConfigParser as cp
from collections import OrderedDict
from llnl.util.lang import memoized
import spack.error
__all__ = [
'SpackConfigParser', 'get_config', 'SpackConfigurationError',
'InvalidConfigurationScopeError', 'InvalidSectionNameError',
'ReadOnlySpackConfigError', 'ConfigParserError', 'NoOptionError',
'NoSectionError']
_named_section_re = r'([^ ]+) "([^"]+)"'
"""Names of scopes and their corresponding configuration files."""
_scopes = OrderedDict({
'site' : os.path.join(spack.etc_path, 'spackconfig'),
'user' : os.path.expanduser('~/.spackconfig')
})
_field_regex = r'^([\w-]*)' \
r'(?:\.(.*(?=.)))?' \
r'(?:\.([\w-]+))?$'
_section_regex = r'^([\w-]*)\s*' \
r'\"([^"]*\)\"$'
# Cache of configs -- we memoize this for performance.
_config = {}
def get_config(scope=None, **kwargs):
"""Get a Spack configuration object, which can be used to set options.
With no arguments, this returns a SpackConfigParser with config
options loaded from all config files. This is how client code
should read Spack configuration options.
Optionally, a scope parameter can be provided. Valid scopes
are ``site`` and ``user``. If a scope is provided, only the
options from that scope's configuration file are loaded. The
caller can set or unset options, then call ``write()`` on the
config object to write it back out to the original config file.
By default, this will cache configurations and return the last
read version of the config file. If the config file is
modified and you need to refresh, call get_config with the
refresh=True keyword argument. This will force all files to be
re-read.
"""
refresh = kwargs.get('refresh', False)
if refresh:
_config.clear()
if scope not in _config:
if scope is None:
_config[scope] = SpackConfigParser([path for path in _scopes.values()])
elif scope not in _scopes:
raise UnknownConfigurationScopeError(scope)
else:
_config[scope] = SpackConfigParser(_scopes[scope])
return _config[scope]
def get_filename(scope):
"""Get the filename for a particular config scope."""
if not scope in _scopes:
raise UnknownConfigurationScopeError(scope)
return _scopes[scope]
def _parse_key(key):
"""Return the section, name, and option the field describes.
Values are returned in a 3-tuple.
e.g.:
The field name ``compiler.gcc@4.7.3.cc`` refers to the 'cc' key
in a section that looks like this:
[compiler "gcc@4.7.3"]
cc = /usr/local/bin/gcc
* The section is ``compiler``
* The name is ``gcc@4.7.3``
* The key is ``cc``
"""
match = re.search(_field_regex, key)
if match:
return match.groups()
else:
raise InvalidSectionNameError(key)
def _make_section_name(section, name):
if not name:
return section
return '%s "%s"' % (section, name)
def _autokey(fun):
"""Allow a function to be called with a string key like
'compiler.gcc.cc', or with the section, name, and option
separated. Function should take at least three args, e.g.:
fun(self, section, name, option, [...])
This will allow the function above to be called normally or
with a string key, e.g.:
fun(self, key, [...])
"""
argspec = inspect.getargspec(fun)
fun_nargs = len(argspec[0])
def string_key_func(*args):
nargs = len(args)
if nargs == fun_nargs - 2:
section, name, option = _parse_key(args[1])
return fun(args[0], section, name, option, *args[2:])
elif nargs == fun_nargs:
return fun(*args)
else:
raise TypeError(
"%s takes %d or %d args (found %d)."
% (fun.__name__, fun_nargs - 2, fun_nargs, len(args)))
return string_key_func
class SpackConfigParser(cp.RawConfigParser):
"""Slightly modified from Python's raw config file parser to accept
leading whitespace and preserve comments.
"""
# Slightly modify Python option expressions to allow leading whitespace
OPTCRE = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE.pattern)
OPTCRE_NV = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE_NV.pattern)
def __init__(self, file_or_files):
cp.RawConfigParser.__init__(
self, dict_type=OrderedDict, allow_no_value=True)
if isinstance(file_or_files, basestring):
self.read([file_or_files])
self.filename = file_or_files
else:
self.read(file_or_files)
self.filename = None
@_autokey
def set_value(self, section, name, option, value):
"""Set the value for a key. If the key is in a section or named
section that does not yet exist, add that section.
"""
sn = _make_section_name(section, name)
if not self.has_section(sn):
self.add_section(sn)
# Allow valueless config options to be set like this:
# spack config set mirror https://foo.bar.com
#
# Instead of this, which parses incorrectly:
# spack config set mirror.https://foo.bar.com
#
if option is None:
option = value
value = None
self.set(sn, option, value)
@_autokey
def get_value(self, section, name, option):
"""Get the value for a key. Raises NoOptionError or NoSectionError if
the key is not present."""
sn = _make_section_name(section, name)
try:
if not option:
# TODO: format this better
return self.items(sn)
return self.get(sn, option)
# Wrap ConfigParser exceptions in SpackExceptions
except cp.NoOptionError, e: raise NoOptionError(e)
except cp.NoSectionError, e: raise NoSectionError(e)
except cp.Error, e: raise ConfigParserError(e)
@_autokey
def has_value(self, section, name, option):
"""Return whether the configuration file has a value for a
particular key."""
sn = _make_section_name(section, name)
return self.has_option(sn, option)
def get_section_names(self, sectype):
"""Get all named sections with the specified type.
A named section looks like this:
[compiler "gcc@4.7"]
Names of sections are returned as a list, e.g.:
['gcc@4.7', 'intel@12.3', 'pgi@4.2']
You can get items in the sections like this:
"""
sections = []
for secname in self.sections():
match = re.match(_named_section_re, secname)
if match:
t, name = match.groups()
if t == sectype:
sections.append(name)
return sections
def write(self, path_or_fp=None):
"""Write this configuration out to a file.
If called with no arguments, this will write the
configuration out to the file from which it was read. If
this config was read from multiple files, e.g. site
configuration and then user configuration, write will
simply raise an error.
If called with a path or file object, this will write the
configuration out to the supplied path or file object.
"""
if path_or_fp is None:
if not self.filename:
raise ReadOnlySpackConfigError()
path_or_fp = self.filename
if isinstance(path_or_fp, basestring):
path_or_fp = open(path_or_fp, 'w')
self._write(path_or_fp)
def _read(self, fp, fpname):
"""This is a copy of Python 2.7's _read() method, with support for
continuation lines removed.
"""
cursect = None # None, or a dictionary
optname = None
lineno = 0
comment = 0
e = None # None, or an exception
while True:
line = fp.readline()
if not line:
break
lineno = lineno + 1
# comment or blank line?
if ((line.strip() == '' or line[0] in '#;') or
(line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR")):
self._sections["comment-%d" % comment] = line
comment += 1
continue
# a section header or option header?
else:
# is it a section header?
mo = self.SECTCRE.match(line)
if mo:
sectname = mo.group('header')
if sectname in self._sections:
cursect = self._sections[sectname]
elif sectname == cp.DEFAULTSECT:
cursect = self._defaults
else:
cursect = self._dict()
cursect['__name__'] = sectname
self._sections[sectname] = cursect
# So sections can't start with a continuation line
optname = None
# no section header in the file?
elif cursect is None:
raise cp.MissingSectionHeaderError(fpname, lineno, line)
# an option line?
else:
mo = self._optcre.match(line)
if mo:
optname, vi, optval = mo.group('option', 'vi', 'value')
optname = self.optionxform(optname.rstrip())
# This check is fine because the OPTCRE cannot
# match if it would set optval to None
if optval is not None:
if vi in ('=', ':') and ';' in optval:
# ';' is a comment delimiter only if it follows
# a spacing character
pos = optval.find(';')
if pos != -1 and optval[pos-1].isspace():
optval = optval[:pos]
optval = optval.strip()
# allow empty values
if optval == '""':
optval = ''
cursect[optname] = [optval]
else:
# valueless option handling
cursect[optname] = optval
else:
# a non-fatal parsing error occurred. set up the
# exception but keep going. the exception will be
# raised at the end of the file and will contain a
# list of all bogus lines
if not e:
e = cp.ParsingError(fpname)
e.append(lineno, repr(line))
# if any parsing errors occurred, raise an exception
if e:
raise e
# join the multi-line values collected while reading
all_sections = [self._defaults]
all_sections.extend(self._sections.values())
for options in all_sections:
# skip comments
if isinstance(options, basestring):
continue
for name, val in options.items():
if isinstance(val, list):
options[name] = '\n'.join(val)
def _write(self, fp):
"""Write an .ini-format representation of the configuration state.
This is taken from the default Python 2.7 source. It writes 4
spaces at the beginning of lines instead of no leading space.
"""
if self._defaults:
fp.write("[%s]\n" % cp.DEFAULTSECT)
for (key, value) in self._defaults.items():
fp.write(" %s = %s\n" % (key, str(value).replace('\n', '\n\t')))
fp.write("\n")
for section in self._sections:
# Handles comments and blank lines.
if isinstance(self._sections[section], basestring):
fp.write(self._sections[section])
continue
else:
# Allow leading whitespace
fp.write("[%s]\n" % section)
for (key, value) in self._sections[section].items():
if key == "__name__":
continue
if (value is not None) or (self._optcre == self.OPTCRE):
key = " = ".join((key, str(value).replace('\n', '\n\t')))
fp.write(" %s\n" % (key))
class SpackConfigurationError(spack.error.SpackError):
def __init__(self, *args):
super(SpackConfigurationError, self).__init__(*args)
class InvalidConfigurationScopeError(SpackConfigurationError):
def __init__(self, scope):
super(InvalidConfigurationScopeError, self).__init__(
"Invalid configuration scope: '%s'" % scope,
"Options are: %s" % ", ".join(*_scopes.values()))
class InvalidSectionNameError(SpackConfigurationError):
"""Raised when the name for a section is invalid."""
def __init__(self, name):
super(InvalidSectionNameError, self).__init__(
"Invalid section specifier: '%s'" % name)
class ReadOnlySpackConfigError(SpackConfigurationError):
"""Raised when user attempts to write to a config read from multiple files."""
def __init__(self):
super(ReadOnlySpackConfigError, self).__init__(
"Can only write to a single-file SpackConfigParser")
class ConfigParserError(SpackConfigurationError):
"""Wrapper for the Python ConfigParser's errors"""
def __init__(self, error):
super(ConfigParserError, self).__init__(str(error))
self.error = error
class NoOptionError(ConfigParserError):
"""Wrapper for ConfigParser NoOptionError"""
def __init__(self, error):
super(NoOptionError, self).__init__(error)
class NoSectionError(ConfigParserError):
"""Wrapper for ConfigParser NoOptionError"""
def __init__(self, error):
super(NoSectionError, self).__init__(error)

View file

@ -1,135 +0,0 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import tempfile
from llnl.util.filesystem import *
from spack.version import Version
from spack.util.executable import *
from spack.directory_layout import SpecHashDirectoryLayout
from spack.concretize import DefaultConcretizer
from spack.packages import PackageDB
# This lives in $prefix/lib/spac/spack/__file__
prefix = ancestor(__file__, 4)
# The spack script itself
spack_file = join_path(prefix, "bin", "spack")
# spack directory hierarchy
lib_path = join_path(prefix, "lib", "spack")
build_env_path = join_path(lib_path, "env")
module_path = join_path(lib_path, "spack")
compilers_path = join_path(module_path, "compilers")
test_path = join_path(module_path, "test")
var_path = join_path(prefix, "var", "spack")
stage_path = join_path(var_path, "stage")
install_path = join_path(prefix, "opt")
#
# Set up the packages database.
#
packages_path = join_path(var_path, "packages")
db = PackageDB(packages_path)
#
# This is the path to mock packages used by spack for testing.
#
mock_packages_path = join_path(var_path, "mock_packages")
#
# This controls how spack lays out install prefixes and
# stage directories.
#
install_layout = SpecHashDirectoryLayout(install_path, prefix_size=6)
#
# This controls how things are concretized in spack.
# Replace it with a subclass if you want different
# policies.
#
concretizer = DefaultConcretizer()
# Version information
spack_version = Version("1.0")
# User's editor from the environment
editor = Executable(os.environ.get("EDITOR", ""))
# Curl tool for fetching files.
curl = which("curl", required=True)
# Whether to build in tmp space or directly in the stage_path.
# If this is true, then spack will make stage directories in
# a tmp filesystem, and it will symlink them into stage_path.
use_tmp_stage = True
# Locations to use for staging and building, in order of preference
# Use a %u to add a username to the stage paths here, in case this
# is a shared filesystem. Spack will use the first of these paths
# that it can create.
tmp_dirs = []
_default_tmp = tempfile.gettempdir()
if _default_tmp != os.getcwd():
tmp_dirs.append(os.path.join(_default_tmp, 'spack-stage'))
tmp_dirs.append('/nfs/tmp2/%u/spack-stage')
# Whether spack should allow installation of unsafe versions of
# software. "Unsafe" versions are ones it doesn't have a checksum
# for.
do_checksum = True
#
# SYS_TYPE to use for the spack installation.
# Value of this determines what platform spack thinks it is by
# default. You can assign three types of values:
# 1. None
# Spack will try to determine the sys_type automatically.
#
# 2. A string
# Spack will assume that the sys_type is hardcoded to the value.
#
# 3. A function that returns a string:
# Spack will use this function to determine the sys_type.
#
sys_type = None
#
# Places to download tarballs from. Examples:
#
# For a local directory:
# mirrors = ['file:///Users/gamblin2/spack-mirror']
#
# For a website:
# mirrors = ['http://spackports.org/spack-mirror/']
#
# For no mirrors:
# mirrors = []
#
mirrors = []

View file

@ -117,7 +117,7 @@ def __call__(self, package_self, *args, **kwargs):
or if there is none, then raise a NoSuchMethodError.
"""
for spec, method in self.method_list:
if spec.satisfies(package_self.spec):
if package_self.spec.satisfies(spec):
return method(package_self, *args, **kwargs)
if self.default:

View file

@ -626,6 +626,7 @@ def do_install(self, **kwargs):
"""
# whether to keep the prefix on failure. Default is to destroy it.
keep_prefix = kwargs.get('keep_prefix', False)
keep_stage = kwargs.get('keep_stage', False)
ignore_deps = kwargs.get('ignore_deps', False)
if not self.spec.concrete:
@ -650,29 +651,31 @@ def do_install(self, **kwargs):
raise InstallError("Unable to fork build process: %s" % e)
if pid == 0:
tty.msg("Building %s." % self.name)
# create the install directory (allow the layout to handle
# this in case it needs to add extra files)
spack.install_layout.make_path_for_spec(self.spec)
# Set up process's build environment before running install.
build_env.set_build_environment_variables(self)
build_env.set_module_variables_for_package(self)
try:
# Subclasses implement install() to do the build &
# install work.
tty.msg("Building %s." % self.name)
# create the install directory. The install layout
# handles this in case so that it can use whatever
# package naming scheme it likes.
spack.install_layout.make_path_for_spec(self.spec)
# Set up process's build environment before running install.
build_env.set_compiler_environment_variables(self)
build_env.set_build_environment_variables(self)
build_env.set_module_variables_for_package(self)
# Subclasses implement install() to do the real work.
self.install(self.spec, self.prefix)
# Ensure that something was actually installed.
if not os.listdir(self.prefix):
raise InstallError(
"Install failed for %s. Nothing was installed!"
% self.name)
# On successful install, remove the stage.
# Leave if there is an error
self.stage.destroy()
if not keep_stage:
self.stage.destroy()
tty.msg("Successfully installed %s" % self.name)
print_pkg(self.prefix)
@ -690,7 +693,11 @@ def do_install(self, **kwargs):
"Spack will think this package is installed." +
"Manually remove this directory to fix:",
self.prefix)
raise
# Child doesn't raise or return to main spack code.
# Just runs default exception handler and exits.
sys.excepthook(*sys.exc_info())
os._exit(1)
# Parent process just waits for the child to complete. If the
# child exited badly, assume it already printed an appropriate
@ -724,16 +731,16 @@ def do_uninstall(self, **kwargs):
force = kwargs.get('force', False)
if not self.installed:
raise InstallError(self.name + " is not installed.")
raise InstallError(str(self.spec) + " is not installed.")
if not force:
deps = self.installed_dependents
if deps: raise InstallError(
"Cannot uninstall %s. The following installed packages depend on it: %s"
% (self.name, deps))
% (self.spec, deps))
self.remove_prefix()
tty.msg("Successfully uninstalled %s." % self.name)
tty.msg("Successfully uninstalled %s." % self.spec)
def do_clean(self):

View file

@ -22,10 +22,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import re
import os
import sys
import string
import inspect
import glob
import imp
@ -34,10 +32,10 @@
from llnl.util.filesystem import join_path
from llnl.util.lang import memoized
import spack
import spack.error
import spack.spec
from spack.virtual import ProviderIndex
from spack.util.naming import mod_to_class, validate_module_name
# Name of module under which packages are imported
_imported_packages_module = 'spack.packages'
@ -45,42 +43,6 @@
# Name of the package file inside a package directory
_package_file_name = 'package.py'
# Valid package names can contain '-' but can't start with it.
valid_package_re = r'^\w[\w-]*$'
# Don't allow consecutive [_-] in package names
invalid_package_re = r'[_-][_-]+'
def valid_package_name(pkg_name):
"""Return whether the pkg_name is valid for use in Spack."""
return (re.match(valid_package_re, pkg_name) and
not re.search(invalid_package_re, pkg_name))
def validate_package_name(pkg_name):
"""Raise an exception if pkg_name is not valid."""
if not valid_package_name(pkg_name):
raise InvalidPackageNameError(pkg_name)
def class_name_for_package_name(pkg_name):
"""Get a name for the class the package file should contain. Note that
conflicts don't matter because the classes are in different modules.
"""
validate_package_name(pkg_name)
class_name = pkg_name.replace('_', '-')
class_name = string.capwords(class_name, '-')
class_name = class_name.replace('-', '')
# If a class starts with a number, prefix it with Number_ to make it a valid
# Python class name.
if re.match(r'^[0-9]', class_name):
class_name = "Num_%s" % class_name
return class_name
def _autospec(function):
"""Decorator that automatically converts the argument of a single-arg
@ -114,6 +76,7 @@ def get(self, spec):
@_autospec
def get_installed(self, spec):
"""Get all the installed specs that satisfy the provided spec constraint."""
return [s for s in self.installed_package_specs() if s.satisfies(spec)]
@ -143,7 +106,7 @@ def filename_for_package_name(self, pkg_name):
package doesn't exist yet, so callers will need to ensure
the package exists before importing.
"""
validate_package_name(pkg_name)
validate_module_name(pkg_name)
pkg_dir = self.dirname_for_package_name(pkg_name)
return join_path(pkg_dir, _package_file_name)
@ -200,7 +163,7 @@ def get_class_for_package_name(self, pkg_name):
else:
raise UnknownPackageError(pkg_name)
class_name = class_name_for_package_name(pkg_name)
class_name = mod_to_class(pkg_name)
try:
module_name = _imported_packages_module + '.' + pkg_name
module = imp.load_source(module_name, file_path)
@ -259,14 +222,6 @@ def quote(string):
out.write('}\n')
class InvalidPackageNameError(spack.error.SpackError):
"""Raised when we encounter a bad package name."""
def __init__(self, name):
super(InvalidPackageNameError, self).__init__(
"Invalid package name: " + name)
self.name = name
class UnknownPackageError(spack.error.SpackError):
"""Raised when we encounter a package spack doesn't have."""
def __init__(self, name):

View file

@ -102,8 +102,7 @@
import spack
import spack.parse
import spack.error
import spack.compilers
import spack.compilers.gcc
import spack.compilers as compilers
from spack.version import *
from spack.util.string import *
@ -169,36 +168,71 @@ def __call__(self, match):
@key_ordering
class Compiler(object):
"""The Compiler field represents the compiler or range of compiler
versions that a package should be built with. Compilers have a
class CompilerSpec(object):
"""The CompilerSpec field represents the compiler or range of compiler
versions that a package should be built with. CompilerSpecs have a
name and a version list. """
def __init__(self, name, version=None):
self.name = name
self.versions = VersionList()
if version:
self.versions.add(version)
def __init__(self, *args):
nargs = len(args)
if nargs == 1:
arg = args[0]
# If there is one argument, it's either another CompilerSpec
# to copy or a string to parse
if isinstance(arg, basestring):
c = SpecParser().parse_compiler(arg)
self.name = c.name
self.versions = c.versions
elif isinstance(arg, CompilerSpec):
self.name = arg.name
self.versions = arg.versions.copy()
else:
raise TypeError(
"Can only build CompilerSpec from string or CompilerSpec." +
" Found %s" % type(arg))
elif nargs == 2:
name, version = args
self.name = name
self.versions = VersionList()
self.versions.add(ver(version))
else:
raise TypeError(
"__init__ takes 1 or 2 arguments. (%d given)" % nargs)
def _add_version(self, version):
self.versions.add(version)
def _autospec(self, compiler_spec_like):
if isinstance(compiler_spec_like, CompilerSpec):
return compiler_spec_like
return CompilerSpec(compiler_spec_like)
def satisfies(self, other):
other = self._autospec(other)
return (self.name == other.name and
self.versions.overlaps(other.versions))
self.versions.satisfies(other.versions))
def constrain(self, other):
if not self.satisfies(other):
raise UnsatisfiableCompilerSpecError(self, other)
other = self._autospec(other)
# ensure that other will actually constrain this spec.
if not other.satisfies(self):
raise UnsatisfiableCompilerSpecError(other, self)
self.versions.intersect(other.versions)
@property
def concrete(self):
"""A Compiler spec is concrete if its versions are concrete."""
"""A CompilerSpec is concrete if its versions are concrete and there
is an available compiler with the right version."""
return self.versions.concrete
@ -210,7 +244,8 @@ def version(self):
def copy(self):
clone = Compiler(self.name)
clone = CompilerSpec.__new__(CompilerSpec)
clone.name = self.name
clone.versions = self.versions.copy()
return clone
@ -226,6 +261,9 @@ def __str__(self):
out += "@%s" % vlist
return out
def __repr__(self):
return str(self)
@key_ordering
class Variant(object):
@ -332,7 +370,7 @@ def _add_variant(self, name, enabled):
def _set_compiler(self, compiler):
"""Called by the parser to set the compiler."""
if self.compiler: raise DuplicateCompilerError(
if self.compiler: raise DuplicateCompilerSpecError(
"Spec for '%s' cannot have two compilers." % self.name)
self.compiler = compiler
@ -361,14 +399,14 @@ def root(self):
"""
if not self.dependents:
return self
else:
# If the spec has multiple dependents, ensure that they all
# lead to the same place. Spack shouldn't deal with any DAGs
# with multiple roots, so something's wrong if we find one.
depiter = iter(self.dependents.values())
first_root = next(depiter).root
assert(all(first_root is d.root for d in depiter))
return first_root
# If the spec has multiple dependents, ensure that they all
# lead to the same place. Spack shouldn't deal with any DAGs
# with multiple roots, so something's wrong if we find one.
depiter = iter(self.dependents.values())
first_root = next(depiter).root
assert(all(first_root is d.root for d in depiter))
return first_root
@property
@ -428,17 +466,28 @@ def preorder_traversal(self, visited=None, d=0, **kwargs):
root [=True]
If false, this won't yield the root node, just its descendents.
direction [=children|parents]
If 'children', does a traversal of this spec's children. If
'parents', traverses upwards in the DAG towards the root.
"""
depth = kwargs.get('depth', False)
key_fun = kwargs.get('key', id)
yield_root = kwargs.get('root', True)
cover = kwargs.get('cover', 'nodes')
direction = kwargs.get('direction', 'children')
cover_values = ('nodes', 'edges', 'paths')
if cover not in cover_values:
raise ValueError("Invalid value for cover: %s. Choices are %s"
% (cover, ",".join(cover_values)))
direction_values = ('children', 'parents')
if direction not in direction_values:
raise ValueError("Invalid value for direction: %s. Choices are %s"
% (direction, ",".join(direction_values)))
if visited is None:
visited = set()
@ -452,9 +501,13 @@ def preorder_traversal(self, visited=None, d=0, **kwargs):
else:
if yield_root or d > 0: yield result
successors = self.dependencies
if direction == 'parents':
successors = self.dependents
visited.add(key)
for name in sorted(self.dependencies):
child = self.dependencies[name]
for name in sorted(successors):
child = successors[name]
for elt in child.preorder_traversal(visited, d+1, **kwargs):
yield elt
@ -763,22 +816,22 @@ def normalized(self):
def validate_names(self):
"""This checks that names of packages and compilers in this spec are real.
If they're not, it will raise either UnknownPackageError or
UnknownCompilerError.
UnsupportedCompilerError.
"""
for spec in self.preorder_traversal():
# Don't get a package for a virtual name.
if not spec.virtual:
spack.db.get(spec.name)
# validate compiler name in addition to the package name.
# validate compiler in addition to the package name.
if spec.compiler:
compiler_name = spec.compiler.name
if not spack.compilers.supported(compiler_name):
raise UnknownCompilerError(compiler_name)
if not compilers.supported(spec.compiler):
raise UnsupportedCompilerError(spec.compiler.name)
def constrain(self, other, **kwargs):
other = self._autospec(other)
constrain_deps = kwargs.get('deps', True)
if not self.name == other.name:
raise UnsatisfiableSpecNameError(self.name, other.name)
@ -806,7 +859,7 @@ def constrain(self, other, **kwargs):
self.variants.update(other.variants)
self.architecture = self.architecture or other.architecture
if kwargs.get('deps', True):
if constrain_deps:
self._constrain_dependencies(other)
@ -818,8 +871,8 @@ def _constrain_dependencies(self, other):
# TODO: might want more detail than this, e.g. specific deps
# in violation. if this becomes a priority get rid of this
# check and be more specici about what's wrong.
if not self.satisfies_dependencies(other):
raise UnsatisfiableDependencySpecError(self, other)
if not other.satisfies_dependencies(self):
raise UnsatisfiableDependencySpecError(other, self)
# Handle common first-order constraints directly
for name in self.common_dependencies(other):
@ -863,28 +916,28 @@ def _autospec(self, spec_like):
def satisfies(self, other, **kwargs):
other = self._autospec(other)
satisfy_deps = kwargs.get('deps', True)
# First thing we care about is whether the name matches
if self.name != other.name:
return False
# This function simplifies null checking below
def check(attribute, op):
s = getattr(self, attribute)
o = getattr(other, attribute)
return not s or not o or op(s,o)
# All these attrs have satisfies criteria of their own
for attr in ('versions', 'variants', 'compiler'):
if not check(attr, lambda s, o: s.satisfies(o)):
# All these attrs have satisfies criteria of their own,
# but can be None to indicate no constraints.
for s, o in ((self.versions, other.versions),
(self.variants, other.variants),
(self.compiler, other.compiler)):
if s and o and not s.satisfies(o):
return False
# Architecture is just a string
# TODO: inviestigate making an Architecture class for symmetry
if not check('architecture', lambda s,o: s == o):
# Architecture satisfaction is currently just string equality.
# Can be None for unconstrained, though.
if (self.architecture and other.architecture and
self.architecture != other.architecture):
return False
if kwargs.get('deps', True):
# If we need to descend into dependencies, do it, otherwise we're done.
if satisfy_deps:
return self.satisfies_dependencies(other)
else:
return True
@ -1188,6 +1241,11 @@ def do_parse(self):
return specs
def parse_compiler(self, text):
self.setup(text)
return self.compiler()
def spec(self):
"""Parse a spec out of the input. If a spec is supplied, then initialize
and return it instead of creating a new one."""
@ -1279,7 +1337,10 @@ def version_list(self):
def compiler(self):
self.expect(ID)
self.check_identifier()
compiler = Compiler(self.token.value)
compiler = CompilerSpec.__new__(CompilerSpec)
compiler.name = self.token.value
compiler.versions = VersionList()
if self.accept(AT):
vlist = self.version_list()
for version in vlist:
@ -1359,17 +1420,17 @@ def __init__(self, message):
super(DuplicateVariantError, self).__init__(message)
class DuplicateCompilerError(SpecError):
class DuplicateCompilerSpecError(SpecError):
"""Raised when the same compiler occurs in a spec twice."""
def __init__(self, message):
super(DuplicateCompilerError, self).__init__(message)
super(DuplicateCompilerSpecError, self).__init__(message)
class UnknownCompilerError(SpecError):
class UnsupportedCompilerError(SpecError):
"""Raised when the user asks for a compiler spack doesn't know about."""
def __init__(self, compiler_name):
super(UnknownCompilerError, self).__init__(
"Unknown compiler: %s" % compiler_name)
super(UnsupportedCompilerError, self).__init__(
"The '%s' compiler is not yet supported." % compiler_name)
class DuplicateArchitectureError(SpecError):

View file

@ -145,7 +145,7 @@ def _setup(self):
back to making the stage inside spack.stage_path.
"""
# Create the top-level stage directory
spack.mkdirp(spack.stage_path)
mkdirp(spack.stage_path)
self._cleanup_dead_links()
# If this is a named stage, then construct a named path.

View file

@ -44,7 +44,8 @@
'concretize',
'multimethod',
'install',
'package_sanity']
'package_sanity',
'config']
def list_tests():

View file

@ -25,7 +25,7 @@
import unittest
import spack
from spack.spec import Spec
from spack.spec import Spec, CompilerSpec
from spack.test.mock_packages_test import *
class ConcretizeTest(MockPackagesTest):
@ -163,3 +163,15 @@ def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
spec = Spec('indirect_mpich')
spec.normalize()
spec.concretize()
def test_compiler_inheritance(self):
spec = Spec('mpileaks')
spec.normalize()
spec['dyninst'].compiler = CompilerSpec('clang')
spec.concretize()
# TODO: not exactly the syntax I would like.
self.assertTrue(spec['libdwarf'].compiler.satisfies('clang'))
self.assertTrue(spec['libelf'].compiler.satisfies('clang'))

View file

@ -0,0 +1,69 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import unittest
import shutil
import os
from tempfile import mkdtemp
from spack.config import *
class ConfigTest(unittest.TestCase):
@classmethod
def setUp(cls):
cls.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
@classmethod
def tearDown(cls):
shutil.rmtree(cls.tmp_dir, True)
def get_path(self):
return os.path.join(ConfigTest.tmp_dir, "spackconfig")
def test_write_key(self):
config = SpackConfigParser(self.get_path())
config.set_value('compiler.cc', 'a')
config.set_value('compiler.cxx', 'b')
config.set_value('compiler', 'gcc@4.7.3', 'cc', 'c')
config.set_value('compiler', 'gcc@4.7.3', 'cxx', 'd')
config.write()
config = SpackConfigParser(self.get_path())
self.assertEqual(config.get_value('compiler.cc'), 'a')
self.assertEqual(config.get_value('compiler.cxx'), 'b')
self.assertEqual(config.get_value('compiler', 'gcc@4.7.3', 'cc'), 'c')
self.assertEqual(config.get_value('compiler', 'gcc@4.7.3', 'cxx'), 'd')
self.assertEqual(config.get_value('compiler', None, 'cc'), 'a')
self.assertEqual(config.get_value('compiler', None, 'cxx'), 'b')
self.assertEqual(config.get_value('compiler.gcc@4.7.3.cc'), 'c')
self.assertEqual(config.get_value('compiler.gcc@4.7.3.cxx'), 'd')
self.assertRaises(NoOptionError, config.get_value, 'compiler', None, 'fc')

View file

@ -22,6 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import unittest
import shutil
from contextlib import closing
@ -82,11 +83,12 @@ def test_install_and_uninstall(self):
# Get a basic concrete spec for the trivial install package.
spec = Spec(install_test_package)
spec.concretize()
self.assertTrue(spec.concrete)
# Get the package
pkg = spack.db.get(spec)
# Fake some values
# Fake the URL for the package so it downloads from a file.
archive_path = join_path(self.stage.path, archive_name)
pkg.url = 'file://' + archive_path
@ -94,5 +96,5 @@ def test_install_and_uninstall(self):
pkg.do_install()
pkg.do_uninstall()
except Exception, e:
if pkg: pkg.remove_prefix()
pkg.remove_prefix()
raise

View file

@ -25,9 +25,11 @@
import unittest
import spack
import spack.config
from spack.packages import PackageDB
from spack.spec import Spec
def set_pkg_dep(pkg, spec):
"""Alters dependence information for a pacakge.
Use this to mock up constraints.
@ -45,9 +47,14 @@ def setUp(self):
self.real_db = spack.db
spack.db = PackageDB(spack.mock_packages_path)
self.real_scopes = spack.config._scopes
spack.config._scopes = {
'site' : spack.mock_site_config,
'user' : spack.mock_user_config }
@classmethod
def tearDown(self):
"""Restore the real packages path after any test."""
#restore_dependencies()
spack.db = self.real_db
spack.config._scopes = self.real_scopes

View file

@ -31,6 +31,13 @@
import spack.url as url
class PackageSanityTest(unittest.TestCase):
def test_get_all_packages(self):
"""Get all packages once and make sure that works."""
for name in spack.db.all_package_names():
spack.db.get(name)
def test_url_versions(self):
"""Ensure that url_for_version does the right thing for at least the
default version of each package.

View file

@ -28,6 +28,7 @@
import spack
import spack.packages as packages
from spack.util.naming import mod_to_class
from spack.test.mock_packages_test import *
@ -58,8 +59,8 @@ def test_nonexisting_package_filename(self):
def test_package_class_names(self):
self.assertEqual('Mpich', packages.class_name_for_package_name('mpich'))
self.assertEqual('PmgrCollective', packages.class_name_for_package_name('pmgr_collective'))
self.assertEqual('PmgrCollective', packages.class_name_for_package_name('pmgr-collective'))
self.assertEqual('Pmgrcollective', packages.class_name_for_package_name('PmgrCollective'))
self.assertEqual('Num_3db', packages.class_name_for_package_name('3db'))
self.assertEqual('Mpich', mod_to_class('mpich'))
self.assertEqual('PmgrCollective', mod_to_class('pmgr_collective'))
self.assertEqual('PmgrCollective', mod_to_class('pmgr-collective'))
self.assertEqual('Pmgrcollective', mod_to_class('PmgrCollective'))
self.assertEqual('_3db', mod_to_class('3db'))

View file

@ -37,16 +37,13 @@ def check_satisfies(self, spec, anon_spec):
left = Spec(spec)
right = parse_anonymous_spec(anon_spec, left.name)
# Satisfies is one-directional.
self.assertTrue(left.satisfies(right))
self.assertTrue(left.satisfies(anon_spec))
self.assertTrue(right.satisfies(left))
try:
left.copy().constrain(right)
left.copy().constrain(anon_spec)
right.copy().constrain(left)
except SpecError, e:
self.fail("Got a SpecError in constrain! " + e.message)
# if left satisfies right, then we should be able to consrain
# right by left. Reverse is not always true.
right.copy().constrain(left)
def check_unsatisfiable(self, spec, anon_spec):
@ -56,25 +53,21 @@ def check_unsatisfiable(self, spec, anon_spec):
self.assertFalse(left.satisfies(right))
self.assertFalse(left.satisfies(anon_spec))
self.assertFalse(right.satisfies(left))
self.assertRaises(UnsatisfiableSpecError, left.constrain, right)
self.assertRaises(UnsatisfiableSpecError, left.constrain, anon_spec)
self.assertRaises(UnsatisfiableSpecError, right.constrain, left)
self.assertRaises(UnsatisfiableSpecError, right.copy().constrain, left)
def check_constrain(self, expected, constrained, constraint):
def check_constrain(self, expected, spec, constraint):
exp = Spec(expected)
constrained = Spec(constrained)
spec = Spec(spec)
constraint = Spec(constraint)
constrained.constrain(constraint)
self.assertEqual(exp, constrained)
spec.constrain(constraint)
self.assertEqual(exp, spec)
def check_invalid_constraint(self, constrained, constraint):
constrained = Spec(constrained)
def check_invalid_constraint(self, spec, constraint):
spec = Spec(spec)
constraint = Spec(constraint)
self.assertRaises(UnsatisfiableSpecError, constrained.constrain, constraint)
self.assertRaises(UnsatisfiableSpecError, spec.constrain, constraint)
# ================================================================================
@ -177,3 +170,8 @@ def test_invalid_constraint(self):
self.check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo')
self.check_invalid_constraint('libelf=bgqos_0', 'libelf=x86_54')
def test_compiler_satisfies(self):
self.check_satisfies('foo %gcc@4.7.3', '%gcc@4.7')
self.check_unsatisfiable('foo %gcc@4.7', '%gcc@4.7.3')

View file

@ -133,12 +133,12 @@ def test_duplicate_depdendence(self):
self.assertRaises(DuplicateDependencyError, self.check_parse, "x ^y ^y")
def test_duplicate_compiler(self):
self.assertRaises(DuplicateCompilerError, self.check_parse, "x%intel%intel")
self.assertRaises(DuplicateCompilerError, self.check_parse, "x%intel%gcc")
self.assertRaises(DuplicateCompilerError, self.check_parse, "x%gcc%intel")
self.assertRaises(DuplicateCompilerError, self.check_parse, "x ^y%intel%intel")
self.assertRaises(DuplicateCompilerError, self.check_parse, "x ^y%intel%gcc")
self.assertRaises(DuplicateCompilerError, self.check_parse, "x ^y%gcc%intel")
self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%intel%intel")
self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%intel%gcc")
self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%gcc%intel")
self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%intel%intel")
self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%intel%gcc")
self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%gcc%intel")
# ================================================================================

View file

@ -83,6 +83,14 @@ def assert_no_overlap(self, v1, v2):
self.assertFalse(ver(v1).overlaps(ver(v2)))
def assert_satisfies(self, v1, v2):
self.assertTrue(ver(v1).satisfies(ver(v2)))
def assert_does_not_satisfy(self, v1, v2):
self.assertFalse(ver(v1).satisfies(ver(v2)))
def check_intersection(self, expected, a, b):
self.assertEqual(ver(expected), ver(a).intersection(ver(b)))
@ -301,3 +309,68 @@ def test_intersection(self):
self.check_intersection(['2.5:2.7'],
['1.1:2.7'], ['2.5:3.0','1.0'])
self.check_intersection(['0:1'], [':'], ['0:1'])
def test_basic_version_satisfaction(self):
self.assert_satisfies('4.7.3', '4.7.3')
self.assert_satisfies('4.7.3', '4.7')
self.assert_satisfies('4.7.3b2', '4.7')
self.assert_satisfies('4.7b6', '4.7')
self.assert_satisfies('4.7.3', '4')
self.assert_satisfies('4.7.3b2', '4')
self.assert_satisfies('4.7b6', '4')
self.assert_does_not_satisfy('4.8.0', '4.9')
self.assert_does_not_satisfy('4.8', '4.9')
self.assert_does_not_satisfy('4', '4.9')
def test_basic_version_satisfaction_in_lists(self):
self.assert_satisfies(['4.7.3'], ['4.7.3'])
self.assert_satisfies(['4.7.3'], ['4.7'])
self.assert_satisfies(['4.7.3b2'], ['4.7'])
self.assert_satisfies(['4.7b6'], ['4.7'])
self.assert_satisfies(['4.7.3'], ['4'])
self.assert_satisfies(['4.7.3b2'], ['4'])
self.assert_satisfies(['4.7b6'], ['4'])
self.assert_does_not_satisfy(['4.8.0'], ['4.9'])
self.assert_does_not_satisfy(['4.8'], ['4.9'])
self.assert_does_not_satisfy(['4'], ['4.9'])
def test_version_range_satisfaction(self):
self.assert_satisfies('4.7b6', '4.3:4.7')
self.assert_satisfies('4.3.0', '4.3:4.7')
self.assert_satisfies('4.3.2', '4.3:4.7')
self.assert_does_not_satisfy('4.8.0', '4.3:4.7')
self.assert_does_not_satisfy('4.3', '4.4:4.7')
self.assert_satisfies('4.7b6', '4.3:4.7')
self.assert_does_not_satisfy('4.8.0', '4.3:4.7')
def test_version_range_satisfaction_in_lists(self):
self.assert_satisfies(['4.7b6'], ['4.3:4.7'])
self.assert_satisfies(['4.3.0'], ['4.3:4.7'])
self.assert_satisfies(['4.3.2'], ['4.3:4.7'])
self.assert_does_not_satisfy(['4.8.0'], ['4.3:4.7'])
self.assert_does_not_satisfy(['4.3'], ['4.4:4.7'])
self.assert_satisfies(['4.7b6'], ['4.3:4.7'])
self.assert_does_not_satisfy(['4.8.0'], ['4.3:4.7'])
def test_satisfaction_with_lists(self):
self.assert_satisfies('4.7', '4.3, 4.6, 4.7')
self.assert_satisfies('4.7.3', '4.3, 4.6, 4.7')
self.assert_satisfies('4.6.5', '4.3, 4.6, 4.7')
self.assert_satisfies('4.6.5.2', '4.3, 4.6, 4.7')
self.assert_does_not_satisfy('4', '4.3, 4.6, 4.7')
self.assert_does_not_satisfy('4.8.0', '4.2, 4.3:4.7')
self.assert_satisfies('4.8.0', '4.2, 4.3:4.8')
self.assert_satisfies('4.8.2', '4.2, 4.3:4.8')

View file

@ -22,14 +22,15 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
__all__ = ['Executable', 'which', 'ProcessError']
import os
import sys
import re
import subprocess
import llnl.util.tty as tty
from spack.error import SpackError
import spack.error
class Executable(object):
"""Class representing a program that can be run on the command line."""
@ -37,19 +38,21 @@ def __init__(self, name):
self.exe = name.split(' ')
self.returncode = None
def add_default_arg(self, arg):
self.exe.append(arg)
@property
def command(self):
return self.exe[0]
return ' '.join(self.exe)
def __call__(self, *args, **kwargs):
"""Run the executable with subprocess.check_output, return output."""
return_output = kwargs.get("return_output", False)
fail_on_error = kwargs.get("fail_on_error", True)
error = kwargs.get("error", sys.stderr)
quoted_args = [arg for arg in args if re.search(r'^"|^\'|"$|\'$', arg)]
if quoted_args:
@ -60,24 +63,49 @@ def __call__(self, *args, **kwargs):
"Consider removing them")
cmd = self.exe + list(args)
tty.verbose(" ".join(cmd))
tty.debug(" ".join(cmd))
close_error = False
try:
if error is None:
error = open(os.devnull, 'w')
close_error = True
proc = subprocess.Popen(
cmd,
stderr=sys.stderr,
stderr=error,
stdout=subprocess.PIPE if return_output else sys.stdout)
out, err = proc.communicate()
self.returncode = proc.returncode
if fail_on_error and proc.returncode != 0:
raise SpackError("command '%s' returned error code %d"
% (" ".join(cmd), proc.returncode))
raise ProcessError("command '%s' returned error code %d"
% (" ".join(cmd), proc.returncode))
if return_output:
return out
except subprocess.CalledProcessError, e:
if fail_on_error: raise
if fail_on_error:
raise ProcessError(
"command '%s' failed to run." % (
" ".join(cmd), proc.returncode), str(e))
finally:
if close_error:
error.close()
def __eq__(self, other):
return self.exe == other.exe
def __neq__(self, other):
return not (self == other)
def __hash__(self):
return hash((type(self),) + tuple(self.exe))
def __repr__(self):
return "<exe: %s>" % self.exe
@ -99,3 +127,8 @@ def which(name, **kwargs):
if required:
tty.die("spack requires %s. Make sure it is in your path." % name)
return None
class ProcessError(spack.error.SpackError):
def __init__(self, msg, *long_msg):
super(ProcessError, self).__init__(msg, *long_msg)

View file

@ -0,0 +1,45 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""
This implements a parallel map operation but it can accept more values
than multiprocessing.Pool.apply() can. For example, apply() will fail
to pickle functions if they're passed indirectly as parameters.
"""
from multiprocessing import Process, Pipe
from itertools import izip
def spawn(f):
def fun(pipe,x):
pipe.send(f(x))
pipe.close()
return fun
def parmap(f,X):
pipe=[Pipe() for x in X]
proc=[Process(target=spawn(f),args=(c,x)) for x,(p,c) in izip(X,pipe)]
[p.start() for p in proc]
[p.join() for p in proc]
return [p.recv() for (p,c) in pipe]

View file

@ -0,0 +1,61 @@
# Need this because of spack.util.string
from __future__ import absolute_import
import string
import re
import spack
# Valid module names can contain '-' but can't start with it.
_valid_module_re = r'^\w[\w-]*$'
def mod_to_class(mod_name):
"""Convert a name from module style to class name style. Spack mostly
follows `PEP-8 <http://legacy.python.org/dev/peps/pep-0008/>`_:
* Module and package names use lowercase_with_underscores.
* Class names use the CapWords convention.
Regular source code follows these convetions. Spack is a bit
more liberal with its Package names nad Compiler names:
* They can contain '-' as well as '_', but cannot start with '-'.
* They can start with numbers, e.g. "3proxy".
This function converts from the module convention to the class
convention by removing _ and - and converting surrounding
lowercase text to CapWords. If mod_name starts with a number,
the class name returned will be prepended with '_' to make a
valid Python identifier.
"""
validate_module_name(mod_name)
class_name = re.sub(r'[-_]+', '-', mod_name)
class_name = string.capwords(class_name, '-')
class_name = class_name.replace('-', '')
# If a class starts with a number, prefix it with Number_ to make it a valid
# Python class name.
if re.match(r'^[0-9]', class_name):
class_name = "_%s" % class_name
return class_name
def valid_module_name(mod_name):
"""Return whether the mod_name is valid for use in Spack."""
return bool(re.match(_valid_module_re, mod_name))
def validate_module_name(mod_name):
"""Raise an exception if mod_name is not valid."""
if not valid_module_name(mod_name):
raise InvalidModuleNameError(mod_name)
class InvalidModuleNameError(spack.error.SpackError):
"""Raised when we encounter a bad module name."""
def __init__(self, name):
super(InvalidModuleNameError, self).__init__(
"Invalid module name: " + name)
self.name = name

View file

@ -143,6 +143,18 @@ def highest(self):
return self
@coerced
def satisfies(self, other):
"""A Version 'satisfies' another if it is at least as specific and has a
common prefix. e.g., we want gcc@4.7.3 to satisfy a request for
gcc@4.7 so that when a user asks to build with gcc@4.7, we can find
a suitable compiler.
"""
nself = len(self.version)
nother = len(other.version)
return nother <= nself and self.version[:nother] == other.version
def wildcard(self):
"""Create a regex that will match variants of this version string."""
def a_or_n(seg):
@ -326,6 +338,37 @@ def __contains__(self, other):
none_high.le(other.end, self.end))
@coerced
def satisfies(self, other):
"""A VersionRange satisfies another if some version in this range
would satisfy some version in the other range. To do this it must
either:
a) Overlap with the other range
b) The start of this range satisfies the end of the other range.
This is essentially the same as overlaps(), but overlaps assumes
that its arguments are specific. That is, 4.7 is interpreted as
4.7.0.0.0.0... . This funciton assumes that 4.7 woudl be satisfied
by 4.7.3.5, etc.
Rationale:
If a user asks for gcc@4.5:4.7, and a package is only compatible with
gcc@4.7.3:4.8, then that package should be able to build under the
constraints. Just using overlaps() would not work here.
Note that we don't need to check whether the end of this range
would satisfy the start of the other range, because overlaps()
already covers that case.
Note further that overlaps() is a symmetric operation, while
satisfies() is not.
"""
return (self.overlaps(other) or
# if either self.start or other.end are None, then this can't
# satisfy, or overlaps() would've taken care of it.
self.start and other.end and self.start.satisfies(other.end))
@coerced
def overlaps(self, other):
return (other in self or self in other or
@ -444,11 +487,6 @@ def highest(self):
return self[-1].highest()
def satisfies(self, other):
"""Synonym for overlaps."""
return self.overlaps(other)
@coerced
def overlaps(self, other):
if not other or not self:
@ -465,6 +503,27 @@ def overlaps(self, other):
return False
@coerced
def satisfies(self, other):
"""A VersionList satisfies another if some version in the list would
would satisfy some version in the other list. This uses essentially
the same algorithm as overlaps() does for VersionList, but it calls
satisfies() on member Versions and VersionRanges.
"""
if not other or not self:
return False
s = o = 0
while s < len(self) and o < len(other):
if self[s].satisfies(other[o]):
return True
elif self[s] < other[o]:
s += 1
else:
o += 1
return False
@coerced
def update(self, other):
for v in other.versions:

View file

@ -0,0 +1,12 @@
[compiler "gcc@4.5.0"]
cc = /path/to/gcc
cxx = /path/to/g++
f77 = /path/to/gfortran
fc = /path/to/gfortran
[compiler "clang@3.3"]
cc = /path/to/clang
cxx = /path/to/clang++
f77 = None
fc = None

View file

View file

@ -39,6 +39,8 @@ class Libdwarf(Package):
depends_on("libelf")
parallel = False
def clean(self):
for dir in dwarf_dirs: