YAML config files for compilers and mirrors
This commit is contained in:
parent
cd1ca36488
commit
46b91ddf57
12 changed files with 358 additions and 504 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -4,6 +4,7 @@
|
||||||
*~
|
*~
|
||||||
.DS_Store
|
.DS_Store
|
||||||
.idea
|
.idea
|
||||||
|
/etc/spack/*
|
||||||
/etc/spackconfig
|
/etc/spackconfig
|
||||||
/share/spack/dotkit
|
/share/spack/dotkit
|
||||||
/share/spack/modules
|
/share/spack/modules
|
||||||
|
|
|
@ -68,7 +68,7 @@ def compiler_add(args):
|
||||||
spack.compilers.add_compilers_to_config('user', *compilers)
|
spack.compilers.add_compilers_to_config('user', *compilers)
|
||||||
n = len(compilers)
|
n = len(compilers)
|
||||||
tty.msg("Added %d new compiler%s to %s" % (
|
tty.msg("Added %d new compiler%s to %s" % (
|
||||||
n, 's' if n > 1 else '', spack.config.get_filename('user')))
|
n, 's' if n > 1 else '', spack.config.get_config_scope_filename('user', 'compilers')))
|
||||||
colify(reversed(sorted(c.spec for c in compilers)), indent=4)
|
colify(reversed(sorted(c.spec for c in compilers)), indent=4)
|
||||||
else:
|
else:
|
||||||
tty.msg("Found no new compilers")
|
tty.msg("Found no new compilers")
|
||||||
|
|
|
@ -43,42 +43,27 @@ def setup_parser(subparser):
|
||||||
|
|
||||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
|
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
|
||||||
|
|
||||||
set_parser = sp.add_parser('set', help='Set configuration values.')
|
get_parser = sp.add_parser('get', help='Print configuration values.')
|
||||||
set_parser.add_argument('key', help="Key to set value for.")
|
get_parser.add_argument('category', help="Configuration category to print.")
|
||||||
set_parser.add_argument('value', nargs='?', default=None,
|
|
||||||
help="Value to associate with key")
|
|
||||||
|
|
||||||
get_parser = sp.add_parser('get', help='Get configuration values.')
|
|
||||||
get_parser.add_argument('key', help="Key to get value for.")
|
|
||||||
|
|
||||||
edit_parser = sp.add_parser('edit', help='Edit configuration file.')
|
edit_parser = sp.add_parser('edit', help='Edit configuration file.')
|
||||||
|
edit_parser.add_argument('category', help="Configuration category to edit")
|
||||||
|
|
||||||
def config_set(args):
|
|
||||||
# default scope for writing is 'user'
|
|
||||||
if not args.scope:
|
|
||||||
args.scope = 'user'
|
|
||||||
|
|
||||||
config = spack.config.get_config(args.scope)
|
|
||||||
config.set_value(args.key, args.value)
|
|
||||||
config.write()
|
|
||||||
|
|
||||||
|
|
||||||
def config_get(args):
|
def config_get(args):
|
||||||
config = spack.config.get_config(args.scope)
|
spack.config.print_category(args.category)
|
||||||
print config.get_value(args.key)
|
|
||||||
|
|
||||||
|
|
||||||
def config_edit(args):
|
def config_edit(args):
|
||||||
if not args.scope:
|
if not args.scope:
|
||||||
args.scope = 'user'
|
args.scope = 'user'
|
||||||
config_file = spack.config.get_filename(args.scope)
|
if not args.category:
|
||||||
|
args.category = None
|
||||||
|
config_file = spack.config.get_config_scope_filename(args.scope, args.category)
|
||||||
spack.editor(config_file)
|
spack.editor(config_file)
|
||||||
|
|
||||||
|
|
||||||
def config(parser, args):
|
def config(parser, args):
|
||||||
action = { 'set' : config_set,
|
action = { 'get' : config_get,
|
||||||
'get' : config_get,
|
|
||||||
'edit' : config_edit }
|
'edit' : config_edit }
|
||||||
action[args.config_command](args)
|
action[args.config_command](args)
|
||||||
|
|
||||||
|
|
|
@ -75,27 +75,22 @@ def mirror_add(args):
|
||||||
if url.startswith('/'):
|
if url.startswith('/'):
|
||||||
url = 'file://' + url
|
url = 'file://' + url
|
||||||
|
|
||||||
config = spack.config.get_config('user')
|
mirror_dict = { args.name : url }
|
||||||
config.set_value('mirror', args.name, 'url', url)
|
spack.config.add_to_mirror_config({ args.name : url })
|
||||||
config.write()
|
|
||||||
|
|
||||||
|
|
||||||
def mirror_remove(args):
|
def mirror_remove(args):
|
||||||
"""Remove a mirror by name."""
|
"""Remove a mirror by name."""
|
||||||
config = spack.config.get_config('user')
|
|
||||||
name = args.name
|
name = args.name
|
||||||
|
|
||||||
if not config.has_named_section('mirror', name):
|
rmd_something = spack.config.remove_from_config('mirrors', name)
|
||||||
|
if not rmd_something:
|
||||||
tty.die("No such mirror: %s" % name)
|
tty.die("No such mirror: %s" % name)
|
||||||
config.remove_named_section('mirror', name)
|
|
||||||
config.write()
|
|
||||||
|
|
||||||
|
|
||||||
def mirror_list(args):
|
def mirror_list(args):
|
||||||
"""Print out available mirrors to the console."""
|
"""Print out available mirrors to the console."""
|
||||||
config = spack.config.get_config()
|
sec_names = spack.config.get_mirror_config()
|
||||||
sec_names = config.get_section_names('mirror')
|
|
||||||
|
|
||||||
if not sec_names:
|
if not sec_names:
|
||||||
tty.msg("No mirrors configured.")
|
tty.msg("No mirrors configured.")
|
||||||
return
|
return
|
||||||
|
@ -103,8 +98,7 @@ def mirror_list(args):
|
||||||
max_len = max(len(s) for s in sec_names)
|
max_len = max(len(s) for s in sec_names)
|
||||||
fmt = "%%-%ds%%s" % (max_len + 4)
|
fmt = "%%-%ds%%s" % (max_len + 4)
|
||||||
|
|
||||||
for name in sec_names:
|
for name, val in sec_names.iteritems():
|
||||||
val = config.get_value('mirror', name, 'url')
|
|
||||||
print fmt % (name, val)
|
print fmt % (name, val)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -60,24 +60,25 @@ def _get_config():
|
||||||
first."""
|
first."""
|
||||||
# If any configuration file has compilers, just stick with the
|
# If any configuration file has compilers, just stick with the
|
||||||
# ones already configured.
|
# ones already configured.
|
||||||
config = spack.config.get_config()
|
config = spack.config.get_compilers_config()
|
||||||
existing = [spack.spec.CompilerSpec(s)
|
existing = [spack.spec.CompilerSpec(s)
|
||||||
for s in config.get_section_names('compiler')]
|
for s in config]
|
||||||
if existing:
|
if existing:
|
||||||
return config
|
return config
|
||||||
|
|
||||||
compilers = find_compilers(*get_path('PATH'))
|
compilers = find_compilers(*get_path('PATH'))
|
||||||
new_compilers = [
|
add_compilers_to_config('user', *compilers)
|
||||||
c for c in compilers if c.spec not in existing]
|
|
||||||
add_compilers_to_config('user', *new_compilers)
|
|
||||||
|
|
||||||
# After writing compilers to the user config, return a full config
|
# After writing compilers to the user config, return a full config
|
||||||
# from all files.
|
# from all files.
|
||||||
return spack.config.get_config(refresh=True)
|
return spack.config.get_compilers_config()
|
||||||
|
|
||||||
|
|
||||||
@memoized
|
_cached_default_compiler = None
|
||||||
def default_compiler():
|
def default_compiler():
|
||||||
|
global _cached_default_compiler
|
||||||
|
if _cached_default_compiler:
|
||||||
|
return _cached_default_compiler
|
||||||
versions = []
|
versions = []
|
||||||
for name in _default_order: # TODO: customize order.
|
for name in _default_order: # TODO: customize order.
|
||||||
versions = find(name)
|
versions = find(name)
|
||||||
|
@ -86,7 +87,8 @@ def default_compiler():
|
||||||
if not versions:
|
if not versions:
|
||||||
raise NoCompilersError()
|
raise NoCompilersError()
|
||||||
|
|
||||||
return sorted(versions)[-1]
|
_cached_default_compiler = sorted(versions)[-1]
|
||||||
|
return _cached_default_compiler
|
||||||
|
|
||||||
|
|
||||||
def find_compilers(*path):
|
def find_compilers(*path):
|
||||||
|
@ -122,19 +124,17 @@ def find_compilers(*path):
|
||||||
|
|
||||||
|
|
||||||
def add_compilers_to_config(scope, *compilers):
|
def add_compilers_to_config(scope, *compilers):
|
||||||
config = spack.config.get_config(scope)
|
compiler_config_tree = {}
|
||||||
for compiler in compilers:
|
for compiler in compilers:
|
||||||
add_compiler(config, compiler)
|
compiler_entry = {}
|
||||||
config.write()
|
|
||||||
|
|
||||||
|
|
||||||
def add_compiler(config, compiler):
|
|
||||||
def setup_field(cspec, name, exe):
|
|
||||||
path = exe if exe else "None"
|
|
||||||
config.set_value('compiler', cspec, name, path)
|
|
||||||
|
|
||||||
for c in _required_instance_vars:
|
for c in _required_instance_vars:
|
||||||
setup_field(compiler.spec, c, getattr(compiler, c))
|
val = getattr(compiler, c)
|
||||||
|
if not val:
|
||||||
|
val = "None"
|
||||||
|
compiler_entry[c] = val
|
||||||
|
compiler_config_tree[str(compiler.spec)] = compiler_entry
|
||||||
|
spack.config.add_to_compiler_config(compiler_config_tree, scope)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def supported_compilers():
|
def supported_compilers():
|
||||||
|
@ -157,8 +157,7 @@ def all_compilers():
|
||||||
available to build with. These are instances of CompilerSpec.
|
available to build with. These are instances of CompilerSpec.
|
||||||
"""
|
"""
|
||||||
configuration = _get_config()
|
configuration = _get_config()
|
||||||
return [spack.spec.CompilerSpec(s)
|
return [spack.spec.CompilerSpec(s) for s in configuration]
|
||||||
for s in configuration.get_section_names('compiler')]
|
|
||||||
|
|
||||||
|
|
||||||
@_auto_compiler_spec
|
@_auto_compiler_spec
|
||||||
|
@ -176,7 +175,7 @@ def compilers_for_spec(compiler_spec):
|
||||||
config = _get_config()
|
config = _get_config()
|
||||||
|
|
||||||
def get_compiler(cspec):
|
def get_compiler(cspec):
|
||||||
items = dict((k,v) for k,v in config.items('compiler "%s"' % cspec))
|
items = config[str(cspec)]
|
||||||
|
|
||||||
if not all(n in items for n in _required_instance_vars):
|
if not all(n in items for n in _required_instance_vars):
|
||||||
raise InvalidCompilerConfigurationError(cspec)
|
raise InvalidCompilerConfigurationError(cspec)
|
||||||
|
|
|
@ -28,452 +28,315 @@
|
||||||
===============================
|
===============================
|
||||||
|
|
||||||
When Spack runs, it pulls configuration data from several config
|
When Spack runs, it pulls configuration data from several config
|
||||||
files, much like bash shells. In Spack, there are two configuration
|
directories, each of which contains configuration files. In Spack,
|
||||||
scopes:
|
there are two configuration scopes:
|
||||||
|
|
||||||
1. ``site``: Spack loads site-wide configuration options from
|
1. ``site``: Spack loads site-wide configuration options from
|
||||||
``$(prefix)/etc/spackconfig``.
|
``$(prefix)/etc/spack/``.
|
||||||
|
|
||||||
2. ``user``: Spack next loads per-user configuration options from
|
2. ``user``: Spack next loads per-user configuration options from
|
||||||
~/.spackconfig.
|
~/.spack/.
|
||||||
|
|
||||||
If user options have the same names as site options, the user options
|
|
||||||
take precedence.
|
|
||||||
|
|
||||||
|
Spack may read configuration files from both of these locations. When
|
||||||
|
configurations conflict, the user config options take precedence over
|
||||||
|
the site configurations. Each configuration directory may contain
|
||||||
|
several configuration files, such as compilers.yaml or mirrors.yaml.
|
||||||
|
|
||||||
Configuration file format
|
Configuration file format
|
||||||
===============================
|
===============================
|
||||||
|
|
||||||
Configuration files are formatted using .gitconfig syntax, which is
|
Configuration files are formatted using YAML syntax.
|
||||||
much like Windows .INI format. This format is implemented by Python's
|
This format is implemented by Python's
|
||||||
ConfigParser class, and it's easy to read and versatile.
|
yaml class, and it's easy to read and versatile.
|
||||||
|
|
||||||
The file is divided into sections, like this ``compiler`` section::
|
The config files are structured as trees, like this ``compiler`` section::
|
||||||
|
|
||||||
[compiler]
|
compilers:
|
||||||
cc = /usr/bin/gcc
|
chaos_5_x86_64_ib:
|
||||||
|
gcc@4.4.7:
|
||||||
|
cc: /usr/bin/gcc
|
||||||
|
cxx: /usr/bin/g++
|
||||||
|
f77: /usr/bin/gfortran
|
||||||
|
fc: /usr/bin/gfortran
|
||||||
|
bgqos_0:
|
||||||
|
xlc@12.1:
|
||||||
|
cc: /usr/local/bin/mpixlc
|
||||||
|
...
|
||||||
|
|
||||||
In each section there are options (cc), and each option has a value
|
In this example, entries like ''compilers'' and ''xlc@12.1'' are used to
|
||||||
(/usr/bin/gcc).
|
categorize entries beneath them in the tree. At the root of the tree,
|
||||||
|
entries like ''cc'' and ''cxx'' are specified as name/value pairs.
|
||||||
|
|
||||||
Borrowing from git, we also allow named sections, e.g.:
|
Spack returns these trees as nested dicts. The dict for the above example
|
||||||
|
would looks like:
|
||||||
|
|
||||||
[compiler "gcc@4.7.3"]
|
{ 'compilers' :
|
||||||
cc = /usr/bin/gcc
|
{ 'chaos_5_x86_64_ib' :
|
||||||
|
{ 'gcc@4.4.7' :
|
||||||
|
{ 'cc' : '/usr/bin/gcc',
|
||||||
|
'cxx' : '/usr/bin/g++'
|
||||||
|
'f77' : '/usr/bin/gfortran'
|
||||||
|
'fc' : '/usr/bin/gfortran' }
|
||||||
|
}
|
||||||
|
{ 'bgqos_0' :
|
||||||
|
{ 'cc' : '/usr/local/bin/mpixlc' }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
This is a compiler section, but it's for the specific compiler,
|
Some routines, like get_mirrors_config and get_compilers_config may strip
|
||||||
``gcc@4.7.3``. ``gcc@4.7.3`` is the name.
|
off the top-levels of the tree and return subtrees.
|
||||||
|
|
||||||
|
|
||||||
Keys
|
|
||||||
===============================
|
|
||||||
|
|
||||||
Together, the section, name, and option, separated by periods, are
|
|
||||||
called a ``key``. Keys can be used on the command line to set
|
|
||||||
configuration options explicitly (this is also borrowed from git).
|
|
||||||
|
|
||||||
For example, to change the C compiler used by gcc@4.7.3, you could do
|
|
||||||
this:
|
|
||||||
|
|
||||||
spack config compiler.gcc@4.7.3.cc /usr/local/bin/gcc
|
|
||||||
|
|
||||||
That will create a named compiler section in the user's .spackconfig
|
|
||||||
like the one shown above.
|
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import re
|
import exceptions
|
||||||
import inspect
|
import sys
|
||||||
import ConfigParser as cp
|
|
||||||
|
|
||||||
from external.ordereddict import OrderedDict
|
from external.ordereddict import OrderedDict
|
||||||
from llnl.util.lang import memoized
|
from llnl.util.lang import memoized
|
||||||
import spack.error
|
import spack.error
|
||||||
|
|
||||||
__all__ = [
|
from contextlib import closing
|
||||||
'SpackConfigParser', 'get_config', 'SpackConfigurationError',
|
from external import yaml
|
||||||
'InvalidConfigurationScopeError', 'InvalidSectionNameError',
|
from external.yaml.error import MarkedYAMLError
|
||||||
'ReadOnlySpackConfigError', 'ConfigParserError', 'NoOptionError',
|
import llnl.util.tty as tty
|
||||||
'NoSectionError']
|
from llnl.util.filesystem import mkdirp
|
||||||
|
|
||||||
_named_section_re = r'([^ ]+) "([^"]+)"'
|
_config_sections = {}
|
||||||
|
class _ConfigCategory:
|
||||||
|
name = None
|
||||||
|
filename = None
|
||||||
|
merge = True
|
||||||
|
def __init__(self, n, f, m):
|
||||||
|
self.name = n
|
||||||
|
self.filename = f
|
||||||
|
self.merge = m
|
||||||
|
self.files_read_from = []
|
||||||
|
self.result_dict = {}
|
||||||
|
_config_sections[n] = self
|
||||||
|
|
||||||
|
_ConfigCategory('compilers', 'compilers.yaml', True)
|
||||||
|
_ConfigCategory('mirrors', 'mirrors.yaml', True)
|
||||||
|
_ConfigCategory('view', 'views.yaml', True)
|
||||||
|
_ConfigCategory('order', 'orders.yaml', True)
|
||||||
|
|
||||||
"""Names of scopes and their corresponding configuration files."""
|
"""Names of scopes and their corresponding configuration files."""
|
||||||
_scopes = OrderedDict({
|
config_scopes = [('site', os.path.join(spack.etc_path, 'spack')),
|
||||||
'site' : os.path.join(spack.etc_path, 'spackconfig'),
|
('user', os.path.expanduser('~/.spack'))]
|
||||||
'user' : os.path.expanduser('~/.spackconfig')
|
|
||||||
})
|
|
||||||
|
|
||||||
_field_regex = r'^([\w-]*)' \
|
_compiler_by_arch = {}
|
||||||
r'(?:\.(.*(?=.)))?' \
|
_read_config_file_result = {}
|
||||||
r'(?:\.([\w-]+))?$'
|
def _read_config_file(filename):
|
||||||
|
"""Read a given YAML configuration file"""
|
||||||
_section_regex = r'^([\w-]*)\s*' \
|
global _read_config_file_result
|
||||||
r'\"([^"]*\)\"$'
|
if filename in _read_config_file_result:
|
||||||
|
return _read_config_file_result[filename]
|
||||||
|
|
||||||
# Cache of configs -- we memoize this for performance.
|
|
||||||
_config = {}
|
|
||||||
|
|
||||||
def get_config(scope=None, **kwargs):
|
|
||||||
"""Get a Spack configuration object, which can be used to set options.
|
|
||||||
|
|
||||||
With no arguments, this returns a SpackConfigParser with config
|
|
||||||
options loaded from all config files. This is how client code
|
|
||||||
should read Spack configuration options.
|
|
||||||
|
|
||||||
Optionally, a scope parameter can be provided. Valid scopes
|
|
||||||
are ``site`` and ``user``. If a scope is provided, only the
|
|
||||||
options from that scope's configuration file are loaded. The
|
|
||||||
caller can set or unset options, then call ``write()`` on the
|
|
||||||
config object to write it back out to the original config file.
|
|
||||||
|
|
||||||
By default, this will cache configurations and return the last
|
|
||||||
read version of the config file. If the config file is
|
|
||||||
modified and you need to refresh, call get_config with the
|
|
||||||
refresh=True keyword argument. This will force all files to be
|
|
||||||
re-read.
|
|
||||||
"""
|
|
||||||
refresh = kwargs.get('refresh', False)
|
|
||||||
if refresh:
|
|
||||||
_config.clear()
|
|
||||||
|
|
||||||
if scope not in _config:
|
|
||||||
if scope is None:
|
|
||||||
_config[scope] = SpackConfigParser([path for path in _scopes.values()])
|
|
||||||
elif scope not in _scopes:
|
|
||||||
raise UnknownConfigurationScopeError(scope)
|
|
||||||
else:
|
|
||||||
_config[scope] = SpackConfigParser(_scopes[scope])
|
|
||||||
|
|
||||||
return _config[scope]
|
|
||||||
|
|
||||||
|
|
||||||
def get_filename(scope):
|
|
||||||
"""Get the filename for a particular config scope."""
|
|
||||||
if not scope in _scopes:
|
|
||||||
raise UnknownConfigurationScopeError(scope)
|
|
||||||
return _scopes[scope]
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_key(key):
|
|
||||||
"""Return the section, name, and option the field describes.
|
|
||||||
Values are returned in a 3-tuple.
|
|
||||||
|
|
||||||
e.g.:
|
|
||||||
The field name ``compiler.gcc@4.7.3.cc`` refers to the 'cc' key
|
|
||||||
in a section that looks like this:
|
|
||||||
|
|
||||||
[compiler "gcc@4.7.3"]
|
|
||||||
cc = /usr/local/bin/gcc
|
|
||||||
|
|
||||||
* The section is ``compiler``
|
|
||||||
* The name is ``gcc@4.7.3``
|
|
||||||
* The key is ``cc``
|
|
||||||
"""
|
|
||||||
match = re.search(_field_regex, key)
|
|
||||||
if match:
|
|
||||||
return match.groups()
|
|
||||||
else:
|
|
||||||
raise InvalidSectionNameError(key)
|
|
||||||
|
|
||||||
|
|
||||||
def _make_section_name(section, name):
|
|
||||||
if not name:
|
|
||||||
return section
|
|
||||||
return '%s "%s"' % (section, name)
|
|
||||||
|
|
||||||
|
|
||||||
def _autokey(fun):
|
|
||||||
"""Allow a function to be called with a string key like
|
|
||||||
'compiler.gcc.cc', or with the section, name, and option
|
|
||||||
separated. Function should take at least three args, e.g.:
|
|
||||||
|
|
||||||
fun(self, section, name, option, [...])
|
|
||||||
|
|
||||||
This will allow the function above to be called normally or
|
|
||||||
with a string key, e.g.:
|
|
||||||
|
|
||||||
fun(self, key, [...])
|
|
||||||
"""
|
|
||||||
argspec = inspect.getargspec(fun)
|
|
||||||
fun_nargs = len(argspec[0])
|
|
||||||
|
|
||||||
def string_key_func(*args):
|
|
||||||
nargs = len(args)
|
|
||||||
if nargs == fun_nargs - 2:
|
|
||||||
section, name, option = _parse_key(args[1])
|
|
||||||
return fun(args[0], section, name, option, *args[2:])
|
|
||||||
|
|
||||||
elif nargs == fun_nargs:
|
|
||||||
return fun(*args)
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise TypeError(
|
|
||||||
"%s takes %d or %d args (found %d)."
|
|
||||||
% (fun.__name__, fun_nargs - 2, fun_nargs, len(args)))
|
|
||||||
return string_key_func
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class SpackConfigParser(cp.RawConfigParser):
|
|
||||||
"""Slightly modified from Python's raw config file parser to accept
|
|
||||||
leading whitespace and preserve comments.
|
|
||||||
"""
|
|
||||||
# Slightly modify Python option expressions to allow leading whitespace
|
|
||||||
OPTCRE = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE.pattern)
|
|
||||||
|
|
||||||
def __init__(self, file_or_files):
|
|
||||||
cp.RawConfigParser.__init__(self, dict_type=OrderedDict)
|
|
||||||
|
|
||||||
if isinstance(file_or_files, basestring):
|
|
||||||
self.read([file_or_files])
|
|
||||||
self.filename = file_or_files
|
|
||||||
|
|
||||||
else:
|
|
||||||
self.read(file_or_files)
|
|
||||||
self.filename = None
|
|
||||||
|
|
||||||
|
|
||||||
@_autokey
|
|
||||||
def set_value(self, section, name, option, value):
|
|
||||||
"""Set the value for a key. If the key is in a section or named
|
|
||||||
section that does not yet exist, add that section.
|
|
||||||
"""
|
|
||||||
sn = _make_section_name(section, name)
|
|
||||||
if not self.has_section(sn):
|
|
||||||
self.add_section(sn)
|
|
||||||
|
|
||||||
# Allow valueless config options to be set like this:
|
|
||||||
# spack config set mirror https://foo.bar.com
|
|
||||||
#
|
|
||||||
# Instead of this, which parses incorrectly:
|
|
||||||
# spack config set mirror.https://foo.bar.com
|
|
||||||
#
|
|
||||||
if option is None:
|
|
||||||
option = value
|
|
||||||
value = None
|
|
||||||
|
|
||||||
self.set(sn, option, value)
|
|
||||||
|
|
||||||
|
|
||||||
@_autokey
|
|
||||||
def get_value(self, section, name, option):
|
|
||||||
"""Get the value for a key. Raises NoOptionError or NoSectionError if
|
|
||||||
the key is not present."""
|
|
||||||
sn = _make_section_name(section, name)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not option:
|
with open(filename) as f:
|
||||||
# TODO: format this better
|
ydict = yaml.load(f)
|
||||||
return self.items(sn)
|
except MarkedYAMLError, e:
|
||||||
|
tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
|
||||||
return self.get(sn, option)
|
except exceptions.IOError, e:
|
||||||
|
_read_config_file_result[filename] = None
|
||||||
# Wrap ConfigParser exceptions in SpackExceptions
|
return None
|
||||||
except cp.NoOptionError, e: raise NoOptionError(e)
|
_read_config_file_result[filename] = ydict
|
||||||
except cp.NoSectionError, e: raise NoSectionError(e)
|
return ydict
|
||||||
except cp.Error, e: raise ConfigParserError(e)
|
|
||||||
|
|
||||||
|
|
||||||
@_autokey
|
def clear_config_caches():
|
||||||
def has_value(self, section, name, option):
|
"""Clears the caches for configuration files, which will cause them
|
||||||
"""Return whether the configuration file has a value for a
|
to be re-read upon the next request"""
|
||||||
particular key."""
|
for key,s in _config_sections.iteritems():
|
||||||
sn = _make_section_name(section, name)
|
s.files_read_from = []
|
||||||
return self.has_option(sn, option)
|
s.result_dict = {}
|
||||||
|
spack.config._read_config_file_result = {}
|
||||||
|
spack.config._compiler_by_arch = {}
|
||||||
|
spack.compilers._cached_default_compiler = None
|
||||||
|
|
||||||
|
|
||||||
def has_named_section(self, section, name):
|
def _merge_dicts(d1, d2):
|
||||||
sn = _make_section_name(section, name)
|
"""Recursively merges two configuration trees, with entries
|
||||||
return self.has_section(sn)
|
in d2 taking precedence over d1"""
|
||||||
|
if not d1:
|
||||||
|
return d2.copy()
|
||||||
|
if not d2:
|
||||||
|
return d1
|
||||||
|
|
||||||
|
for key2, val2 in d2.iteritems():
|
||||||
def remove_named_section(self, section, name):
|
if not key2 in d1:
|
||||||
sn = _make_section_name(section, name)
|
d1[key2] = val2
|
||||||
self.remove_section(sn)
|
|
||||||
|
|
||||||
|
|
||||||
def get_section_names(self, sectype):
|
|
||||||
"""Get all named sections with the specified type.
|
|
||||||
A named section looks like this:
|
|
||||||
|
|
||||||
[compiler "gcc@4.7"]
|
|
||||||
|
|
||||||
Names of sections are returned as a list, e.g.:
|
|
||||||
|
|
||||||
['gcc@4.7', 'intel@12.3', 'pgi@4.2']
|
|
||||||
|
|
||||||
You can get items in the sections like this:
|
|
||||||
"""
|
|
||||||
sections = []
|
|
||||||
for secname in self.sections():
|
|
||||||
match = re.match(_named_section_re, secname)
|
|
||||||
if match:
|
|
||||||
t, name = match.groups()
|
|
||||||
if t == sectype:
|
|
||||||
sections.append(name)
|
|
||||||
return sections
|
|
||||||
|
|
||||||
|
|
||||||
def write(self, path_or_fp=None):
|
|
||||||
"""Write this configuration out to a file.
|
|
||||||
|
|
||||||
If called with no arguments, this will write the
|
|
||||||
configuration out to the file from which it was read. If
|
|
||||||
this config was read from multiple files, e.g. site
|
|
||||||
configuration and then user configuration, write will
|
|
||||||
simply raise an error.
|
|
||||||
|
|
||||||
If called with a path or file object, this will write the
|
|
||||||
configuration out to the supplied path or file object.
|
|
||||||
"""
|
|
||||||
if path_or_fp is None:
|
|
||||||
if not self.filename:
|
|
||||||
raise ReadOnlySpackConfigError()
|
|
||||||
path_or_fp = self.filename
|
|
||||||
|
|
||||||
if isinstance(path_or_fp, basestring):
|
|
||||||
path_or_fp = open(path_or_fp, 'w')
|
|
||||||
|
|
||||||
self._write(path_or_fp)
|
|
||||||
|
|
||||||
|
|
||||||
def _read(self, fp, fpname):
|
|
||||||
"""This is a copy of Python 2.6's _read() method, with support for
|
|
||||||
continuation lines removed."""
|
|
||||||
cursect = None # None, or a dictionary
|
|
||||||
optname = None
|
|
||||||
comment = 0
|
|
||||||
lineno = 0
|
|
||||||
e = None # None, or an exception
|
|
||||||
while True:
|
|
||||||
line = fp.readline()
|
|
||||||
if not line:
|
|
||||||
break
|
|
||||||
lineno = lineno + 1
|
|
||||||
# comment or blank line?
|
|
||||||
if ((line.strip() == '' or line[0] in '#;') or
|
|
||||||
(line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR")):
|
|
||||||
self._sections["comment-%d" % comment] = line
|
|
||||||
comment += 1
|
|
||||||
# a section header or option header?
|
|
||||||
else:
|
|
||||||
# is it a section header?
|
|
||||||
mo = self.SECTCRE.match(line)
|
|
||||||
if mo:
|
|
||||||
sectname = mo.group('header')
|
|
||||||
if sectname in self._sections:
|
|
||||||
cursect = self._sections[sectname]
|
|
||||||
elif sectname == cp.DEFAULTSECT:
|
|
||||||
cursect = self._defaults
|
|
||||||
else:
|
|
||||||
cursect = self._dict()
|
|
||||||
cursect['__name__'] = sectname
|
|
||||||
self._sections[sectname] = cursect
|
|
||||||
# So sections can't start with a continuation line
|
|
||||||
optname = None
|
|
||||||
# no section header in the file?
|
|
||||||
elif cursect is None:
|
|
||||||
raise cp.MissingSectionHeaderError(fpname, lineno, line)
|
|
||||||
# an option line?
|
|
||||||
else:
|
|
||||||
mo = self.OPTCRE.match(line)
|
|
||||||
if mo:
|
|
||||||
optname, vi, optval = mo.group('option', 'vi', 'value')
|
|
||||||
if vi in ('=', ':') and ';' in optval:
|
|
||||||
# ';' is a comment delimiter only if it follows
|
|
||||||
# a spacing character
|
|
||||||
pos = optval.find(';')
|
|
||||||
if pos != -1 and optval[pos-1].isspace():
|
|
||||||
optval = optval[:pos]
|
|
||||||
optval = optval.strip()
|
|
||||||
# allow empty values
|
|
||||||
if optval == '""':
|
|
||||||
optval = ''
|
|
||||||
optname = self.optionxform(optname.rstrip())
|
|
||||||
cursect[optname] = optval
|
|
||||||
else:
|
|
||||||
# a non-fatal parsing error occurred. set up the
|
|
||||||
# exception but keep going. the exception will be
|
|
||||||
# raised at the end of the file and will contain a
|
|
||||||
# list of all bogus lines
|
|
||||||
if not e:
|
|
||||||
e = cp.ParsingError(fpname)
|
|
||||||
e.append(lineno, repr(line))
|
|
||||||
# if any parsing errors occurred, raise an exception
|
|
||||||
if e:
|
|
||||||
raise e
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def _write(self, fp):
|
|
||||||
"""Write an .ini-format representation of the configuration state.
|
|
||||||
|
|
||||||
This is taken from the default Python 2.6 source. It writes 4
|
|
||||||
spaces at the beginning of lines instead of no leading space.
|
|
||||||
"""
|
|
||||||
if self._defaults:
|
|
||||||
fp.write("[%s]\n" % cp.DEFAULTSECT)
|
|
||||||
for (key, value) in self._defaults.items():
|
|
||||||
fp.write(" %s = %s\n" % (key, str(value).replace('\n', '\n\t')))
|
|
||||||
fp.write("\n")
|
|
||||||
|
|
||||||
for section in self._sections:
|
|
||||||
# Handles comments and blank lines.
|
|
||||||
if isinstance(self._sections[section], basestring):
|
|
||||||
fp.write(self._sections[section])
|
|
||||||
continue
|
continue
|
||||||
|
val1 = d1[key2]
|
||||||
|
if isinstance(val1, dict) and isinstance(val2, dict):
|
||||||
|
d1[key2] = _merge_dicts(val1, val2)
|
||||||
|
continue
|
||||||
|
if isinstance(val1, list) and isinstance(val2, list):
|
||||||
|
val1.extend(val2)
|
||||||
|
seen = set()
|
||||||
|
d1[key2] = [ x for x in val1 if not (x in seen or seen.add(x)) ]
|
||||||
|
continue
|
||||||
|
d1[key2] = val2
|
||||||
|
return d1
|
||||||
|
|
||||||
|
|
||||||
|
def get_config(category_name):
|
||||||
|
"""Get the confguration tree for the names category. Strips off the
|
||||||
|
top-level category entry from the dict"""
|
||||||
|
global config_scopes
|
||||||
|
category = _config_sections[category_name]
|
||||||
|
if category.result_dict:
|
||||||
|
return category.result_dict
|
||||||
|
|
||||||
|
category.result_dict = {}
|
||||||
|
for scope, scope_path in config_scopes:
|
||||||
|
path = os.path.join(scope_path, category.filename)
|
||||||
|
result = _read_config_file(path)
|
||||||
|
if not result:
|
||||||
|
continue
|
||||||
|
if not category_name in result:
|
||||||
|
continue
|
||||||
|
category.files_read_from.insert(0, path)
|
||||||
|
result = result[category_name]
|
||||||
|
if category.merge:
|
||||||
|
category.result_dict = _merge_dicts(category.result_dict, result)
|
||||||
else:
|
else:
|
||||||
# Allow leading whitespace
|
category.result_dict = result
|
||||||
fp.write("[%s]\n" % section)
|
return category.result_dict
|
||||||
for (key, value) in self._sections[section].items():
|
|
||||||
if key != "__name__":
|
|
||||||
fp.write(" %s = %s\n" %
|
|
||||||
(key, str(value).replace('\n', '\n\t')))
|
|
||||||
|
|
||||||
|
|
||||||
class SpackConfigurationError(spack.error.SpackError):
|
def get_compilers_config(arch=None):
|
||||||
def __init__(self, *args):
|
"""Get the compiler configuration from config files for the given
|
||||||
super(SpackConfigurationError, self).__init__(*args)
|
architecture. Strips off the architecture component of the
|
||||||
|
configuration"""
|
||||||
|
global _compiler_by_arch
|
||||||
|
if not arch:
|
||||||
|
arch = spack.architecture.sys_type()
|
||||||
|
if arch in _compiler_by_arch:
|
||||||
|
return _compiler_by_arch[arch]
|
||||||
|
|
||||||
|
cc_config = get_config('compilers')
|
||||||
|
if arch in cc_config and 'all' in cc_config:
|
||||||
|
arch_compiler = dict(cc_config[arch])
|
||||||
|
_compiler_by_arch[arch] = _merge_dict(arch_compiler, cc_config['all'])
|
||||||
|
elif arch in cc_config:
|
||||||
|
_compiler_by_arch[arch] = cc_config[arch]
|
||||||
|
elif 'all' in cc_config:
|
||||||
|
_compiler_by_arch[arch] = cc_config['all']
|
||||||
|
else:
|
||||||
|
_compiler_by_arch[arch] = {}
|
||||||
|
return _compiler_by_arch[arch]
|
||||||
|
|
||||||
|
|
||||||
class InvalidConfigurationScopeError(SpackConfigurationError):
|
def get_mirror_config():
|
||||||
def __init__(self, scope):
|
"""Get the mirror configuration from config files"""
|
||||||
super(InvalidConfigurationScopeError, self).__init__(
|
return get_config('mirrors')
|
||||||
"Invalid configuration scope: '%s'" % scope,
|
|
||||||
"Options are: %s" % ", ".join(*_scopes.values()))
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidSectionNameError(SpackConfigurationError):
|
def get_config_scope_dirname(scope):
|
||||||
"""Raised when the name for a section is invalid."""
|
"""For a scope return the config directory"""
|
||||||
def __init__(self, name):
|
global config_scopes
|
||||||
super(InvalidSectionNameError, self).__init__(
|
for s,p in config_scopes:
|
||||||
"Invalid section specifier: '%s'" % name)
|
if s == scope:
|
||||||
|
return p
|
||||||
|
tty.die("Unknown scope %s. Valid options are %s" %
|
||||||
|
(scope, ", ".join([s for s,p in config_scopes])))
|
||||||
|
|
||||||
|
|
||||||
class ReadOnlySpackConfigError(SpackConfigurationError):
|
def get_config_scope_filename(scope, category_name):
|
||||||
"""Raised when user attempts to write to a config read from multiple files."""
|
"""For some scope and category, get the name of the configuration file"""
|
||||||
def __init__(self):
|
if not category_name in _config_sections:
|
||||||
super(ReadOnlySpackConfigError, self).__init__(
|
tty.die("Unknown config category %s. Valid options are: %s" %
|
||||||
"Can only write to a single-file SpackConfigParser")
|
(category_name, ", ".join([s for s in _config_sections])))
|
||||||
|
return os.path.join(get_config_scope_dirname(scope), _config_sections[category_name].filename)
|
||||||
|
|
||||||
|
|
||||||
class ConfigParserError(SpackConfigurationError):
|
def add_to_config(category_name, addition_dict, scope=None):
|
||||||
"""Wrapper for the Python ConfigParser's errors"""
|
"""Merge a new dict into a configuration tree and write the new
|
||||||
def __init__(self, error):
|
configuration to disk"""
|
||||||
super(ConfigParserError, self).__init__(str(error))
|
global _read_config_file_result
|
||||||
self.error = error
|
get_config(category_name)
|
||||||
|
category = _config_sections[category_name]
|
||||||
|
|
||||||
|
#If scope is specified, use it. Otherwise use the last config scope that
|
||||||
|
#we successfully parsed data from.
|
||||||
|
file = None
|
||||||
|
path = None
|
||||||
|
if not scope and not category.files_read_from:
|
||||||
|
scope = 'user'
|
||||||
|
if scope:
|
||||||
|
try:
|
||||||
|
dir = get_config_scope_dirname(scope)
|
||||||
|
if not os.path.exists(dir):
|
||||||
|
mkdirp(dir)
|
||||||
|
path = os.path.join(dir, category.filename)
|
||||||
|
file = open(path, 'w')
|
||||||
|
except exceptions.IOError, e:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
for p in category.files_read_from:
|
||||||
|
try:
|
||||||
|
file = open(p, 'w')
|
||||||
|
except exceptions.IOError, e:
|
||||||
|
pass
|
||||||
|
if file:
|
||||||
|
path = p
|
||||||
|
break;
|
||||||
|
if not file:
|
||||||
|
tty.die('Unable to write to config file %s' % path)
|
||||||
|
|
||||||
|
#Merge the new information into the existing file info, then write to disk
|
||||||
|
new_dict = _read_config_file_result[path]
|
||||||
|
if new_dict and category_name in new_dict:
|
||||||
|
new_dict = new_dict[category_name]
|
||||||
|
new_dict = _merge_dicts(new_dict, addition_dict)
|
||||||
|
new_dict = { category_name : new_dict }
|
||||||
|
_read_config_file_result[path] = new_dict
|
||||||
|
yaml.dump(new_dict, stream=file, default_flow_style=False)
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
#Merge the new information into the cached results
|
||||||
|
category.result_dict = _merge_dicts(category.result_dict, addition_dict)
|
||||||
|
|
||||||
|
|
||||||
class NoOptionError(ConfigParserError):
|
def add_to_mirror_config(addition_dict, scope=None):
|
||||||
"""Wrapper for ConfigParser NoOptionError"""
|
"""Add mirrors to the configuration files"""
|
||||||
def __init__(self, error):
|
add_to_config('mirrors', addition_dict, scope)
|
||||||
super(NoOptionError, self).__init__(error)
|
|
||||||
|
|
||||||
|
|
||||||
class NoSectionError(ConfigParserError):
|
def add_to_compiler_config(addition_dict, scope=None, arch=None):
|
||||||
"""Wrapper for ConfigParser NoOptionError"""
|
"""Add compilerss to the configuration files"""
|
||||||
def __init__(self, error):
|
if not arch:
|
||||||
super(NoSectionError, self).__init__(error)
|
arch = spack.architecture.sys_type()
|
||||||
|
add_to_config('compilers', { arch : addition_dict }, scope)
|
||||||
|
clear_config_caches()
|
||||||
|
|
||||||
|
|
||||||
|
def remove_from_config(category_name, key_to_rm, scope=None):
|
||||||
|
"""Remove a configuration key and write a new configuration to disk"""
|
||||||
|
global config_scopes
|
||||||
|
get_config(category_name)
|
||||||
|
scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes]
|
||||||
|
category = _config_sections[category_name]
|
||||||
|
|
||||||
|
rmd_something = False
|
||||||
|
for s in scopes_to_rm_from:
|
||||||
|
path = get_config_scope_filename(scope, category_name)
|
||||||
|
result = _read_config_file(path)
|
||||||
|
if not result:
|
||||||
|
continue
|
||||||
|
if not key_to_rm in result[category_name]:
|
||||||
|
continue
|
||||||
|
with closing(open(path, 'w')) as f:
|
||||||
|
result[category_name].pop(key_to_rm, None)
|
||||||
|
yaml.dump(result, stream=f, default_flow_style=False)
|
||||||
|
category.result_dict.pop(key_to_rm, None)
|
||||||
|
rmd_something = True
|
||||||
|
return rmd_something
|
||||||
|
|
||||||
|
|
||||||
|
"""Print a configuration to stdout"""
|
||||||
|
def print_category(category_name):
|
||||||
|
if not category_name in _config_sections:
|
||||||
|
tty.die("Unknown config category %s. Valid options are: %s" %
|
||||||
|
(category_name, ", ".join([s for s in _config_sections])))
|
||||||
|
yaml.dump(get_config(category_name), stream=sys.stdout, default_flow_style=False)
|
||||||
|
|
||||||
|
|
|
@ -344,13 +344,9 @@ def destroy(self):
|
||||||
|
|
||||||
def _get_mirrors():
|
def _get_mirrors():
|
||||||
"""Get mirrors from spack configuration."""
|
"""Get mirrors from spack configuration."""
|
||||||
config = spack.config.get_config()
|
config = spack.config.get_mirror_config()
|
||||||
|
return [val for name, val in config.iteritems()]
|
||||||
|
|
||||||
mirrors = []
|
|
||||||
sec_names = config.get_section_names('mirror')
|
|
||||||
for name in sec_names:
|
|
||||||
mirrors.append(config.get_value('mirror', name, 'url'))
|
|
||||||
return mirrors
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_access(file=spack.stage_path):
|
def ensure_access(file=spack.stage_path):
|
||||||
|
|
|
@ -26,44 +26,49 @@
|
||||||
import shutil
|
import shutil
|
||||||
import os
|
import os
|
||||||
from tempfile import mkdtemp
|
from tempfile import mkdtemp
|
||||||
|
import spack
|
||||||
|
from spack.packages import PackageDB
|
||||||
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
from spack.config import *
|
class ConfigTest(MockPackagesTest):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.initmock()
|
||||||
|
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
|
||||||
|
spack.config.config_scopes = [('test_low_priority', os.path.join(self.tmp_dir, 'low')),
|
||||||
|
('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
|
||||||
|
|
||||||
class ConfigTest(unittest.TestCase):
|
def tearDown(self):
|
||||||
|
self.cleanmock()
|
||||||
|
shutil.rmtree(self.tmp_dir, True)
|
||||||
|
|
||||||
@classmethod
|
def check_config(self, comps):
|
||||||
def setUp(cls):
|
config = spack.config.get_compilers_config()
|
||||||
cls.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
|
compiler_list = ['cc', 'cxx', 'f77', 'f90']
|
||||||
|
for key in comps:
|
||||||
|
for c in compiler_list:
|
||||||
@classmethod
|
if comps[key][c] == '/bad':
|
||||||
def tearDown(cls):
|
continue
|
||||||
shutil.rmtree(cls.tmp_dir, True)
|
self.assertEqual(comps[key][c], config[key][c])
|
||||||
|
|
||||||
|
|
||||||
def get_path(self):
|
|
||||||
return os.path.join(ConfigTest.tmp_dir, "spackconfig")
|
|
||||||
|
|
||||||
|
|
||||||
def test_write_key(self):
|
def test_write_key(self):
|
||||||
config = SpackConfigParser(self.get_path())
|
a_comps = {"gcc@4.7.3" : { "cc" : "/gcc473", "cxx" : "/g++473", "f77" : None, "f90" : None },
|
||||||
config.set_value('compiler.cc', 'a')
|
"gcc@4.5.0" : { "cc" : "/gcc450", "cxx" : "/g++450", "f77" : "/gfortran", "f90" : "/gfortran" },
|
||||||
config.set_value('compiler.cxx', 'b')
|
"clang@3.3" : { "cc" : "/bad", "cxx" : "/bad", "f77" : "/bad", "f90" : "/bad" }}
|
||||||
config.set_value('compiler', 'gcc@4.7.3', 'cc', 'c')
|
|
||||||
config.set_value('compiler', 'gcc@4.7.3', 'cxx', 'd')
|
|
||||||
config.write()
|
|
||||||
|
|
||||||
config = SpackConfigParser(self.get_path())
|
b_comps = {"icc@10.0" : { "cc" : "/icc100", "cxx" : "/icc100", "f77" : None, "f90" : None },
|
||||||
|
"icc@11.1" : { "cc" : "/icc111", "cxx" : "/icp111", "f77" : "/ifort", "f90" : "/ifort" },
|
||||||
|
"clang@3.3" : { "cc" : "/clang", "cxx" : "/clang++", "f77" : None, "f90" : None}}
|
||||||
|
|
||||||
self.assertEqual(config.get_value('compiler.cc'), 'a')
|
spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
|
||||||
self.assertEqual(config.get_value('compiler.cxx'), 'b')
|
spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
|
||||||
self.assertEqual(config.get_value('compiler', 'gcc@4.7.3', 'cc'), 'c')
|
|
||||||
self.assertEqual(config.get_value('compiler', 'gcc@4.7.3', 'cxx'), 'd')
|
|
||||||
|
|
||||||
self.assertEqual(config.get_value('compiler', None, 'cc'), 'a')
|
self.check_config(a_comps)
|
||||||
self.assertEqual(config.get_value('compiler', None, 'cxx'), 'b')
|
self.check_config(b_comps)
|
||||||
self.assertEqual(config.get_value('compiler.gcc@4.7.3.cc'), 'c')
|
|
||||||
self.assertEqual(config.get_value('compiler.gcc@4.7.3.cxx'), 'd')
|
spack.config.clear_config_caches()
|
||||||
|
|
||||||
|
self.check_config(a_comps)
|
||||||
|
self.check_config(b_comps)
|
||||||
|
|
||||||
self.assertRaises(NoOptionError, config.get_value, 'compiler', None, 'fc')
|
|
||||||
|
|
|
@ -31,7 +31,7 @@
|
||||||
|
|
||||||
|
|
||||||
def set_pkg_dep(pkg, spec):
|
def set_pkg_dep(pkg, spec):
|
||||||
"""Alters dependence information for a pacakge.
|
"""Alters dependence information for a package.
|
||||||
Use this to mock up constraints.
|
Use this to mock up constraints.
|
||||||
"""
|
"""
|
||||||
spec = Spec(spec)
|
spec = Spec(spec)
|
||||||
|
@ -39,21 +39,32 @@ def set_pkg_dep(pkg, spec):
|
||||||
|
|
||||||
|
|
||||||
class MockPackagesTest(unittest.TestCase):
|
class MockPackagesTest(unittest.TestCase):
|
||||||
def setUp(self):
|
def initmock(self):
|
||||||
# Use the mock packages database for these tests. This allows
|
# Use the mock packages database for these tests. This allows
|
||||||
# us to set up contrived packages that don't interfere with
|
# us to set up contrived packages that don't interfere with
|
||||||
# real ones.
|
# real ones.
|
||||||
self.real_db = spack.db
|
self.real_db = spack.db
|
||||||
spack.db = PackageDB(spack.mock_packages_path)
|
spack.db = PackageDB(spack.mock_packages_path)
|
||||||
|
|
||||||
self.real_scopes = spack.config._scopes
|
spack.config.clear_config_caches()
|
||||||
spack.config._scopes = {
|
self.real_scopes = spack.config.config_scopes
|
||||||
'site' : spack.mock_site_config,
|
spack.config.config_scopes = [
|
||||||
'user' : spack.mock_user_config }
|
('site', spack.mock_site_config),
|
||||||
|
('user', spack.mock_user_config)]
|
||||||
|
|
||||||
|
|
||||||
|
def cleanmock(self):
|
||||||
|
"""Restore the real packages path after any test."""
|
||||||
|
spack.db = self.real_db
|
||||||
|
spack.config.config_scopes = self.real_scopes
|
||||||
|
spack.config.clear_config_caches()
|
||||||
|
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.initmock()
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
"""Restore the real packages path after any test."""
|
self.cleanmock()
|
||||||
spack.db = self.real_db
|
|
||||||
spack.config._scopes = self.real_scopes
|
|
||||||
|
|
||||||
|
|
|
@ -1,12 +0,0 @@
|
||||||
[compiler "gcc@4.5.0"]
|
|
||||||
cc = /path/to/gcc
|
|
||||||
cxx = /path/to/g++
|
|
||||||
f77 = /path/to/gfortran
|
|
||||||
fc = /path/to/gfortran
|
|
||||||
|
|
||||||
[compiler "clang@3.3"]
|
|
||||||
cc = /path/to/clang
|
|
||||||
cxx = /path/to/clang++
|
|
||||||
f77 = None
|
|
||||||
fc = None
|
|
||||||
|
|
12
var/spack/mock_configs/site_spackconfig/compilers.yaml
Normal file
12
var/spack/mock_configs/site_spackconfig/compilers.yaml
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
compilers:
|
||||||
|
all:
|
||||||
|
clang@3.3:
|
||||||
|
cc: /path/to/clang
|
||||||
|
cxx: /path/to/clang++
|
||||||
|
f77: None
|
||||||
|
fc: None
|
||||||
|
gcc@4.5.0:
|
||||||
|
cc: /path/to/gcc
|
||||||
|
cxx: /path/to/g++
|
||||||
|
f77: /path/to/gfortran
|
||||||
|
fc: /path/to/gfortran
|
Loading…
Reference in a new issue