Rework Spack config: keep user & site config in memory.

- User and site config are now kept separately in memory.
- Merging is done on demand when client code requests the configuration.
- Allows user/site config to be updated independently of each other by commands.
- simplifies config logic (no more tracking merged files)
This commit is contained in:
Todd Gamblin 2015-12-25 14:00:33 -08:00
parent cc349e9a32
commit 34401cf0c3
11 changed files with 281 additions and 264 deletions

1
.gitignore vendored
View file

@ -8,3 +8,4 @@
/etc/spackconfig /etc/spackconfig
/share/spack/dotkit /share/spack/dotkit
/share/spack/modules /share/spack/modules
/TAGS

View file

@ -65,10 +65,11 @@ def compiler_add(args):
if c.spec not in spack.compilers.all_compilers()] if c.spec not in spack.compilers.all_compilers()]
if compilers: if compilers:
spack.compilers.add_compilers_to_config('user', *compilers) spack.compilers.add_compilers_to_config('user', compilers)
n = len(compilers) n = len(compilers)
tty.msg("Added %d new compiler%s to %s" % ( s = 's' if n > 1 else ''
n, 's' if n > 1 else '', spack.config.get_config_scope_filename('user', 'compilers'))) filename = spack.config.get_config_filename('user', 'compilers')
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec for c in compilers)), indent=4) colify(reversed(sorted(c.spec for c in compilers)), indent=4)
else: else:
tty.msg("Found no new compilers") tty.msg("Found no new compilers")

View file

@ -44,22 +44,22 @@ def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command') sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
get_parser = sp.add_parser('get', help='Print configuration values.') get_parser = sp.add_parser('get', help='Print configuration values.')
get_parser.add_argument('category', help="Configuration category to print.") get_parser.add_argument('section', help="Configuration section to print.")
edit_parser = sp.add_parser('edit', help='Edit configuration file.') edit_parser = sp.add_parser('edit', help='Edit configuration file.')
edit_parser.add_argument('category', help="Configuration category to edit") edit_parser.add_argument('section', help="Configuration section to edit")
def config_get(args): def config_get(args):
spack.config.print_category(args.category) spack.config.print_section(args.section)
def config_edit(args): def config_edit(args):
if not args.scope: if not args.scope:
args.scope = 'user' args.scope = 'user'
if not args.category: if not args.section:
args.category = None args.section = None
config_file = spack.config.get_config_scope_filename(args.scope, args.category) config_file = spack.config.get_config_filename(args.scope, args.section)
spack.editor(config_file) spack.editor(config_file)

View file

@ -76,7 +76,7 @@ def mirror_add(args):
url = 'file://' + url url = 'file://' + url
mirror_dict = { args.name : url } mirror_dict = { args.name : url }
spack.config.add_to_mirror_config({ args.name : url }) spack.config.update_config('mirrors', { args.name : url }, 'user')
def mirror_remove(args): def mirror_remove(args):
@ -90,7 +90,7 @@ def mirror_remove(args):
def mirror_list(args): def mirror_list(args):
"""Print out available mirrors to the console.""" """Print out available mirrors to the console."""
sec_names = spack.config.get_mirror_config() sec_names = spack.config.get_config('mirrors')
if not sec_names: if not sec_names:
tty.msg("No mirrors configured.") tty.msg("No mirrors configured.")
return return

View file

@ -35,6 +35,7 @@
import spack.error import spack.error
import spack.spec import spack.spec
import spack.config import spack.config
import spack.architecture
from spack.util.multiproc import parmap from spack.util.multiproc import parmap
from spack.compiler import Compiler from spack.compiler import Compiler
@ -55,23 +56,48 @@ def converter(cspec_like):
return converter return converter
def _get_config(): def _to_dict(compiler):
"""Get a Spack config, but make sure it has compiler configuration """Return a dict version of compiler suitable to insert in YAML."""
first.""" return {
str(compiler.spec) : dict(
(attr, getattr(compiler, attr, None))
for attr in _required_instance_vars)
}
def get_compiler_config(arch=None):
"""Return the compiler configuration for the specified architecture.
If the compiler configuration designates some compilers for
'all' architectures, those are merged into the result, as well.
"""
# If any configuration file has compilers, just stick with the # If any configuration file has compilers, just stick with the
# ones already configured. # ones already configured.
config = spack.config.get_compilers_config() config = spack.config.get_config('compilers')
existing = [spack.spec.CompilerSpec(s)
for s in config]
if existing:
return config
compilers = find_compilers(*get_path('PATH')) if arch is None:
add_compilers_to_config('user', *compilers) arch = spack.architecture.sys_type()
# After writing compilers to the user config, return a full config if arch not in config:
# from all files. config[arch] = {}
return spack.config.get_compilers_config() compilers = find_compilers(*get_path('PATH'))
for compiler in compilers:
config[arch].update(_to_dict(compiler))
spack.config.update_config('compilers', config, 'user')
# Merge 'all' compilers with arch-specific ones.
merged_config = config.get('all', {})
merged_config = spack.config._merge_yaml(merged_config, config[arch])
return merged_config
def all_compilers(arch=None):
"""Return a set of specs for all the compiler versions currently
available to build with. These are instances of CompilerSpec.
"""
return [spack.spec.CompilerSpec(s) for s in get_compiler_config(arch)]
_cached_default_compiler = None _cached_default_compiler = None
@ -123,20 +149,6 @@ def find_compilers(*path):
return clist return clist
def add_compilers_to_config(scope, *compilers):
compiler_config_tree = {}
for compiler in compilers:
compiler_entry = {}
for c in _required_instance_vars:
val = getattr(compiler, c)
if not val:
val = "None"
compiler_entry[c] = val
compiler_config_tree[str(compiler.spec)] = compiler_entry
spack.config.add_to_compiler_config(compiler_config_tree, scope)
def supported_compilers(): def supported_compilers():
"""Return a set of names of compilers supported by Spack. """Return a set of names of compilers supported by Spack.
@ -152,14 +164,6 @@ def supported(compiler_spec):
return compiler_spec.name in supported_compilers() return compiler_spec.name in supported_compilers()
def all_compilers():
"""Return a set of specs for all the compiler versions currently
available to build with. These are instances of CompilerSpec.
"""
configuration = _get_config()
return [spack.spec.CompilerSpec(s) for s in configuration]
@_auto_compiler_spec @_auto_compiler_spec
def find(compiler_spec): def find(compiler_spec):
"""Return specs of available compilers that match the supplied """Return specs of available compilers that match the supplied
@ -172,7 +176,7 @@ def compilers_for_spec(compiler_spec):
"""This gets all compilers that satisfy the supplied CompilerSpec. """This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found. Returns an empty list if none are found.
""" """
config = _get_config() config = get_compiler_config()
def get_compiler(cspec): def get_compiler(cspec):
items = config[str(cspec)] items = config[str(cspec)]

View file

@ -67,25 +67,54 @@
categorize entries beneath them in the tree. At the root of the tree, categorize entries beneath them in the tree. At the root of the tree,
entries like ''cc'' and ''cxx'' are specified as name/value pairs. entries like ''cc'' and ''cxx'' are specified as name/value pairs.
Spack returns these trees as nested dicts. The dict for the above example ``config.get_config()`` returns these trees as nested dicts, but it
would looks like: strips the first level off. So, ``config.get_config('compilers')``
would return something like this for the above example:
{ 'compilers' : { 'chaos_5_x86_64_ib' :
{ 'chaos_5_x86_64_ib' : { 'gcc@4.4.7' :
{ 'gcc@4.4.7' : { 'cc' : '/usr/bin/gcc',
{ 'cc' : '/usr/bin/gcc', 'cxx' : '/usr/bin/g++'
'cxx' : '/usr/bin/g++' 'f77' : '/usr/bin/gfortran'
'f77' : '/usr/bin/gfortran' 'fc' : '/usr/bin/gfortran' }
'fc' : '/usr/bin/gfortran' } }
} { 'bgqos_0' :
{ 'bgqos_0' : { 'cc' : '/usr/local/bin/mpixlc' } }
{ 'cc' : '/usr/local/bin/mpixlc' }
}
}
Some convenience functions, like get_mirrors_config and Likewise, the ``mirrors.yaml`` file's first line must be ``mirrors:``,
``get_compilers_config`` may strip off the top-levels of the tree and but ``get_config()`` strips that off too.
return subtrees.
Precedence
===============================
``config.py`` routines attempt to recursively merge configuration
across scopes. So if there are ``compilers.py`` files in both the
site scope and the user scope, ``get_config('compilers')`` will return
merged dictionaries of *all* the compilers available. If a user
compiler conflicts with a site compiler, Spack will overwrite the site
configuration wtih the user configuration. If both the user and site
``mirrors.yaml`` files contain lists of mirrors, then ``get_config()``
will return a concatenated list of mirrors, with the user config items
first.
Sometimes, it is useful to *completely* override a site setting with a
user one. To accomplish this, you can use *two* colons at the end of
a key in a configuration file. For example, this:
compilers::
chaos_5_x86_64_ib:
gcc@4.4.7:
cc: /usr/bin/gcc
cxx: /usr/bin/g++
f77: /usr/bin/gfortran
fc: /usr/bin/gfortran
bgqos_0:
xlc@12.1:
cc: /usr/local/bin/mpixlc
...
Will make Spack take compilers *only* from the user configuration, and
the site configuration will be ignored.
""" """
import os import os
@ -96,80 +125,132 @@
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp from llnl.util.filesystem import mkdirp
from llnl.util.lang import memoized
import spack import spack
from spack.error import SpackError
"""List of valid config sections."""
valid_sections = ('compilers', 'mirrors', 'repos')
_config_sections = {} def check_section(section):
class _ConfigCategory: """Raise a ValueError if the section is not a valid section."""
name = None if section not in valid_sections:
filename = None raise ValueError("Invalid config section: '%s'. Options are %s."
merge = True % (section, valid_sections))
def __init__(self, name, filename, merge, strip):
self.name = name
self.filename = filename
self.merge = merge
self.strip = strip
self.files_read_from = []
self.result_dict = {}
_config_sections[name] = self
_ConfigCategory('repos', 'repos.yaml', True, True)
_ConfigCategory('compilers', 'compilers.yaml', True, True)
_ConfigCategory('mirrors', 'mirrors.yaml', True, True)
_ConfigCategory('view', 'views.yaml', True, True)
_ConfigCategory('order', 'orders.yaml', True, True)
"""Names of scopes and their corresponding configuration files.""" class ConfigScope(object):
config_scopes = [('site', os.path.join(spack.etc_path, 'spack')), """This class represents a configuration scope.
('user', os.path.expanduser('~/.spack'))]
A scope is one directory containing named configuration files.
Each file is a config "section" (e.g., mirrors, compilers, etc).
"""
def __init__(self, name, path):
self.name = name # scope name.
self.path = path # path to directory containing configs.
self.sections = {} # sections read from config files.
def get_section_filename(self, section):
check_section(section)
return os.path.join(self.path, "%s.yaml" % section)
def get_section(self, section):
if not section in self.sections:
path = self.get_section_filename(section)
data = _read_config_file(path)
self.sections[section] = {} if data is None else data
return self.sections[section]
def write_section(self, section):
filename = self.get_section_filename(section)
data = self.get_section(section)
try:
mkdirp(self.path)
with open(filename, 'w') as f:
yaml.dump(data, stream=f, default_flow_style=False)
except (yaml.YAMLError, IOError) as e:
raise ConfigFileError("Error writing to config file: '%s'" % str(e))
def clear(self):
"""Empty cached config information."""
self.sections = {}
"""List of config scopes by name.
Later scopes in the list will override earlier scopes.
"""
config_scopes = [
ConfigScope('site', os.path.join(spack.etc_path, 'spack')),
ConfigScope('user', os.path.expanduser('~/.spack'))]
"""List of valid scopes, for convenience."""
valid_scopes = (s.name for s in config_scopes)
def check_scope(scope):
if scope is None:
return 'user'
elif scope not in valid_scopes:
raise ValueError("Invalid config scope: '%s'. Must be one of %s."
% (scope, valid_scopes))
return scope
def get_scope(scope):
scope = check_scope(scope)
return next(s for s in config_scopes if s.name == scope)
_compiler_by_arch = {}
@memoized
def _read_config_file(filename): def _read_config_file(filename):
"""Read a YAML configuration file""" """Read a YAML configuration file."""
# Ignore nonexisting files. # Ignore nonexisting files.
if not os.path.exists(filename): if not os.path.exists(filename):
return None return None
elif not os.path.isfile(filename): elif not os.path.isfile(filename):
tty.die("Invlaid configuration. %s exists but is not a file." % filename) raise ConfigFileError(
"Invlaid configuration. %s exists but is not a file." % filename)
elif not os.access(filename, os.R_OK): elif not os.access(filename, os.R_OK):
tty.die("Configuration file %s is not readable." % filename) raise ConfigFileError("Config file is not readable: %s." % filename)
try: try:
with open(filename) as f: with open(filename) as f:
return yaml.load(f) return yaml.load(f)
except MarkedYAMLError, e: except MarkedYAMLError, e:
tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem)) raise ConfigFileError(
"Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
except IOError, e: except IOError, e:
tty.die("Error reading configuration file %s: %s" % (filename, str(e))) raise ConfigFileError(
"Error reading configuration file %s: %s" % (filename, str(e)))
def clear_config_caches(): def clear_config_caches():
"""Clears the caches for configuration files, which will cause them """Clears the caches for configuration files, which will cause them
to be re-read upon the next request""" to be re-read upon the next request"""
for key,s in _config_sections.iteritems(): for scope in config_scopes:
s.files_read_from = [] scope.clear()
s.result_dict = {}
_read_config_file.clear()
spack.config._compiler_by_arch = {}
spack.compilers._cached_default_compiler = None
def _merge_yaml(dest, source): def _merge_yaml(dest, source):
"""Merges source into dest; entries in source take precedence over dest. """Merges source into dest; entries in source take precedence over dest.
This routine may modify dest and should be assigned to dest, in
case dest was None to begin with, e.g.:
dest = _merge_yaml(dest, source)
Config file authors can optionally end any attribute in a dict Config file authors can optionally end any attribute in a dict
with `::` instead of `:`, and the key will override that of the with `::` instead of `:`, and the key will override that of the
parent instead of merging. parent instead of merging.
""" """
def they_are(t): def they_are(t):
return isinstance(dest, t) and isinstance(source, t) return isinstance(dest, t) and isinstance(source, t)
@ -212,61 +293,31 @@ def substitute_spack_prefix(path):
return path.replace('$spack', spack.prefix) return path.replace('$spack', spack.prefix)
def get_config(category): def get_config(section):
"""Get the confguration tree for a category. """Get configuration settings for a section.
Strips off the top-level category entry from the dict Strips off the top-level section name from the YAML dict.
""" """
category = _config_sections[category] check_section(section)
if category.result_dict: merged_section = {}
return category.result_dict
category.result_dict = {} for scope in config_scopes:
for scope, scope_path in config_scopes: # read potentially cached data from the scope.
path = os.path.join(scope_path, category.filename) data = scope.get_section(section)
result = _read_config_file(path) if not data or not section in data:
if not result:
continue continue
if category.strip: # extract data under the section name header
if not category.name in result: data = data[section]
continue
result = result[category.name]
# ignore empty sections for easy commenting of single-line configs. # ignore empty sections for easy commenting of single-line configs.
if result is None: if not data:
continue continue
category.files_read_from.insert(0, path) # merge config data from scopes.
if category.merge: merged_section = _merge_yaml(merged_section, data)
category.result_dict = _merge_yaml(category.result_dict, result)
else:
category.result_dict = result
return category.result_dict return merged_section
def get_compilers_config(arch=None):
"""Get the compiler configuration from config files for the given
architecture. Strips off the architecture component of the
configuration"""
global _compiler_by_arch
if not arch:
arch = spack.architecture.sys_type()
if arch in _compiler_by_arch:
return _compiler_by_arch[arch]
cc_config = get_config('compilers')
if arch in cc_config and 'all' in cc_config:
arch_compiler = dict(cc_config[arch])
_compiler_by_arch[arch] = _merge_yaml(arch_compiler, cc_config['all'])
elif arch in cc_config:
_compiler_by_arch[arch] = cc_config[arch]
elif 'all' in cc_config:
_compiler_by_arch[arch] = cc_config['all']
else:
_compiler_by_arch[arch] = {}
return _compiler_by_arch[arch]
def get_repos_config(): def get_repos_config():
@ -284,119 +335,71 @@ def expand_repo_path(path):
return [expand_repo_path(repo) for repo in repo_list] return [expand_repo_path(repo) for repo in repo_list]
def get_mirror_config(): def get_config_filename(scope, section):
"""Get the mirror configuration from config files""" """For some scope and section, get the name of the configuration file"""
return get_config('mirrors') scope = get_scope(scope)
return scope.get_section_filename(section)
def get_config_scope_dirname(scope): def update_config(section, update_data, scope=None):
"""For a scope return the config directory""" """Update the configuration file for a particular scope.
for s,p in config_scopes:
if s == scope: Merges contents of update_data into the scope's data for the
return p specified section, then writes out the config file.
tty.die("Unknown scope %s. Valid options are %s" %
(scope, ", ".join([s for s,p in config_scopes]))) update_data shoudl contain only the section's data, with the
top-level name stripped off. This can be a list, dict, or any
other yaml-ish structure.
"""
# read in the config to ensure we've got current data
get_config(section)
check_section(section) # validate section name
scope = get_scope(scope) # get ConfigScope object from string.
# read only the requested section's data.
data = scope.get_section(section)
data = _merge_yaml(data, { section : update_data })
scope.write_section(section)
def get_config_scope_filename(scope, category_name): def remove_from_config(section, key_to_rm, scope=None):
"""For some scope and category, get the name of the configuration file""" """Remove a configuration key and write updated configuration to disk.
if not category_name in _config_sections:
tty.die("Unknown config category %s. Valid options are: %s" %
(category_name, ", ".join([s for s in _config_sections])))
return os.path.join(get_config_scope_dirname(scope), _config_sections[category_name].filename)
Return True if something was removed, False otherwise.
def add_to_config(category_name, addition_dict, scope=None): """
"""Merge a new dict into a configuration tree and write the new # ensure configs are current by reading in.
configuration to disk""" get_config(section)
get_config(category_name)
category = _config_sections[category_name]
# If scope is specified, use it. Otherwise use the last config scope that # check args and get the objects we need.
# we successfully parsed data from. scope = get_scope(scope)
file = None data = scope.get_section(section)
path = None filename = scope.get_section_filename(section)
if not scope and not category.files_read_from:
scope = 'user'
if scope: # do some checks
try: if not data:
dir = get_config_scope_dirname(scope) return False
if not os.path.exists(dir):
mkdirp(dir)
path = os.path.join(dir, category.filename)
file = open(path, 'w')
except IOError, e:
pass
else:
for p in category.files_read_from:
try:
file = open(p, 'w')
except IOError, e:
pass
if file:
path = p
break;
if not file: if not section in data:
tty.die('Unable to write to config file %s' % path) raise ConfigFileError("Invalid configuration file: '%s'" % filename)
# Merge the new information into the existing file info, then write to disk if key_to_rm not in section[section]:
new_dict = _read_config_file(path) return False
if new_dict and category_name in new_dict: # remove the key from the section's configuration
new_dict = new_dict[category_name] del data[section][key_to_rm]
scope.write_section(section)
new_dict = _merge_yaml(new_dict, addition_dict)
new_dict = { category_name : new_dict }
# Install new dict as memoized value, and dump to disk
_read_config_file.cache[path] = new_dict
yaml.dump(new_dict, stream=file, default_flow_style=False)
file.close()
# Merge the new information into the cached results
category.result_dict = _merge_yaml(category.result_dict, addition_dict)
def add_to_mirror_config(addition_dict, scope=None):
"""Add mirrors to the configuration files"""
add_to_config('mirrors', addition_dict, scope)
def add_to_compiler_config(addition_dict, scope=None, arch=None):
"""Add compilerss to the configuration files"""
if not arch:
arch = spack.architecture.sys_type()
add_to_config('compilers', { arch : addition_dict }, scope)
clear_config_caches()
def remove_from_config(category_name, key_to_rm, scope=None):
"""Remove a configuration key and write a new configuration to disk"""
get_config(category_name)
scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes]
category = _config_sections[category_name]
rmd_something = False
for s in scopes_to_rm_from:
path = get_config_scope_filename(scope, category_name)
result = _read_config_file(path)
if not result:
continue
if not key_to_rm in result[category_name]:
continue
with open(path, 'w') as f:
result[category_name].pop(key_to_rm, None)
yaml.dump(result, stream=f, default_flow_style=False)
category.result_dict.pop(key_to_rm, None)
rmd_something = True
return rmd_something
"""Print a configuration to stdout""" """Print a configuration to stdout"""
def print_category(category_name): def print_section(section):
if not category_name in _config_sections: try:
tty.die("Unknown config category %s. Valid options are: %s" % yaml.dump(get_config(section), stream=sys.stdout, default_flow_style=False)
(category_name, ", ".join([s for s in _config_sections]))) except (yaml.YAMLError, IOError) as e:
yaml.dump(get_config(category_name), stream=sys.stdout, default_flow_style=False) raise ConfigError("Error reading configuration: %s" % section)
class ConfigError(SpackError): pass
class ConfigFileError(ConfigError): pass

View file

@ -531,6 +531,7 @@ def concrete(self):
and self.architecture and self.architecture
and self.compiler and self.compiler.concrete and self.compiler and self.compiler.concrete
and self.dependencies.concrete) and self.dependencies.concrete)
return self._concrete return self._concrete

View file

@ -242,7 +242,8 @@ def fetch(self):
# TODO: move mirror logic out of here and clean it up! # TODO: move mirror logic out of here and clean it up!
if self.mirror_path: if self.mirror_path:
urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()] urls = ["%s/%s" % (u, self.mirror_path)
for name, u in spack.config.get_config('mirrors')]
digest = None digest = None
if isinstance(self.fetcher, fs.URLFetchStrategy): if isinstance(self.fetcher, fs.URLFetchStrategy):
@ -345,7 +346,7 @@ def destroy(self):
def _get_mirrors(): def _get_mirrors():
"""Get mirrors from spack configuration.""" """Get mirrors from spack configuration."""
config = spack.config.get_mirror_config() config = spack.config.get_config('mirrors')
return [val for name, val in config.iteritems()] return [val for name, val in config.iteritems()]

View file

@ -27,6 +27,7 @@
import os import os
from tempfile import mkdtemp from tempfile import mkdtemp
import spack import spack
import spack.config
from spack.test.mock_packages_test import * from spack.test.mock_packages_test import *
# Some sample compiler config data # Some sample compiler config data
@ -72,9 +73,9 @@ def setUp(self):
super(ConfigTest, self).setUp() super(ConfigTest, self).setUp()
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-') self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
spack.config.config_scopes = [ spack.config.config_scopes = [
('test_low_priority', os.path.join(self.tmp_dir, 'low')), spack.config.ConfigScope('test_low_priority', os.path.join(self.tmp_dir, 'low')),
('test_high_priority', os.path.join(self.tmp_dir, 'high'))] spack.config.ConfigScope('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
spack.config.valid_scopes = ('test_low_priority', 'test_high_priority')
def tearDown(self): def tearDown(self):
super(ConfigTest, self).tearDown() super(ConfigTest, self).tearDown()
@ -83,17 +84,19 @@ def tearDown(self):
def check_config(self, comps, *compiler_names): def check_config(self, comps, *compiler_names):
"""Check that named compilers in comps match Spack's config.""" """Check that named compilers in comps match Spack's config."""
config = spack.config.get_compilers_config() config = spack.config.get_config('compilers')
compiler_list = ['cc', 'cxx', 'f77', 'f90'] compiler_list = ['cc', 'cxx', 'f77', 'f90']
for key in compiler_names: for key in compiler_names:
for c in compiler_list: for c in compiler_list:
self.assertEqual(comps[key][c], config[key][c]) expected = comps[key][c]
actual = config[key][c]
self.assertEqual(expected, actual)
def test_write_key_in_memory(self): def test_write_key_in_memory(self):
# Write b_comps "on top of" a_comps. # Write b_comps "on top of" a_comps.
spack.config.add_to_compiler_config(a_comps, 'test_low_priority') spack.config.update_config('compilers', a_comps, 'test_low_priority')
spack.config.add_to_compiler_config(b_comps, 'test_high_priority') spack.config.update_config('compilers', b_comps, 'test_high_priority')
# Make sure the config looks how we expect. # Make sure the config looks how we expect.
self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0') self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
@ -102,8 +105,8 @@ def test_write_key_in_memory(self):
def test_write_key_to_disk(self): def test_write_key_to_disk(self):
# Write b_comps "on top of" a_comps. # Write b_comps "on top of" a_comps.
spack.config.add_to_compiler_config(a_comps, 'test_low_priority') spack.config.update_config('compilers', a_comps, 'test_low_priority')
spack.config.add_to_compiler_config(b_comps, 'test_high_priority') spack.config.update_config('compilers', b_comps, 'test_high_priority')
# Clear caches so we're forced to read from disk. # Clear caches so we're forced to read from disk.
spack.config.clear_config_caches() spack.config.clear_config_caches()

View file

@ -79,7 +79,8 @@ class DatabaseTest(MockPackagesTest):
def _mock_install(self, spec): def _mock_install(self, spec):
s = Spec(spec) s = Spec(spec)
pkg = spack.repo.get(s.concretized()) s.concretize()
pkg = spack.repo.get(s)
pkg.do_install(fake=True) pkg.do_install(fake=True)

View file

@ -41,9 +41,10 @@ def initmock(self):
spack.config.clear_config_caches() spack.config.clear_config_caches()
self.real_scopes = spack.config.config_scopes self.real_scopes = spack.config.config_scopes
self.real_valid_scopes = spack.config.valid_scopes
spack.config.config_scopes = [ spack.config.config_scopes = [
('site', spack.mock_site_config), spack.config.ConfigScope('site', spack.mock_site_config),
('user', spack.mock_user_config)] spack.config.ConfigScope('user', spack.mock_user_config)]
# Store changes to the package's dependencies so we can # Store changes to the package's dependencies so we can
# restore later. # restore later.
@ -71,6 +72,7 @@ def cleanmock(self):
"""Restore the real packages path after any test.""" """Restore the real packages path after any test."""
spack.repo.swap(self.db) spack.repo.swap(self.db)
spack.config.config_scopes = self.real_scopes spack.config.config_scopes = self.real_scopes
spack.config.valid_scopes = self.real_valid_scopes
spack.config.clear_config_caches() spack.config.clear_config_caches()
# Restore dependency changes that happened during the test # Restore dependency changes that happened during the test