Merge branch 'features/optional-deps' into develop

This includes:
- Much better variant support (+debug/-debug)
- Optional dependency support (depends_on(... , when='<condition>')
- New config file format (YAML in ~/.spack)
- New Spec format (YAML in $prefix/.spack/spec.yaml)
This commit is contained in:
Todd Gamblin 2015-05-18 16:16:20 -07:00
commit f813d823a1
67 changed files with 7631 additions and 1164 deletions

1
.gitignore vendored
View file

@ -4,6 +4,7 @@
*~ *~
.DS_Store .DS_Store
.idea .idea
/etc/spack/*
/etc/spackconfig /etc/spackconfig
/share/spack/dotkit /share/spack/dotkit
/share/spack/modules /share/spack/modules

View file

@ -107,6 +107,10 @@ def main():
tty.set_debug(args.debug) tty.set_debug(args.debug)
spack.debug = args.debug spack.debug = args.debug
if spack.debug:
import spack.util.debug as debug
debug.register_interrupt_handler()
spack.spack_working_dir = working_dir spack.spack_working_dir = working_dir
if args.mock: if args.mock:
from spack.packages import PackageDB from spack.packages import PackageDB

View file

@ -22,7 +22,12 @@
{%- endif %} {%- endif %}
{%- endif %} {%- endif %}
<br/>
Written by Todd Gamblin (<a href="mailto:tgamblin@llnl.gov">tgamblin@llnl.gov</a>) and
many contributors. LLNL-CODE-647188.
{%- if last_updated %} {%- if last_updated %}
<br/>
{% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %} {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %}
{%- endif %} {%- endif %}
</p> </p>
@ -33,4 +38,3 @@
{%- endif %} {%- endif %}
</footer> </footer>

View file

@ -94,7 +94,7 @@
# General information about the project. # General information about the project.
project = u'Spack' project = u'Spack'
copyright = u'2013-2014, Lawrence Livermore National Laboratory' copyright = u'2013-2015, Lawrence Livermore National Laboratory.'
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
@ -203,7 +203,7 @@
#html_show_sourcelink = True #html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True #html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True #html_show_copyright = True

View file

@ -28,6 +28,11 @@
So far: So far:
argparse: We include our own version to be Python 2.6 compatible. argparse: We include our own version to be Python 2.6 compatible.
pyqver2: External script to query required python version of python source code.
Used for ensuring 2.6 compatibility. pyqver2: External script to query required python version of
python source code. Used for ensuring 2.6 compatibility.
functools: Used for implementation of total_ordering.
yaml: Used for config files.
""" """

19
lib/spack/external/yaml/LICENSE vendored Normal file
View file

@ -0,0 +1,19 @@
Copyright (c) 2006 Kirill Simonov
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

35
lib/spack/external/yaml/README vendored Normal file
View file

@ -0,0 +1,35 @@
PyYAML - The next generation YAML parser and emitter for Python.
To install, type 'python setup.py install'.
By default, the setup.py script checks whether LibYAML is installed
and if so, builds and installs LibYAML bindings. To skip the check
and force installation of LibYAML bindings, use the option '--with-libyaml':
'python setup.py --with-libyaml install'. To disable the check and
skip building and installing LibYAML bindings, use '--without-libyaml':
'python setup.py --without-libyaml install'.
When LibYAML bindings are installed, you may use fast LibYAML-based
parser and emitter as follows:
>>> yaml.load(stream, Loader=yaml.CLoader)
>>> yaml.dump(data, Dumper=yaml.CDumper)
PyYAML includes a comprehensive test suite. To run the tests,
type 'python setup.py test'.
For more information, check the PyYAML homepage:
'http://pyyaml.org/wiki/PyYAML'.
For PyYAML tutorial and reference, see:
'http://pyyaml.org/wiki/PyYAMLDocumentation'.
Post your questions and opinions to the YAML-Core mailing list:
'http://lists.sourceforge.net/lists/listinfo/yaml-core'.
Submit bug reports and feature requests to the PyYAML bug tracker:
'http://pyyaml.org/newticket?component=pyyaml'.
PyYAML is written by Kirill Simonov <xi@resolvent.net>. It is released
under the MIT license. See the file LICENSE for more details.

315
lib/spack/external/yaml/__init__.py vendored Normal file
View file

@ -0,0 +1,315 @@
from error import *
from tokens import *
from events import *
from nodes import *
from loader import *
from dumper import *
__version__ = '3.10'
try:
from cyaml import *
__with_libyaml__ = True
except ImportError:
__with_libyaml__ = False
def scan(stream, Loader=Loader):
"""
Scan a YAML stream and produce scanning tokens.
"""
loader = Loader(stream)
try:
while loader.check_token():
yield loader.get_token()
finally:
loader.dispose()
def parse(stream, Loader=Loader):
"""
Parse a YAML stream and produce parsing events.
"""
loader = Loader(stream)
try:
while loader.check_event():
yield loader.get_event()
finally:
loader.dispose()
def compose(stream, Loader=Loader):
"""
Parse the first YAML document in a stream
and produce the corresponding representation tree.
"""
loader = Loader(stream)
try:
return loader.get_single_node()
finally:
loader.dispose()
def compose_all(stream, Loader=Loader):
"""
Parse all YAML documents in a stream
and produce corresponding representation trees.
"""
loader = Loader(stream)
try:
while loader.check_node():
yield loader.get_node()
finally:
loader.dispose()
def load(stream, Loader=Loader):
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
"""
loader = Loader(stream)
try:
return loader.get_single_data()
finally:
loader.dispose()
def load_all(stream, Loader=Loader):
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
"""
loader = Loader(stream)
try:
while loader.check_data():
yield loader.get_data()
finally:
loader.dispose()
def safe_load(stream):
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
Resolve only basic YAML tags.
"""
return load(stream, SafeLoader)
def safe_load_all(stream):
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
Resolve only basic YAML tags.
"""
return load_all(stream, SafeLoader)
def emit(events, stream=None, Dumper=Dumper,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None):
"""
Emit YAML parsing events into a stream.
If stream is None, return the produced string instead.
"""
getvalue = None
if stream is None:
from StringIO import StringIO
stream = StringIO()
getvalue = stream.getvalue
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
try:
for event in events:
dumper.emit(event)
finally:
dumper.dispose()
if getvalue:
return getvalue()
def serialize_all(nodes, stream=None, Dumper=Dumper,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding='utf-8', explicit_start=None, explicit_end=None,
version=None, tags=None):
"""
Serialize a sequence of representation trees into a YAML stream.
If stream is None, return the produced string instead.
"""
getvalue = None
if stream is None:
if encoding is None:
from StringIO import StringIO
else:
from cStringIO import StringIO
stream = StringIO()
getvalue = stream.getvalue
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break,
encoding=encoding, version=version, tags=tags,
explicit_start=explicit_start, explicit_end=explicit_end)
try:
dumper.open()
for node in nodes:
dumper.serialize(node)
dumper.close()
finally:
dumper.dispose()
if getvalue:
return getvalue()
def serialize(node, stream=None, Dumper=Dumper, **kwds):
"""
Serialize a representation tree into a YAML stream.
If stream is None, return the produced string instead.
"""
return serialize_all([node], stream, Dumper=Dumper, **kwds)
def dump_all(documents, stream=None, Dumper=Dumper,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding='utf-8', explicit_start=None, explicit_end=None,
version=None, tags=None):
"""
Serialize a sequence of Python objects into a YAML stream.
If stream is None, return the produced string instead.
"""
getvalue = None
if stream is None:
if encoding is None:
from StringIO import StringIO
else:
from cStringIO import StringIO
stream = StringIO()
getvalue = stream.getvalue
dumper = Dumper(stream, default_style=default_style,
default_flow_style=default_flow_style,
canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break,
encoding=encoding, version=version, tags=tags,
explicit_start=explicit_start, explicit_end=explicit_end)
try:
dumper.open()
for data in documents:
dumper.represent(data)
dumper.close()
finally:
dumper.dispose()
if getvalue:
return getvalue()
def dump(data, stream=None, Dumper=Dumper, **kwds):
"""
Serialize a Python object into a YAML stream.
If stream is None, return the produced string instead.
"""
return dump_all([data], stream, Dumper=Dumper, **kwds)
def safe_dump_all(documents, stream=None, **kwds):
"""
Serialize a sequence of Python objects into a YAML stream.
Produce only basic YAML tags.
If stream is None, return the produced string instead.
"""
return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
def safe_dump(data, stream=None, **kwds):
"""
Serialize a Python object into a YAML stream.
Produce only basic YAML tags.
If stream is None, return the produced string instead.
"""
return dump_all([data], stream, Dumper=SafeDumper, **kwds)
def add_implicit_resolver(tag, regexp, first=None,
Loader=Loader, Dumper=Dumper):
"""
Add an implicit scalar detector.
If an implicit scalar value matches the given regexp,
the corresponding tag is assigned to the scalar.
first is a sequence of possible initial characters or None.
"""
Loader.add_implicit_resolver(tag, regexp, first)
Dumper.add_implicit_resolver(tag, regexp, first)
def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
"""
Add a path based resolver for the given tag.
A path is a list of keys that forms a path
to a node in the representation tree.
Keys can be string values, integers, or None.
"""
Loader.add_path_resolver(tag, path, kind)
Dumper.add_path_resolver(tag, path, kind)
def add_constructor(tag, constructor, Loader=Loader):
"""
Add a constructor for the given tag.
Constructor is a function that accepts a Loader instance
and a node object and produces the corresponding Python object.
"""
Loader.add_constructor(tag, constructor)
def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
"""
Add a multi-constructor for the given tag prefix.
Multi-constructor is called for a node if its tag starts with tag_prefix.
Multi-constructor accepts a Loader instance, a tag suffix,
and a node object and produces the corresponding Python object.
"""
Loader.add_multi_constructor(tag_prefix, multi_constructor)
def add_representer(data_type, representer, Dumper=Dumper):
"""
Add a representer for the given type.
Representer is a function accepting a Dumper instance
and an instance of the given data type
and producing the corresponding representation node.
"""
Dumper.add_representer(data_type, representer)
def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
"""
Add a representer for the given type.
Multi-representer is a function accepting a Dumper instance
and an instance of the given data type or subtype
and producing the corresponding representation node.
"""
Dumper.add_multi_representer(data_type, multi_representer)
class YAMLObjectMetaclass(type):
"""
The metaclass for YAMLObject.
"""
def __init__(cls, name, bases, kwds):
super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
cls.yaml_dumper.add_representer(cls, cls.to_yaml)
class YAMLObject(object):
"""
An object that can dump itself to a YAML stream
and load itself from a YAML stream.
"""
__metaclass__ = YAMLObjectMetaclass
__slots__ = () # no direct instantiation, so allow immutable subclasses
yaml_loader = Loader
yaml_dumper = Dumper
yaml_tag = None
yaml_flow_style = None
def from_yaml(cls, loader, node):
"""
Convert a representation node to a Python object.
"""
return loader.construct_yaml_object(node, cls)
from_yaml = classmethod(from_yaml)
def to_yaml(cls, dumper, data):
"""
Convert a Python object to a representation node.
"""
return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
flow_style=cls.yaml_flow_style)
to_yaml = classmethod(to_yaml)

139
lib/spack/external/yaml/composer.py vendored Normal file
View file

@ -0,0 +1,139 @@
__all__ = ['Composer', 'ComposerError']
from error import MarkedYAMLError
from events import *
from nodes import *
class ComposerError(MarkedYAMLError):
pass
class Composer(object):
def __init__(self):
self.anchors = {}
def check_node(self):
# Drop the STREAM-START event.
if self.check_event(StreamStartEvent):
self.get_event()
# If there are more documents available?
return not self.check_event(StreamEndEvent)
def get_node(self):
# Get the root node of the next document.
if not self.check_event(StreamEndEvent):
return self.compose_document()
def get_single_node(self):
# Drop the STREAM-START event.
self.get_event()
# Compose a document if the stream is not empty.
document = None
if not self.check_event(StreamEndEvent):
document = self.compose_document()
# Ensure that the stream contains no more documents.
if not self.check_event(StreamEndEvent):
event = self.get_event()
raise ComposerError("expected a single document in the stream",
document.start_mark, "but found another document",
event.start_mark)
# Drop the STREAM-END event.
self.get_event()
return document
def compose_document(self):
# Drop the DOCUMENT-START event.
self.get_event()
# Compose the root node.
node = self.compose_node(None, None)
# Drop the DOCUMENT-END event.
self.get_event()
self.anchors = {}
return node
def compose_node(self, parent, index):
if self.check_event(AliasEvent):
event = self.get_event()
anchor = event.anchor
if anchor not in self.anchors:
raise ComposerError(None, None, "found undefined alias %r"
% anchor.encode('utf-8'), event.start_mark)
return self.anchors[anchor]
event = self.peek_event()
anchor = event.anchor
if anchor is not None:
if anchor in self.anchors:
raise ComposerError("found duplicate anchor %r; first occurence"
% anchor.encode('utf-8'), self.anchors[anchor].start_mark,
"second occurence", event.start_mark)
self.descend_resolver(parent, index)
if self.check_event(ScalarEvent):
node = self.compose_scalar_node(anchor)
elif self.check_event(SequenceStartEvent):
node = self.compose_sequence_node(anchor)
elif self.check_event(MappingStartEvent):
node = self.compose_mapping_node(anchor)
self.ascend_resolver()
return node
def compose_scalar_node(self, anchor):
event = self.get_event()
tag = event.tag
if tag is None or tag == u'!':
tag = self.resolve(ScalarNode, event.value, event.implicit)
node = ScalarNode(tag, event.value,
event.start_mark, event.end_mark, style=event.style)
if anchor is not None:
self.anchors[anchor] = node
return node
def compose_sequence_node(self, anchor):
start_event = self.get_event()
tag = start_event.tag
if tag is None or tag == u'!':
tag = self.resolve(SequenceNode, None, start_event.implicit)
node = SequenceNode(tag, [],
start_event.start_mark, None,
flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
index = 0
while not self.check_event(SequenceEndEvent):
node.value.append(self.compose_node(node, index))
index += 1
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node
def compose_mapping_node(self, anchor):
start_event = self.get_event()
tag = start_event.tag
if tag is None or tag == u'!':
tag = self.resolve(MappingNode, None, start_event.implicit)
node = MappingNode(tag, [],
start_event.start_mark, None,
flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
while not self.check_event(MappingEndEvent):
#key_event = self.peek_event()
item_key = self.compose_node(node, None)
#if item_key in node.value:
# raise ComposerError("while composing a mapping", start_event.start_mark,
# "found duplicate key", key_event.start_mark)
item_value = self.compose_node(node, item_key)
#node.value[item_key] = item_value
node.value.append((item_key, item_value))
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node

678
lib/spack/external/yaml/constructor.py vendored Normal file
View file

@ -0,0 +1,678 @@
__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
'ConstructorError']
from error import *
from nodes import *
import datetime
import binascii, re, sys, types
class ConstructorError(MarkedYAMLError):
pass
class BaseConstructor(object):
yaml_constructors = {}
yaml_multi_constructors = {}
def __init__(self):
self.constructed_objects = {}
self.recursive_objects = {}
self.state_generators = []
self.deep_construct = False
def check_data(self):
# If there are more documents available?
return self.check_node()
def get_data(self):
# Construct and return the next document.
if self.check_node():
return self.construct_document(self.get_node())
def get_single_data(self):
# Ensure that the stream contains a single document and construct it.
node = self.get_single_node()
if node is not None:
return self.construct_document(node)
return None
def construct_document(self, node):
data = self.construct_object(node)
while self.state_generators:
state_generators = self.state_generators
self.state_generators = []
for generator in state_generators:
for dummy in generator:
pass
self.constructed_objects = {}
self.recursive_objects = {}
self.deep_construct = False
return data
def construct_object(self, node, deep=False):
if node in self.constructed_objects:
return self.constructed_objects[node]
if deep:
old_deep = self.deep_construct
self.deep_construct = True
if node in self.recursive_objects:
raise ConstructorError(None, None,
"found unconstructable recursive node", node.start_mark)
self.recursive_objects[node] = None
constructor = None
tag_suffix = None
if node.tag in self.yaml_constructors:
constructor = self.yaml_constructors[node.tag]
else:
for tag_prefix in self.yaml_multi_constructors:
if node.tag.startswith(tag_prefix):
tag_suffix = node.tag[len(tag_prefix):]
constructor = self.yaml_multi_constructors[tag_prefix]
break
else:
if None in self.yaml_multi_constructors:
tag_suffix = node.tag
constructor = self.yaml_multi_constructors[None]
elif None in self.yaml_constructors:
constructor = self.yaml_constructors[None]
elif isinstance(node, ScalarNode):
constructor = self.__class__.construct_scalar
elif isinstance(node, SequenceNode):
constructor = self.__class__.construct_sequence
elif isinstance(node, MappingNode):
constructor = self.__class__.construct_mapping
if tag_suffix is None:
data = constructor(self, node)
else:
data = constructor(self, tag_suffix, node)
if isinstance(data, types.GeneratorType):
generator = data
data = generator.next()
if self.deep_construct:
for dummy in generator:
pass
else:
self.state_generators.append(generator)
self.constructed_objects[node] = data
del self.recursive_objects[node]
if deep:
self.deep_construct = old_deep
return data
def construct_scalar(self, node):
if not isinstance(node, ScalarNode):
raise ConstructorError(None, None,
"expected a scalar node, but found %s" % node.id,
node.start_mark)
return node.value
def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode):
raise ConstructorError(None, None,
"expected a sequence node, but found %s" % node.id,
node.start_mark)
return [self.construct_object(child, deep=deep)
for child in node.value]
def construct_mapping(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
mapping = {}
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError, exc:
raise ConstructorError("while constructing a mapping", node.start_mark,
"found unacceptable key (%s)" % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
if key in mapping:
raise ConstructorError("while constructing a mapping", node.start_mark,
"found already in-use key (%s)" % key, key_node.start_mark)
mapping[key] = value
return mapping
def construct_pairs(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
pairs = []
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
value = self.construct_object(value_node, deep=deep)
pairs.append((key, value))
return pairs
def add_constructor(cls, tag, constructor):
if not 'yaml_constructors' in cls.__dict__:
cls.yaml_constructors = cls.yaml_constructors.copy()
cls.yaml_constructors[tag] = constructor
add_constructor = classmethod(add_constructor)
def add_multi_constructor(cls, tag_prefix, multi_constructor):
if not 'yaml_multi_constructors' in cls.__dict__:
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
add_multi_constructor = classmethod(add_multi_constructor)
class SafeConstructor(BaseConstructor):
def construct_scalar(self, node):
if isinstance(node, MappingNode):
for key_node, value_node in node.value:
if key_node.tag == u'tag:yaml.org,2002:value':
return self.construct_scalar(value_node)
return BaseConstructor.construct_scalar(self, node)
def flatten_mapping(self, node):
merge = []
index = 0
while index < len(node.value):
key_node, value_node = node.value[index]
if key_node.tag == u'tag:yaml.org,2002:merge':
del node.value[index]
if isinstance(value_node, MappingNode):
self.flatten_mapping(value_node)
merge.extend(value_node.value)
elif isinstance(value_node, SequenceNode):
submerge = []
for subnode in value_node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing a mapping",
node.start_mark,
"expected a mapping for merging, but found %s"
% subnode.id, subnode.start_mark)
self.flatten_mapping(subnode)
submerge.append(subnode.value)
submerge.reverse()
for value in submerge:
merge.extend(value)
else:
raise ConstructorError("while constructing a mapping", node.start_mark,
"expected a mapping or list of mappings for merging, but found %s"
% value_node.id, value_node.start_mark)
elif key_node.tag == u'tag:yaml.org,2002:value':
key_node.tag = u'tag:yaml.org,2002:str'
index += 1
else:
index += 1
if merge:
node.value = merge + node.value
def construct_mapping(self, node, deep=False):
if isinstance(node, MappingNode):
self.flatten_mapping(node)
return BaseConstructor.construct_mapping(self, node, deep=deep)
def construct_yaml_null(self, node):
self.construct_scalar(node)
return None
bool_values = {
u'yes': True,
u'no': False,
u'true': True,
u'false': False,
u'on': True,
u'off': False,
}
def construct_yaml_bool(self, node):
value = self.construct_scalar(node)
return self.bool_values[value.lower()]
def construct_yaml_int(self, node):
value = str(self.construct_scalar(node))
value = value.replace('_', '')
sign = +1
if value[0] == '-':
sign = -1
if value[0] in '+-':
value = value[1:]
if value == '0':
return 0
elif value.startswith('0b'):
return sign*int(value[2:], 2)
elif value.startswith('0x'):
return sign*int(value[2:], 16)
elif value[0] == '0':
return sign*int(value, 8)
elif ':' in value:
digits = [int(part) for part in value.split(':')]
digits.reverse()
base = 1
value = 0
for digit in digits:
value += digit*base
base *= 60
return sign*value
else:
return sign*int(value)
inf_value = 1e300
while inf_value != inf_value*inf_value:
inf_value *= inf_value
nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
def construct_yaml_float(self, node):
value = str(self.construct_scalar(node))
value = value.replace('_', '').lower()
sign = +1
if value[0] == '-':
sign = -1
if value[0] in '+-':
value = value[1:]
if value == '.inf':
return sign*self.inf_value
elif value == '.nan':
return self.nan_value
elif ':' in value:
digits = [float(part) for part in value.split(':')]
digits.reverse()
base = 1
value = 0.0
for digit in digits:
value += digit*base
base *= 60
return sign*value
else:
return sign*float(value)
def construct_yaml_binary(self, node):
value = self.construct_scalar(node)
try:
return str(value).decode('base64')
except (binascii.Error, UnicodeEncodeError), exc:
raise ConstructorError(None, None,
"failed to decode base64 data: %s" % exc, node.start_mark)
timestamp_regexp = re.compile(
ur'''^(?P<year>[0-9][0-9][0-9][0-9])
-(?P<month>[0-9][0-9]?)
-(?P<day>[0-9][0-9]?)
(?:(?:[Tt]|[ \t]+)
(?P<hour>[0-9][0-9]?)
:(?P<minute>[0-9][0-9])
:(?P<second>[0-9][0-9])
(?:\.(?P<fraction>[0-9]*))?
(?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
(?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
def construct_yaml_timestamp(self, node):
value = self.construct_scalar(node)
match = self.timestamp_regexp.match(node.value)
values = match.groupdict()
year = int(values['year'])
month = int(values['month'])
day = int(values['day'])
if not values['hour']:
return datetime.date(year, month, day)
hour = int(values['hour'])
minute = int(values['minute'])
second = int(values['second'])
fraction = 0
if values['fraction']:
fraction = values['fraction'][:6]
while len(fraction) < 6:
fraction += '0'
fraction = int(fraction)
delta = None
if values['tz_sign']:
tz_hour = int(values['tz_hour'])
tz_minute = int(values['tz_minute'] or 0)
delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
if values['tz_sign'] == '-':
delta = -delta
data = datetime.datetime(year, month, day, hour, minute, second, fraction)
if delta:
data -= delta
return data
def construct_yaml_omap(self, node):
# Note: we do not check for duplicate keys, because it's too
# CPU-expensive.
omap = []
yield omap
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a sequence, but found %s" % node.id, node.start_mark)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a mapping of length 1, but found %s" % subnode.id,
subnode.start_mark)
if len(subnode.value) != 1:
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
subnode.start_mark)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
omap.append((key, value))
def construct_yaml_pairs(self, node):
# Note: the same code as `construct_yaml_omap`.
pairs = []
yield pairs
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a sequence, but found %s" % node.id, node.start_mark)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a mapping of length 1, but found %s" % subnode.id,
subnode.start_mark)
if len(subnode.value) != 1:
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
subnode.start_mark)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
pairs.append((key, value))
def construct_yaml_set(self, node):
data = set()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_yaml_str(self, node):
value = self.construct_scalar(node)
try:
return value.encode('ascii')
except UnicodeEncodeError:
return value
def construct_yaml_seq(self, node):
data = []
yield data
data.extend(self.construct_sequence(node))
def construct_yaml_map(self, node):
data = {}
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_yaml_object(self, node, cls):
data = cls.__new__(cls)
yield data
if hasattr(data, '__setstate__'):
state = self.construct_mapping(node, deep=True)
data.__setstate__(state)
else:
state = self.construct_mapping(node)
data.__dict__.update(state)
def construct_undefined(self, node):
raise ConstructorError(None, None,
"could not determine a constructor for the tag %r" % node.tag.encode('utf-8'),
node.start_mark)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:null',
SafeConstructor.construct_yaml_null)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:bool',
SafeConstructor.construct_yaml_bool)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:int',
SafeConstructor.construct_yaml_int)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:float',
SafeConstructor.construct_yaml_float)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:binary',
SafeConstructor.construct_yaml_binary)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:timestamp',
SafeConstructor.construct_yaml_timestamp)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:omap',
SafeConstructor.construct_yaml_omap)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:pairs',
SafeConstructor.construct_yaml_pairs)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:set',
SafeConstructor.construct_yaml_set)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:str',
SafeConstructor.construct_yaml_str)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:seq',
SafeConstructor.construct_yaml_seq)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:map',
SafeConstructor.construct_yaml_map)
SafeConstructor.add_constructor(None,
SafeConstructor.construct_undefined)
class Constructor(SafeConstructor):
def construct_python_str(self, node):
return self.construct_scalar(node).encode('utf-8')
def construct_python_unicode(self, node):
return self.construct_scalar(node)
def construct_python_long(self, node):
return long(self.construct_yaml_int(node))
def construct_python_complex(self, node):
return complex(self.construct_scalar(node))
def construct_python_tuple(self, node):
return tuple(self.construct_sequence(node))
def find_python_module(self, name, mark):
if not name:
raise ConstructorError("while constructing a Python module", mark,
"expected non-empty name appended to the tag", mark)
try:
__import__(name)
except ImportError, exc:
raise ConstructorError("while constructing a Python module", mark,
"cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark)
return sys.modules[name]
def find_python_name(self, name, mark):
if not name:
raise ConstructorError("while constructing a Python object", mark,
"expected non-empty name appended to the tag", mark)
if u'.' in name:
module_name, object_name = name.rsplit('.', 1)
else:
module_name = '__builtin__'
object_name = name
try:
__import__(module_name)
except ImportError, exc:
raise ConstructorError("while constructing a Python object", mark,
"cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark)
module = sys.modules[module_name]
if not hasattr(module, object_name):
raise ConstructorError("while constructing a Python object", mark,
"cannot find %r in the module %r" % (object_name.encode('utf-8'),
module.__name__), mark)
return getattr(module, object_name)
def construct_python_name(self, suffix, node):
value = self.construct_scalar(node)
if value:
raise ConstructorError("while constructing a Python name", node.start_mark,
"expected the empty value, but found %r" % value.encode('utf-8'),
node.start_mark)
return self.find_python_name(suffix, node.start_mark)
def construct_python_module(self, suffix, node):
value = self.construct_scalar(node)
if value:
raise ConstructorError("while constructing a Python module", node.start_mark,
"expected the empty value, but found %r" % value.encode('utf-8'),
node.start_mark)
return self.find_python_module(suffix, node.start_mark)
class classobj: pass
def make_python_instance(self, suffix, node,
args=None, kwds=None, newobj=False):
if not args:
args = []
if not kwds:
kwds = {}
cls = self.find_python_name(suffix, node.start_mark)
if newobj and isinstance(cls, type(self.classobj)) \
and not args and not kwds:
instance = self.classobj()
instance.__class__ = cls
return instance
elif newobj and isinstance(cls, type):
return cls.__new__(cls, *args, **kwds)
else:
return cls(*args, **kwds)
def set_python_instance_state(self, instance, state):
if hasattr(instance, '__setstate__'):
instance.__setstate__(state)
else:
slotstate = {}
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if hasattr(instance, '__dict__'):
instance.__dict__.update(state)
elif state:
slotstate.update(state)
for key, value in slotstate.items():
setattr(object, key, value)
def construct_python_object(self, suffix, node):
# Format:
# !!python/object:module.name { ... state ... }
instance = self.make_python_instance(suffix, node, newobj=True)
yield instance
deep = hasattr(instance, '__setstate__')
state = self.construct_mapping(node, deep=deep)
self.set_python_instance_state(instance, state)
def construct_python_object_apply(self, suffix, node, newobj=False):
# Format:
# !!python/object/apply # (or !!python/object/new)
# args: [ ... arguments ... ]
# kwds: { ... keywords ... }
# state: ... state ...
# listitems: [ ... listitems ... ]
# dictitems: { ... dictitems ... }
# or short format:
# !!python/object/apply [ ... arguments ... ]
# The difference between !!python/object/apply and !!python/object/new
# is how an object is created, check make_python_instance for details.
if isinstance(node, SequenceNode):
args = self.construct_sequence(node, deep=True)
kwds = {}
state = {}
listitems = []
dictitems = {}
else:
value = self.construct_mapping(node, deep=True)
args = value.get('args', [])
kwds = value.get('kwds', {})
state = value.get('state', {})
listitems = value.get('listitems', [])
dictitems = value.get('dictitems', {})
instance = self.make_python_instance(suffix, node, args, kwds, newobj)
if state:
self.set_python_instance_state(instance, state)
if listitems:
instance.extend(listitems)
if dictitems:
for key in dictitems:
instance[key] = dictitems[key]
return instance
def construct_python_object_new(self, suffix, node):
return self.construct_python_object_apply(suffix, node, newobj=True)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/none',
Constructor.construct_yaml_null)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/bool',
Constructor.construct_yaml_bool)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/str',
Constructor.construct_python_str)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/unicode',
Constructor.construct_python_unicode)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/int',
Constructor.construct_yaml_int)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/long',
Constructor.construct_python_long)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/float',
Constructor.construct_yaml_float)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/complex',
Constructor.construct_python_complex)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/list',
Constructor.construct_yaml_seq)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/tuple',
Constructor.construct_python_tuple)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/dict',
Constructor.construct_yaml_map)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/name:',
Constructor.construct_python_name)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/module:',
Constructor.construct_python_module)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object:',
Constructor.construct_python_object)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object/apply:',
Constructor.construct_python_object_apply)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object/new:',
Constructor.construct_python_object_new)

62
lib/spack/external/yaml/dumper.py vendored Normal file
View file

@ -0,0 +1,62 @@
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
from emitter import *
from serializer import *
from representer import *
from resolver import *
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
def __init__(self, stream,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
Representer.__init__(self, default_style=default_style,
default_flow_style=default_flow_style)
Resolver.__init__(self)
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
def __init__(self, stream,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
SafeRepresenter.__init__(self, default_style=default_style,
default_flow_style=default_flow_style)
Resolver.__init__(self)
class Dumper(Emitter, Serializer, Representer, Resolver):
def __init__(self, stream,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
Representer.__init__(self, default_style=default_style,
default_flow_style=default_flow_style)
Resolver.__init__(self)

1140
lib/spack/external/yaml/emitter.py vendored Normal file

File diff suppressed because it is too large Load diff

75
lib/spack/external/yaml/error.py vendored Normal file
View file

@ -0,0 +1,75 @@
__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
class Mark(object):
def __init__(self, name, index, line, column, buffer, pointer):
self.name = name
self.index = index
self.line = line
self.column = column
self.buffer = buffer
self.pointer = pointer
def get_snippet(self, indent=4, max_length=75):
if self.buffer is None:
return None
head = ''
start = self.pointer
while start > 0 and self.buffer[start-1] not in u'\0\r\n\x85\u2028\u2029':
start -= 1
if self.pointer-start > max_length/2-1:
head = ' ... '
start += 5
break
tail = ''
end = self.pointer
while end < len(self.buffer) and self.buffer[end] not in u'\0\r\n\x85\u2028\u2029':
end += 1
if end-self.pointer > max_length/2-1:
tail = ' ... '
end -= 5
break
snippet = self.buffer[start:end].encode('utf-8')
return ' '*indent + head + snippet + tail + '\n' \
+ ' '*(indent+self.pointer-start+len(head)) + '^'
def __str__(self):
snippet = self.get_snippet()
where = " in \"%s\", line %d, column %d" \
% (self.name, self.line+1, self.column+1)
if snippet is not None:
where += ":\n"+snippet
return where
class YAMLError(Exception):
pass
class MarkedYAMLError(YAMLError):
def __init__(self, context=None, context_mark=None,
problem=None, problem_mark=None, note=None):
self.context = context
self.context_mark = context_mark
self.problem = problem
self.problem_mark = problem_mark
self.note = note
def __str__(self):
lines = []
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None \
and (self.problem is None or self.problem_mark is None
or self.context_mark.name != self.problem_mark.name
or self.context_mark.line != self.problem_mark.line
or self.context_mark.column != self.problem_mark.column):
lines.append(str(self.context_mark))
if self.problem is not None:
lines.append(self.problem)
if self.problem_mark is not None:
lines.append(str(self.problem_mark))
if self.note is not None:
lines.append(self.note)
return '\n'.join(lines)

86
lib/spack/external/yaml/events.py vendored Normal file
View file

@ -0,0 +1,86 @@
# Abstract classes.
class Event(object):
def __init__(self, start_mark=None, end_mark=None):
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
attributes = [key for key in ['anchor', 'tag', 'implicit', 'value']
if hasattr(self, key)]
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
return '%s(%s)' % (self.__class__.__name__, arguments)
class NodeEvent(Event):
def __init__(self, anchor, start_mark=None, end_mark=None):
self.anchor = anchor
self.start_mark = start_mark
self.end_mark = end_mark
class CollectionStartEvent(NodeEvent):
def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
flow_style=None):
self.anchor = anchor
self.tag = tag
self.implicit = implicit
self.start_mark = start_mark
self.end_mark = end_mark
self.flow_style = flow_style
class CollectionEndEvent(Event):
pass
# Implementations.
class StreamStartEvent(Event):
def __init__(self, start_mark=None, end_mark=None, encoding=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.encoding = encoding
class StreamEndEvent(Event):
pass
class DocumentStartEvent(Event):
def __init__(self, start_mark=None, end_mark=None,
explicit=None, version=None, tags=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.explicit = explicit
self.version = version
self.tags = tags
class DocumentEndEvent(Event):
def __init__(self, start_mark=None, end_mark=None,
explicit=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.explicit = explicit
class AliasEvent(NodeEvent):
pass
class ScalarEvent(NodeEvent):
def __init__(self, anchor, tag, implicit, value,
start_mark=None, end_mark=None, style=None):
self.anchor = anchor
self.tag = tag
self.implicit = implicit
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style
class SequenceStartEvent(CollectionStartEvent):
pass
class SequenceEndEvent(CollectionEndEvent):
pass
class MappingStartEvent(CollectionStartEvent):
pass
class MappingEndEvent(CollectionEndEvent):
pass

40
lib/spack/external/yaml/loader.py vendored Normal file
View file

@ -0,0 +1,40 @@
__all__ = ['BaseLoader', 'SafeLoader', 'Loader']
from reader import *
from scanner import *
from parser import *
from composer import *
from constructor import *
from resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
BaseConstructor.__init__(self)
BaseResolver.__init__(self)
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
SafeConstructor.__init__(self)
Resolver.__init__(self)
class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
Constructor.__init__(self)
Resolver.__init__(self)

49
lib/spack/external/yaml/nodes.py vendored Normal file
View file

@ -0,0 +1,49 @@
class Node(object):
def __init__(self, tag, value, start_mark, end_mark):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
value = self.value
#if isinstance(value, list):
# if len(value) == 0:
# value = '<empty>'
# elif len(value) == 1:
# value = '<1 item>'
# else:
# value = '<%d items>' % len(value)
#else:
# if len(value) > 75:
# value = repr(value[:70]+u' ... ')
# else:
# value = repr(value)
value = repr(value)
return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
class ScalarNode(Node):
id = 'scalar'
def __init__(self, tag, value,
start_mark=None, end_mark=None, style=None):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style
class CollectionNode(Node):
def __init__(self, tag, value,
start_mark=None, end_mark=None, flow_style=None):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.flow_style = flow_style
class SequenceNode(CollectionNode):
id = 'sequence'
class MappingNode(CollectionNode):
id = 'mapping'

589
lib/spack/external/yaml/parser.py vendored Normal file
View file

@ -0,0 +1,589 @@
# The following YAML grammar is LL(1) and is parsed by a recursive descent
# parser.
#
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
# block_node_or_indentless_sequence ::=
# ALIAS
# | properties (block_content | indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# FIRST sets:
#
# stream: { STREAM-START }
# explicit_document: { DIRECTIVE DOCUMENT-START }
# implicit_document: FIRST(block_node)
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
# block_sequence: { BLOCK-SEQUENCE-START }
# block_mapping: { BLOCK-MAPPING-START }
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
# indentless_sequence: { ENTRY }
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
# flow_sequence: { FLOW-SEQUENCE-START }
# flow_mapping: { FLOW-MAPPING-START }
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
__all__ = ['Parser', 'ParserError']
from error import MarkedYAMLError
from tokens import *
from events import *
from scanner import *
class ParserError(MarkedYAMLError):
pass
class Parser(object):
# Since writing a recursive-descendant parser is a straightforward task, we
# do not give many comments here.
DEFAULT_TAGS = {
u'!': u'!',
u'!!': u'tag:yaml.org,2002:',
}
def __init__(self):
self.current_event = None
self.yaml_version = None
self.tag_handles = {}
self.states = []
self.marks = []
self.state = self.parse_stream_start
def dispose(self):
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
def check_event(self, *choices):
# Check the type of the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
if self.current_event is not None:
if not choices:
return True
for choice in choices:
if isinstance(self.current_event, choice):
return True
return False
def peek_event(self):
# Get the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
return self.current_event
def get_event(self):
# Get the next event and proceed further.
if self.current_event is None:
if self.state:
self.current_event = self.state()
value = self.current_event
self.current_event = None
return value
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
def parse_stream_start(self):
# Parse the stream start.
token = self.get_token()
event = StreamStartEvent(token.start_mark, token.end_mark,
encoding=token.encoding)
# Prepare the next state.
self.state = self.parse_implicit_document_start
return event
def parse_implicit_document_start(self):
# Parse an implicit document.
if not self.check_token(DirectiveToken, DocumentStartToken,
StreamEndToken):
self.tag_handles = self.DEFAULT_TAGS
token = self.peek_token()
start_mark = end_mark = token.start_mark
event = DocumentStartEvent(start_mark, end_mark,
explicit=False)
# Prepare the next state.
self.states.append(self.parse_document_end)
self.state = self.parse_block_node
return event
else:
return self.parse_document_start()
def parse_document_start(self):
# Parse any extra document end indicators.
while self.check_token(DocumentEndToken):
self.get_token()
# Parse an explicit document.
if not self.check_token(StreamEndToken):
token = self.peek_token()
start_mark = token.start_mark
version, tags = self.process_directives()
if not self.check_token(DocumentStartToken):
raise ParserError(None, None,
"expected '<document start>', but found %r"
% self.peek_token().id,
self.peek_token().start_mark)
token = self.get_token()
end_mark = token.end_mark
event = DocumentStartEvent(start_mark, end_mark,
explicit=True, version=version, tags=tags)
self.states.append(self.parse_document_end)
self.state = self.parse_document_content
else:
# Parse the end of the stream.
token = self.get_token()
event = StreamEndEvent(token.start_mark, token.end_mark)
assert not self.states
assert not self.marks
self.state = None
return event
def parse_document_end(self):
# Parse the document end.
token = self.peek_token()
start_mark = end_mark = token.start_mark
explicit = False
if self.check_token(DocumentEndToken):
token = self.get_token()
end_mark = token.end_mark
explicit = True
event = DocumentEndEvent(start_mark, end_mark,
explicit=explicit)
# Prepare the next state.
self.state = self.parse_document_start
return event
def parse_document_content(self):
if self.check_token(DirectiveToken,
DocumentStartToken, DocumentEndToken, StreamEndToken):
event = self.process_empty_scalar(self.peek_token().start_mark)
self.state = self.states.pop()
return event
else:
return self.parse_block_node()
def process_directives(self):
self.yaml_version = None
self.tag_handles = {}
while self.check_token(DirectiveToken):
token = self.get_token()
if token.name == u'YAML':
if self.yaml_version is not None:
raise ParserError(None, None,
"found duplicate YAML directive", token.start_mark)
major, minor = token.value
if major != 1:
raise ParserError(None, None,
"found incompatible YAML document (version 1.* is required)",
token.start_mark)
self.yaml_version = token.value
elif token.name == u'TAG':
handle, prefix = token.value
if handle in self.tag_handles:
raise ParserError(None, None,
"duplicate tag handle %r" % handle.encode('utf-8'),
token.start_mark)
self.tag_handles[handle] = prefix
if self.tag_handles:
value = self.yaml_version, self.tag_handles.copy()
else:
value = self.yaml_version, None
for key in self.DEFAULT_TAGS:
if key not in self.tag_handles:
self.tag_handles[key] = self.DEFAULT_TAGS[key]
return value
# block_node_or_indentless_sequence ::= ALIAS
# | properties (block_content | indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
def parse_block_node(self):
return self.parse_node(block=True)
def parse_flow_node(self):
return self.parse_node()
def parse_block_node_or_indentless_sequence(self):
return self.parse_node(block=True, indentless_sequence=True)
def parse_node(self, block=False, indentless_sequence=False):
if self.check_token(AliasToken):
token = self.get_token()
event = AliasEvent(token.value, token.start_mark, token.end_mark)
self.state = self.states.pop()
else:
anchor = None
tag = None
start_mark = end_mark = tag_mark = None
if self.check_token(AnchorToken):
token = self.get_token()
start_mark = token.start_mark
end_mark = token.end_mark
anchor = token.value
if self.check_token(TagToken):
token = self.get_token()
tag_mark = token.start_mark
end_mark = token.end_mark
tag = token.value
elif self.check_token(TagToken):
token = self.get_token()
start_mark = tag_mark = token.start_mark
end_mark = token.end_mark
tag = token.value
if self.check_token(AnchorToken):
token = self.get_token()
end_mark = token.end_mark
anchor = token.value
if tag is not None:
handle, suffix = tag
if handle is not None:
if handle not in self.tag_handles:
raise ParserError("while parsing a node", start_mark,
"found undefined tag handle %r" % handle.encode('utf-8'),
tag_mark)
tag = self.tag_handles[handle]+suffix
else:
tag = suffix
#if tag == u'!':
# raise ParserError("while parsing a node", start_mark,
# "found non-specific tag '!'", tag_mark,
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
if start_mark is None:
start_mark = end_mark = self.peek_token().start_mark
event = None
implicit = (tag is None or tag == u'!')
if indentless_sequence and self.check_token(BlockEntryToken):
end_mark = self.peek_token().end_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark)
self.state = self.parse_indentless_sequence_entry
else:
if self.check_token(ScalarToken):
token = self.get_token()
end_mark = token.end_mark
if (token.plain and tag is None) or tag == u'!':
implicit = (True, False)
elif tag is None:
implicit = (False, True)
else:
implicit = (False, False)
event = ScalarEvent(anchor, tag, implicit, token.value,
start_mark, end_mark, style=token.style)
self.state = self.states.pop()
elif self.check_token(FlowSequenceStartToken):
end_mark = self.peek_token().end_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=True)
self.state = self.parse_flow_sequence_first_entry
elif self.check_token(FlowMappingStartToken):
end_mark = self.peek_token().end_mark
event = MappingStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=True)
self.state = self.parse_flow_mapping_first_key
elif block and self.check_token(BlockSequenceStartToken):
end_mark = self.peek_token().start_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=False)
self.state = self.parse_block_sequence_first_entry
elif block and self.check_token(BlockMappingStartToken):
end_mark = self.peek_token().start_mark
event = MappingStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=False)
self.state = self.parse_block_mapping_first_key
elif anchor is not None or tag is not None:
# Empty scalars are allowed even if a tag or an anchor is
# specified.
event = ScalarEvent(anchor, tag, (implicit, False), u'',
start_mark, end_mark)
self.state = self.states.pop()
else:
if block:
node = 'block'
else:
node = 'flow'
token = self.peek_token()
raise ParserError("while parsing a %s node" % node, start_mark,
"expected the node content, but found %r" % token.id,
token.start_mark)
return event
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
def parse_block_sequence_first_entry(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_block_sequence_entry()
def parse_block_sequence_entry(self):
if self.check_token(BlockEntryToken):
token = self.get_token()
if not self.check_token(BlockEntryToken, BlockEndToken):
self.states.append(self.parse_block_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_block_sequence_entry
return self.process_empty_scalar(token.end_mark)
if not self.check_token(BlockEndToken):
token = self.peek_token()
raise ParserError("while parsing a block collection", self.marks[-1],
"expected <block end>, but found %r" % token.id, token.start_mark)
token = self.get_token()
event = SequenceEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
def parse_indentless_sequence_entry(self):
if self.check_token(BlockEntryToken):
token = self.get_token()
if not self.check_token(BlockEntryToken,
KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_indentless_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_indentless_sequence_entry
return self.process_empty_scalar(token.end_mark)
token = self.peek_token()
event = SequenceEndEvent(token.start_mark, token.start_mark)
self.state = self.states.pop()
return event
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
def parse_block_mapping_first_key(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_block_mapping_key()
def parse_block_mapping_key(self):
if self.check_token(KeyToken):
token = self.get_token()
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_value)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_value
return self.process_empty_scalar(token.end_mark)
if not self.check_token(BlockEndToken):
token = self.peek_token()
raise ParserError("while parsing a block mapping", self.marks[-1],
"expected <block end>, but found %r" % token.id, token.start_mark)
token = self.get_token()
event = MappingEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_block_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_key)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_key
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_block_mapping_key
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# Note that while production rules for both flow_sequence_entry and
# flow_mapping_entry are equal, their interpretations are different.
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
# generate an inline mapping (set syntax).
def parse_flow_sequence_first_entry(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_sequence_entry(first=True)
def parse_flow_sequence_entry(self, first=False):
if not self.check_token(FlowSequenceEndToken):
if not first:
if self.check_token(FlowEntryToken):
self.get_token()
else:
token = self.peek_token()
raise ParserError("while parsing a flow sequence", self.marks[-1],
"expected ',' or ']', but got %r" % token.id, token.start_mark)
if self.check_token(KeyToken):
token = self.peek_token()
event = MappingStartEvent(None, None, True,
token.start_mark, token.end_mark,
flow_style=True)
self.state = self.parse_flow_sequence_entry_mapping_key
return event
elif not self.check_token(FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry)
return self.parse_flow_node()
token = self.get_token()
event = SequenceEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_sequence_entry_mapping_key(self):
token = self.get_token()
if not self.check_token(ValueToken,
FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_value
return self.process_empty_scalar(token.end_mark)
def parse_flow_sequence_entry_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_end)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_end
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_sequence_entry_mapping_end
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_sequence_entry_mapping_end(self):
self.state = self.parse_flow_sequence_entry
token = self.peek_token()
return MappingEndEvent(token.start_mark, token.start_mark)
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
def parse_flow_mapping_first_key(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_mapping_key(first=True)
def parse_flow_mapping_key(self, first=False):
if not self.check_token(FlowMappingEndToken):
if not first:
if self.check_token(FlowEntryToken):
self.get_token()
else:
token = self.peek_token()
raise ParserError("while parsing a flow mapping", self.marks[-1],
"expected ',' or '}', but got %r" % token.id, token.start_mark)
if self.check_token(KeyToken):
token = self.get_token()
if not self.check_token(ValueToken,
FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_value
return self.process_empty_scalar(token.end_mark)
elif not self.check_token(FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_empty_value)
return self.parse_flow_node()
token = self.get_token()
event = MappingEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_key)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_mapping_key
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_mapping_empty_value(self):
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(self.peek_token().start_mark)
def process_empty_scalar(self, mark):
return ScalarEvent(None, None, (True, False), u'', mark, mark)

189
lib/spack/external/yaml/reader.py vendored Normal file
View file

@ -0,0 +1,189 @@
# This module contains abstractions for the input stream. You don't have to
# looks further, there are no pretty code.
#
# We define two classes here.
#
# Mark(source, line, column)
# It's just a record and its only use is producing nice error messages.
# Parser does not use it for any other purposes.
#
# Reader(source, data)
# Reader determines the encoding of `data` and converts it to unicode.
# Reader provides the following methods and attributes:
# reader.peek(length=1) - return the next `length` characters
# reader.forward(length=1) - move the current position to `length` characters.
# reader.index - the number of the current character.
# reader.line, stream.column - the line and the column of the current character.
__all__ = ['Reader', 'ReaderError']
from error import YAMLError, Mark
import codecs, re
class ReaderError(YAMLError):
def __init__(self, name, position, character, encoding, reason):
self.name = name
self.character = character
self.position = position
self.encoding = encoding
self.reason = reason
def __str__(self):
if isinstance(self.character, str):
return "'%s' codec can't decode byte #x%02x: %s\n" \
" in \"%s\", position %d" \
% (self.encoding, ord(self.character), self.reason,
self.name, self.position)
else:
return "unacceptable character #x%04x: %s\n" \
" in \"%s\", position %d" \
% (self.character, self.reason,
self.name, self.position)
class Reader(object):
# Reader:
# - determines the data encoding and converts it to unicode,
# - checks if characters are in allowed range,
# - adds '\0' to the end.
# Reader accepts
# - a `str` object,
# - a `unicode` object,
# - a file-like object with its `read` method returning `str`,
# - a file-like object with its `read` method returning `unicode`.
# Yeah, it's ugly and slow.
def __init__(self, stream, name=None):
self.stream = None
self.stream_pointer = 0
self.eof = True
self.buffer = u''
self.pointer = 0
self.raw_buffer = None
self.raw_decode = None
self.encoding = None
self.index = 0
self.line = 0
self.column = 0
if isinstance(stream, unicode):
self.name = "<unicode string>" if name is None else name
self.check_printable(stream)
self.buffer = stream+u'\0'
elif isinstance(stream, str):
self.name = "<string>" if name is None else name
self.raw_buffer = stream
self.determine_encoding()
else:
self.stream = stream
self.name = getattr(stream, 'name', "<file>") if name is None else name
self.eof = False
self.raw_buffer = ''
self.determine_encoding()
def peek(self, index=0):
try:
return self.buffer[self.pointer+index]
except IndexError:
self.update(index+1)
return self.buffer[self.pointer+index]
def prefix(self, length=1):
if self.pointer+length >= len(self.buffer):
self.update(length)
return self.buffer[self.pointer:self.pointer+length]
def forward(self, length=1):
if self.pointer+length+1 >= len(self.buffer):
self.update(length+1)
while length:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
if ch in u'\n\x85\u2028\u2029' \
or (ch == u'\r' and self.buffer[self.pointer] != u'\n'):
self.line += 1
self.column = 0
elif ch != u'\uFEFF':
self.column += 1
length -= 1
def get_mark(self):
if self.stream is None:
return Mark(self.name, self.index, self.line, self.column,
self.buffer, self.pointer)
else:
return Mark(self.name, self.index, self.line, self.column,
None, None)
def determine_encoding(self):
while not self.eof and len(self.raw_buffer) < 2:
self.update_raw()
if not isinstance(self.raw_buffer, unicode):
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
self.raw_decode = codecs.utf_16_le_decode
self.encoding = 'utf-16-le'
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
self.raw_decode = codecs.utf_16_be_decode
self.encoding = 'utf-16-be'
else:
self.raw_decode = codecs.utf_8_decode
self.encoding = 'utf-8'
self.update(1)
NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
def check_printable(self, data):
match = self.NON_PRINTABLE.search(data)
if match:
character = match.group()
position = self.index+(len(self.buffer)-self.pointer)+match.start()
raise ReaderError(self.name, position, ord(character),
'unicode', "special characters are not allowed")
def update(self, length):
if self.raw_buffer is None:
return
self.buffer = self.buffer[self.pointer:]
self.pointer = 0
while len(self.buffer) < length:
if not self.eof:
self.update_raw()
if self.raw_decode is not None:
try:
data, converted = self.raw_decode(self.raw_buffer,
'strict', self.eof)
except UnicodeDecodeError, exc:
character = exc.object[exc.start]
if self.stream is not None:
position = self.stream_pointer-len(self.raw_buffer)+exc.start
else:
position = exc.start
raise ReaderError(self.name, position, character,
exc.encoding, exc.reason)
else:
data = self.raw_buffer
converted = len(data)
self.check_printable(data)
self.buffer += data
self.raw_buffer = self.raw_buffer[converted:]
if self.eof:
self.buffer += u'\0'
self.raw_buffer = None
break
def update_raw(self, size=1024):
data = self.stream.read(size)
if data:
self.raw_buffer += data
self.stream_pointer += len(data)
else:
self.eof = True
#try:
# import psyco
# psyco.bind(Reader)
#except ImportError:
# pass

484
lib/spack/external/yaml/representer.py vendored Normal file
View file

@ -0,0 +1,484 @@
__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
'RepresenterError']
from error import *
from nodes import *
import datetime
import sys, copy_reg, types
class RepresenterError(YAMLError):
pass
class BaseRepresenter(object):
yaml_representers = {}
yaml_multi_representers = {}
def __init__(self, default_style=None, default_flow_style=None):
self.default_style = default_style
self.default_flow_style = default_flow_style
self.represented_objects = {}
self.object_keeper = []
self.alias_key = None
def represent(self, data):
node = self.represent_data(data)
self.serialize(node)
self.represented_objects = {}
self.object_keeper = []
self.alias_key = None
def get_classobj_bases(self, cls):
bases = [cls]
for base in cls.__bases__:
bases.extend(self.get_classobj_bases(base))
return bases
def represent_data(self, data):
if self.ignore_aliases(data):
self.alias_key = None
else:
self.alias_key = id(data)
if self.alias_key is not None:
if self.alias_key in self.represented_objects:
node = self.represented_objects[self.alias_key]
#if node is None:
# raise RepresenterError("recursive objects are not allowed: %r" % data)
return node
#self.represented_objects[alias_key] = None
self.object_keeper.append(data)
data_types = type(data).__mro__
if type(data) is types.InstanceType:
data_types = self.get_classobj_bases(data.__class__)+list(data_types)
if data_types[0] in self.yaml_representers:
node = self.yaml_representers[data_types[0]](self, data)
else:
for data_type in data_types:
if data_type in self.yaml_multi_representers:
node = self.yaml_multi_representers[data_type](self, data)
break
else:
if None in self.yaml_multi_representers:
node = self.yaml_multi_representers[None](self, data)
elif None in self.yaml_representers:
node = self.yaml_representers[None](self, data)
else:
node = ScalarNode(None, unicode(data))
#if alias_key is not None:
# self.represented_objects[alias_key] = node
return node
def add_representer(cls, data_type, representer):
if not 'yaml_representers' in cls.__dict__:
cls.yaml_representers = cls.yaml_representers.copy()
cls.yaml_representers[data_type] = representer
add_representer = classmethod(add_representer)
def add_multi_representer(cls, data_type, representer):
if not 'yaml_multi_representers' in cls.__dict__:
cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
cls.yaml_multi_representers[data_type] = representer
add_multi_representer = classmethod(add_multi_representer)
def represent_scalar(self, tag, value, style=None):
if style is None:
style = self.default_style
node = ScalarNode(tag, value, style=style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
return node
def represent_sequence(self, tag, sequence, flow_style=None):
value = []
node = SequenceNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
for item in sequence:
node_item = self.represent_data(item)
if not (isinstance(node_item, ScalarNode) and not node_item.style):
best_style = False
value.append(node_item)
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def represent_mapping(self, tag, mapping, flow_style=None):
value = []
node = MappingNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
if hasattr(mapping, 'items'):
mapping = mapping.items()
mapping.sort()
for item_key, item_value in mapping:
node_key = self.represent_data(item_key)
node_value = self.represent_data(item_value)
if not (isinstance(node_key, ScalarNode) and not node_key.style):
best_style = False
if not (isinstance(node_value, ScalarNode) and not node_value.style):
best_style = False
value.append((node_key, node_value))
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def ignore_aliases(self, data):
return False
class SafeRepresenter(BaseRepresenter):
def ignore_aliases(self, data):
if data in [None, ()]:
return True
if isinstance(data, (str, unicode, bool, int, float)):
return True
def represent_none(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:null',
u'null')
def represent_str(self, data):
tag = None
style = None
try:
data = unicode(data, 'ascii')
tag = u'tag:yaml.org,2002:str'
except UnicodeDecodeError:
try:
data = unicode(data, 'utf-8')
tag = u'tag:yaml.org,2002:str'
except UnicodeDecodeError:
data = data.encode('base64')
tag = u'tag:yaml.org,2002:binary'
style = '|'
return self.represent_scalar(tag, data, style=style)
def represent_unicode(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:str', data)
def represent_bool(self, data):
if data:
value = u'true'
else:
value = u'false'
return self.represent_scalar(u'tag:yaml.org,2002:bool', value)
def represent_int(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data))
def represent_long(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data))
inf_value = 1e300
while repr(inf_value) != repr(inf_value*inf_value):
inf_value *= inf_value
def represent_float(self, data):
if data != data or (data == 0.0 and data == 1.0):
value = u'.nan'
elif data == self.inf_value:
value = u'.inf'
elif data == -self.inf_value:
value = u'-.inf'
else:
value = unicode(repr(data)).lower()
# Note that in some cases `repr(data)` represents a float number
# without the decimal parts. For instance:
# >>> repr(1e17)
# '1e17'
# Unfortunately, this is not a valid float representation according
# to the definition of the `!!float` tag. We fix this by adding
# '.0' before the 'e' symbol.
if u'.' not in value and u'e' in value:
value = value.replace(u'e', u'.0e', 1)
return self.represent_scalar(u'tag:yaml.org,2002:float', value)
def represent_list(self, data):
#pairs = (len(data) > 0 and isinstance(data, list))
#if pairs:
# for item in data:
# if not isinstance(item, tuple) or len(item) != 2:
# pairs = False
# break
#if not pairs:
return self.represent_sequence(u'tag:yaml.org,2002:seq', data)
#value = []
#for item_key, item_value in data:
# value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
# [(item_key, item_value)]))
#return SequenceNode(u'tag:yaml.org,2002:pairs', value)
def represent_dict(self, data):
return self.represent_mapping(u'tag:yaml.org,2002:map', data)
def represent_set(self, data):
value = {}
for key in data:
value[key] = None
return self.represent_mapping(u'tag:yaml.org,2002:set', value)
def represent_date(self, data):
value = unicode(data.isoformat())
return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
def represent_datetime(self, data):
value = unicode(data.isoformat(' '))
return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
def represent_yaml_object(self, tag, data, cls, flow_style=None):
if hasattr(data, '__getstate__'):
state = data.__getstate__()
else:
state = data.__dict__.copy()
return self.represent_mapping(tag, state, flow_style=flow_style)
def represent_undefined(self, data):
raise RepresenterError("cannot represent an object: %s" % data)
SafeRepresenter.add_representer(type(None),
SafeRepresenter.represent_none)
SafeRepresenter.add_representer(str,
SafeRepresenter.represent_str)
SafeRepresenter.add_representer(unicode,
SafeRepresenter.represent_unicode)
SafeRepresenter.add_representer(bool,
SafeRepresenter.represent_bool)
SafeRepresenter.add_representer(int,
SafeRepresenter.represent_int)
SafeRepresenter.add_representer(long,
SafeRepresenter.represent_long)
SafeRepresenter.add_representer(float,
SafeRepresenter.represent_float)
SafeRepresenter.add_representer(list,
SafeRepresenter.represent_list)
SafeRepresenter.add_representer(tuple,
SafeRepresenter.represent_list)
SafeRepresenter.add_representer(dict,
SafeRepresenter.represent_dict)
SafeRepresenter.add_representer(set,
SafeRepresenter.represent_set)
SafeRepresenter.add_representer(datetime.date,
SafeRepresenter.represent_date)
SafeRepresenter.add_representer(datetime.datetime,
SafeRepresenter.represent_datetime)
SafeRepresenter.add_representer(None,
SafeRepresenter.represent_undefined)
class Representer(SafeRepresenter):
def represent_str(self, data):
tag = None
style = None
try:
data = unicode(data, 'ascii')
tag = u'tag:yaml.org,2002:str'
except UnicodeDecodeError:
try:
data = unicode(data, 'utf-8')
tag = u'tag:yaml.org,2002:python/str'
except UnicodeDecodeError:
data = data.encode('base64')
tag = u'tag:yaml.org,2002:binary'
style = '|'
return self.represent_scalar(tag, data, style=style)
def represent_unicode(self, data):
tag = None
try:
data.encode('ascii')
tag = u'tag:yaml.org,2002:python/unicode'
except UnicodeEncodeError:
tag = u'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data)
def represent_long(self, data):
tag = u'tag:yaml.org,2002:int'
if int(data) is not data:
tag = u'tag:yaml.org,2002:python/long'
return self.represent_scalar(tag, unicode(data))
def represent_complex(self, data):
if data.imag == 0.0:
data = u'%r' % data.real
elif data.real == 0.0:
data = u'%rj' % data.imag
elif data.imag > 0:
data = u'%r+%rj' % (data.real, data.imag)
else:
data = u'%r%rj' % (data.real, data.imag)
return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data)
def represent_tuple(self, data):
return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data)
def represent_name(self, data):
name = u'%s.%s' % (data.__module__, data.__name__)
return self.represent_scalar(u'tag:yaml.org,2002:python/name:'+name, u'')
def represent_module(self, data):
return self.represent_scalar(
u'tag:yaml.org,2002:python/module:'+data.__name__, u'')
def represent_instance(self, data):
# For instances of classic classes, we use __getinitargs__ and
# __getstate__ to serialize the data.
# If data.__getinitargs__ exists, the object must be reconstructed by
# calling cls(**args), where args is a tuple returned by
# __getinitargs__. Otherwise, the cls.__init__ method should never be
# called and the class instance is created by instantiating a trivial
# class and assigning to the instance's __class__ variable.
# If data.__getstate__ exists, it returns the state of the object.
# Otherwise, the state of the object is data.__dict__.
# We produce either a !!python/object or !!python/object/new node.
# If data.__getinitargs__ does not exist and state is a dictionary, we
# produce a !!python/object node . Otherwise we produce a
# !!python/object/new node.
cls = data.__class__
class_name = u'%s.%s' % (cls.__module__, cls.__name__)
args = None
state = None
if hasattr(data, '__getinitargs__'):
args = list(data.__getinitargs__())
if hasattr(data, '__getstate__'):
state = data.__getstate__()
else:
state = data.__dict__
if args is None and isinstance(state, dict):
return self.represent_mapping(
u'tag:yaml.org,2002:python/object:'+class_name, state)
if isinstance(state, dict) and not state:
return self.represent_sequence(
u'tag:yaml.org,2002:python/object/new:'+class_name, args)
value = {}
if args:
value['args'] = args
value['state'] = state
return self.represent_mapping(
u'tag:yaml.org,2002:python/object/new:'+class_name, value)
def represent_object(self, data):
# We use __reduce__ API to save the data. data.__reduce__ returns
# a tuple of length 2-5:
# (function, args, state, listitems, dictitems)
# For reconstructing, we calls function(*args), then set its state,
# listitems, and dictitems if they are not None.
# A special case is when function.__name__ == '__newobj__'. In this
# case we create the object with args[0].__new__(*args).
# Another special case is when __reduce__ returns a string - we don't
# support it.
# We produce a !!python/object, !!python/object/new or
# !!python/object/apply node.
cls = type(data)
if cls in copy_reg.dispatch_table:
reduce = copy_reg.dispatch_table[cls](data)
elif hasattr(data, '__reduce_ex__'):
reduce = data.__reduce_ex__(2)
elif hasattr(data, '__reduce__'):
reduce = data.__reduce__()
else:
raise RepresenterError("cannot represent object: %r" % data)
reduce = (list(reduce)+[None]*5)[:5]
function, args, state, listitems, dictitems = reduce
args = list(args)
if state is None:
state = {}
if listitems is not None:
listitems = list(listitems)
if dictitems is not None:
dictitems = dict(dictitems)
if function.__name__ == '__newobj__':
function = args[0]
args = args[1:]
tag = u'tag:yaml.org,2002:python/object/new:'
newobj = True
else:
tag = u'tag:yaml.org,2002:python/object/apply:'
newobj = False
function_name = u'%s.%s' % (function.__module__, function.__name__)
if not args and not listitems and not dictitems \
and isinstance(state, dict) and newobj:
return self.represent_mapping(
u'tag:yaml.org,2002:python/object:'+function_name, state)
if not listitems and not dictitems \
and isinstance(state, dict) and not state:
return self.represent_sequence(tag+function_name, args)
value = {}
if args:
value['args'] = args
if state or not isinstance(state, dict):
value['state'] = state
if listitems:
value['listitems'] = listitems
if dictitems:
value['dictitems'] = dictitems
return self.represent_mapping(tag+function_name, value)
Representer.add_representer(str,
Representer.represent_str)
Representer.add_representer(unicode,
Representer.represent_unicode)
Representer.add_representer(long,
Representer.represent_long)
Representer.add_representer(complex,
Representer.represent_complex)
Representer.add_representer(tuple,
Representer.represent_tuple)
Representer.add_representer(type,
Representer.represent_name)
Representer.add_representer(types.ClassType,
Representer.represent_name)
Representer.add_representer(types.FunctionType,
Representer.represent_name)
Representer.add_representer(types.BuiltinFunctionType,
Representer.represent_name)
Representer.add_representer(types.ModuleType,
Representer.represent_module)
Representer.add_multi_representer(types.InstanceType,
Representer.represent_instance)
Representer.add_multi_representer(object,
Representer.represent_object)

224
lib/spack/external/yaml/resolver.py vendored Normal file
View file

@ -0,0 +1,224 @@
__all__ = ['BaseResolver', 'Resolver']
from error import *
from nodes import *
import re
class ResolverError(YAMLError):
pass
class BaseResolver(object):
DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str'
DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq'
DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map'
yaml_implicit_resolvers = {}
yaml_path_resolvers = {}
def __init__(self):
self.resolver_exact_paths = []
self.resolver_prefix_paths = []
def add_implicit_resolver(cls, tag, regexp, first):
if not 'yaml_implicit_resolvers' in cls.__dict__:
cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
if first is None:
first = [None]
for ch in first:
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
add_implicit_resolver = classmethod(add_implicit_resolver)
def add_path_resolver(cls, tag, path, kind=None):
# Note: `add_path_resolver` is experimental. The API could be changed.
# `new_path` is a pattern that is matched against the path from the
# root to the node that is being considered. `node_path` elements are
# tuples `(node_check, index_check)`. `node_check` is a node class:
# `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
# matches any kind of a node. `index_check` could be `None`, a boolean
# value, a string value, or a number. `None` and `False` match against
# any _value_ of sequence and mapping nodes. `True` matches against
# any _key_ of a mapping node. A string `index_check` matches against
# a mapping value that corresponds to a scalar key which content is
# equal to the `index_check` value. An integer `index_check` matches
# against a sequence value with the index equal to `index_check`.
if not 'yaml_path_resolvers' in cls.__dict__:
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
new_path = []
for element in path:
if isinstance(element, (list, tuple)):
if len(element) == 2:
node_check, index_check = element
elif len(element) == 1:
node_check = element[0]
index_check = True
else:
raise ResolverError("Invalid path element: %s" % element)
else:
node_check = None
index_check = element
if node_check is str:
node_check = ScalarNode
elif node_check is list:
node_check = SequenceNode
elif node_check is dict:
node_check = MappingNode
elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
and not isinstance(node_check, basestring) \
and node_check is not None:
raise ResolverError("Invalid node checker: %s" % node_check)
if not isinstance(index_check, (basestring, int)) \
and index_check is not None:
raise ResolverError("Invalid index checker: %s" % index_check)
new_path.append((node_check, index_check))
if kind is str:
kind = ScalarNode
elif kind is list:
kind = SequenceNode
elif kind is dict:
kind = MappingNode
elif kind not in [ScalarNode, SequenceNode, MappingNode] \
and kind is not None:
raise ResolverError("Invalid node kind: %s" % kind)
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
add_path_resolver = classmethod(add_path_resolver)
def descend_resolver(self, current_node, current_index):
if not self.yaml_path_resolvers:
return
exact_paths = {}
prefix_paths = []
if current_node:
depth = len(self.resolver_prefix_paths)
for path, kind in self.resolver_prefix_paths[-1]:
if self.check_resolver_prefix(depth, path, kind,
current_node, current_index):
if len(path) > depth:
prefix_paths.append((path, kind))
else:
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
else:
for path, kind in self.yaml_path_resolvers:
if not path:
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
else:
prefix_paths.append((path, kind))
self.resolver_exact_paths.append(exact_paths)
self.resolver_prefix_paths.append(prefix_paths)
def ascend_resolver(self):
if not self.yaml_path_resolvers:
return
self.resolver_exact_paths.pop()
self.resolver_prefix_paths.pop()
def check_resolver_prefix(self, depth, path, kind,
current_node, current_index):
node_check, index_check = path[depth-1]
if isinstance(node_check, basestring):
if current_node.tag != node_check:
return
elif node_check is not None:
if not isinstance(current_node, node_check):
return
if index_check is True and current_index is not None:
return
if (index_check is False or index_check is None) \
and current_index is None:
return
if isinstance(index_check, basestring):
if not (isinstance(current_index, ScalarNode)
and index_check == current_index.value):
return
elif isinstance(index_check, int) and not isinstance(index_check, bool):
if index_check != current_index:
return
return True
def resolve(self, kind, value, implicit):
if kind is ScalarNode and implicit[0]:
if value == u'':
resolvers = self.yaml_implicit_resolvers.get(u'', [])
else:
resolvers = self.yaml_implicit_resolvers.get(value[0], [])
resolvers += self.yaml_implicit_resolvers.get(None, [])
for tag, regexp in resolvers:
if regexp.match(value):
return tag
implicit = implicit[1]
if self.yaml_path_resolvers:
exact_paths = self.resolver_exact_paths[-1]
if kind in exact_paths:
return exact_paths[kind]
if None in exact_paths:
return exact_paths[None]
if kind is ScalarNode:
return self.DEFAULT_SCALAR_TAG
elif kind is SequenceNode:
return self.DEFAULT_SEQUENCE_TAG
elif kind is MappingNode:
return self.DEFAULT_MAPPING_TAG
class Resolver(BaseResolver):
pass
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:bool',
re.compile(ur'''^(?:yes|Yes|YES|no|No|NO
|true|True|TRUE|false|False|FALSE
|on|On|ON|off|Off|OFF)$''', re.X),
list(u'yYnNtTfFoO'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:float',
re.compile(ur'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)?
|\.[0-9_]+(?:[eE][-+][0-9]+)?
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
|[-+]?\.(?:inf|Inf|INF)
|\.(?:nan|NaN|NAN))$''', re.X),
list(u'-+0123456789.'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:int',
re.compile(ur'''^(?:[-+]?0b[0-1_]+
|[-+]?0[0-7_]+
|[-+]?(?:0|[1-9][0-9_]*)
|[-+]?0x[0-9a-fA-F_]+
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
list(u'-+0123456789'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:merge',
re.compile(ur'^(?:<<)$'),
[u'<'])
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:null',
re.compile(ur'''^(?: ~
|null|Null|NULL
| )$''', re.X),
[u'~', u'n', u'N', u''])
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:timestamp',
re.compile(ur'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
(?:[Tt]|[ \t]+)[0-9][0-9]?
:[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)?
(?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
list(u'0123456789'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:value',
re.compile(ur'^(?:=)$'),
[u'='])
# The following resolver is only for documentation purposes. It cannot work
# because plain scalars cannot start with '!', '&', or '*'.
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:yaml',
re.compile(ur'^(?:!|&|\*)$'),
list(u'!&*'))

1457
lib/spack/external/yaml/scanner.py vendored Normal file

File diff suppressed because it is too large Load diff

111
lib/spack/external/yaml/serializer.py vendored Normal file
View file

@ -0,0 +1,111 @@
__all__ = ['Serializer', 'SerializerError']
from error import YAMLError
from events import *
from nodes import *
class SerializerError(YAMLError):
pass
class Serializer(object):
ANCHOR_TEMPLATE = u'id%03d'
def __init__(self, encoding=None,
explicit_start=None, explicit_end=None, version=None, tags=None):
self.use_encoding = encoding
self.use_explicit_start = explicit_start
self.use_explicit_end = explicit_end
self.use_version = version
self.use_tags = tags
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
self.closed = None
def open(self):
if self.closed is None:
self.emit(StreamStartEvent(encoding=self.use_encoding))
self.closed = False
elif self.closed:
raise SerializerError("serializer is closed")
else:
raise SerializerError("serializer is already opened")
def close(self):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif not self.closed:
self.emit(StreamEndEvent())
self.closed = True
#def __del__(self):
# self.close()
def serialize(self, node):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif self.closed:
raise SerializerError("serializer is closed")
self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
version=self.use_version, tags=self.use_tags))
self.anchor_node(node)
self.serialize_node(node, None, None)
self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
def anchor_node(self, node):
if node in self.anchors:
if self.anchors[node] is None:
self.anchors[node] = self.generate_anchor(node)
else:
self.anchors[node] = None
if isinstance(node, SequenceNode):
for item in node.value:
self.anchor_node(item)
elif isinstance(node, MappingNode):
for key, value in node.value:
self.anchor_node(key)
self.anchor_node(value)
def generate_anchor(self, node):
self.last_anchor_id += 1
return self.ANCHOR_TEMPLATE % self.last_anchor_id
def serialize_node(self, node, parent, index):
alias = self.anchors[node]
if node in self.serialized_nodes:
self.emit(AliasEvent(alias))
else:
self.serialized_nodes[node] = True
self.descend_resolver(parent, index)
if isinstance(node, ScalarNode):
detected_tag = self.resolve(ScalarNode, node.value, (True, False))
default_tag = self.resolve(ScalarNode, node.value, (False, True))
implicit = (node.tag == detected_tag), (node.tag == default_tag)
self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
style=node.style))
elif isinstance(node, SequenceNode):
implicit = (node.tag
== self.resolve(SequenceNode, node.value, True))
self.emit(SequenceStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
index = 0
for item in node.value:
self.serialize_node(item, node, index)
index += 1
self.emit(SequenceEndEvent())
elif isinstance(node, MappingNode):
implicit = (node.tag
== self.resolve(MappingNode, node.value, True))
self.emit(MappingStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
for key, value in node.value:
self.serialize_node(key, node, None)
self.serialize_node(value, node, key)
self.emit(MappingEndEvent())
self.ascend_resolver()

104
lib/spack/external/yaml/tokens.py vendored Normal file
View file

@ -0,0 +1,104 @@
class Token(object):
def __init__(self, start_mark, end_mark):
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
attributes = [key for key in self.__dict__
if not key.endswith('_mark')]
attributes.sort()
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
return '%s(%s)' % (self.__class__.__name__, arguments)
#class BOMToken(Token):
# id = '<byte order mark>'
class DirectiveToken(Token):
id = '<directive>'
def __init__(self, name, value, start_mark, end_mark):
self.name = name
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class DocumentStartToken(Token):
id = '<document start>'
class DocumentEndToken(Token):
id = '<document end>'
class StreamStartToken(Token):
id = '<stream start>'
def __init__(self, start_mark=None, end_mark=None,
encoding=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.encoding = encoding
class StreamEndToken(Token):
id = '<stream end>'
class BlockSequenceStartToken(Token):
id = '<block sequence start>'
class BlockMappingStartToken(Token):
id = '<block mapping start>'
class BlockEndToken(Token):
id = '<block end>'
class FlowSequenceStartToken(Token):
id = '['
class FlowMappingStartToken(Token):
id = '{'
class FlowSequenceEndToken(Token):
id = ']'
class FlowMappingEndToken(Token):
id = '}'
class KeyToken(Token):
id = '?'
class ValueToken(Token):
id = ':'
class BlockEntryToken(Token):
id = '-'
class FlowEntryToken(Token):
id = ','
class AliasToken(Token):
id = '<alias>'
def __init__(self, value, start_mark, end_mark):
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class AnchorToken(Token):
id = '<anchor>'
def __init__(self, value, start_mark, end_mark):
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class TagToken(Token):
id = '<tag>'
def __init__(self, value, start_mark, end_mark):
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class ScalarToken(Token):
id = '<scalar>'
def __init__(self, value, plain, start_mark, end_mark, style=None):
self.value = value
self.plain = plain
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style

View file

@ -126,22 +126,20 @@ def caller_locals():
del stack del stack
def get_calling_package_name(): def get_calling_module_name():
"""Make sure that the caller is a class definition, and return the """Make sure that the caller is a class definition, and return the
module's name. enclosing module's name.
""" """
stack = inspect.stack() stack = inspect.stack()
try: try:
# get calling function name (the relation)
relation = stack[1][3]
# Make sure locals contain __module__ # Make sure locals contain __module__
caller_locals = stack[2][0].f_locals caller_locals = stack[2][0].f_locals
finally: finally:
del stack del stack
if not '__module__' in caller_locals: if not '__module__' in caller_locals:
raise ScopeError(relation) raise RuntimeError("Must invoke get_calling_module_name() "
"from inside a class definition!")
module_name = caller_locals['__module__'] module_name = caller_locals['__module__']
base_name = module_name.split('.')[-1] base_name = module_name.split('.')[-1]
@ -322,6 +320,24 @@ def match(string):
return match return match
def DictWrapper(dictionary):
"""Returns a class that wraps a dictionary and enables it to be used
like an object."""
class wrapper(object):
def __getattr__(self, name): return dictionary[name]
def __setattr__(self, name, value): dictionary[name] = value
def setdefault(self, *args): return dictionary.setdefault(*args)
def get(self, *args): return dictionary.get(*args)
def keys(self): return dictionary.keys()
def values(self): return dictionary.values()
def items(self): return dictionary.items()
def __iter__(self): return iter(dictionary)
return wrapper()
class RequiredAttributeError(ValueError): class RequiredAttributeError(ValueError):
def __init__(self, message): def __init__(self, message):
super(RequiredAttributeError, self).__init__(message) super(RequiredAttributeError, self).__init__(message)

View file

@ -42,7 +42,8 @@
hooks_path = join_path(module_path, "hooks") hooks_path = join_path(module_path, "hooks")
var_path = join_path(prefix, "var", "spack") var_path = join_path(prefix, "var", "spack")
stage_path = join_path(var_path, "stage") stage_path = join_path(var_path, "stage")
install_path = join_path(prefix, "opt") opt_path = join_path(prefix, "opt")
install_path = join_path(opt_path, "spack")
share_path = join_path(prefix, "share", "spack") share_path = join_path(prefix, "share", "spack")
# #
@ -65,8 +66,8 @@
# This controls how spack lays out install prefixes and # This controls how spack lays out install prefixes and
# stage directories. # stage directories.
# #
from spack.directory_layout import SpecHashDirectoryLayout from spack.directory_layout import YamlDirectoryLayout
install_layout = SpecHashDirectoryLayout(install_path) install_layout = YamlDirectoryLayout(install_path)
# #
# This controls how things are concretized in spack. # This controls how things are concretized in spack.
@ -146,9 +147,9 @@
from llnl.util.filesystem import * from llnl.util.filesystem import *
__all__ += llnl.util.filesystem.__all__ __all__ += llnl.util.filesystem.__all__
import spack.relations import spack.directives
from spack.relations import * from spack.directives import *
__all__ += spack.relations.__all__ __all__ += spack.directives.__all__
import spack.util.executable import spack.util.executable
from spack.util.executable import * from spack.util.executable import *

View file

@ -38,17 +38,11 @@ def setup_parser(subparser):
def activate(parser, args): def activate(parser, args):
# TODO: shouldn't have to concretize here. Fix DAG issues. specs = spack.cmd.parse_specs(args.spec)
specs = spack.cmd.parse_specs(args.spec, concretize=True)
if len(specs) != 1: if len(specs) != 1:
tty.die("activate requires one spec. %d given." % len(specs)) tty.die("activate requires one spec. %d given." % len(specs))
# TODO: remove this hack when DAG info is stored in dir layout.
# This ensures the ext spec is always normalized properly.
spack.db.get(specs[0])
spec = spack.cmd.disambiguate_spec(specs[0]) spec = spack.cmd.disambiguate_spec(specs[0])
if not spec.package.is_extension: if not spec.package.is_extension:
tty.die("%s is not an extension." % spec.name) tty.die("%s is not an extension." % spec.name)

View file

@ -68,7 +68,7 @@ def compiler_add(args):
spack.compilers.add_compilers_to_config('user', *compilers) spack.compilers.add_compilers_to_config('user', *compilers)
n = len(compilers) n = len(compilers)
tty.msg("Added %d new compiler%s to %s" % ( tty.msg("Added %d new compiler%s to %s" % (
n, 's' if n > 1 else '', spack.config.get_filename('user'))) n, 's' if n > 1 else '', spack.config.get_config_scope_filename('user', 'compilers')))
colify(reversed(sorted(c.spec for c in compilers)), indent=4) colify(reversed(sorted(c.spec for c in compilers)), indent=4)
else: else:
tty.msg("Found no new compilers") tty.msg("Found no new compilers")

View file

@ -43,42 +43,27 @@ def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command') sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
set_parser = sp.add_parser('set', help='Set configuration values.') get_parser = sp.add_parser('get', help='Print configuration values.')
set_parser.add_argument('key', help="Key to set value for.") get_parser.add_argument('category', help="Configuration category to print.")
set_parser.add_argument('value', nargs='?', default=None,
help="Value to associate with key")
get_parser = sp.add_parser('get', help='Get configuration values.')
get_parser.add_argument('key', help="Key to get value for.")
edit_parser = sp.add_parser('edit', help='Edit configuration file.') edit_parser = sp.add_parser('edit', help='Edit configuration file.')
edit_parser.add_argument('category', help="Configuration category to edit")
def config_set(args):
# default scope for writing is 'user'
if not args.scope:
args.scope = 'user'
config = spack.config.get_config(args.scope)
config.set_value(args.key, args.value)
config.write()
def config_get(args): def config_get(args):
config = spack.config.get_config(args.scope) spack.config.print_category(args.category)
print config.get_value(args.key)
def config_edit(args): def config_edit(args):
if not args.scope: if not args.scope:
args.scope = 'user' args.scope = 'user'
config_file = spack.config.get_filename(args.scope) if not args.category:
args.category = None
config_file = spack.config.get_config_scope_filename(args.scope, args.category)
spack.editor(config_file) spack.editor(config_file)
def config(parser, args): def config(parser, args):
action = { 'set' : config_set, action = { 'get' : config_get,
'get' : config_get,
'edit' : config_edit } 'edit' : config_edit }
action[args.config_command](args) action[args.config_command](args)

View file

@ -44,15 +44,10 @@ def setup_parser(subparser):
def deactivate(parser, args): def deactivate(parser, args):
# TODO: shouldn't have to concretize here. Fix DAG issues. specs = spack.cmd.parse_specs(args.spec)
specs = spack.cmd.parse_specs(args.spec, concretize=True)
if len(specs) != 1: if len(specs) != 1:
tty.die("deactivate requires one spec. %d given." % len(specs)) tty.die("deactivate requires one spec. %d given." % len(specs))
# TODO: remove this hack when DAG info is stored properly.
# This ensures the ext spec is always normalized properly.
spack.db.get(specs[0])
spec = spack.cmd.disambiguate_spec(specs[0]) spec = spack.cmd.disambiguate_spec(specs[0])
pkg = spec.package pkg = spec.package
@ -67,9 +62,6 @@ def deactivate(parser, args):
ext_pkg.do_deactivate(force=True) ext_pkg.do_deactivate(force=True)
elif pkg.is_extension: elif pkg.is_extension:
# TODO: store DAG info properly (see above)
spec.normalize()
if not args.force and not spec.package.activated: if not args.force and not spec.package.activated:
tty.die("%s is not activated." % pkg.spec.short_spec) tty.die("%s is not activated." % pkg.spec.short_spec)
@ -81,10 +73,6 @@ def deactivate(parser, args):
for name in topo_order: for name in topo_order:
espec = index[name] espec = index[name]
epkg = espec.package epkg = espec.package
# TODO: store DAG info properly (see above)
epkg.spec.normalize()
if epkg.extends(pkg.extendee_spec): if epkg.extends(pkg.extendee_spec):
if epkg.activated or args.force: if epkg.activated or args.force:

View file

@ -40,9 +40,6 @@
def setup_parser(subparser): def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group() format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
'-l', '--long', action='store_const', dest='mode', const='long',
help='Show dependency hashes as well as versions.')
format_group.add_argument( format_group.add_argument(
'-p', '--paths', action='store_const', dest='mode', const='paths', '-p', '--paths', action='store_const', dest='mode', const='paths',
help='Show paths to package install directories') help='Show paths to package install directories')
@ -50,13 +47,22 @@ def setup_parser(subparser):
'-d', '--deps', action='store_const', dest='mode', const='deps', '-d', '--deps', action='store_const', dest='mode', const='deps',
help='Show full dependency DAG of installed packages') help='Show full dependency DAG of installed packages')
subparser.add_argument(
'-l', '--long', action='store_true', dest='long',
help='Show dependency hashes as well as versions.')
subparser.add_argument( subparser.add_argument(
'query_specs', nargs=argparse.REMAINDER, 'query_specs', nargs=argparse.REMAINDER,
help='optional specs to filter results') help='optional specs to filter results')
def gray_hash(spec):
return colorize('@K{[%s]}' % spec.dag_hash(7))
def display_specs(specs, **kwargs): def display_specs(specs, **kwargs):
mode = kwargs.get('mode', 'short') mode = kwargs.get('mode', 'short')
hashes = kwargs.get('long', False)
# Make a dict with specs keyed by architecture and compiler. # Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler')) index = index_by(specs, ('architecture', 'compiler'))
@ -85,13 +91,20 @@ def display_specs(specs, **kwargs):
elif mode == 'deps': elif mode == 'deps':
for spec in specs: for spec in specs:
print spec.tree(indent=4, format='$_$@$+$#', color=True), print spec.tree(
format='$_$@$+',
color=True,
indent=4,
prefix=(lambda s: gray_hash(s)) if hashes else None)
elif mode in ('short', 'long'): elif mode == 'short':
fmt = '$-_$@$+' def fmt(s):
if mode == 'long': string = ""
fmt += '$#' if hashes:
colify(s.format(fmt, color=True) for s in specs) string += gray_hash(s) + ' '
string += s.format('$-_$@$+', color=True)
return string
colify(fmt(s) for s in specs)
else: else:
raise ValueError( raise ValueError(
@ -125,5 +138,4 @@ def find(parser, args):
if sys.stdout.isatty(): if sys.stdout.isatty():
tty.msg("%d installed packages." % len(specs)) tty.msg("%d installed packages." % len(specs))
display_specs(specs, mode=args.mode) display_specs(specs, mode=args.mode, long=args.long)

View file

@ -22,12 +22,22 @@
# along with this program; if not, write to the Free Software Foundation, # along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import textwrap
from llnl.util.tty.colify import * from llnl.util.tty.colify import *
import spack import spack
import spack.fetch_strategy as fs import spack.fetch_strategy as fs
description = "Get detailed information on a particular package" description = "Get detailed information on a particular package"
def padder(str_list, extra=0):
"""Return a function to pad elements of a list."""
length = max(len(str(s)) for s in str_list) + extra
def pad(string):
string = str(string)
padding = max(0, length - len(string))
return string + (padding * ' ')
return pad
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.") subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.")
@ -42,13 +52,24 @@ def print_text_info(pkg):
print "Safe versions: " print "Safe versions: "
if not pkg.versions: if not pkg.versions:
print("None.") print("None")
else: else:
maxlen = max(len(str(v)) for v in pkg.versions) pad = padder(pkg.versions, 4)
fmt = "%%-%ss" % maxlen
for v in reversed(sorted(pkg.versions)): for v in reversed(sorted(pkg.versions)):
f = fs.for_package_version(pkg, v) f = fs.for_package_version(pkg, v)
print " " + (fmt % v) + " " + str(f) print " %s%s" % (pad(v), str(f))
print
print "Variants:"
if not pkg.variants:
print "None"
else:
pad = padder(pkg.variants, 4)
for name in sorted(pkg.variants):
v = pkg.variants[name]
print " %s%s" % (
pad(('+' if v.default else '-') + name + ':'),
"\n".join(textwrap.wrap(v.description)))
print print
print "Dependencies:" print "Dependencies:"

View file

@ -75,27 +75,22 @@ def mirror_add(args):
if url.startswith('/'): if url.startswith('/'):
url = 'file://' + url url = 'file://' + url
config = spack.config.get_config('user') mirror_dict = { args.name : url }
config.set_value('mirror', args.name, 'url', url) spack.config.add_to_mirror_config({ args.name : url })
config.write()
def mirror_remove(args): def mirror_remove(args):
"""Remove a mirror by name.""" """Remove a mirror by name."""
config = spack.config.get_config('user')
name = args.name name = args.name
if not config.has_named_section('mirror', name): rmd_something = spack.config.remove_from_config('mirrors', name)
if not rmd_something:
tty.die("No such mirror: %s" % name) tty.die("No such mirror: %s" % name)
config.remove_named_section('mirror', name)
config.write()
def mirror_list(args): def mirror_list(args):
"""Print out available mirrors to the console.""" """Print out available mirrors to the console."""
config = spack.config.get_config() sec_names = spack.config.get_mirror_config()
sec_names = config.get_section_names('mirror')
if not sec_names: if not sec_names:
tty.msg("No mirrors configured.") tty.msg("No mirrors configured.")
return return
@ -103,8 +98,7 @@ def mirror_list(args):
max_len = max(len(s) for s in sec_names) max_len = max(len(s) for s in sec_names)
fmt = "%%-%ds%%s" % (max_len + 4) fmt = "%%-%ds%%s" % (max_len + 4)
for name in sec_names: for name, val in sec_names.iteritems():
val = config.get_value('mirror', name, 'url')
print fmt % (name, val) print fmt % (name, val)

View file

@ -60,24 +60,25 @@ def _get_config():
first.""" first."""
# If any configuration file has compilers, just stick with the # If any configuration file has compilers, just stick with the
# ones already configured. # ones already configured.
config = spack.config.get_config() config = spack.config.get_compilers_config()
existing = [spack.spec.CompilerSpec(s) existing = [spack.spec.CompilerSpec(s)
for s in config.get_section_names('compiler')] for s in config]
if existing: if existing:
return config return config
compilers = find_compilers(*get_path('PATH')) compilers = find_compilers(*get_path('PATH'))
new_compilers = [ add_compilers_to_config('user', *compilers)
c for c in compilers if c.spec not in existing]
add_compilers_to_config('user', *new_compilers)
# After writing compilers to the user config, return a full config # After writing compilers to the user config, return a full config
# from all files. # from all files.
return spack.config.get_config(refresh=True) return spack.config.get_compilers_config()
@memoized _cached_default_compiler = None
def default_compiler(): def default_compiler():
global _cached_default_compiler
if _cached_default_compiler:
return _cached_default_compiler
versions = [] versions = []
for name in _default_order: # TODO: customize order. for name in _default_order: # TODO: customize order.
versions = find(name) versions = find(name)
@ -86,7 +87,8 @@ def default_compiler():
if not versions: if not versions:
raise NoCompilersError() raise NoCompilersError()
return sorted(versions)[-1] _cached_default_compiler = sorted(versions)[-1]
return _cached_default_compiler
def find_compilers(*path): def find_compilers(*path):
@ -122,20 +124,18 @@ def find_compilers(*path):
def add_compilers_to_config(scope, *compilers): def add_compilers_to_config(scope, *compilers):
config = spack.config.get_config(scope) compiler_config_tree = {}
for compiler in compilers: for compiler in compilers:
add_compiler(config, compiler) compiler_entry = {}
config.write() for c in _required_instance_vars:
val = getattr(compiler, c)
if not val:
val = "None"
compiler_entry[c] = val
compiler_config_tree[str(compiler.spec)] = compiler_entry
spack.config.add_to_compiler_config(compiler_config_tree, scope)
def add_compiler(config, compiler):
def setup_field(cspec, name, exe):
path = exe if exe else "None"
config.set_value('compiler', cspec, name, path)
for c in _required_instance_vars:
setup_field(compiler.spec, c, getattr(compiler, c))
def supported_compilers(): def supported_compilers():
"""Return a set of names of compilers supported by Spack. """Return a set of names of compilers supported by Spack.
@ -157,8 +157,7 @@ def all_compilers():
available to build with. These are instances of CompilerSpec. available to build with. These are instances of CompilerSpec.
""" """
configuration = _get_config() configuration = _get_config()
return [spack.spec.CompilerSpec(s) return [spack.spec.CompilerSpec(s) for s in configuration]
for s in configuration.get_section_names('compiler')]
@_auto_compiler_spec @_auto_compiler_spec
@ -176,7 +175,7 @@ def compilers_for_spec(compiler_spec):
config = _get_config() config = _get_config()
def get_compiler(cspec): def get_compiler(cspec):
items = dict((k,v) for k,v in config.items('compiler "%s"' % cspec)) items = config[str(cspec)]
if not all(n in items for n in _required_instance_vars): if not all(n in items for n in _required_instance_vars):
raise InvalidCompilerConfigurationError(cspec) raise InvalidCompilerConfigurationError(cspec)

View file

@ -101,6 +101,16 @@ def concretize_architecture(self, spec):
spec.architecture = spack.architecture.sys_type() spec.architecture = spack.architecture.sys_type()
def concretize_variants(self, spec):
"""If the spec already has variants filled in, return. Otherwise, add
the default variants from the package specification.
"""
for name, variant in spec.package.variants.items():
if name not in spec.variants:
spec.variants[name] = spack.spec.VariantSpec(
name, variant.default)
def concretize_compiler(self, spec): def concretize_compiler(self, spec):
"""If the spec already has a compiler, we're done. If not, then take """If the spec already has a compiler, we're done. If not, then take
the compiler used for the nearest ancestor with a compiler the compiler used for the nearest ancestor with a compiler

View file

@ -28,452 +28,315 @@
=============================== ===============================
When Spack runs, it pulls configuration data from several config When Spack runs, it pulls configuration data from several config
files, much like bash shells. In Spack, there are two configuration directories, each of which contains configuration files. In Spack,
scopes: there are two configuration scopes:
1. ``site``: Spack loads site-wide configuration options from 1. ``site``: Spack loads site-wide configuration options from
``$(prefix)/etc/spackconfig``. ``$(prefix)/etc/spack/``.
2. ``user``: Spack next loads per-user configuration options from 2. ``user``: Spack next loads per-user configuration options from
~/.spackconfig. ~/.spack/.
If user options have the same names as site options, the user options
take precedence.
Spack may read configuration files from both of these locations. When
configurations conflict, the user config options take precedence over
the site configurations. Each configuration directory may contain
several configuration files, such as compilers.yaml or mirrors.yaml.
Configuration file format Configuration file format
=============================== ===============================
Configuration files are formatted using .gitconfig syntax, which is Configuration files are formatted using YAML syntax.
much like Windows .INI format. This format is implemented by Python's This format is implemented by Python's
ConfigParser class, and it's easy to read and versatile. yaml class, and it's easy to read and versatile.
The file is divided into sections, like this ``compiler`` section:: The config files are structured as trees, like this ``compiler`` section::
[compiler] compilers:
cc = /usr/bin/gcc chaos_5_x86_64_ib:
gcc@4.4.7:
cc: /usr/bin/gcc
cxx: /usr/bin/g++
f77: /usr/bin/gfortran
fc: /usr/bin/gfortran
bgqos_0:
xlc@12.1:
cc: /usr/local/bin/mpixlc
...
In each section there are options (cc), and each option has a value In this example, entries like ''compilers'' and ''xlc@12.1'' are used to
(/usr/bin/gcc). categorize entries beneath them in the tree. At the root of the tree,
entries like ''cc'' and ''cxx'' are specified as name/value pairs.
Borrowing from git, we also allow named sections, e.g.: Spack returns these trees as nested dicts. The dict for the above example
would looks like:
[compiler "gcc@4.7.3"] { 'compilers' :
cc = /usr/bin/gcc { 'chaos_5_x86_64_ib' :
{ 'gcc@4.4.7' :
{ 'cc' : '/usr/bin/gcc',
'cxx' : '/usr/bin/g++'
'f77' : '/usr/bin/gfortran'
'fc' : '/usr/bin/gfortran' }
}
{ 'bgqos_0' :
{ 'cc' : '/usr/local/bin/mpixlc' }
}
}
This is a compiler section, but it's for the specific compiler, Some routines, like get_mirrors_config and get_compilers_config may strip
``gcc@4.7.3``. ``gcc@4.7.3`` is the name. off the top-levels of the tree and return subtrees.
Keys
===============================
Together, the section, name, and option, separated by periods, are
called a ``key``. Keys can be used on the command line to set
configuration options explicitly (this is also borrowed from git).
For example, to change the C compiler used by gcc@4.7.3, you could do
this:
spack config compiler.gcc@4.7.3.cc /usr/local/bin/gcc
That will create a named compiler section in the user's .spackconfig
like the one shown above.
""" """
import os import os
import re import exceptions
import inspect import sys
import ConfigParser as cp
from external.ordereddict import OrderedDict from external.ordereddict import OrderedDict
from llnl.util.lang import memoized from llnl.util.lang import memoized
import spack.error import spack.error
__all__ = [ from contextlib import closing
'SpackConfigParser', 'get_config', 'SpackConfigurationError', from external import yaml
'InvalidConfigurationScopeError', 'InvalidSectionNameError', from external.yaml.error import MarkedYAMLError
'ReadOnlySpackConfigError', 'ConfigParserError', 'NoOptionError', import llnl.util.tty as tty
'NoSectionError'] from llnl.util.filesystem import mkdirp
_named_section_re = r'([^ ]+) "([^"]+)"' _config_sections = {}
class _ConfigCategory:
name = None
filename = None
merge = True
def __init__(self, n, f, m):
self.name = n
self.filename = f
self.merge = m
self.files_read_from = []
self.result_dict = {}
_config_sections[n] = self
_ConfigCategory('compilers', 'compilers.yaml', True)
_ConfigCategory('mirrors', 'mirrors.yaml', True)
_ConfigCategory('view', 'views.yaml', True)
_ConfigCategory('order', 'orders.yaml', True)
"""Names of scopes and their corresponding configuration files.""" """Names of scopes and their corresponding configuration files."""
_scopes = OrderedDict({ config_scopes = [('site', os.path.join(spack.etc_path, 'spack')),
'site' : os.path.join(spack.etc_path, 'spackconfig'), ('user', os.path.expanduser('~/.spack'))]
'user' : os.path.expanduser('~/.spackconfig')
})
_field_regex = r'^([\w-]*)' \ _compiler_by_arch = {}
r'(?:\.(.*(?=.)))?' \ _read_config_file_result = {}
r'(?:\.([\w-]+))?$' def _read_config_file(filename):
"""Read a given YAML configuration file"""
global _read_config_file_result
if filename in _read_config_file_result:
return _read_config_file_result[filename]
_section_regex = r'^([\w-]*)\s*' \ try:
r'\"([^"]*\)\"$' with open(filename) as f:
ydict = yaml.load(f)
except MarkedYAMLError, e:
tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
except exceptions.IOError, e:
_read_config_file_result[filename] = None
return None
_read_config_file_result[filename] = ydict
return ydict
# Cache of configs -- we memoize this for performance. def clear_config_caches():
_config = {} """Clears the caches for configuration files, which will cause them
to be re-read upon the next request"""
for key,s in _config_sections.iteritems():
s.files_read_from = []
s.result_dict = {}
spack.config._read_config_file_result = {}
spack.config._compiler_by_arch = {}
spack.compilers._cached_default_compiler = None
def get_config(scope=None, **kwargs):
"""Get a Spack configuration object, which can be used to set options.
With no arguments, this returns a SpackConfigParser with config def _merge_dicts(d1, d2):
options loaded from all config files. This is how client code """Recursively merges two configuration trees, with entries
should read Spack configuration options. in d2 taking precedence over d1"""
if not d1:
return d2.copy()
if not d2:
return d1
Optionally, a scope parameter can be provided. Valid scopes for key2, val2 in d2.iteritems():
are ``site`` and ``user``. If a scope is provided, only the if not key2 in d1:
options from that scope's configuration file are loaded. The d1[key2] = val2
caller can set or unset options, then call ``write()`` on the continue
config object to write it back out to the original config file. val1 = d1[key2]
if isinstance(val1, dict) and isinstance(val2, dict):
d1[key2] = _merge_dicts(val1, val2)
continue
if isinstance(val1, list) and isinstance(val2, list):
val1.extend(val2)
seen = set()
d1[key2] = [ x for x in val1 if not (x in seen or seen.add(x)) ]
continue
d1[key2] = val2
return d1
By default, this will cache configurations and return the last
read version of the config file. If the config file is
modified and you need to refresh, call get_config with the
refresh=True keyword argument. This will force all files to be
re-read.
"""
refresh = kwargs.get('refresh', False)
if refresh:
_config.clear()
if scope not in _config: def get_config(category_name):
if scope is None: """Get the confguration tree for the names category. Strips off the
_config[scope] = SpackConfigParser([path for path in _scopes.values()]) top-level category entry from the dict"""
elif scope not in _scopes: global config_scopes
raise UnknownConfigurationScopeError(scope) category = _config_sections[category_name]
if category.result_dict:
return category.result_dict
category.result_dict = {}
for scope, scope_path in config_scopes:
path = os.path.join(scope_path, category.filename)
result = _read_config_file(path)
if not result:
continue
if not category_name in result:
continue
category.files_read_from.insert(0, path)
result = result[category_name]
if category.merge:
category.result_dict = _merge_dicts(category.result_dict, result)
else: else:
_config[scope] = SpackConfigParser(_scopes[scope]) category.result_dict = result
return category.result_dict
return _config[scope]
def get_filename(scope): def get_compilers_config(arch=None):
"""Get the filename for a particular config scope.""" """Get the compiler configuration from config files for the given
if not scope in _scopes: architecture. Strips off the architecture component of the
raise UnknownConfigurationScopeError(scope) configuration"""
return _scopes[scope] global _compiler_by_arch
if not arch:
arch = spack.architecture.sys_type()
if arch in _compiler_by_arch:
return _compiler_by_arch[arch]
cc_config = get_config('compilers')
def _parse_key(key): if arch in cc_config and 'all' in cc_config:
"""Return the section, name, and option the field describes. arch_compiler = dict(cc_config[arch])
Values are returned in a 3-tuple. _compiler_by_arch[arch] = _merge_dict(arch_compiler, cc_config['all'])
elif arch in cc_config:
e.g.: _compiler_by_arch[arch] = cc_config[arch]
The field name ``compiler.gcc@4.7.3.cc`` refers to the 'cc' key elif 'all' in cc_config:
in a section that looks like this: _compiler_by_arch[arch] = cc_config['all']
[compiler "gcc@4.7.3"]
cc = /usr/local/bin/gcc
* The section is ``compiler``
* The name is ``gcc@4.7.3``
* The key is ``cc``
"""
match = re.search(_field_regex, key)
if match:
return match.groups()
else: else:
raise InvalidSectionNameError(key) _compiler_by_arch[arch] = {}
return _compiler_by_arch[arch]
def _make_section_name(section, name): def get_mirror_config():
if not name: """Get the mirror configuration from config files"""
return section return get_config('mirrors')
return '%s "%s"' % (section, name)
def _autokey(fun): def get_config_scope_dirname(scope):
"""Allow a function to be called with a string key like """For a scope return the config directory"""
'compiler.gcc.cc', or with the section, name, and option global config_scopes
separated. Function should take at least three args, e.g.: for s,p in config_scopes:
if s == scope:
fun(self, section, name, option, [...]) return p
tty.die("Unknown scope %s. Valid options are %s" %
This will allow the function above to be called normally or (scope, ", ".join([s for s,p in config_scopes])))
with a string key, e.g.:
fun(self, key, [...])
"""
argspec = inspect.getargspec(fun)
fun_nargs = len(argspec[0])
def string_key_func(*args):
nargs = len(args)
if nargs == fun_nargs - 2:
section, name, option = _parse_key(args[1])
return fun(args[0], section, name, option, *args[2:])
elif nargs == fun_nargs:
return fun(*args)
else:
raise TypeError(
"%s takes %d or %d args (found %d)."
% (fun.__name__, fun_nargs - 2, fun_nargs, len(args)))
return string_key_func
def get_config_scope_filename(scope, category_name):
class SpackConfigParser(cp.RawConfigParser): """For some scope and category, get the name of the configuration file"""
"""Slightly modified from Python's raw config file parser to accept if not category_name in _config_sections:
leading whitespace and preserve comments. tty.die("Unknown config category %s. Valid options are: %s" %
""" (category_name, ", ".join([s for s in _config_sections])))
# Slightly modify Python option expressions to allow leading whitespace return os.path.join(get_config_scope_dirname(scope), _config_sections[category_name].filename)
OPTCRE = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE.pattern)
def __init__(self, file_or_files):
cp.RawConfigParser.__init__(self, dict_type=OrderedDict)
if isinstance(file_or_files, basestring):
self.read([file_or_files])
self.filename = file_or_files
else:
self.read(file_or_files)
self.filename = None
@_autokey def add_to_config(category_name, addition_dict, scope=None):
def set_value(self, section, name, option, value): """Merge a new dict into a configuration tree and write the new
"""Set the value for a key. If the key is in a section or named configuration to disk"""
section that does not yet exist, add that section. global _read_config_file_result
""" get_config(category_name)
sn = _make_section_name(section, name) category = _config_sections[category_name]
if not self.has_section(sn):
self.add_section(sn)
# Allow valueless config options to be set like this:
# spack config set mirror https://foo.bar.com
#
# Instead of this, which parses incorrectly:
# spack config set mirror.https://foo.bar.com
#
if option is None:
option = value
value = None
self.set(sn, option, value)
@_autokey
def get_value(self, section, name, option):
"""Get the value for a key. Raises NoOptionError or NoSectionError if
the key is not present."""
sn = _make_section_name(section, name)
#If scope is specified, use it. Otherwise use the last config scope that
#we successfully parsed data from.
file = None
path = None
if not scope and not category.files_read_from:
scope = 'user'
if scope:
try: try:
if not option: dir = get_config_scope_dirname(scope)
# TODO: format this better if not os.path.exists(dir):
return self.items(sn) mkdirp(dir)
path = os.path.join(dir, category.filename)
file = open(path, 'w')
except exceptions.IOError, e:
pass
else:
for p in category.files_read_from:
try:
file = open(p, 'w')
except exceptions.IOError, e:
pass
if file:
path = p
break;
if not file:
tty.die('Unable to write to config file %s' % path)
return self.get(sn, option) #Merge the new information into the existing file info, then write to disk
new_dict = _read_config_file_result[path]
if new_dict and category_name in new_dict:
new_dict = new_dict[category_name]
new_dict = _merge_dicts(new_dict, addition_dict)
new_dict = { category_name : new_dict }
_read_config_file_result[path] = new_dict
yaml.dump(new_dict, stream=file, default_flow_style=False)
file.close()
# Wrap ConfigParser exceptions in SpackExceptions #Merge the new information into the cached results
except cp.NoOptionError, e: raise NoOptionError(e) category.result_dict = _merge_dicts(category.result_dict, addition_dict)
except cp.NoSectionError, e: raise NoSectionError(e)
except cp.Error, e: raise ConfigParserError(e)
@_autokey def add_to_mirror_config(addition_dict, scope=None):
def has_value(self, section, name, option): """Add mirrors to the configuration files"""
"""Return whether the configuration file has a value for a add_to_config('mirrors', addition_dict, scope)
particular key."""
sn = _make_section_name(section, name)
return self.has_option(sn, option)
def has_named_section(self, section, name): def add_to_compiler_config(addition_dict, scope=None, arch=None):
sn = _make_section_name(section, name) """Add compilerss to the configuration files"""
return self.has_section(sn) if not arch:
arch = spack.architecture.sys_type()
add_to_config('compilers', { arch : addition_dict }, scope)
clear_config_caches()
def remove_named_section(self, section, name): def remove_from_config(category_name, key_to_rm, scope=None):
sn = _make_section_name(section, name) """Remove a configuration key and write a new configuration to disk"""
self.remove_section(sn) global config_scopes
get_config(category_name)
scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes]
category = _config_sections[category_name]
rmd_something = False
for s in scopes_to_rm_from:
path = get_config_scope_filename(scope, category_name)
result = _read_config_file(path)
if not result:
continue
if not key_to_rm in result[category_name]:
continue
with closing(open(path, 'w')) as f:
result[category_name].pop(key_to_rm, None)
yaml.dump(result, stream=f, default_flow_style=False)
category.result_dict.pop(key_to_rm, None)
rmd_something = True
return rmd_something
def get_section_names(self, sectype): """Print a configuration to stdout"""
"""Get all named sections with the specified type. def print_category(category_name):
A named section looks like this: if not category_name in _config_sections:
tty.die("Unknown config category %s. Valid options are: %s" %
(category_name, ", ".join([s for s in _config_sections])))
yaml.dump(get_config(category_name), stream=sys.stdout, default_flow_style=False)
[compiler "gcc@4.7"]
Names of sections are returned as a list, e.g.:
['gcc@4.7', 'intel@12.3', 'pgi@4.2']
You can get items in the sections like this:
"""
sections = []
for secname in self.sections():
match = re.match(_named_section_re, secname)
if match:
t, name = match.groups()
if t == sectype:
sections.append(name)
return sections
def write(self, path_or_fp=None):
"""Write this configuration out to a file.
If called with no arguments, this will write the
configuration out to the file from which it was read. If
this config was read from multiple files, e.g. site
configuration and then user configuration, write will
simply raise an error.
If called with a path or file object, this will write the
configuration out to the supplied path or file object.
"""
if path_or_fp is None:
if not self.filename:
raise ReadOnlySpackConfigError()
path_or_fp = self.filename
if isinstance(path_or_fp, basestring):
path_or_fp = open(path_or_fp, 'w')
self._write(path_or_fp)
def _read(self, fp, fpname):
"""This is a copy of Python 2.6's _read() method, with support for
continuation lines removed."""
cursect = None # None, or a dictionary
optname = None
comment = 0
lineno = 0
e = None # None, or an exception
while True:
line = fp.readline()
if not line:
break
lineno = lineno + 1
# comment or blank line?
if ((line.strip() == '' or line[0] in '#;') or
(line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR")):
self._sections["comment-%d" % comment] = line
comment += 1
# a section header or option header?
else:
# is it a section header?
mo = self.SECTCRE.match(line)
if mo:
sectname = mo.group('header')
if sectname in self._sections:
cursect = self._sections[sectname]
elif sectname == cp.DEFAULTSECT:
cursect = self._defaults
else:
cursect = self._dict()
cursect['__name__'] = sectname
self._sections[sectname] = cursect
# So sections can't start with a continuation line
optname = None
# no section header in the file?
elif cursect is None:
raise cp.MissingSectionHeaderError(fpname, lineno, line)
# an option line?
else:
mo = self.OPTCRE.match(line)
if mo:
optname, vi, optval = mo.group('option', 'vi', 'value')
if vi in ('=', ':') and ';' in optval:
# ';' is a comment delimiter only if it follows
# a spacing character
pos = optval.find(';')
if pos != -1 and optval[pos-1].isspace():
optval = optval[:pos]
optval = optval.strip()
# allow empty values
if optval == '""':
optval = ''
optname = self.optionxform(optname.rstrip())
cursect[optname] = optval
else:
# a non-fatal parsing error occurred. set up the
# exception but keep going. the exception will be
# raised at the end of the file and will contain a
# list of all bogus lines
if not e:
e = cp.ParsingError(fpname)
e.append(lineno, repr(line))
# if any parsing errors occurred, raise an exception
if e:
raise e
def _write(self, fp):
"""Write an .ini-format representation of the configuration state.
This is taken from the default Python 2.6 source. It writes 4
spaces at the beginning of lines instead of no leading space.
"""
if self._defaults:
fp.write("[%s]\n" % cp.DEFAULTSECT)
for (key, value) in self._defaults.items():
fp.write(" %s = %s\n" % (key, str(value).replace('\n', '\n\t')))
fp.write("\n")
for section in self._sections:
# Handles comments and blank lines.
if isinstance(self._sections[section], basestring):
fp.write(self._sections[section])
continue
else:
# Allow leading whitespace
fp.write("[%s]\n" % section)
for (key, value) in self._sections[section].items():
if key != "__name__":
fp.write(" %s = %s\n" %
(key, str(value).replace('\n', '\n\t')))
class SpackConfigurationError(spack.error.SpackError):
def __init__(self, *args):
super(SpackConfigurationError, self).__init__(*args)
class InvalidConfigurationScopeError(SpackConfigurationError):
def __init__(self, scope):
super(InvalidConfigurationScopeError, self).__init__(
"Invalid configuration scope: '%s'" % scope,
"Options are: %s" % ", ".join(*_scopes.values()))
class InvalidSectionNameError(SpackConfigurationError):
"""Raised when the name for a section is invalid."""
def __init__(self, name):
super(InvalidSectionNameError, self).__init__(
"Invalid section specifier: '%s'" % name)
class ReadOnlySpackConfigError(SpackConfigurationError):
"""Raised when user attempts to write to a config read from multiple files."""
def __init__(self):
super(ReadOnlySpackConfigError, self).__init__(
"Can only write to a single-file SpackConfigParser")
class ConfigParserError(SpackConfigurationError):
"""Wrapper for the Python ConfigParser's errors"""
def __init__(self, error):
super(ConfigParserError, self).__init__(str(error))
self.error = error
class NoOptionError(ConfigParserError):
"""Wrapper for ConfigParser NoOptionError"""
def __init__(self, error):
super(NoOptionError, self).__init__(error)
class NoSectionError(ConfigParserError):
"""Wrapper for ConfigParser NoOptionError"""
def __init__(self, error):
super(NoSectionError, self).__init__(error)

View file

@ -0,0 +1,277 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""This package contains directives that can be used within a package.
Directives are functions that can be called inside a package
definition to modify the package, for example:
class OpenMpi(Package):
depends_on("hwloc")
provides("mpi")
...
``provides`` and ``depends_on`` are spack directives.
The available directives are:
* ``version``
* ``depends_on``
* ``provides``
* ``extends``
* ``patch``
* ``variant``
"""
__all__ = [ 'depends_on', 'extends', 'provides', 'patch', 'version',
'variant' ]
import re
import inspect
from llnl.util.lang import *
import spack
import spack.spec
import spack.error
import spack.url
from spack.version import Version
from spack.patch import Patch
from spack.variant import Variant
from spack.spec import Spec, parse_anonymous_spec
#
# This is a list of all directives, built up as they are defined in
# this file.
#
directives = {}
def ensure_dicts(pkg):
"""Ensure that a package has all the dicts required by directives."""
for name, d in directives.items():
d.ensure_dicts(pkg)
class directive(object):
"""Decorator for Spack directives.
Spack directives allow you to modify a package while it is being
defined, e.g. to add version or depenency information. Directives
are one of the key pieces of Spack's package "langauge", which is
embedded in python.
Here's an example directive:
@directive(dicts='versions')
version(pkg, ...):
...
This directive allows you write:
class Foo(Package):
version(...)
The ``@directive`` decorator handles a couple things for you:
1. Adds the class scope (pkg) as an initial parameter when
called, like a class method would. This allows you to modify
a package from within a directive, while the package is still
being defined.
2. It automatically adds a dictionary called "versions" to the
package so that you can refer to pkg.versions.
The ``(dicts='versions')`` part ensures that ALL packages in Spack
will have a ``versions`` attribute after they're constructed, and
that if no directive actually modified it, it will just be an
empty dict.
This is just a modular way to add storage attributes to the
Package class, and it's how Spack gets information from the
packages to the core.
"""
def __init__(self, dicts=None):
if isinstance(dicts, basestring):
dicts = (dicts,)
elif type(dicts) not in (list, tuple):
raise TypeError(
"dicts arg must be list, tuple, or string. Found %s."
% type(dicts))
self.dicts = dicts
def ensure_dicts(self, pkg):
"""Ensure that a package has the dicts required by this directive."""
for d in self.dicts:
if not hasattr(pkg, d):
setattr(pkg, d, {})
attr = getattr(pkg, d)
if not isinstance(attr, dict):
raise spack.error.SpackError(
"Package %s has non-dict %s attribute!" % (pkg, d))
def __call__(self, directive_function):
directives[directive_function.__name__] = self
def wrapped(*args, **kwargs):
pkg = DictWrapper(caller_locals())
self.ensure_dicts(pkg)
pkg.name = get_calling_module_name()
return directive_function(pkg, *args, **kwargs)
return wrapped
@directive('versions')
def version(pkg, ver, checksum=None, **kwargs):
"""Adds a version and metadata describing how to fetch it.
Metadata is just stored as a dict in the package's versions
dictionary. Package must turn it into a valid fetch strategy
later.
"""
# TODO: checksum vs md5 distinction is confusing -- fix this.
# special case checksum for backward compatibility
if checksum:
kwargs['md5'] = checksum
# Store kwargs for the package to later with a fetch_strategy.
pkg.versions[Version(ver)] = kwargs
def _depends_on(pkg, spec, when=None):
if when is None:
when = pkg.name
when_spec = parse_anonymous_spec(when, pkg.name)
dep_spec = Spec(spec)
if pkg.name == dep_spec.name:
raise CircularReferenceError('depends_on', pkg.name)
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
if when_spec in conditions:
conditions[when_spec].constrain(dep_spec, deps=False)
else:
conditions[when_spec] = dep_spec
@directive('dependencies')
def depends_on(pkg, spec, when=None):
"""Creates a dict of deps with specs defining when they apply."""
_depends_on(pkg, spec, when=when)
@directive(('extendees', 'dependencies'))
def extends(pkg, spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix.
This is for Python and other language modules where the module
needs to be installed into the prefix of the Python installation.
Spack handles this by installing modules into their own prefix,
but allowing ONE module version to be symlinked into a parent
Python install at a time.
keyword arguments can be passed to extends() so that extension
packages can pass parameters to the extendee's extension
mechanism.
"""
if pkg.extendees:
raise DirectiveError("Packages can extend at most one other package.")
when = kwargs.pop('when', pkg.name)
_depends_on(pkg, spec, when=when)
pkg.extendees[spec] = (Spec(spec), kwargs)
@directive('provided')
def provides(pkg, *specs, **kwargs):
"""Allows packages to provide a virtual dependency. If a package provides
'mpi', other packages can declare that they depend on "mpi", and spack
can use the providing package to satisfy the dependency.
"""
spec_string = kwargs.get('when', pkg.name)
provider_spec = parse_anonymous_spec(spec_string, pkg.name)
for string in specs:
for provided_spec in spack.spec.parse(string):
if pkg.name == provided_spec.name:
raise CircularReferenceError('depends_on', pkg.name)
pkg.provided[provided_spec] = provider_spec
@directive('patches')
def patch(pkg, url_or_filename, level=1, when=None):
"""Packages can declare patches to apply to source. You can
optionally provide a when spec to indicate that a particular
patch should only be applied when the package's spec meets
certain conditions (e.g. a particular version).
"""
if when is None:
when = pkg.name
when_spec = parse_anonymous_spec(when, pkg.name)
if when_spec not in pkg.patches:
pkg.patches[when_spec] = [Patch(pkg.name, url_or_filename, level)]
else:
# if this spec is identical to some other, then append this
# patch to the existing list.
pkg.patches[when_spec].append(Patch(pkg.name, url_or_filename, level))
@directive('variants')
def variant(pkg, name, default=False, description=""):
"""Define a variant for the package. Packager can specify a default
value (on or off) as well as a text description."""
default = bool(default)
description = str(description).strip()
if not re.match(spack.spec.identifier_re, name):
raise DirectiveError("Invalid variant name in %s: '%s'" % (pkg.name, name))
pkg.variants[name] = Variant(default, description)
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""
def __init__(self, directive, message):
super(DirectiveError, self).__init__(message)
self.directive = directive
class CircularReferenceError(DirectiveError):
"""This is raised when something depends on itself."""
def __init__(self, directive, package):
super(CircularReferenceError, self).__init__(
directive,
"Package '%s' cannot pass itself to %s." % (package, directive))
self.package = package

View file

@ -27,8 +27,9 @@
import exceptions import exceptions
import hashlib import hashlib
import shutil import shutil
import glob
import tempfile import tempfile
from contextlib import closing from external import yaml
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import memoized from llnl.util.lang import memoized
@ -81,7 +82,7 @@ def relative_path_for_spec(self, spec):
raise NotImplementedError() raise NotImplementedError()
def make_path_for_spec(self, spec): def create_install_directory(self, spec):
"""Creates the installation directory for a spec.""" """Creates the installation directory for a spec."""
raise NotImplementedError() raise NotImplementedError()
@ -131,7 +132,7 @@ def path_for_spec(self, spec):
return os.path.join(self.root, path) return os.path.join(self.root, path)
def remove_path_for_spec(self, spec): def remove_install_directory(self, spec):
"""Removes a prefix and any empty parent directories from the root. """Removes a prefix and any empty parent directories from the root.
Raised RemoveFailedError if something goes wrong. Raised RemoveFailedError if something goes wrong.
""" """
@ -153,94 +154,70 @@ def remove_path_for_spec(self, spec):
path = os.path.dirname(path) path = os.path.dirname(path)
def traverse_dirs_at_depth(root, depth, path_tuple=(), curdepth=0): class YamlDirectoryLayout(DirectoryLayout):
"""For each directory at <depth> within <root>, return a tuple representing
the ancestors of that directory.
"""
if curdepth == depth and curdepth != 0:
yield path_tuple
elif depth > curdepth:
for filename in os.listdir(root):
child = os.path.join(root, filename)
if os.path.isdir(child):
child_tuple = path_tuple + (filename,)
for tup in traverse_dirs_at_depth(
child, depth, child_tuple, curdepth+1):
yield tup
class SpecHashDirectoryLayout(DirectoryLayout):
"""Lays out installation directories like this:: """Lays out installation directories like this::
<install_root>/ <install root>/
<architecture>/ <architecture>/
<compiler>/ <compiler>-<compiler version>/
name@version+variant-<dependency_hash> <name>-<version>-<variants>-<hash>
Where dependency_hash is a SHA-1 hash prefix for the full package spec. The hash here is a SHA-1 hash for the full DAG plus the build
This accounts for dependencies. spec. TODO: implement the build spec.
If there is ever a hash collision, you won't be able to install a new To avoid special characters (like ~) in the directory name,
package unless you use a larger prefix. However, the full spec is stored only enabled variants are included in the install path.
in a file called .spec in each directory, so you can migrate an entire Disabled variants are omitted.
install directory to a new hash size pretty easily.
TODO: make a tool to migrate install directories to different hash sizes.
""" """
def __init__(self, root, **kwargs): def __init__(self, root, **kwargs):
"""Prefix size is number of characters in the SHA-1 prefix to use super(YamlDirectoryLayout, self).__init__(root)
to make each hash unique. self.metadata_dir = kwargs.get('metadata_dir', '.spack')
""" self.hash_len = kwargs.get('hash_len', None)
spec_file_name = kwargs.get('spec_file_name', '.spec')
extension_file_name = kwargs.get('extension_file_name', '.extensions') self.spec_file_name = 'spec.yaml'
super(SpecHashDirectoryLayout, self).__init__(root) self.extension_file_name = 'extensions.yaml'
self.spec_file_name = spec_file_name
self.extension_file_name = extension_file_name
# Cache of already written/read extension maps. # Cache of already written/read extension maps.
self._extension_maps = {} self._extension_maps = {}
@property @property
def hidden_file_paths(self): def hidden_file_paths(self):
return ('.spec', '.extensions') return (self.metadata_dir,)
def relative_path_for_spec(self, spec): def relative_path_for_spec(self, spec):
_check_concrete(spec) _check_concrete(spec)
dir_name = spec.format('$_$@$+$#') enabled_variants = (
return join_path(spec.architecture, spec.compiler, dir_name) '-' + v.name for v in spec.variants.values()
if v.enabled)
dir_name = "%s-%s%s-%s" % (
spec.name,
spec.version,
''.join(enabled_variants),
spec.dag_hash(self.hash_len))
path = join_path(
spec.architecture,
"%s-%s" % (spec.compiler.name, spec.compiler.version),
dir_name)
return path
def write_spec(self, spec, path): def write_spec(self, spec, path):
"""Write a spec out to a file.""" """Write a spec out to a file."""
with closing(open(path, 'w')) as spec_file: _check_concrete(spec)
spec_file.write(spec.tree(ids=False, cover='nodes')) with open(path, 'w') as f:
spec.to_yaml(f)
def read_spec(self, path): def read_spec(self, path):
"""Read the contents of a file and parse them as a spec""" """Read the contents of a file and parse them as a spec"""
with closing(open(path)) as spec_file: with open(path) as f:
# Specs from files are assumed normal and concrete spec = Spec.from_yaml(f)
spec = Spec(spec_file.read().replace('\n', ''))
if all(spack.db.exists(s.name) for s in spec.traverse()): # Specs read from actual installations are always concrete
copy = spec.copy()
# TODO: It takes a lot of time to normalize every spec on read.
# TODO: Storing graph info with spec files would fix this.
copy.normalize()
if copy.concrete:
return copy # These are specs spack still understands.
# If we get here, either the spec is no longer in spack, or
# something about its dependencies has changed. So we need to
# just assume the read spec is correct. We'll lose graph
# information if we do this, but this is just for best effort
# for commands like uninstall and find. Currently Spack
# doesn't do anything that needs the graph info after install.
# TODO: store specs with full connectivity information, so
# that we don't have to normalize or reconstruct based on
# changing dependencies in the Spack tree.
spec._normal = True spec._normal = True
spec._concrete = True spec._concrete = True
return spec return spec
@ -249,10 +226,14 @@ def read_spec(self, path):
def spec_file_path(self, spec): def spec_file_path(self, spec):
"""Gets full path to spec file""" """Gets full path to spec file"""
_check_concrete(spec) _check_concrete(spec)
return join_path(self.path_for_spec(spec), self.spec_file_name) return join_path(self.metadata_path(spec), self.spec_file_name)
def make_path_for_spec(self, spec): def metadata_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir)
def create_install_directory(self, spec):
_check_concrete(spec) _check_concrete(spec)
path = self.path_for_spec(spec) path = self.path_for_spec(spec)
@ -267,16 +248,13 @@ def make_path_for_spec(self, spec):
if installed_spec == self.spec: if installed_spec == self.spec:
raise InstallDirectoryAlreadyExistsError(path) raise InstallDirectoryAlreadyExistsError(path)
spec_hash = self.hash_spec(spec) if spec.dag_hash() == installed_spec.dag_hash():
installed_hash = self.hash_spec(installed_spec)
if installed_spec == spec_hash:
raise SpecHashCollisionError(installed_hash, spec_hash) raise SpecHashCollisionError(installed_hash, spec_hash)
else: else:
raise InconsistentInstallDirectoryError( raise InconsistentInstallDirectoryError(
'Spec file in %s does not match SHA-1 hash!' 'Spec file in %s does not match hash!' % spec_file_path)
% spec_file_path)
mkdirp(path) mkdirp(self.metadata_path(spec))
self.write_spec(spec, spec_file_path) self.write_spec(spec, spec_file_path)
@ -285,25 +263,50 @@ def all_specs(self):
if not os.path.isdir(self.root): if not os.path.isdir(self.root):
return [] return []
specs = [] pattern = join_path(
for path in traverse_dirs_at_depth(self.root, 3): self.root, '*', '*', '*', self.metadata_dir, self.spec_file_name)
arch, compiler, last_dir = path spec_files = glob.glob(pattern)
spec_file_path = join_path( return [self.read_spec(s) for s in spec_files]
self.root, arch, compiler, last_dir, self.spec_file_name)
if os.path.exists(spec_file_path):
spec = self.read_spec(spec_file_path) @memoized
specs.append(spec) def specs_by_hash(self):
return specs by_hash = {}
for spec in self.all_specs():
by_hash[spec.dag_hash()] = spec
return by_hash
def extension_file_path(self, spec): def extension_file_path(self, spec):
"""Gets full path to an installed package's extension file""" """Gets full path to an installed package's extension file"""
_check_concrete(spec) _check_concrete(spec)
return join_path(self.path_for_spec(spec), self.extension_file_name) return join_path(self.metadata_path(spec), self.extension_file_name)
def _write_extensions(self, spec, extensions):
path = self.extension_file_path(spec)
# Create a temp file in the same directory as the actual file.
dirname, basename = os.path.split(path)
tmp = tempfile.NamedTemporaryFile(
prefix=basename, dir=dirname, delete=False)
# write tmp file
with tmp:
yaml.dump({
'extensions' : [
{ ext.name : {
'hash' : ext.dag_hash(),
'path' : str(ext.prefix)
}} for ext in sorted(extensions.values())]
}, tmp, default_flow_style=False)
# Atomic update by moving tmpfile on top of old one.
os.rename(tmp.name, path)
def _extension_map(self, spec): def _extension_map(self, spec):
"""Get a dict<name -> spec> for all extensions currnetly """Get a dict<name -> spec> for all extensions currently
installed for this package.""" installed for this package."""
_check_concrete(spec) _check_concrete(spec)
@ -313,16 +316,26 @@ def _extension_map(self, spec):
self._extension_maps[spec] = {} self._extension_maps[spec] = {}
else: else:
by_hash = self.specs_by_hash()
exts = {} exts = {}
with closing(open(path)) as ext_file: with open(path) as ext_file:
for line in ext_file: yaml_file = yaml.load(ext_file)
try: for entry in yaml_file['extensions']:
spec = Spec(line.strip()) name = next(iter(entry))
exts[spec.name] = spec dag_hash = entry[name]['hash']
except spack.error.SpackError, e: prefix = entry[name]['path']
# TODO: do something better here -- should be
# resilient to corrupt files. if not dag_hash in by_hash:
raise InvalidExtensionSpecError(str(e)) raise InvalidExtensionSpecError(
"Spec %s not found in %s." % (dag_hash, prefix))
ext_spec = by_hash[dag_hash]
if not prefix == ext_spec.prefix:
raise InvalidExtensionSpecError(
"Prefix %s does not match spec with hash %s: %s"
% (prefix, dag_hash, ext_spec))
exts[ext_spec.name] = ext_spec
self._extension_maps[spec] = exts self._extension_maps[spec] = exts
return self._extension_maps[spec] return self._extension_maps[spec]
@ -330,6 +343,7 @@ def _extension_map(self, spec):
def extension_map(self, spec): def extension_map(self, spec):
"""Defensive copying version of _extension_map() for external API.""" """Defensive copying version of _extension_map() for external API."""
_check_concrete(spec)
return self._extension_map(spec).copy() return self._extension_map(spec).copy()
@ -349,23 +363,6 @@ def check_activated(self, spec, ext_spec):
raise NoSuchExtensionError(spec, ext_spec) raise NoSuchExtensionError(spec, ext_spec)
def _write_extensions(self, spec, extensions):
path = self.extension_file_path(spec)
# Create a temp file in the same directory as the actual file.
dirname, basename = os.path.split(path)
tmp = tempfile.NamedTemporaryFile(
prefix=basename, dir=dirname, delete=False)
# Write temp file.
with closing(tmp):
for extension in sorted(extensions.values()):
tmp.write("%s\n" % extension)
# Atomic update by moving tmpfile on top of old one.
os.rename(tmp.name, path)
def add_extension(self, spec, ext_spec): def add_extension(self, spec, ext_spec):
_check_concrete(spec) _check_concrete(spec)
_check_concrete(ext_spec) _check_concrete(ext_spec)
@ -399,9 +396,9 @@ def __init__(self, message):
class SpecHashCollisionError(DirectoryLayoutError): class SpecHashCollisionError(DirectoryLayoutError):
"""Raised when there is a hash collision in an SpecHashDirectoryLayout.""" """Raised when there is a hash collision in an install layout."""
def __init__(self, installed_spec, new_spec): def __init__(self, installed_spec, new_spec):
super(SpecHashDirectoryLayout, self).__init__( super(SpecHashCollisionError, self).__init__(
'Specs %s and %s have the same SHA-1 prefix!' 'Specs %s and %s have the same SHA-1 prefix!'
% installed_spec, new_spec) % installed_spec, new_spec)
@ -422,7 +419,7 @@ def __init__(self, message):
class InstallDirectoryAlreadyExistsError(DirectoryLayoutError): class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
"""Raised when make_path_for_sec is called unnecessarily.""" """Raised when create_install_directory is called unnecessarily."""
def __init__(self, path): def __init__(self, path):
super(InstallDirectoryAlreadyExistsError, self).__init__( super(InstallDirectoryAlreadyExistsError, self).__init__(
"Install path %s already exists!") "Install path %s already exists!")
@ -455,5 +452,3 @@ def __init__(self, spec, ext_spec):
super(NoSuchExtensionError, self).__init__( super(NoSuchExtensionError, self).__init__(
"%s cannot be removed from %s because it's not activated."% ( "%s cannot be removed from %s because it's not activated."% (
ext_spec.short_spec, spec.short_spec)) ext_spec.short_spec, spec.short_spec))

View file

@ -195,7 +195,7 @@ def install(self, prefix):
""" """
class when(object): class when(object):
def __init__(self, spec): def __init__(self, spec):
pkg = get_calling_package_name() pkg = get_calling_module_name()
self.spec = parse_anonymous_spec(spec, pkg) self.spec = parse_anonymous_spec(spec, pkg)
def __call__(self, method): def __call__(self, method):

View file

@ -50,11 +50,11 @@
from llnl.util.lang import * from llnl.util.lang import *
import spack import spack
import spack.spec
import spack.error import spack.error
import spack.compilers import spack.compilers
import spack.mirror import spack.mirror
import spack.hooks import spack.hooks
import spack.directives
import spack.build_environment as build_env import spack.build_environment as build_env
import spack.url as url import spack.url as url
import spack.fetch_strategy as fs import spack.fetch_strategy as fs
@ -301,32 +301,6 @@ class SomePackage(Package):
clean() (some of them do this), and others to provide custom behavior. clean() (some of them do this), and others to provide custom behavior.
""" """
#
# These variables are defaults for the various "relations".
#
"""Map of information about Versions of this package.
Map goes: Version -> dict of attributes"""
versions = {}
"""Specs of dependency packages, keyed by name."""
dependencies = {}
"""Specs of virtual packages provided by this package, keyed by name."""
provided = {}
"""Specs of conflicting packages, keyed by name. """
conflicted = {}
"""Patches to apply to newly expanded source, if any."""
patches = {}
"""Specs of package this one extends, or None.
Currently, ppackages can extend at most one other package.
"""
extendees = {}
# #
# These are default values for instance variables. # These are default values for instance variables.
# #
@ -350,20 +324,8 @@ def __init__(self, spec):
if '.' in self.name: if '.' in self.name:
self.name = self.name[self.name.rindex('.') + 1:] self.name = self.name[self.name.rindex('.') + 1:]
# Sanity check some required variables that could be # Sanity check attributes required by Spack directives.
# overridden by package authors. spack.directives.ensure_dicts(type(self))
def ensure_has_dict(attr_name):
if not hasattr(self, attr_name):
raise PackageError("Package %s must define %s" % attr_name)
attr = getattr(self, attr_name)
if not isinstance(attr, dict):
raise PackageError("Package %s has non-dict %s attribute!"
% (self.name, attr_name))
ensure_has_dict('versions')
ensure_has_dict('dependencies')
ensure_has_dict('conflicted')
ensure_has_dict('patches')
# Check versions in the versions dict. # Check versions in the versions dict.
for v in self.versions: for v in self.versions:
@ -577,41 +539,6 @@ def preorder_traversal(self, visited=None, **kwargs):
yield pkg yield pkg
def validate_dependencies(self):
"""Ensure that this package and its dependencies all have consistent
constraints on them.
NOTE that this will NOT find sanity problems through a virtual
dependency. Virtual deps complicate the problem because we
don't know in advance which ones conflict with others in the
dependency DAG. If there's more than one virtual dependency,
it's a full-on SAT problem, so hold off on this for now.
The vdeps are actually skipped in preorder_traversal, so see
that for details.
TODO: investigate validating virtual dependencies.
"""
# This algorithm just attempts to merge all the constraints on the same
# package together, loses information about the source of the conflict.
# What we'd really like to know is exactly which two constraints
# conflict, but that algorithm is more expensive, so we'll do it
# the simple, less informative way for now.
merged = spack.spec.DependencyMap()
try:
for pkg in self.preorder_traversal():
for name, spec in pkg.dependencies.iteritems():
if name not in merged:
merged[name] = spec.copy()
else:
merged[name].constrain(spec)
except spack.spec.UnsatisfiableSpecError, e:
raise InvalidPackageDependencyError(
"Package %s has inconsistent dependency constraints: %s"
% (self.name, e.message))
def provides(self, vpkg_name): def provides(self, vpkg_name):
"""True if this package provides a virtual package with the specified name.""" """True if this package provides a virtual package with the specified name."""
return vpkg_name in self.provided return vpkg_name in self.provided
@ -664,7 +591,7 @@ def url_version(self, version):
def remove_prefix(self): def remove_prefix(self):
"""Removes the prefix for a package along with any empty parent directories.""" """Removes the prefix for a package along with any empty parent directories."""
spack.install_layout.remove_path_for_spec(self.spec) spack.install_layout.remove_install_directory(self.spec)
def do_fetch(self): def do_fetch(self):
@ -820,7 +747,7 @@ def do_install(self, **kwargs):
# create the install directory. The install layout # create the install directory. The install layout
# handles this in case so that it can use whatever # handles this in case so that it can use whatever
# package naming scheme it likes. # package naming scheme it likes.
spack.install_layout.make_path_for_spec(self.spec) spack.install_layout.create_install_directory(self.spec)
def cleanup(): def cleanup():
if not keep_prefix: if not keep_prefix:
@ -841,11 +768,11 @@ def real_work():
spack.hooks.pre_install(self) spack.hooks.pre_install(self)
# Set up process's build environment before running install. # Set up process's build environment before running install.
self.stage.chdir_to_source()
if fake_install: if fake_install:
self.do_fake_install() self.do_fake_install()
else: else:
# Subclasses implement install() to do the real work. # Subclasses implement install() to do the real work.
self.stage.chdir_to_source()
self.install(self.spec, self.prefix) self.install(self.spec, self.prefix)
# Ensure that something was actually installed. # Ensure that something was actually installed.
@ -994,16 +921,13 @@ def do_activate(self, **kwargs):
self._sanity_check_extension() self._sanity_check_extension()
force = kwargs.get('force', False) force = kwargs.get('force', False)
# TODO: get rid of this normalize - DAG handling. spack.install_layout.check_extension_conflict(
self.spec.normalize() self.extendee_spec, self.spec)
spack.install_layout.check_extension_conflict(self.extendee_spec, self.spec)
# Activate any package dependencies that are also extensions.
if not force: if not force:
for spec in self.spec.traverse(root=False): for spec in self.spec.traverse(root=False):
if spec.package.extends(self.extendee_spec): if spec.package.extends(self.extendee_spec):
# TODO: fix this normalize() requirement -- revisit DAG handling.
spec.package.spec.normalize()
if not spec.package.activated: if not spec.package.activated:
spec.package.do_activate(**kwargs) spec.package.do_activate(**kwargs)
@ -1031,6 +955,7 @@ def ignore(filename):
conflict = tree.find_conflict(self.prefix, ignore=ignore) conflict = tree.find_conflict(self.prefix, ignore=ignore)
if conflict: if conflict:
raise ExtensionConflictError(conflict) raise ExtensionConflictError(conflict)
tree.merge(self.prefix, ignore=ignore) tree.merge(self.prefix, ignore=ignore)
@ -1237,13 +1162,6 @@ def __init__(self, message, long_msg=None):
super(PackageError, self).__init__(message, long_msg) super(PackageError, self).__init__(message, long_msg)
class InvalidPackageDependencyError(PackageError):
"""Raised when package specification is inconsistent with requirements of
its dependencies."""
def __init__(self, message):
super(InvalidPackageDependencyError, self).__init__(message)
class PackageVersionError(PackageError): class PackageVersionError(PackageError):
"""Raised when a version URL cannot automatically be determined.""" """Raised when a version URL cannot automatically be determined."""
def __init__(self, version): def __init__(self, version):

View file

@ -1,215 +0,0 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""
This package contains relationships that can be defined among packages.
Relations are functions that can be called inside a package definition,
for example:
class OpenMPI(Package):
depends_on("hwloc")
provides("mpi")
...
The available relations are:
depends_on
Above, the OpenMPI package declares that it "depends on" hwloc. This means
that the hwloc package needs to be installed before OpenMPI can be
installed. When a user runs 'spack install openmpi', spack will fetch
hwloc and install it first.
provides
This is useful when more than one package can satisfy a dependence. Above,
OpenMPI declares that it "provides" mpi. Other implementations of the MPI
interface, like mvapich and mpich, also provide mpi, e.g.:
class Mvapich(Package):
provides("mpi")
...
class Mpich(Package):
provides("mpi")
...
Instead of depending on openmpi, mvapich, or mpich, another package can
declare that it depends on "mpi":
class Mpileaks(Package):
depends_on("mpi")
...
Now the user can pick which MPI they would like to build with when they
install mpileaks. For example, the user could install 3 instances of
mpileaks, one for each MPI version, by issuing these three commands:
spack install mpileaks ^openmpi
spack install mpileaks ^mvapich
spack install mpileaks ^mpich
"""
__all__ = [ 'depends_on', 'extends', 'provides', 'patch', 'version' ]
import re
import inspect
from llnl.util.lang import *
import spack
import spack.spec
import spack.error
import spack.url
from spack.version import Version
from spack.patch import Patch
from spack.spec import Spec, parse_anonymous_spec
def version(ver, checksum=None, **kwargs):
"""Adds a version and metadata describing how to fetch it.
Metadata is just stored as a dict in the package's versions
dictionary. Package must turn it into a valid fetch strategy
later.
"""
pkg = caller_locals()
versions = pkg.setdefault('versions', {})
# special case checksum for backward compatibility
if checksum:
kwargs['md5'] = checksum
# Store the kwargs for the package to use later when constructing
# a fetch strategy.
versions[Version(ver)] = kwargs
def depends_on(*specs):
"""Adds a dependencies local variable in the locals of
the calling class, based on args. """
pkg = get_calling_package_name()
clocals = caller_locals()
dependencies = clocals.setdefault('dependencies', {})
for string in specs:
for spec in spack.spec.parse(string):
if pkg == spec.name:
raise CircularReferenceError('depends_on', pkg)
dependencies[spec.name] = spec
def extends(spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix.
This is for Python and other language modules where the module
needs to be installed into the prefix of the Python installation.
Spack handles this by installing modules into their own prefix,
but allowing ONE module version to be symlinked into a parent
Python install at a time.
keyword arguments can be passed to extends() so that extension
packages can pass parameters to the extendee's extension
mechanism.
"""
pkg = get_calling_package_name()
clocals = caller_locals()
dependencies = clocals.setdefault('dependencies', {})
extendees = clocals.setdefault('extendees', {})
if extendees:
raise RelationError("Packages can extend at most one other package.")
spec = Spec(spec)
if pkg == spec.name:
raise CircularReferenceError('extends', pkg)
dependencies[spec.name] = spec
extendees[spec.name] = (spec, kwargs)
def provides(*specs, **kwargs):
"""Allows packages to provide a virtual dependency. If a package provides
'mpi', other packages can declare that they depend on "mpi", and spack
can use the providing package to satisfy the dependency.
"""
pkg = get_calling_package_name()
spec_string = kwargs.get('when', pkg)
provider_spec = parse_anonymous_spec(spec_string, pkg)
provided = caller_locals().setdefault("provided", {})
for string in specs:
for provided_spec in spack.spec.parse(string):
if pkg == provided_spec.name:
raise CircularReferenceError('depends_on', pkg)
provided[provided_spec] = provider_spec
def patch(url_or_filename, **kwargs):
"""Packages can declare patches to apply to source. You can
optionally provide a when spec to indicate that a particular
patch should only be applied when the package's spec meets
certain conditions (e.g. a particular version).
"""
pkg = get_calling_package_name()
level = kwargs.get('level', 1)
when_spec = parse_anonymous_spec(kwargs.get('when', pkg), pkg)
patches = caller_locals().setdefault('patches', {})
if when_spec not in patches:
patches[when_spec] = [Patch(pkg, url_or_filename, level)]
else:
# if this spec is identical to some other, then append this
# patch to the existing list.
patches[when_spec].append(Patch(pkg, url_or_filename, level))
def conflicts(*specs):
"""Packages can declare conflicts with other packages.
This can be as specific as you like: use regular spec syntax.
NOT YET IMPLEMENTED.
"""
# TODO: implement conflicts
pass
class RelationError(spack.error.SpackError):
"""This is raised when something is wrong with a package relation."""
def __init__(self, relation, message):
super(RelationError, self).__init__(message)
self.relation = relation
class ScopeError(RelationError):
"""This is raised when a relation is called from outside a spack package."""
def __init__(self, relation):
super(ScopeError, self).__init__(
relation,
"Must invoke '%s' from inside a class definition!" % relation)
class CircularReferenceError(RelationError):
"""This is raised when something depends on itself."""
def __init__(self, relation, package):
super(CircularReferenceError, self).__init__(
relation,
"Package '%s' cannot pass itself to %s." % (package, relation))
self.package = package

View file

@ -1,5 +1,5 @@
############################################################################## ##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
@ -93,8 +93,11 @@
import sys import sys
import itertools import itertools
import hashlib import hashlib
import base64
from StringIO import StringIO from StringIO import StringIO
from operator import attrgetter from operator import attrgetter
from external import yaml
from external.yaml.error import MarkedYAMLError
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import * from llnl.util.lang import *
@ -110,6 +113,9 @@
from spack.util.prefix import Prefix from spack.util.prefix import Prefix
from spack.virtual import ProviderIndex from spack.virtual import ProviderIndex
# Valid pattern for an identifier in Spack
identifier_re = r'\w[\w-]*'
# Convenient names for color formats so that other things can use them # Convenient names for color formats so that other things can use them
compiler_color = '@g' compiler_color = '@g'
version_color = '@c' version_color = '@c'
@ -117,6 +123,7 @@
enabled_variant_color = '@B' enabled_variant_color = '@B'
disabled_variant_color = '@r' disabled_variant_color = '@r'
dependency_color = '@.' dependency_color = '@.'
hash_color = '@K'
"""This map determines the coloring of specs when using color output. """This map determines the coloring of specs when using color output.
We make the fields different colors to enhance readability. We make the fields different colors to enhance readability.
@ -126,7 +133,8 @@
'=' : architecture_color, '=' : architecture_color,
'+' : enabled_variant_color, '+' : enabled_variant_color,
'~' : disabled_variant_color, '~' : disabled_variant_color,
'^' : dependency_color } '^' : dependency_color,
'#' : hash_color }
"""Regex used for splitting by spec field separators.""" """Regex used for splitting by spec field separators."""
_separators = '[%s]' % ''.join(color_formats.keys()) _separators = '[%s]' % ''.join(color_formats.keys())
@ -214,20 +222,24 @@ def _autospec(self, compiler_spec_like):
return CompilerSpec(compiler_spec_like) return CompilerSpec(compiler_spec_like)
def satisfies(self, other): def satisfies(self, other, strict=False):
other = self._autospec(other) other = self._autospec(other)
return (self.name == other.name and return (self.name == other.name and
self.versions.satisfies(other.versions)) self.versions.satisfies(other.versions, strict=strict))
def constrain(self, other): def constrain(self, other):
"""Intersect self's versions with other.
Return whether the CompilerSpec changed.
"""
other = self._autospec(other) other = self._autospec(other)
# ensure that other will actually constrain this spec. # ensure that other will actually constrain this spec.
if not other.satisfies(self): if not other.satisfies(self):
raise UnsatisfiableCompilerSpecError(other, self) raise UnsatisfiableCompilerSpecError(other, self)
self.versions.intersect(other.versions) return self.versions.intersect(other.versions)
@property @property
@ -255,6 +267,18 @@ def _cmp_key(self):
return (self.name, self.versions) return (self.name, self.versions)
def to_dict(self):
d = {'name' : self.name}
d.update(self.versions.to_dict())
return { 'compiler' : d }
@staticmethod
def from_dict(d):
d = d['compiler']
return CompilerSpec(d['name'], VersionList.from_dict(d))
def __str__(self): def __str__(self):
out = self.name out = self.name
if self.versions and self.versions != _any_version: if self.versions and self.versions != _any_version:
@ -267,7 +291,7 @@ def __repr__(self):
@key_ordering @key_ordering
class Variant(object): class VariantSpec(object):
"""Variants are named, build-time options for a package. Names depend """Variants are named, build-time options for a package. Names depend
on the particular package being built, and each named variant can on the particular package being built, and each named variant can
be enabled or disabled. be enabled or disabled.
@ -282,7 +306,7 @@ def _cmp_key(self):
def copy(self): def copy(self):
return Variant(self.name, self.enabled) return VariantSpec(self.name, self.enabled)
def __str__(self): def __str__(self):
@ -291,9 +315,52 @@ def __str__(self):
class VariantMap(HashableMap): class VariantMap(HashableMap):
def satisfies(self, other): def __init__(self, spec):
return all(self[key].enabled == other[key].enabled super(VariantMap, self).__init__()
for key in other if key in self) self.spec = spec
def satisfies(self, other, strict=False):
if strict or self.spec._concrete:
return all(k in self and self[k].enabled == other[k].enabled
for k in other)
else:
return all(self[k].enabled == other[k].enabled
for k in other if k in self)
def constrain(self, other):
"""Add all variants in other that aren't in self to self.
Raises an error if any common variants don't match.
Return whether the spec changed.
"""
if other.spec._concrete:
for k in self:
if k not in other:
raise UnsatisfiableVariantSpecError(self[k], '<absent>')
changed = False
for k in other:
if k in self:
if self[k].enabled != other[k].enabled:
raise UnsatisfiableVariantSpecError(self[k], other[k])
else:
self[k] = other[k].copy()
changed =True
return changed
@property
def concrete(self):
return self.spec._concrete or all(
v in self for v in self.spec.package.variants)
def copy(self):
clone = VariantMap(None)
for name, variant in self.items():
clone[name] = variant.copy()
return clone
def __str__(self): def __str__(self):
@ -340,10 +407,11 @@ def __init__(self, spec_like, *dep_like, **kwargs):
self.name = other.name self.name = other.name
self.dependents = other.dependents self.dependents = other.dependents
self.versions = other.versions self.versions = other.versions
self.variants = other.variants
self.architecture = other.architecture self.architecture = other.architecture
self.compiler = other.compiler self.compiler = other.compiler
self.dependencies = other.dependencies self.dependencies = other.dependencies
self.variants = other.variants
self.variants.spec = self
# Specs are by default not assumed to be normal, but in some # Specs are by default not assumed to be normal, but in some
# cases we've read them from a file want to assume normal. # cases we've read them from a file want to assume normal.
@ -372,7 +440,7 @@ def _add_variant(self, name, enabled):
"""Called by the parser to add a variant.""" """Called by the parser to add a variant."""
if name in self.variants: raise DuplicateVariantError( if name in self.variants: raise DuplicateVariantError(
"Cannot specify variant '%s' twice" % name) "Cannot specify variant '%s' twice" % name)
self.variants[name] = Variant(name, enabled) self.variants[name] = VariantSpec(name, enabled)
def _set_compiler(self, compiler): def _set_compiler(self, compiler):
@ -436,14 +504,15 @@ def virtual(self):
@property @property
def concrete(self): def concrete(self):
"""A spec is concrete if it can describe only ONE build of a package. """A spec is concrete if it can describe only ONE build of a package.
If any of the name, version, architecture, compiler, or depdenencies If any of the name, version, architecture, compiler,
are ambiguous,then it is not concrete. variants, or depdenencies are ambiguous,then it is not concrete.
""" """
if self._concrete: if self._concrete:
return True return True
self._concrete = bool(not self.virtual self._concrete = bool(not self.virtual
and self.versions.concrete and self.versions.concrete
and self.variants.concrete
and self.architecture and self.architecture
and self.compiler and self.compiler.concrete and self.compiler and self.compiler.concrete
and self.dependencies.concrete) and self.dependencies.concrete)
@ -564,18 +633,91 @@ def prefix(self):
return Prefix(spack.install_layout.path_for_spec(self)) return Prefix(spack.install_layout.path_for_spec(self))
def dep_hash(self, length=None): def dag_hash(self, length=None):
"""Return a hash representing all dependencies of this spec """Return a hash of the entire spec DAG, including connectivity."""
(direct and indirect). yaml_text = yaml.dump(
self.to_node_dict(), default_flow_style=True, width=sys.maxint)
sha = hashlib.sha1(yaml_text)
return base64.b32encode(sha.digest()).lower()[:length]
def to_node_dict(self):
d = {
'variants' : dict(
(name,v.enabled) for name, v in self.variants.items()),
'arch' : self.architecture,
'dependencies' : dict((d, self.dependencies[d].dag_hash())
for d in sorted(self.dependencies))
}
if self.compiler:
d.update(self.compiler.to_dict())
else:
d['compiler'] = None
d.update(self.versions.to_dict())
return { self.name : d }
def to_yaml(self, stream=None):
node_list = []
for s in self.traverse(order='pre'):
node = s.to_node_dict()
node[s.name]['hash'] = s.dag_hash()
node_list.append(node)
return yaml.dump({ 'spec' : node_list },
stream=stream, default_flow_style=False)
@staticmethod
def from_node_dict(node):
name = next(iter(node))
node = node[name]
spec = Spec(name)
spec.versions = VersionList.from_dict(node)
spec.architecture = node['arch']
if node['compiler'] is None:
spec.compiler = None
else:
spec.compiler = CompilerSpec.from_dict(node)
for name, enabled in node['variants'].items():
spec.variants[name] = VariantSpec(name, enabled)
return spec
@staticmethod
def from_yaml(stream):
"""Construct a spec from YAML.
Parameters:
stream -- string or file object to read from.
TODO: currently discards hashes. Include hashes when they
represent more than the DAG does.
If you want this hash to be consistent, you should
concretize the spec first so that it is not ambiguous.
""" """
sha = hashlib.sha1() deps = {}
sha.update(self.dep_string()) spec = None
full_hash = sha.hexdigest()
return full_hash[:length] try:
yfile = yaml.load(stream)
except MarkedYAMLError, e:
raise SpackYAMLError("error parsing YMAL spec:", str(e))
for node in yfile['spec']:
name = next(iter(node))
dep = Spec.from_node_dict(node)
if not spec:
spec = dep
deps[dep.name] = dep
for node in yfile['spec']:
name = next(iter(node))
for dep_name in node[name]['dependencies']:
deps[name].dependencies[dep_name] = deps[dep_name]
return spec
def _concretize_helper(self, presets=None, visited=None): def _concretize_helper(self, presets=None, visited=None):
@ -604,6 +746,7 @@ def _concretize_helper(self, presets=None, visited=None):
spack.concretizer.concretize_architecture(self) spack.concretizer.concretize_architecture(self)
spack.concretizer.concretize_compiler(self) spack.concretizer.concretize_compiler(self)
spack.concretizer.concretize_version(self) spack.concretizer.concretize_version(self)
spack.concretizer.concretize_variants(self)
presets[self.name] = self presets[self.name] = self
visited.add(self.name) visited.add(self.name)
@ -736,80 +879,156 @@ def flatten(self):
self._add_dependency(dep) self._add_dependency(dep)
def _evaluate_dependency_conditions(self, name):
"""Evaluate all the conditions on a dependency with this name.
If the package depends on <name> in this configuration, return
the dependency. If no conditions are True (and we don't
depend on it), return None.
"""
pkg = spack.db.get(self.name)
conditions = pkg.dependencies[name]
# evaluate when specs to figure out constraints on the dependency.
dep = None
for when_spec, dep_spec in conditions.items():
sat = self.satisfies(when_spec, strict=True)
if sat:
if dep is None:
dep = Spec(name)
try:
dep.constrain(dep_spec)
except UnsatisfiableSpecError, e:
e.message = ("Conflicting conditional dependencies on package "
"%s for spec %s" % (self.name, self))
raise e
return dep
def _find_provider(self, vdep, provider_index):
"""Find provider for a virtual spec in the provider index.
Raise an exception if there is a conflicting virtual
dependency already in this spec.
"""
assert(vdep.virtual)
providers = provider_index.providers_for(vdep)
# If there is a provider for the vpkg, then use that instead of
# the virtual package.
if providers:
# Can't have multiple providers for the same thing in one spec.
if len(providers) > 1:
raise MultipleProviderError(vdep, providers)
return providers[0]
else:
# The user might have required something insufficient for
# pkg_dep -- so we'll get a conflict. e.g., user asked for
# mpi@:1.1 but some package required mpi@2.1:.
required = provider_index.providers_for(vdep.name)
if len(required) > 1:
raise MultipleProviderError(vdep, required)
elif required:
raise UnsatisfiableProviderSpecError(required[0], vdep)
def _merge_dependency(self, dep, visited, spec_deps, provider_index):
"""Merge the dependency into this spec.
This is the core of the normalize() method. There are a few basic steps:
* If dep is virtual, evaluate whether it corresponds to an
existing concrete dependency, and merge if so.
* If it's real and it provides some virtual dep, see if it provides
what some virtual dependency wants and merge if so.
* Finally, if none of the above, merge dependency and its
constraints into this spec.
This method returns True if the spec was changed, False otherwise.
"""
changed = False
# If it's a virtual dependency, try to find a provider and
# merge that.
if dep.virtual:
visited.add(dep.name)
provider = self._find_provider(dep, provider_index)
if provider:
dep = provider
else:
# if it's a real dependency, check whether it provides
# something already required in the spec.
index = ProviderIndex([dep], restrict=True)
for vspec in (v for v in spec_deps.values() if v.virtual):
if index.providers_for(vspec):
vspec._replace_with(dep)
del spec_deps[vspec.name]
changed = True
else:
required = index.providers_for(vspec.name)
if required:
raise UnsatisfiableProviderSpecError(required[0], dep)
provider_index.update(dep)
# If the spec isn't already in the set of dependencies, clone
# it from the package description.
if dep.name not in spec_deps:
spec_deps[dep.name] = dep.copy()
# Constrain package information with spec info
try:
changed |= spec_deps[dep.name].constrain(dep)
except UnsatisfiableSpecError, e:
e.message = "Invalid spec: '%s'. "
e.message += "Package %s requires %s %s, but spec asked for %s"
e.message %= (spec_deps[dep.name], dep.name, e.constraint_type,
e.required, e.provided)
raise e
# Add merged spec to my deps and recurse
dependency = spec_deps[dep.name]
if dep.name not in self.dependencies:
self._add_dependency(dependency)
changed = True
changed |= dependency._normalize_helper(visited, spec_deps, provider_index)
return changed
def _normalize_helper(self, visited, spec_deps, provider_index): def _normalize_helper(self, visited, spec_deps, provider_index):
"""Recursive helper function for _normalize.""" """Recursive helper function for _normalize."""
if self.name in visited: if self.name in visited:
return return False
visited.add(self.name) visited.add(self.name)
# if we descend into a virtual spec, there's nothing more # if we descend into a virtual spec, there's nothing more
# to normalize. Concretize will finish resolving it later. # to normalize. Concretize will finish resolving it later.
if self.virtual: if self.virtual:
return return False
# Combine constraints from package deps with constraints from
# the spec, until nothing changes.
any_change = False
changed = True
# Combine constraints from package dependencies with
# constraints on the spec's dependencies.
pkg = spack.db.get(self.name) pkg = spack.db.get(self.name)
for name, pkg_dep in self.package.dependencies.items(): while changed:
# If it's a virtual dependency, try to find a provider changed = False
if pkg_dep.virtual: for dep_name in pkg.dependencies:
providers = provider_index.providers_for(pkg_dep) # Do we depend on dep_name? If so pkg_dep is not None.
pkg_dep = self._evaluate_dependency_conditions(dep_name)
# If there is a provider for the vpkg, then use that instead of # If pkg_dep is a dependency, merge it.
# the virtual package. if pkg_dep:
if providers: changed |= self._merge_dependency(
# Can't have multiple providers for the same thing in one spec. pkg_dep, visited, spec_deps, provider_index)
if len(providers) > 1:
raise MultipleProviderError(pkg_dep, providers)
pkg_dep = providers[0] any_change |= changed
name = pkg_dep.name
else: return any_change
# The user might have required something insufficient for
# pkg_dep -- so we'll get a conflict. e.g., user asked for
# mpi@:1.1 but some package required mpi@2.1:.
required = provider_index.providers_for(name)
if len(required) > 1:
raise MultipleProviderError(pkg_dep, required)
elif required:
raise UnsatisfiableProviderSpecError(
required[0], pkg_dep)
else:
# if it's a real dependency, check whether it provides something
# already required in the spec.
index = ProviderIndex([pkg_dep], restrict=True)
for vspec in (v for v in spec_deps.values() if v.virtual):
if index.providers_for(vspec):
vspec._replace_with(pkg_dep)
del spec_deps[vspec.name]
else:
required = index.providers_for(vspec.name)
if required:
raise UnsatisfiableProviderSpecError(
required[0], pkg_dep)
provider_index.update(pkg_dep)
if name not in spec_deps:
# If the spec doesn't reference a dependency that this package
# needs, then clone it from the package description.
spec_deps[name] = pkg_dep.copy()
try:
# Constrain package information with spec info
spec_deps[name].constrain(pkg_dep)
except UnsatisfiableSpecError, e:
e.message = "Invalid spec: '%s'. "
e.message += "Package %s requires %s %s, but spec asked for %s"
e.message %= (spec_deps[name], name, e.constraint_type,
e.required, e.provided)
raise e
# Add merged spec to my deps and recurse
dependency = spec_deps[name]
self._add_dependency(dependency)
dependency._normalize_helper(visited, spec_deps, provider_index)
def normalize(self, **kwargs): def normalize(self, **kwargs):
@ -836,19 +1055,14 @@ def normalize(self, **kwargs):
# Ensure first that all packages & compilers in the DAG exist. # Ensure first that all packages & compilers in the DAG exist.
self.validate_names() self.validate_names()
# Ensure that the package & dep descriptions are consistent & sane
if not self.virtual:
self.package.validate_dependencies()
# Get all the dependencies into one DependencyMap # Get all the dependencies into one DependencyMap
spec_deps = self.flat_dependencies(copy=False) spec_deps = self.flat_dependencies(copy=False)
# Figure out which of the user-provided deps provide virtual deps. # Initialize index of virtual dependency providers
# Remove virtual deps that are already provided by something in the spec
spec_packages = [d.package for d in spec_deps.values() if not d.virtual]
index = ProviderIndex(spec_deps.values(), restrict=True) index = ProviderIndex(spec_deps.values(), restrict=True)
# traverse the package DAG and fill out dependencies according
# to package files & their 'when' specs
visited = set() visited = set()
self._normalize_helper(visited, spec_deps, index) self._normalize_helper(visited, spec_deps, index)
@ -856,12 +1070,6 @@ def normalize(self, **kwargs):
# actually deps of this package. Raise an error. # actually deps of this package. Raise an error.
extra = set(spec_deps.keys()).difference(visited) extra = set(spec_deps.keys()).difference(visited)
# Also subtract out all the packags that provide a needed vpkg
vdeps = [v for v in self.package.virtual_dependencies()]
vpkg_providers = index.providers_for(*vdeps)
extra.difference_update(p.name for p in vpkg_providers)
# Anything left over is not a valid part of the spec. # Anything left over is not a valid part of the spec.
if extra: if extra:
raise InvalidDependencyException( raise InvalidDependencyException(
@ -893,10 +1101,18 @@ def validate_names(self):
if not compilers.supported(spec.compiler): if not compilers.supported(spec.compiler):
raise UnsupportedCompilerError(spec.compiler.name) raise UnsupportedCompilerError(spec.compiler.name)
# Ensure that variants all exist.
for vname, variant in spec.variants.items():
if vname not in spec.package.variants:
raise UnknownVariantError(spec.name, vname)
def constrain(self, other, **kwargs):
def constrain(self, other, deps=True):
"""Merge the constraints of other with self.
Returns True if the spec changed as a result, False if not.
"""
other = self._autospec(other) other = self._autospec(other)
constrain_deps = kwargs.get('deps', True)
if not self.name == other.name: if not self.name == other.name:
raise UnsatisfiableSpecNameError(self.name, other.name) raise UnsatisfiableSpecNameError(self.name, other.name)
@ -915,23 +1131,32 @@ def constrain(self, other, **kwargs):
raise UnsatisfiableArchitectureSpecError(self.architecture, raise UnsatisfiableArchitectureSpecError(self.architecture,
other.architecture) other.architecture)
changed = False
if self.compiler is not None and other.compiler is not None: if self.compiler is not None and other.compiler is not None:
self.compiler.constrain(other.compiler) changed |= self.compiler.constrain(other.compiler)
elif self.compiler is None: elif self.compiler is None:
changed |= (self.compiler != other.compiler)
self.compiler = other.compiler self.compiler = other.compiler
self.versions.intersect(other.versions) changed |= self.versions.intersect(other.versions)
self.variants.update(other.variants) changed |= self.variants.constrain(other.variants)
self.architecture = self.architecture or other.architecture
if constrain_deps: old = self.architecture
self._constrain_dependencies(other) self.architecture = self.architecture or other.architecture
changed |= (self.architecture != old)
if deps:
changed |= self._constrain_dependencies(other)
return changed
def _constrain_dependencies(self, other): def _constrain_dependencies(self, other):
"""Apply constraints of other spec's dependencies to this spec.""" """Apply constraints of other spec's dependencies to this spec."""
other = self._autospec(other)
if not self.dependencies or not other.dependencies: if not self.dependencies or not other.dependencies:
return return False
# TODO: might want more detail than this, e.g. specific deps # TODO: might want more detail than this, e.g. specific deps
# in violation. if this becomes a priority get rid of this # in violation. if this becomes a priority get rid of this
@ -940,12 +1165,17 @@ def _constrain_dependencies(self, other):
raise UnsatisfiableDependencySpecError(other, self) raise UnsatisfiableDependencySpecError(other, self)
# Handle common first-order constraints directly # Handle common first-order constraints directly
changed = False
for name in self.common_dependencies(other): for name in self.common_dependencies(other):
self[name].constrain(other[name], deps=False) changed |= self[name].constrain(other[name], deps=False)
# Update with additional constraints from other spec # Update with additional constraints from other spec
for name in other.dep_difference(self): for name in other.dep_difference(self):
self._add_dependency(other[name].copy()) self._add_dependency(other[name].copy())
changed = True
return changed
def common_dependencies(self, other): def common_dependencies(self, other):
@ -979,44 +1209,74 @@ def _autospec(self, spec_like):
return parse_anonymous_spec(spec_like, self.name) return parse_anonymous_spec(spec_like, self.name)
def satisfies(self, other, **kwargs): def satisfies(self, other, deps=True, strict=False):
"""Determine if this spec satisfies all constraints of another.
There are two senses for satisfies:
* `loose` (default): the absence of a constraint in self
implies that it *could* be satisfied by other, so we only
check that there are no conflicts with other for
constraints that this spec actually has.
* `strict`: strict means that we *must* meet all the
constraints specified on other.
"""
other = self._autospec(other) other = self._autospec(other)
satisfy_deps = kwargs.get('deps', True)
# First thing we care about is whether the name matches # First thing we care about is whether the name matches
if self.name != other.name: if self.name != other.name:
return False return False
# All these attrs have satisfies criteria of their own, if self.versions and other.versions:
# but can be None to indicate no constraints. if not self.versions.satisfies(other.versions, strict=strict):
for s, o in ((self.versions, other.versions),
(self.variants, other.variants),
(self.compiler, other.compiler)):
if s and o and not s.satisfies(o):
return False return False
elif strict and (self.versions or other.versions):
return False
# None indicates no constraints when not strict.
if self.compiler and other.compiler:
if not self.compiler.satisfies(other.compiler, strict=strict):
return False
elif strict and (other.compiler and not self.compiler):
return False
if not self.variants.satisfies(other.variants, strict=strict):
return False
# Architecture satisfaction is currently just string equality. # Architecture satisfaction is currently just string equality.
# Can be None for unconstrained, though. # If not strict, None means unconstrained.
if (self.architecture and other.architecture and if self.architecture and other.architecture:
self.architecture != other.architecture): if self.architecture != other.architecture:
return False
elif strict and (other.architecture and not self.architecture):
return False return False
# If we need to descend into dependencies, do it, otherwise we're done. # If we need to descend into dependencies, do it, otherwise we're done.
if satisfy_deps: if deps:
return self.satisfies_dependencies(other) return self.satisfies_dependencies(other, strict=strict)
else: else:
return True return True
def satisfies_dependencies(self, other): def satisfies_dependencies(self, other, strict=False):
"""This checks constraints on common dependencies against each other.""" """This checks constraints on common dependencies against each other."""
# if either spec doesn't restrict dependencies then both are compatible. other = self._autospec(other)
if not self.dependencies or not other.dependencies:
if strict:
if other.dependencies and not self.dependencies:
return False
if not all(dep in self.dependencies for dep in other.dependencies):
return False
elif not self.dependencies or not other.dependencies:
# if either spec doesn't restrict dependencies then both are compatible.
return True return True
# Handle first-order constraints directly # Handle first-order constraints directly
for name in self.common_dependencies(other): for name in self.common_dependencies(other):
if not self[name].satisfies(other[name]): if not self[name].satisfies(other[name], deps=False):
return False return False
# For virtual dependencies, we need to dig a little deeper. # For virtual dependencies, we need to dig a little deeper.
@ -1061,11 +1321,12 @@ def _dup(self, other, **kwargs):
# Local node attributes get copied first. # Local node attributes get copied first.
self.name = other.name self.name = other.name
self.versions = other.versions.copy() self.versions = other.versions.copy()
self.variants = other.variants.copy()
self.architecture = other.architecture self.architecture = other.architecture
self.compiler = other.compiler.copy() if other.compiler else None self.compiler = other.compiler.copy() if other.compiler else None
self.dependents = DependencyMap() self.dependents = DependencyMap()
self.dependencies = DependencyMap() self.dependencies = DependencyMap()
self.variants = other.variants.copy()
self.variants.spec = self
# If we copy dependencies, preserve DAG structure in the new spec # If we copy dependencies, preserve DAG structure in the new spec
if kwargs.get('deps', True): if kwargs.get('deps', True):
@ -1117,7 +1378,7 @@ def __contains__(self, spec):
""" """
spec = self._autospec(spec) spec = self._autospec(spec)
for s in self.traverse(): for s in self.traverse():
if s.satisfies(spec): if s.satisfies(spec, strict=True):
return True return True
return False return False
@ -1208,7 +1469,7 @@ def format(self, format_string='$_$@$%@$+$=', **kwargs):
$%@ Compiler & compiler version $%@ Compiler & compiler version
$+ Options $+ Options
$= Architecture $= Architecture
$# Dependencies' 8-char sha1 prefix $# 7-char prefix of DAG hash
$$ $ $$ $
Optionally you can provide a width, e.g. $20_ for a 20-wide name. Optionally you can provide a width, e.g. $20_ for a 20-wide name.
@ -1264,8 +1525,7 @@ def write(s, c):
if self.architecture: if self.architecture:
write(fmt % (c + str(self.architecture)), c) write(fmt % (c + str(self.architecture)), c)
elif c == '#': elif c == '#':
if self.dependencies: out.write('-' + fmt % (self.dag_hash(7)))
out.write(fmt % ('-' + self.dep_hash(8)))
elif c == '$': elif c == '$':
if fmt != '': if fmt != '':
raise ValueError("Can't use format width with $$.") raise ValueError("Can't use format width with $$.")
@ -1274,7 +1534,8 @@ def write(s, c):
elif compiler: elif compiler:
if c == '@': if c == '@':
if self.compiler and self.compiler.versions: if (self.compiler and self.compiler.versions and
self.compiler.versions != _any_version):
write(c + str(self.compiler.versions), '%') write(c + str(self.compiler.versions), '%')
elif c == '$': elif c == '$':
escape = True escape = True
@ -1311,12 +1572,15 @@ def tree(self, **kwargs):
cover = kwargs.pop('cover', 'nodes') cover = kwargs.pop('cover', 'nodes')
indent = kwargs.pop('indent', 0) indent = kwargs.pop('indent', 0)
fmt = kwargs.pop('format', '$_$@$%@$+$=') fmt = kwargs.pop('format', '$_$@$%@$+$=')
prefix = kwargs.pop('prefix', None)
check_kwargs(kwargs, self.tree) check_kwargs(kwargs, self.tree)
out = "" out = ""
cur_id = 0 cur_id = 0
ids = {} ids = {}
for d, node in self.traverse(order='pre', cover=cover, depth=True): for d, node in self.traverse(order='pre', cover=cover, depth=True):
if prefix is not None:
out += prefix(node)
out += " " * indent out += " " * indent
if depth: if depth:
out += "%-4d" % d out += "%-4d" % d
@ -1354,6 +1618,8 @@ def __init__(self):
(r'\~', lambda scanner, val: self.token(OFF, val)), (r'\~', lambda scanner, val: self.token(OFF, val)),
(r'\%', lambda scanner, val: self.token(PCT, val)), (r'\%', lambda scanner, val: self.token(PCT, val)),
(r'\=', lambda scanner, val: self.token(EQ, val)), (r'\=', lambda scanner, val: self.token(EQ, val)),
# This is more liberal than identifier_re (see above).
# Checked by check_identifier() for better error messages.
(r'\w[\w.-]*', lambda scanner, val: self.token(ID, val)), (r'\w[\w.-]*', lambda scanner, val: self.token(ID, val)),
(r'\s+', lambda scanner, val: None)]) (r'\s+', lambda scanner, val: None)])
@ -1399,7 +1665,7 @@ def spec(self):
spec = Spec.__new__(Spec) spec = Spec.__new__(Spec)
spec.name = self.token.value spec.name = self.token.value
spec.versions = VersionList() spec.versions = VersionList()
spec.variants = VariantMap() spec.variants = VariantMap(spec)
spec.architecture = None spec.architecture = None
spec.compiler = None spec.compiler = None
spec.dependents = DependencyMap() spec.dependents = DependencyMap()
@ -1580,6 +1846,13 @@ def __init__(self, compiler_name):
"The '%s' compiler is not yet supported." % compiler_name) "The '%s' compiler is not yet supported." % compiler_name)
class UnknownVariantError(SpecError):
"""Raised when the same variant occurs in a spec twice."""
def __init__(self, pkg, variant):
super(UnknownVariantError, self).__init__(
"Package %s has no variant %s!" % (pkg, variant))
class DuplicateArchitectureError(SpecError): class DuplicateArchitectureError(SpecError):
"""Raised when the same architecture occurs in a spec twice.""" """Raised when the same architecture occurs in a spec twice."""
def __init__(self, message): def __init__(self, message):
@ -1683,3 +1956,7 @@ class UnsatisfiableDependencySpecError(UnsatisfiableSpecError):
def __init__(self, provided, required): def __init__(self, provided, required):
super(UnsatisfiableDependencySpecError, self).__init__( super(UnsatisfiableDependencySpecError, self).__init__(
provided, required, "dependency") provided, required, "dependency")
class SpackYAMLError(spack.error.SpackError):
def __init__(self, msg, yaml_error):
super(SpackError, self).__init__(msg, str(yaml_error))

View file

@ -344,13 +344,9 @@ def destroy(self):
def _get_mirrors(): def _get_mirrors():
"""Get mirrors from spack configuration.""" """Get mirrors from spack configuration."""
config = spack.config.get_config() config = spack.config.get_mirror_config()
return [val for name, val in config.iteritems()]
mirrors = []
sec_names = config.get_section_names('mirror')
for name in sec_names:
mirrors.append(config.get_value('mirror', name, 'url'))
return mirrors
def ensure_access(file=spack.stage_path): def ensure_access(file=spack.stage_path):

View file

@ -52,7 +52,9 @@
'mirror', 'mirror',
'url_extrapolate', 'url_extrapolate',
'cc', 'cc',
'link_tree'] 'link_tree',
'spec_yaml',
'optional_deps']
def list_tests(): def list_tests():

View file

@ -35,7 +35,13 @@ def check_spec(self, abstract, concrete):
self.assertEqual(abstract.versions, concrete.versions) self.assertEqual(abstract.versions, concrete.versions)
if abstract.variants: if abstract.variants:
self.assertEqual(abstract.versions, concrete.versions) for name in abstract.variants:
avariant = abstract.variants[name]
cvariant = concrete.variants[name]
self.assertEqual(avariant.enabled, cvariant.enabled)
for name in abstract.package.variants:
self.assertTrue(name in concrete.variants)
if abstract.compiler and abstract.compiler.concrete: if abstract.compiler and abstract.compiler.concrete:
self.assertEqual(abstract.compiler, concrete.compiler) self.assertEqual(abstract.compiler, concrete.compiler)
@ -66,6 +72,12 @@ def test_concretize_dag(self):
self.check_concretize('libelf') self.check_concretize('libelf')
def test_concretize_variant(self):
self.check_concretize('mpich+debug')
self.check_concretize('mpich~debug')
self.check_concretize('mpich')
def test_concretize_with_virtual(self): def test_concretize_with_virtual(self):
self.check_concretize('mpileaks ^mpi') self.check_concretize('mpileaks ^mpi')
self.check_concretize('mpileaks ^mpi@:1.1') self.check_concretize('mpileaks ^mpi@:1.1')

View file

@ -26,44 +26,49 @@
import shutil import shutil
import os import os
from tempfile import mkdtemp from tempfile import mkdtemp
import spack
from spack.packages import PackageDB
from spack.test.mock_packages_test import *
from spack.config import * class ConfigTest(MockPackagesTest):
def setUp(self):
self.initmock()
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
spack.config.config_scopes = [('test_low_priority', os.path.join(self.tmp_dir, 'low')),
('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
class ConfigTest(unittest.TestCase): def tearDown(self):
self.cleanmock()
shutil.rmtree(self.tmp_dir, True)
@classmethod def check_config(self, comps):
def setUp(cls): config = spack.config.get_compilers_config()
cls.tmp_dir = mkdtemp('.tmp', 'spack-config-test-') compiler_list = ['cc', 'cxx', 'f77', 'f90']
for key in comps:
for c in compiler_list:
@classmethod if comps[key][c] == '/bad':
def tearDown(cls): continue
shutil.rmtree(cls.tmp_dir, True) self.assertEqual(comps[key][c], config[key][c])
def get_path(self):
return os.path.join(ConfigTest.tmp_dir, "spackconfig")
def test_write_key(self): def test_write_key(self):
config = SpackConfigParser(self.get_path()) a_comps = {"gcc@4.7.3" : { "cc" : "/gcc473", "cxx" : "/g++473", "f77" : None, "f90" : None },
config.set_value('compiler.cc', 'a') "gcc@4.5.0" : { "cc" : "/gcc450", "cxx" : "/g++450", "f77" : "/gfortran", "f90" : "/gfortran" },
config.set_value('compiler.cxx', 'b') "clang@3.3" : { "cc" : "/bad", "cxx" : "/bad", "f77" : "/bad", "f90" : "/bad" }}
config.set_value('compiler', 'gcc@4.7.3', 'cc', 'c')
config.set_value('compiler', 'gcc@4.7.3', 'cxx', 'd')
config.write()
config = SpackConfigParser(self.get_path()) b_comps = {"icc@10.0" : { "cc" : "/icc100", "cxx" : "/icc100", "f77" : None, "f90" : None },
"icc@11.1" : { "cc" : "/icc111", "cxx" : "/icp111", "f77" : "/ifort", "f90" : "/ifort" },
"clang@3.3" : { "cc" : "/clang", "cxx" : "/clang++", "f77" : None, "f90" : None}}
self.assertEqual(config.get_value('compiler.cc'), 'a') spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
self.assertEqual(config.get_value('compiler.cxx'), 'b') spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
self.assertEqual(config.get_value('compiler', 'gcc@4.7.3', 'cc'), 'c')
self.assertEqual(config.get_value('compiler', 'gcc@4.7.3', 'cxx'), 'd')
self.assertEqual(config.get_value('compiler', None, 'cc'), 'a') self.check_config(a_comps)
self.assertEqual(config.get_value('compiler', None, 'cxx'), 'b') self.check_config(b_comps)
self.assertEqual(config.get_value('compiler.gcc@4.7.3.cc'), 'c')
self.assertEqual(config.get_value('compiler.gcc@4.7.3.cxx'), 'd') spack.config.clear_config_caches()
self.check_config(a_comps)
self.check_config(b_comps)
self.assertRaises(NoOptionError, config.get_value, 'compiler', None, 'fc')

View file

@ -36,7 +36,11 @@
import spack import spack
from spack.spec import Spec from spack.spec import Spec
from spack.packages import PackageDB from spack.packages import PackageDB
from spack.directory_layout import SpecHashDirectoryLayout from spack.directory_layout import YamlDirectoryLayout
# number of packages to test (to reduce test time)
max_packages = 10
class DirectoryLayoutTest(unittest.TestCase): class DirectoryLayoutTest(unittest.TestCase):
"""Tests that a directory layout works correctly and produces a """Tests that a directory layout works correctly and produces a
@ -44,11 +48,11 @@ class DirectoryLayoutTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.tmpdir = tempfile.mkdtemp() self.tmpdir = tempfile.mkdtemp()
self.layout = SpecHashDirectoryLayout(self.tmpdir) self.layout = YamlDirectoryLayout(self.tmpdir)
def tearDown(self): def tearDown(self):
shutil.rmtree(self.tmpdir, ignore_errors=True) #shutil.rmtree(self.tmpdir, ignore_errors=True)
self.layout = None self.layout = None
@ -59,7 +63,9 @@ def test_read_and_write_spec(self):
finally that the directory can be removed by the directory finally that the directory can be removed by the directory
layout. layout.
""" """
for pkg in spack.db.all_packages(): packages = list(spack.db.all_packages())[:max_packages]
for pkg in packages:
spec = pkg.spec spec = pkg.spec
# If a spec fails to concretize, just skip it. If it is a # If a spec fails to concretize, just skip it. If it is a
@ -69,7 +75,7 @@ def test_read_and_write_spec(self):
except: except:
continue continue
self.layout.make_path_for_spec(spec) self.layout.create_install_directory(spec)
install_dir = self.layout.path_for_spec(spec) install_dir = self.layout.path_for_spec(spec)
spec_path = self.layout.spec_file_path(spec) spec_path = self.layout.spec_file_path(spec)
@ -90,7 +96,7 @@ def test_read_and_write_spec(self):
# Ensure that specs that come out "normal" are really normal. # Ensure that specs that come out "normal" are really normal.
with closing(open(spec_path)) as spec_file: with closing(open(spec_path)) as spec_file:
read_separately = Spec(spec_file.read()) read_separately = Spec.from_yaml(spec_file.read())
read_separately.normalize() read_separately.normalize()
self.assertEqual(read_separately, spec_from_file) self.assertEqual(read_separately, spec_from_file)
@ -98,11 +104,11 @@ def test_read_and_write_spec(self):
read_separately.concretize() read_separately.concretize()
self.assertEqual(read_separately, spec_from_file) self.assertEqual(read_separately, spec_from_file)
# Make sure the dep hash of the read-in spec is the same # Make sure the hash of the read-in spec is the same
self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash()) self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash())
# Ensure directories are properly removed # Ensure directories are properly removed
self.layout.remove_path_for_spec(spec) self.layout.remove_install_directory(spec)
self.assertFalse(os.path.isdir(install_dir)) self.assertFalse(os.path.isdir(install_dir))
self.assertFalse(os.path.exists(install_dir)) self.assertFalse(os.path.exists(install_dir))
@ -120,12 +126,14 @@ def test_handle_unknown_package(self):
""" """
mock_db = PackageDB(spack.mock_packages_path) mock_db = PackageDB(spack.mock_packages_path)
not_in_mock = set(spack.db.all_package_names()).difference( not_in_mock = set.difference(
set(spack.db.all_package_names()),
set(mock_db.all_package_names())) set(mock_db.all_package_names()))
packages = list(not_in_mock)[:max_packages]
# Create all the packages that are not in mock. # Create all the packages that are not in mock.
installed_specs = {} installed_specs = {}
for pkg_name in not_in_mock: for pkg_name in packages:
spec = spack.db.get(pkg_name).spec spec = spack.db.get(pkg_name).spec
# If a spec fails to concretize, just skip it. If it is a # If a spec fails to concretize, just skip it. If it is a
@ -135,7 +143,7 @@ def test_handle_unknown_package(self):
except: except:
continue continue
self.layout.make_path_for_spec(spec) self.layout.create_install_directory(spec)
installed_specs[spec] = self.layout.path_for_spec(spec) installed_specs[spec] = self.layout.path_for_spec(spec)
tmp = spack.db tmp = spack.db
@ -144,12 +152,29 @@ def test_handle_unknown_package(self):
# Now check that even without the package files, we know # Now check that even without the package files, we know
# enough to read a spec from the spec file. # enough to read a spec from the spec file.
for spec, path in installed_specs.items(): for spec, path in installed_specs.items():
spec_from_file = self.layout.read_spec(join_path(path, '.spec')) spec_from_file = self.layout.read_spec(
join_path(path, '.spack', 'spec.yaml'))
# To satisfy these conditions, directory layouts need to # To satisfy these conditions, directory layouts need to
# read in concrete specs from their install dirs somehow. # read in concrete specs from their install dirs somehow.
self.assertEqual(path, self.layout.path_for_spec(spec_from_file)) self.assertEqual(path, self.layout.path_for_spec(spec_from_file))
self.assertEqual(spec, spec_from_file) self.assertEqual(spec, spec_from_file)
self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash()) self.assertTrue(spec.eq_dag(spec_from_file))
self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash())
spack.db = tmp spack.db = tmp
def test_find(self):
"""Test that finding specs within an install layout works."""
packages = list(spack.db.all_packages())[:max_packages]
installed_specs = {}
for pkg in packages:
spec = pkg.spec.concretized()
installed_specs[spec.name] = spec
self.layout.create_install_directory(spec)
found_specs = dict((s.name, s) for s in self.layout.all_specs())
for name, spec in found_specs.items():
self.assertTrue(name in found_specs)
self.assertTrue(found_specs[name].eq_dag(spec))

View file

@ -33,7 +33,7 @@
import spack import spack
from spack.stage import Stage from spack.stage import Stage
from spack.fetch_strategy import URLFetchStrategy from spack.fetch_strategy import URLFetchStrategy
from spack.directory_layout import SpecHashDirectoryLayout from spack.directory_layout import YamlDirectoryLayout
from spack.util.executable import which from spack.util.executable import which
from spack.test.mock_packages_test import * from spack.test.mock_packages_test import *
from spack.test.mock_repo import MockArchive from spack.test.mock_repo import MockArchive
@ -55,7 +55,7 @@ def setUp(self):
# installed pkgs and mock packages. # installed pkgs and mock packages.
self.tmpdir = tempfile.mkdtemp() self.tmpdir = tempfile.mkdtemp()
self.orig_layout = spack.install_layout self.orig_layout = spack.install_layout
spack.install_layout = SpecHashDirectoryLayout(self.tmpdir) spack.install_layout = YamlDirectoryLayout(self.tmpdir)
def tearDown(self): def tearDown(self):

View file

@ -31,29 +31,40 @@
def set_pkg_dep(pkg, spec): def set_pkg_dep(pkg, spec):
"""Alters dependence information for a pacakge. """Alters dependence information for a package.
Use this to mock up constraints. Use this to mock up constraints.
""" """
spec = Spec(spec) spec = Spec(spec)
spack.db.get(pkg).dependencies[spec.name] = spec spack.db.get(pkg).dependencies[spec.name] = { Spec(pkg) : spec }
class MockPackagesTest(unittest.TestCase): class MockPackagesTest(unittest.TestCase):
def setUp(self): def initmock(self):
# Use the mock packages database for these tests. This allows # Use the mock packages database for these tests. This allows
# us to set up contrived packages that don't interfere with # us to set up contrived packages that don't interfere with
# real ones. # real ones.
self.real_db = spack.db self.real_db = spack.db
spack.db = PackageDB(spack.mock_packages_path) spack.db = PackageDB(spack.mock_packages_path)
self.real_scopes = spack.config._scopes spack.config.clear_config_caches()
spack.config._scopes = { self.real_scopes = spack.config.config_scopes
'site' : spack.mock_site_config, spack.config.config_scopes = [
'user' : spack.mock_user_config } ('site', spack.mock_site_config),
('user', spack.mock_user_config)]
def cleanmock(self):
"""Restore the real packages path after any test."""
spack.db = self.real_db
spack.config.config_scopes = self.real_scopes
spack.config.clear_config_caches()
def setUp(self):
self.initmock()
def tearDown(self): def tearDown(self):
"""Restore the real packages path after any test.""" self.cleanmock()
spack.db = self.real_db
spack.config._scopes = self.real_scopes

View file

@ -0,0 +1,94 @@
##############################################################################
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import unittest
import spack
from spack.spec import Spec, CompilerSpec
from spack.test.mock_packages_test import *
class ConcretizeTest(MockPackagesTest):
def check_normalize(self, spec_string, expected):
spec = Spec(spec_string)
spec.normalize()
self.assertEqual(spec, expected)
self.assertTrue(spec.eq_dag(expected))
def test_normalize_simple_conditionals(self):
self.check_normalize('optional-dep-test', Spec('optional-dep-test'))
self.check_normalize('optional-dep-test~a', Spec('optional-dep-test~a'))
self.check_normalize('optional-dep-test+a',
Spec('optional-dep-test+a', Spec('a')))
self.check_normalize('optional-dep-test@1.1',
Spec('optional-dep-test@1.1', Spec('b')))
self.check_normalize('optional-dep-test%intel',
Spec('optional-dep-test%intel', Spec('c')))
self.check_normalize('optional-dep-test%intel@64.1',
Spec('optional-dep-test%intel@64.1', Spec('c'), Spec('d')))
self.check_normalize('optional-dep-test%intel@64.1.2',
Spec('optional-dep-test%intel@64.1.2', Spec('c'), Spec('d')))
self.check_normalize('optional-dep-test%clang@35',
Spec('optional-dep-test%clang@35', Spec('e')))
def test_multiple_conditionals(self):
self.check_normalize('optional-dep-test+a@1.1',
Spec('optional-dep-test+a@1.1', Spec('a'), Spec('b')))
self.check_normalize('optional-dep-test+a%intel',
Spec('optional-dep-test+a%intel', Spec('a'), Spec('c')))
self.check_normalize('optional-dep-test@1.1%intel',
Spec('optional-dep-test@1.1%intel', Spec('b'), Spec('c')))
self.check_normalize('optional-dep-test@1.1%intel@64.1.2+a',
Spec('optional-dep-test@1.1%intel@64.1.2+a',
Spec('b'), Spec('a'), Spec('c'), Spec('d')))
self.check_normalize('optional-dep-test@1.1%clang@36.5+a',
Spec('optional-dep-test@1.1%clang@36.5+a',
Spec('b'), Spec('a'), Spec('e')))
def test_chained_mpi(self):
self.check_normalize('optional-dep-test-2+mpi',
Spec('optional-dep-test-2+mpi',
Spec('optional-dep-test+mpi',
Spec('mpi'))))
def test_transitive_chain(self):
# Each of these dependencies comes from a conditional
# dependency on another. This requires iterating to evaluate
# the whole chain.
self.check_normalize('optional-dep-test+f',
Spec('optional-dep-test+f', Spec('f'), Spec('g'), Spec('mpi')))

View file

@ -44,8 +44,11 @@ def test_conflicting_package_constraints(self):
set_pkg_dep('callpath', 'mpich@2.0') set_pkg_dep('callpath', 'mpich@2.0')
spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf') spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.package.InvalidPackageDependencyError,
spec.package.validate_dependencies) # TODO: try to do something to showt that the issue was with
# TODO: the user's input or with package inconsistencies.
self.assertRaises(spack.spec.UnsatisfiableVersionSpecError,
spec.normalize)
def test_preorder_node_traversal(self): def test_preorder_node_traversal(self):
@ -140,11 +143,6 @@ def test_postorder_path_traversal(self):
def test_conflicting_spec_constraints(self): def test_conflicting_spec_constraints(self):
mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf') mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
try:
mpileaks.package.validate_dependencies()
except spack.package.InvalidPackageDependencyError, e:
self.fail("validate_dependencies raised an exception: %s"
% e.message)
# Normalize then add conflicting constraints to the DAG (this is an # Normalize then add conflicting constraints to the DAG (this is an
# extremely unlikely scenario, but we test for it anyway) # extremely unlikely scenario, but we test for it anyway)
@ -242,12 +240,6 @@ def test_unsatisfiable_compiler_version(self):
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize) self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
def test_unsatisfiable_variant(self):
set_pkg_dep('mpileaks', 'mpich+debug')
spec = Spec('mpileaks ^mpich~debug ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableVariantSpecError, spec.normalize)
def test_unsatisfiable_architecture(self): def test_unsatisfiable_architecture(self):
set_pkg_dep('mpileaks', 'mpich=bgqos_0') set_pkg_dep('mpileaks', 'mpich=bgqos_0')
spec = Spec('mpileaks ^mpich=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf') spec = Spec('mpileaks ^mpich=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')

View file

@ -33,8 +33,8 @@ class SpecSematicsTest(MockPackagesTest):
# ================================================================================ # ================================================================================
# Utility functions to set everything up. # Utility functions to set everything up.
# ================================================================================ # ================================================================================
def check_satisfies(self, spec, anon_spec): def check_satisfies(self, spec, anon_spec, concrete=False):
left = Spec(spec) left = Spec(spec, concrete=concrete)
right = parse_anonymous_spec(anon_spec, left.name) right = parse_anonymous_spec(anon_spec, left.name)
# Satisfies is one-directional. # Satisfies is one-directional.
@ -46,8 +46,8 @@ def check_satisfies(self, spec, anon_spec):
right.copy().constrain(left) right.copy().constrain(left)
def check_unsatisfiable(self, spec, anon_spec): def check_unsatisfiable(self, spec, anon_spec, concrete=False):
left = Spec(spec) left = Spec(spec, concrete=concrete)
right = parse_anonymous_spec(anon_spec, left.name) right = parse_anonymous_spec(anon_spec, left.name)
self.assertFalse(left.satisfies(right)) self.assertFalse(left.satisfies(right))
@ -64,6 +64,16 @@ def check_constrain(self, expected, spec, constraint):
self.assertEqual(exp, spec) self.assertEqual(exp, spec)
def check_constrain_changed(self, spec, constraint):
spec = Spec(spec)
self.assertTrue(spec.constrain(constraint))
def check_constrain_not_changed(self, spec, constraint):
spec = Spec(spec)
self.assertFalse(spec.constrain(constraint))
def check_invalid_constraint(self, spec, constraint): def check_invalid_constraint(self, spec, constraint):
spec = Spec(spec) spec = Spec(spec)
constraint = Spec(constraint) constraint = Spec(constraint)
@ -71,7 +81,7 @@ def check_invalid_constraint(self, spec, constraint):
# ================================================================================ # ================================================================================
# Satisfiability and constraints # Satisfiability
# ================================================================================ # ================================================================================
def test_satisfies(self): def test_satisfies(self):
self.check_satisfies('libelf@0.8.13', '@0:1') self.check_satisfies('libelf@0.8.13', '@0:1')
@ -96,6 +106,9 @@ def test_satisfies_compiler_version(self):
self.check_unsatisfiable('foo@4.0%pgi', '@1:3%pgi') self.check_unsatisfiable('foo@4.0%pgi', '@1:3%pgi')
self.check_unsatisfiable('foo@4.0%pgi@4.5', '@1:3%pgi@4.4:4.6') self.check_unsatisfiable('foo@4.0%pgi@4.5', '@1:3%pgi@4.4:4.6')
self.check_satisfies('foo %gcc@4.7.3', '%gcc@4.7')
self.check_unsatisfiable('foo %gcc@4.7', '%gcc@4.7.3')
def test_satisfies_architecture(self): def test_satisfies_architecture(self):
self.check_satisfies('foo=chaos_5_x86_64_ib', '=chaos_5_x86_64_ib') self.check_satisfies('foo=chaos_5_x86_64_ib', '=chaos_5_x86_64_ib')
@ -147,7 +160,40 @@ def test_satisfies_virtual_dependency_versions(self):
self.check_unsatisfiable('mpileaks^mpi@3:', '^mpich@1.0') self.check_unsatisfiable('mpileaks^mpi@3:', '^mpich@1.0')
def test_constrain(self): def test_satisfies_matching_variant(self):
self.check_satisfies('mpich+foo', 'mpich+foo')
self.check_satisfies('mpich~foo', 'mpich~foo')
def test_satisfies_unconstrained_variant(self):
# only asked for mpich, no constraints. Either will do.
self.check_satisfies('mpich+foo', 'mpich')
self.check_satisfies('mpich~foo', 'mpich')
def test_unsatisfiable_variants(self):
# This case is different depending on whether the specs are concrete.
# 'mpich' is not concrete:
self.check_satisfies('mpich', 'mpich+foo', False)
self.check_satisfies('mpich', 'mpich~foo', False)
# 'mpich' is concrete:
self.check_unsatisfiable('mpich', 'mpich+foo', True)
self.check_unsatisfiable('mpich', 'mpich~foo', True)
def test_unsatisfiable_variant_mismatch(self):
# No matchi in specs
self.check_unsatisfiable('mpich~foo', 'mpich+foo')
self.check_unsatisfiable('mpich+foo', 'mpich~foo')
# ================================================================================
# Constraints
# ================================================================================
def test_constrain_variants(self):
self.check_constrain('libelf@2.1:2.5', 'libelf@0:2.5', 'libelf@2.1:3') self.check_constrain('libelf@2.1:2.5', 'libelf@0:2.5', 'libelf@2.1:3')
self.check_constrain('libelf@2.1:2.5%gcc@4.5:4.6', self.check_constrain('libelf@2.1:2.5%gcc@4.5:4.6',
'libelf@0:2.5%gcc@2:4.6', 'libelf@2.1:3%gcc@4.5:4.7') 'libelf@0:2.5%gcc@2:4.6', 'libelf@2.1:3%gcc@4.5:4.7')
@ -158,6 +204,13 @@ def test_constrain(self):
self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf~foo') self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf~foo')
self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf+debug~foo') self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf+debug~foo')
def test_constrain_arch(self):
self.check_constrain('libelf=bgqos_0', 'libelf=bgqos_0', 'libelf=bgqos_0')
self.check_constrain('libelf=bgqos_0', 'libelf', 'libelf=bgqos_0')
def test_constrain_compiler(self):
self.check_constrain('libelf=bgqos_0', 'libelf=bgqos_0', 'libelf=bgqos_0') self.check_constrain('libelf=bgqos_0', 'libelf=bgqos_0', 'libelf=bgqos_0')
self.check_constrain('libelf=bgqos_0', 'libelf', 'libelf=bgqos_0') self.check_constrain('libelf=bgqos_0', 'libelf', 'libelf=bgqos_0')
@ -172,6 +225,45 @@ def test_invalid_constraint(self):
self.check_invalid_constraint('libelf=bgqos_0', 'libelf=x86_54') self.check_invalid_constraint('libelf=bgqos_0', 'libelf=x86_54')
def test_compiler_satisfies(self): def test_constrain_changed(self):
self.check_satisfies('foo %gcc@4.7.3', '%gcc@4.7') self.check_constrain_changed('libelf', '@1.0')
self.check_unsatisfiable('foo %gcc@4.7', '%gcc@4.7.3') self.check_constrain_changed('libelf', '@1.0:5.0')
self.check_constrain_changed('libelf', '%gcc')
self.check_constrain_changed('libelf%gcc', '%gcc@4.5')
self.check_constrain_changed('libelf', '+debug')
self.check_constrain_changed('libelf', '~debug')
self.check_constrain_changed('libelf', '=bgqos_0')
def test_constrain_not_changed(self):
self.check_constrain_not_changed('libelf', 'libelf')
self.check_constrain_not_changed('libelf@1.0', '@1.0')
self.check_constrain_not_changed('libelf@1.0:5.0', '@1.0:5.0')
self.check_constrain_not_changed('libelf%gcc', '%gcc')
self.check_constrain_not_changed('libelf%gcc@4.5', '%gcc@4.5')
self.check_constrain_not_changed('libelf+debug', '+debug')
self.check_constrain_not_changed('libelf~debug', '~debug')
self.check_constrain_not_changed('libelf=bgqos_0', '=bgqos_0')
self.check_constrain_not_changed('libelf^foo', 'libelf^foo')
self.check_constrain_not_changed('libelf^foo^bar', 'libelf^foo^bar')
def test_constrain_dependency_changed(self):
self.check_constrain_changed('libelf^foo', 'libelf^foo@1.0')
self.check_constrain_changed('libelf^foo', 'libelf^foo@1.0:5.0')
self.check_constrain_changed('libelf^foo', 'libelf^foo%gcc')
self.check_constrain_changed('libelf^foo%gcc', 'libelf^foo%gcc@4.5')
self.check_constrain_changed('libelf^foo', 'libelf^foo+debug')
self.check_constrain_changed('libelf^foo', 'libelf^foo~debug')
self.check_constrain_changed('libelf^foo', 'libelf^foo=bgqos_0')
def test_constrain_dependency_not_changed(self):
self.check_constrain_not_changed('libelf^foo@1.0', 'libelf^foo@1.0')
self.check_constrain_not_changed('libelf^foo@1.0:5.0', 'libelf^foo@1.0:5.0')
self.check_constrain_not_changed('libelf^foo%gcc', 'libelf^foo%gcc')
self.check_constrain_not_changed('libelf^foo%gcc@4.5', 'libelf^foo%gcc@4.5')
self.check_constrain_not_changed('libelf^foo+debug', 'libelf^foo+debug')
self.check_constrain_not_changed('libelf^foo~debug', 'libelf^foo~debug')
self.check_constrain_not_changed('libelf^foo=bgqos_0', 'libelf^foo=bgqos_0')

View file

@ -0,0 +1,71 @@
##############################################################################
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""Test YAML serialization for specs.
YAML format preserves DAG informatoin in the spec.
"""
from spack.spec import Spec
from spack.test.mock_packages_test import *
class SpecDagTest(MockPackagesTest):
def check_yaml_round_trip(self, spec):
yaml_text = spec.to_yaml()
spec_from_yaml = Spec.from_yaml(yaml_text)
self.assertTrue(spec.eq_dag(spec_from_yaml))
def test_simple_spec(self):
spec = Spec('mpileaks')
self.check_yaml_round_trip(spec)
def test_normal_spec(self):
spec = Spec('mpileaks+debug~opt')
spec.normalize()
self.check_yaml_round_trip(spec)
def test_ambiguous_version_spec(self):
spec = Spec('mpileaks@1.0:5.0,6.1,7.3+debug~opt')
spec.normalize()
self.check_yaml_round_trip(spec)
def test_concrete_spec(self):
spec = Spec('mpileaks+debug~opt')
spec.concretize()
self.check_yaml_round_trip(spec)
def test_yaml_subdag(self):
spec = Spec('mpileaks^mpich+debug')
spec.concretize()
yaml_spec = Spec.from_yaml(spec.to_yaml())
for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'):
self.assertTrue(spec[dep].eq_dag(yaml_spec[dep]))

View file

@ -0,0 +1,52 @@
##############################################################################
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""Debug signal handler: prints a stack trace and enters interpreter.
``register_interrupt_handler()`` enables a ctrl-C handler that prints
a stack trace and drops the user into an interpreter.
"""
import os
import code
import traceback
import signal
def debug_handler(sig, frame):
"""Interrupt running process, and provide a python prompt for
interactive debugging."""
d = {'_frame':frame} # Allow access to frame object.
d.update(frame.f_globals) # Unless shadowed by global
d.update(frame.f_locals)
i = code.InteractiveConsole(d)
message = "Signal received : entering python shell.\nTraceback:\n"
message += ''.join(traceback.format_stack(frame))
i.interact(message)
os._exit(1) # Use os._exit to avoid test harness.
def register_interrupt_handler():
"""Register a handler to print a stack trace and enter an interpreter on Ctrl-C"""
signal.signal(signal.SIGINT, debug_handler)

View file

@ -0,0 +1,36 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""Variant is a class describing flags on builds, or "variants".
Could be generalized later to describe aribitrary parameters, but
currently variants are just flags.
"""
class Variant(object):
"""Represents a variant on a build. Can be either on or off."""
def __init__(self, default, description):
self.default = bool(default)
self.description = str(description)

View file

@ -93,12 +93,12 @@ def check_type(t):
def coerced(method): def coerced(method):
"""Decorator that ensures that argument types of a method are coerced.""" """Decorator that ensures that argument types of a method are coerced."""
@wraps(method) @wraps(method)
def coercing_method(a, b): def coercing_method(a, b, *args, **kwargs):
if type(a) == type(b) or a is None or b is None: if type(a) == type(b) or a is None or b is None:
return method(a, b) return method(a, b, *args, **kwargs)
else: else:
ca, cb = coerce_versions(a, b) ca, cb = coerce_versions(a, b)
return getattr(ca, method.__name__)(cb) return getattr(ca, method.__name__)(cb, *args, **kwargs)
return coercing_method return coercing_method
@ -587,16 +587,42 @@ def overlaps(self, other):
return False return False
def to_dict(self):
"""Generate human-readable dict for YAML."""
if self.concrete:
return { 'version' : str(self[0]) }
else:
return { 'versions' : [str(v) for v in self] }
@staticmethod
def from_dict(dictionary):
"""Parse dict from to_dict."""
if 'versions' in dictionary:
return VersionList(dictionary['versions'])
elif 'version' in dictionary:
return VersionList([dictionary['version']])
else:
raise ValueError("Dict must have 'version' or 'versions' in it.")
@coerced @coerced
def satisfies(self, other): def satisfies(self, other, strict=False):
"""A VersionList satisfies another if some version in the list would """A VersionList satisfies another if some version in the list
would satisfy some version in the other list. This uses essentially would satisfy some version in the other list. This uses
the same algorithm as overlaps() does for VersionList, but it calls essentially the same algorithm as overlaps() does for
satisfies() on member Versions and VersionRanges. VersionList, but it calls satisfies() on member Versions
and VersionRanges.
If strict is specified, this version list must lie entirely
*within* the other in order to satisfy it.
""" """
if not other or not self: if not other or not self:
return False return False
if strict:
return self in other
s = o = 0 s = o = 0
while s < len(self) and o < len(other): while s < len(self) and o < len(other):
if self[s].satisfies(other[o]): if self[s].satisfies(other[o]):
@ -633,9 +659,14 @@ def intersection(self, other):
@coerced @coerced
def intersect(self, other): def intersect(self, other):
isection = self.intersection(other) """Intersect this spec's list with other.
self.versions = isection.versions
Return True if the spec changed as a result; False otherwise
"""
isection = self.intersection(other)
changed = (isection.versions != self.versions)
self.versions = isection.versions
return changed
@coerced @coerced
def __contains__(self, other): def __contains__(self, other):

View file

@ -1,12 +0,0 @@
[compiler "gcc@4.5.0"]
cc = /path/to/gcc
cxx = /path/to/g++
f77 = /path/to/gfortran
fc = /path/to/gfortran
[compiler "clang@3.3"]
cc = /path/to/clang
cxx = /path/to/clang++
f77 = None
fc = None

View file

@ -0,0 +1,12 @@
compilers:
all:
clang@3.3:
cc: /path/to/clang
cxx: /path/to/clang++
f77: None
fc: None
gcc@4.5.0:
cc: /path/to/gcc
cxx: /path/to/g++
f77: /path/to/gfortran
fc: /path/to/gfortran

View file

@ -0,0 +1,12 @@
from spack import *
class A(Package):
"""Simple package with no dependencies"""
homepage = "http://www.example.com"
url = "http://www.example.com/a-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,12 @@
from spack import *
class B(Package):
"""Simple package with no dependencies"""
homepage = "http://www.example.com"
url = "http://www.example.com/b-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,12 @@
from spack import *
class C(Package):
"""Simple package with no dependencies"""
homepage = "http://www.example.com"
url = "http://www.example.com/c-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,12 @@
from spack import *
class E(Package):
"""Simple package with no dependencies"""
homepage = "http://www.example.com"
url = "http://www.example.com/e-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass

View file

@ -30,6 +30,9 @@ class Mpich(Package):
list_url = "http://www.mpich.org/static/downloads/" list_url = "http://www.mpich.org/static/downloads/"
list_depth = 2 list_depth = 2
variant('debug', default=False,
description="Compile MPICH with debug flags.")
version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0')
version('3.0.3', 'foobarbaz') version('3.0.3', 'foobarbaz')
version('3.0.2', 'foobarbaz') version('3.0.2', 'foobarbaz')

View file

@ -33,6 +33,9 @@ class Mpileaks(Package):
version(2.2, 'foobarbaz') version(2.2, 'foobarbaz')
version(2.3, 'foobarbaz') version(2.3, 'foobarbaz')
variant('debug', default=False, description='Debug variant')
variant('opt', default=False, description='Optimized variant')
depends_on("mpi") depends_on("mpi")
depends_on("callpath") depends_on("callpath")

View file

@ -0,0 +1,18 @@
from spack import *
class OptionalDepTest2(Package):
"""Depends on the optional-dep-test package"""
homepage = "http://www.example.com"
url = "http://www.example.com/optional-dep-test-2-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
variant('odt', default=False)
variant('mpi', default=False)
depends_on('optional-dep-test', when='+odt')
depends_on('optional-dep-test+mpi', when='+mpi')
def install(self, spec, prefix):
pass

View file

@ -0,0 +1,29 @@
from spack import *
class OptionalDepTest(Package):
"""Description"""
homepage = "http://www.example.com"
url = "http://www.example.com/optional_dep_test-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
version('1.1', '0123456789abcdef0123456789abcdef')
variant('a', default=False)
variant('f', default=False)
variant('mpi', default=False)
depends_on('a', when='+a')
depends_on('b', when='@1.1')
depends_on('c', when='%intel')
depends_on('d', when='%intel@64.1')
depends_on('e', when='%clang@34:40')
depends_on('f', when='+f')
depends_on('g', when='^f')
depends_on('mpi', when='^g')
depends_on('mpi', when='+mpi')
def install(self, spec, prefix):
pass

View file

@ -139,7 +139,9 @@ def write_easy_install_pth(self, exts):
def activate(self, ext_pkg, **args): def activate(self, ext_pkg, **args):
args.update(ignore=self.python_ignore(ext_pkg, args)) ignore=self.python_ignore(ext_pkg, args)
args.update(ignore=ignore)
super(Python, self).activate(ext_pkg, **args) super(Python, self).activate(ext_pkg, **args)
exts = spack.install_layout.extension_map(self.spec) exts = spack.install_layout.extension_map(self.spec)