Merge branch 'mplegendre-multi_pkgsrc_roots' into develop
- This moves var/spack/packages to var/spack/repos/builtin/packages. - Packages that did not exist in the source branch, or were changed in develop, were moved into var/spack/repos/builtin/packages as part of the integration. Conflicts: lib/spack/spack/test/unit_install.py var/spack/repos/builtin/packages/clang/package.py
This commit is contained in:
commit
8d6342c53d
445 changed files with 6902 additions and 840 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -8,3 +8,4 @@
|
||||||
/etc/spackconfig
|
/etc/spackconfig
|
||||||
/share/spack/dotkit
|
/share/spack/dotkit
|
||||||
/share/spack/modules
|
/share/spack/modules
|
||||||
|
/TAGS
|
||||||
|
|
2
.mailmap
2
.mailmap
|
@ -9,3 +9,5 @@ Saravan Pantham <saravan.pantham@gmail.com> Saravan Pantham <pantham1@su
|
||||||
Tom Scogland <tscogland@llnl.gov> Tom Scogland <scogland1@llnl.gov>
|
Tom Scogland <tscogland@llnl.gov> Tom Scogland <scogland1@llnl.gov>
|
||||||
Tom Scogland <tscogland@llnl.gov> Tom Scogland <tom.scogland@gmail.com>
|
Tom Scogland <tscogland@llnl.gov> Tom Scogland <tom.scogland@gmail.com>
|
||||||
Joachim Protze <protze@rz.rwth-aachen.de> jprotze <protze@rz.rwth-aachen.de>
|
Joachim Protze <protze@rz.rwth-aachen.de> jprotze <protze@rz.rwth-aachen.de>
|
||||||
|
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@surface86.llnl.gov>
|
||||||
|
Gregory L. Lee <lee218@llnl.gov> Gregory Lee <lee218@llnl.gov>
|
||||||
|
|
|
@ -140,8 +140,8 @@ def main():
|
||||||
|
|
||||||
spack.spack_working_dir = working_dir
|
spack.spack_working_dir = working_dir
|
||||||
if args.mock:
|
if args.mock:
|
||||||
from spack.packages import PackageDB
|
from spack.repository import RepoPath
|
||||||
spack.db = PackageDB(spack.mock_packages_path)
|
spack.repo.swap(RepoPath(spack.mock_packages_path))
|
||||||
|
|
||||||
# If the user asked for it, don't check ssl certs.
|
# If the user asked for it, don't check ssl certs.
|
||||||
if args.insecure:
|
if args.insecure:
|
||||||
|
|
8
etc/spack/repos.yaml
Normal file
8
etc/spack/repos.yaml
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
# This is the default spack repository configuration.
|
||||||
|
#
|
||||||
|
# Changes to this file will affect all users of this spack install,
|
||||||
|
# although users can override these settings in their ~/.spack/repos.yaml.
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
repos:
|
||||||
|
- $spack/var/spack/repos/builtin
|
22
lib/spack/external/argparse.py
vendored
22
lib/spack/external/argparse.py
vendored
|
@ -1067,9 +1067,13 @@ class _SubParsersAction(Action):
|
||||||
|
|
||||||
class _ChoicesPseudoAction(Action):
|
class _ChoicesPseudoAction(Action):
|
||||||
|
|
||||||
def __init__(self, name, help):
|
def __init__(self, name, aliases, help):
|
||||||
|
metavar = dest = name
|
||||||
|
if aliases:
|
||||||
|
metavar += ' (%s)' % ', '.join(aliases)
|
||||||
sup = super(_SubParsersAction._ChoicesPseudoAction, self)
|
sup = super(_SubParsersAction._ChoicesPseudoAction, self)
|
||||||
sup.__init__(option_strings=[], dest=name, help=help)
|
sup.__init__(option_strings=[], dest=dest, help=help,
|
||||||
|
metavar=metavar)
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
option_strings,
|
option_strings,
|
||||||
|
@ -1097,15 +1101,22 @@ def add_parser(self, name, **kwargs):
|
||||||
if kwargs.get('prog') is None:
|
if kwargs.get('prog') is None:
|
||||||
kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
|
kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
|
||||||
|
|
||||||
|
aliases = kwargs.pop('aliases', ())
|
||||||
|
|
||||||
# create a pseudo-action to hold the choice help
|
# create a pseudo-action to hold the choice help
|
||||||
if 'help' in kwargs:
|
if 'help' in kwargs:
|
||||||
help = kwargs.pop('help')
|
help = kwargs.pop('help')
|
||||||
choice_action = self._ChoicesPseudoAction(name, help)
|
choice_action = self._ChoicesPseudoAction(name, aliases, help)
|
||||||
self._choices_actions.append(choice_action)
|
self._choices_actions.append(choice_action)
|
||||||
|
|
||||||
# create the parser and add it to the map
|
# create the parser and add it to the map
|
||||||
parser = self._parser_class(**kwargs)
|
parser = self._parser_class(**kwargs)
|
||||||
self._name_parser_map[name] = parser
|
self._name_parser_map[name] = parser
|
||||||
|
|
||||||
|
# make parser available under aliases also
|
||||||
|
for alias in aliases:
|
||||||
|
self._name_parser_map[alias] = parser
|
||||||
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
def _get_subactions(self):
|
def _get_subactions(self):
|
||||||
|
@ -1123,8 +1134,9 @@ def __call__(self, parser, namespace, values, option_string=None):
|
||||||
try:
|
try:
|
||||||
parser = self._name_parser_map[parser_name]
|
parser = self._name_parser_map[parser_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
tup = parser_name, ', '.join(self._name_parser_map)
|
args = {'parser_name': parser_name,
|
||||||
msg = _('unknown parser %r (choices: %s)' % tup)
|
'choices': ', '.join(self._name_parser_map)}
|
||||||
|
msg = _('unknown parser %(parser_name)r (choices: %(choices)s)') % args
|
||||||
raise ArgumentError(self, msg)
|
raise ArgumentError(self, msg)
|
||||||
|
|
||||||
# parse all the remaining options into the namespace
|
# parse all the remaining options into the namespace
|
||||||
|
|
19
lib/spack/external/jsonschema/COPYING
vendored
Normal file
19
lib/spack/external/jsonschema/COPYING
vendored
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright (c) 2013 Julian Berman
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
104
lib/spack/external/jsonschema/README.rst
vendored
Normal file
104
lib/spack/external/jsonschema/README.rst
vendored
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
==========
|
||||||
|
jsonschema
|
||||||
|
==========
|
||||||
|
|
||||||
|
``jsonschema`` is an implementation of `JSON Schema <http://json-schema.org>`_
|
||||||
|
for Python (supporting 2.6+ including Python 3).
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
>>> from jsonschema import validate
|
||||||
|
|
||||||
|
>>> # A sample schema, like what we'd get from json.load()
|
||||||
|
>>> schema = {
|
||||||
|
... "type" : "object",
|
||||||
|
... "properties" : {
|
||||||
|
... "price" : {"type" : "number"},
|
||||||
|
... "name" : {"type" : "string"},
|
||||||
|
... },
|
||||||
|
... }
|
||||||
|
|
||||||
|
>>> # If no exception is raised by validate(), the instance is valid.
|
||||||
|
>>> validate({"name" : "Eggs", "price" : 34.99}, schema)
|
||||||
|
|
||||||
|
>>> validate(
|
||||||
|
... {"name" : "Eggs", "price" : "Invalid"}, schema
|
||||||
|
... ) # doctest: +IGNORE_EXCEPTION_DETAIL
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
ValidationError: 'Invalid' is not of type 'number'
|
||||||
|
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
* Full support for
|
||||||
|
`Draft 3 <https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.Draft3Validator>`_
|
||||||
|
**and** `Draft 4 <https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.Draft4Validator>`_
|
||||||
|
of the schema.
|
||||||
|
|
||||||
|
* `Lazy validation <https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.IValidator.iter_errors>`_
|
||||||
|
that can iteratively report *all* validation errors.
|
||||||
|
|
||||||
|
* Small and extensible
|
||||||
|
|
||||||
|
* `Programmatic querying <https://python-jsonschema.readthedocs.org/en/latest/errors/#module-jsonschema>`_
|
||||||
|
of which properties or items failed validation.
|
||||||
|
|
||||||
|
|
||||||
|
Release Notes
|
||||||
|
-------------
|
||||||
|
|
||||||
|
* A simple CLI was added for validation
|
||||||
|
* Validation errors now keep full absolute paths and absolute schema paths in
|
||||||
|
their ``absolute_path`` and ``absolute_schema_path`` attributes. The ``path``
|
||||||
|
and ``schema_path`` attributes are deprecated in favor of ``relative_path``
|
||||||
|
and ``relative_schema_path``\ .
|
||||||
|
|
||||||
|
*Note:* Support for Python 3.2 was dropped in this release, and installation
|
||||||
|
now uses setuptools.
|
||||||
|
|
||||||
|
|
||||||
|
Running the Test Suite
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
``jsonschema`` uses the wonderful `Tox <http://tox.readthedocs.org>`_ for its
|
||||||
|
test suite. (It really is wonderful, if for some reason you haven't heard of
|
||||||
|
it, you really should use it for your projects).
|
||||||
|
|
||||||
|
Assuming you have ``tox`` installed (perhaps via ``pip install tox`` or your
|
||||||
|
package manager), just run ``tox`` in the directory of your source checkout to
|
||||||
|
run ``jsonschema``'s test suite on all of the versions of Python ``jsonschema``
|
||||||
|
supports. Note that you'll need to have all of those versions installed in
|
||||||
|
order to run the tests on each of them, otherwise ``tox`` will skip (and fail)
|
||||||
|
the tests on that version.
|
||||||
|
|
||||||
|
Of course you're also free to just run the tests on a single version with your
|
||||||
|
favorite test runner. The tests live in the ``jsonschema.tests`` package.
|
||||||
|
|
||||||
|
|
||||||
|
Community
|
||||||
|
---------
|
||||||
|
|
||||||
|
There's a `mailing list <https://groups.google.com/forum/#!forum/jsonschema>`_
|
||||||
|
for this implementation on Google Groups.
|
||||||
|
|
||||||
|
Please join, and feel free to send questions there.
|
||||||
|
|
||||||
|
|
||||||
|
Contributing
|
||||||
|
------------
|
||||||
|
|
||||||
|
I'm Julian Berman.
|
||||||
|
|
||||||
|
``jsonschema`` is on `GitHub <http://github.com/Julian/jsonschema>`_.
|
||||||
|
|
||||||
|
Get in touch, via GitHub or otherwise, if you've got something to contribute,
|
||||||
|
it'd be most welcome!
|
||||||
|
|
||||||
|
You can also generally find me on Freenode (nick: ``tos9``) in various
|
||||||
|
channels, including ``#python``.
|
||||||
|
|
||||||
|
If you feel overwhelmingly grateful, you can woo me with beer money on
|
||||||
|
`Gittip <https://www.gittip.com/Julian/>`_ or via Google Wallet with the email
|
||||||
|
in my GitHub profile.
|
26
lib/spack/external/jsonschema/__init__.py
vendored
Normal file
26
lib/spack/external/jsonschema/__init__.py
vendored
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
"""
|
||||||
|
An implementation of JSON Schema for Python
|
||||||
|
|
||||||
|
The main functionality is provided by the validator classes for each of the
|
||||||
|
supported JSON Schema versions.
|
||||||
|
|
||||||
|
Most commonly, :func:`validate` is the quickest way to simply validate a given
|
||||||
|
instance under a schema, and will create a validator for you.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from jsonschema.exceptions import (
|
||||||
|
ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError
|
||||||
|
)
|
||||||
|
from jsonschema._format import (
|
||||||
|
FormatChecker, draft3_format_checker, draft4_format_checker,
|
||||||
|
)
|
||||||
|
from jsonschema.validators import (
|
||||||
|
Draft3Validator, Draft4Validator, RefResolver, validate
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__version__ = "2.4.0"
|
||||||
|
|
||||||
|
|
||||||
|
# flake8: noqa
|
2
lib/spack/external/jsonschema/__main__.py
vendored
Normal file
2
lib/spack/external/jsonschema/__main__.py
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
from jsonschema.cli import main
|
||||||
|
main()
|
240
lib/spack/external/jsonschema/_format.py
vendored
Normal file
240
lib/spack/external/jsonschema/_format.py
vendored
Normal file
|
@ -0,0 +1,240 @@
|
||||||
|
import datetime
|
||||||
|
import re
|
||||||
|
import socket
|
||||||
|
|
||||||
|
from jsonschema.compat import str_types
|
||||||
|
from jsonschema.exceptions import FormatError
|
||||||
|
|
||||||
|
|
||||||
|
class FormatChecker(object):
|
||||||
|
"""
|
||||||
|
A ``format`` property checker.
|
||||||
|
|
||||||
|
JSON Schema does not mandate that the ``format`` property actually do any
|
||||||
|
validation. If validation is desired however, instances of this class can
|
||||||
|
be hooked into validators to enable format validation.
|
||||||
|
|
||||||
|
:class:`FormatChecker` objects always return ``True`` when asked about
|
||||||
|
formats that they do not know how to validate.
|
||||||
|
|
||||||
|
To check a custom format using a function that takes an instance and
|
||||||
|
returns a ``bool``, use the :meth:`FormatChecker.checks` or
|
||||||
|
:meth:`FormatChecker.cls_checks` decorators.
|
||||||
|
|
||||||
|
:argument iterable formats: the known formats to validate. This argument
|
||||||
|
can be used to limit which formats will be used
|
||||||
|
during validation.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
checkers = {}
|
||||||
|
|
||||||
|
def __init__(self, formats=None):
|
||||||
|
if formats is None:
|
||||||
|
self.checkers = self.checkers.copy()
|
||||||
|
else:
|
||||||
|
self.checkers = dict((k, self.checkers[k]) for k in formats)
|
||||||
|
|
||||||
|
def checks(self, format, raises=()):
|
||||||
|
"""
|
||||||
|
Register a decorated function as validating a new format.
|
||||||
|
|
||||||
|
:argument str format: the format that the decorated function will check
|
||||||
|
:argument Exception raises: the exception(s) raised by the decorated
|
||||||
|
function when an invalid instance is found. The exception object
|
||||||
|
will be accessible as the :attr:`ValidationError.cause` attribute
|
||||||
|
of the resulting validation error.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _checks(func):
|
||||||
|
self.checkers[format] = (func, raises)
|
||||||
|
return func
|
||||||
|
return _checks
|
||||||
|
|
||||||
|
cls_checks = classmethod(checks)
|
||||||
|
|
||||||
|
def check(self, instance, format):
|
||||||
|
"""
|
||||||
|
Check whether the instance conforms to the given format.
|
||||||
|
|
||||||
|
:argument instance: the instance to check
|
||||||
|
:type: any primitive type (str, number, bool)
|
||||||
|
:argument str format: the format that instance should conform to
|
||||||
|
:raises: :exc:`FormatError` if instance does not conform to format
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
if format not in self.checkers:
|
||||||
|
return
|
||||||
|
|
||||||
|
func, raises = self.checkers[format]
|
||||||
|
result, cause = None, None
|
||||||
|
try:
|
||||||
|
result = func(instance)
|
||||||
|
except raises as e:
|
||||||
|
cause = e
|
||||||
|
if not result:
|
||||||
|
raise FormatError(
|
||||||
|
"%r is not a %r" % (instance, format), cause=cause,
|
||||||
|
)
|
||||||
|
|
||||||
|
def conforms(self, instance, format):
|
||||||
|
"""
|
||||||
|
Check whether the instance conforms to the given format.
|
||||||
|
|
||||||
|
:argument instance: the instance to check
|
||||||
|
:type: any primitive type (str, number, bool)
|
||||||
|
:argument str format: the format that instance should conform to
|
||||||
|
:rtype: bool
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.check(instance, format)
|
||||||
|
except FormatError:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
_draft_checkers = {"draft3": [], "draft4": []}
|
||||||
|
|
||||||
|
|
||||||
|
def _checks_drafts(both=None, draft3=None, draft4=None, raises=()):
|
||||||
|
draft3 = draft3 or both
|
||||||
|
draft4 = draft4 or both
|
||||||
|
|
||||||
|
def wrap(func):
|
||||||
|
if draft3:
|
||||||
|
_draft_checkers["draft3"].append(draft3)
|
||||||
|
func = FormatChecker.cls_checks(draft3, raises)(func)
|
||||||
|
if draft4:
|
||||||
|
_draft_checkers["draft4"].append(draft4)
|
||||||
|
func = FormatChecker.cls_checks(draft4, raises)(func)
|
||||||
|
return func
|
||||||
|
return wrap
|
||||||
|
|
||||||
|
|
||||||
|
@_checks_drafts("email")
|
||||||
|
def is_email(instance):
|
||||||
|
if not isinstance(instance, str_types):
|
||||||
|
return True
|
||||||
|
return "@" in instance
|
||||||
|
|
||||||
|
|
||||||
|
_ipv4_re = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
|
||||||
|
|
||||||
|
@_checks_drafts(draft3="ip-address", draft4="ipv4")
|
||||||
|
def is_ipv4(instance):
|
||||||
|
if not isinstance(instance, str_types):
|
||||||
|
return True
|
||||||
|
if not _ipv4_re.match(instance):
|
||||||
|
return False
|
||||||
|
return all(0 <= int(component) <= 255 for component in instance.split("."))
|
||||||
|
|
||||||
|
|
||||||
|
if hasattr(socket, "inet_pton"):
|
||||||
|
@_checks_drafts("ipv6", raises=socket.error)
|
||||||
|
def is_ipv6(instance):
|
||||||
|
if not isinstance(instance, str_types):
|
||||||
|
return True
|
||||||
|
return socket.inet_pton(socket.AF_INET6, instance)
|
||||||
|
|
||||||
|
|
||||||
|
_host_name_re = re.compile(r"^[A-Za-z0-9][A-Za-z0-9\.\-]{1,255}$")
|
||||||
|
|
||||||
|
@_checks_drafts(draft3="host-name", draft4="hostname")
|
||||||
|
def is_host_name(instance):
|
||||||
|
if not isinstance(instance, str_types):
|
||||||
|
return True
|
||||||
|
if not _host_name_re.match(instance):
|
||||||
|
return False
|
||||||
|
components = instance.split(".")
|
||||||
|
for component in components:
|
||||||
|
if len(component) > 63:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import rfc3987
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
@_checks_drafts("uri", raises=ValueError)
|
||||||
|
def is_uri(instance):
|
||||||
|
if not isinstance(instance, str_types):
|
||||||
|
return True
|
||||||
|
return rfc3987.parse(instance, rule="URI")
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import strict_rfc3339
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import isodate
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
@_checks_drafts("date-time", raises=(ValueError, isodate.ISO8601Error))
|
||||||
|
def is_date(instance):
|
||||||
|
if not isinstance(instance, str_types):
|
||||||
|
return True
|
||||||
|
return isodate.parse_datetime(instance)
|
||||||
|
else:
|
||||||
|
@_checks_drafts("date-time")
|
||||||
|
def is_date(instance):
|
||||||
|
if not isinstance(instance, str_types):
|
||||||
|
return True
|
||||||
|
return strict_rfc3339.validate_rfc3339(instance)
|
||||||
|
|
||||||
|
|
||||||
|
@_checks_drafts("regex", raises=re.error)
|
||||||
|
def is_regex(instance):
|
||||||
|
if not isinstance(instance, str_types):
|
||||||
|
return True
|
||||||
|
return re.compile(instance)
|
||||||
|
|
||||||
|
|
||||||
|
@_checks_drafts(draft3="date", raises=ValueError)
|
||||||
|
def is_date(instance):
|
||||||
|
if not isinstance(instance, str_types):
|
||||||
|
return True
|
||||||
|
return datetime.datetime.strptime(instance, "%Y-%m-%d")
|
||||||
|
|
||||||
|
|
||||||
|
@_checks_drafts(draft3="time", raises=ValueError)
|
||||||
|
def is_time(instance):
|
||||||
|
if not isinstance(instance, str_types):
|
||||||
|
return True
|
||||||
|
return datetime.datetime.strptime(instance, "%H:%M:%S")
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import webcolors
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
def is_css_color_code(instance):
|
||||||
|
return webcolors.normalize_hex(instance)
|
||||||
|
|
||||||
|
|
||||||
|
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
|
||||||
|
def is_css21_color(instance):
|
||||||
|
if (
|
||||||
|
not isinstance(instance, str_types) or
|
||||||
|
instance.lower() in webcolors.css21_names_to_hex
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
return is_css_color_code(instance)
|
||||||
|
|
||||||
|
|
||||||
|
def is_css3_color(instance):
|
||||||
|
if instance.lower() in webcolors.css3_names_to_hex:
|
||||||
|
return True
|
||||||
|
return is_css_color_code(instance)
|
||||||
|
|
||||||
|
|
||||||
|
draft3_format_checker = FormatChecker(_draft_checkers["draft3"])
|
||||||
|
draft4_format_checker = FormatChecker(_draft_checkers["draft4"])
|
155
lib/spack/external/jsonschema/_reflect.py
vendored
Normal file
155
lib/spack/external/jsonschema/_reflect.py
vendored
Normal file
|
@ -0,0 +1,155 @@
|
||||||
|
# -*- test-case-name: twisted.test.test_reflect -*-
|
||||||
|
# Copyright (c) Twisted Matrix Laboratories.
|
||||||
|
# See LICENSE for details.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Standardized versions of various cool and/or strange things that you can do
|
||||||
|
with Python's reflection capabilities.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from jsonschema.compat import PY3
|
||||||
|
|
||||||
|
|
||||||
|
class _NoModuleFound(Exception):
|
||||||
|
"""
|
||||||
|
No module was found because none exists.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidName(ValueError):
|
||||||
|
"""
|
||||||
|
The given name is not a dot-separated list of Python objects.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleNotFound(InvalidName):
|
||||||
|
"""
|
||||||
|
The module associated with the given name doesn't exist and it can't be
|
||||||
|
imported.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectNotFound(InvalidName):
|
||||||
|
"""
|
||||||
|
The object associated with the given name doesn't exist and it can't be
|
||||||
|
imported.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def reraise(exception, traceback):
|
||||||
|
raise exception.with_traceback(traceback)
|
||||||
|
else:
|
||||||
|
exec("""def reraise(exception, traceback):
|
||||||
|
raise exception.__class__, exception, traceback""")
|
||||||
|
|
||||||
|
reraise.__doc__ = """
|
||||||
|
Re-raise an exception, with an optional traceback, in a way that is compatible
|
||||||
|
with both Python 2 and Python 3.
|
||||||
|
|
||||||
|
Note that on Python 3, re-raised exceptions will be mutated, with their
|
||||||
|
C{__traceback__} attribute being set.
|
||||||
|
|
||||||
|
@param exception: The exception instance.
|
||||||
|
@param traceback: The traceback to use, or C{None} indicating a new traceback.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def _importAndCheckStack(importName):
|
||||||
|
"""
|
||||||
|
Import the given name as a module, then walk the stack to determine whether
|
||||||
|
the failure was the module not existing, or some code in the module (for
|
||||||
|
example a dependent import) failing. This can be helpful to determine
|
||||||
|
whether any actual application code was run. For example, to distiguish
|
||||||
|
administrative error (entering the wrong module name), from programmer
|
||||||
|
error (writing buggy code in a module that fails to import).
|
||||||
|
|
||||||
|
@param importName: The name of the module to import.
|
||||||
|
@type importName: C{str}
|
||||||
|
@raise Exception: if something bad happens. This can be any type of
|
||||||
|
exception, since nobody knows what loading some arbitrary code might
|
||||||
|
do.
|
||||||
|
@raise _NoModuleFound: if no module was found.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return __import__(importName)
|
||||||
|
except ImportError:
|
||||||
|
excType, excValue, excTraceback = sys.exc_info()
|
||||||
|
while excTraceback:
|
||||||
|
execName = excTraceback.tb_frame.f_globals["__name__"]
|
||||||
|
# in Python 2 execName is None when an ImportError is encountered,
|
||||||
|
# where in Python 3 execName is equal to the importName.
|
||||||
|
if execName is None or execName == importName:
|
||||||
|
reraise(excValue, excTraceback)
|
||||||
|
excTraceback = excTraceback.tb_next
|
||||||
|
raise _NoModuleFound()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def namedAny(name):
|
||||||
|
"""
|
||||||
|
Retrieve a Python object by its fully qualified name from the global Python
|
||||||
|
module namespace. The first part of the name, that describes a module,
|
||||||
|
will be discovered and imported. Each subsequent part of the name is
|
||||||
|
treated as the name of an attribute of the object specified by all of the
|
||||||
|
name which came before it. For example, the fully-qualified name of this
|
||||||
|
object is 'twisted.python.reflect.namedAny'.
|
||||||
|
|
||||||
|
@type name: L{str}
|
||||||
|
@param name: The name of the object to return.
|
||||||
|
|
||||||
|
@raise InvalidName: If the name is an empty string, starts or ends with
|
||||||
|
a '.', or is otherwise syntactically incorrect.
|
||||||
|
|
||||||
|
@raise ModuleNotFound: If the name is syntactically correct but the
|
||||||
|
module it specifies cannot be imported because it does not appear to
|
||||||
|
exist.
|
||||||
|
|
||||||
|
@raise ObjectNotFound: If the name is syntactically correct, includes at
|
||||||
|
least one '.', but the module it specifies cannot be imported because
|
||||||
|
it does not appear to exist.
|
||||||
|
|
||||||
|
@raise AttributeError: If an attribute of an object along the way cannot be
|
||||||
|
accessed, or a module along the way is not found.
|
||||||
|
|
||||||
|
@return: the Python object identified by 'name'.
|
||||||
|
"""
|
||||||
|
if not name:
|
||||||
|
raise InvalidName('Empty module name')
|
||||||
|
|
||||||
|
names = name.split('.')
|
||||||
|
|
||||||
|
# if the name starts or ends with a '.' or contains '..', the __import__
|
||||||
|
# will raise an 'Empty module name' error. This will provide a better error
|
||||||
|
# message.
|
||||||
|
if '' in names:
|
||||||
|
raise InvalidName(
|
||||||
|
"name must be a string giving a '.'-separated list of Python "
|
||||||
|
"identifiers, not %r" % (name,))
|
||||||
|
|
||||||
|
topLevelPackage = None
|
||||||
|
moduleNames = names[:]
|
||||||
|
while not topLevelPackage:
|
||||||
|
if moduleNames:
|
||||||
|
trialname = '.'.join(moduleNames)
|
||||||
|
try:
|
||||||
|
topLevelPackage = _importAndCheckStack(trialname)
|
||||||
|
except _NoModuleFound:
|
||||||
|
moduleNames.pop()
|
||||||
|
else:
|
||||||
|
if len(names) == 1:
|
||||||
|
raise ModuleNotFound("No module named %r" % (name,))
|
||||||
|
else:
|
||||||
|
raise ObjectNotFound('%r does not name an object' % (name,))
|
||||||
|
|
||||||
|
obj = topLevelPackage
|
||||||
|
for n in names[1:]:
|
||||||
|
obj = getattr(obj, n)
|
||||||
|
|
||||||
|
return obj
|
213
lib/spack/external/jsonschema/_utils.py
vendored
Normal file
213
lib/spack/external/jsonschema/_utils.py
vendored
Normal file
|
@ -0,0 +1,213 @@
|
||||||
|
import itertools
|
||||||
|
import json
|
||||||
|
import pkgutil
|
||||||
|
import re
|
||||||
|
|
||||||
|
from jsonschema.compat import str_types, MutableMapping, urlsplit
|
||||||
|
|
||||||
|
|
||||||
|
class URIDict(MutableMapping):
|
||||||
|
"""
|
||||||
|
Dictionary which uses normalized URIs as keys.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def normalize(self, uri):
|
||||||
|
return urlsplit(uri).geturl()
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.store = dict()
|
||||||
|
self.store.update(*args, **kwargs)
|
||||||
|
|
||||||
|
def __getitem__(self, uri):
|
||||||
|
return self.store[self.normalize(uri)]
|
||||||
|
|
||||||
|
def __setitem__(self, uri, value):
|
||||||
|
self.store[self.normalize(uri)] = value
|
||||||
|
|
||||||
|
def __delitem__(self, uri):
|
||||||
|
del self.store[self.normalize(uri)]
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self.store)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.store)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return repr(self.store)
|
||||||
|
|
||||||
|
|
||||||
|
class Unset(object):
|
||||||
|
"""
|
||||||
|
An as-of-yet unset attribute or unprovided default parameter.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<unset>"
|
||||||
|
|
||||||
|
|
||||||
|
def load_schema(name):
|
||||||
|
"""
|
||||||
|
Load a schema from ./schemas/``name``.json and return it.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = pkgutil.get_data(__package__, "schemas/{0}.json".format(name))
|
||||||
|
return json.loads(data.decode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
|
def indent(string, times=1):
|
||||||
|
"""
|
||||||
|
A dumb version of :func:`textwrap.indent` from Python 3.3.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
return "\n".join(" " * (4 * times) + line for line in string.splitlines())
|
||||||
|
|
||||||
|
|
||||||
|
def format_as_index(indices):
|
||||||
|
"""
|
||||||
|
Construct a single string containing indexing operations for the indices.
|
||||||
|
|
||||||
|
For example, [1, 2, "foo"] -> [1][2]["foo"]
|
||||||
|
|
||||||
|
:type indices: sequence
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not indices:
|
||||||
|
return ""
|
||||||
|
return "[%s]" % "][".join(repr(index) for index in indices)
|
||||||
|
|
||||||
|
|
||||||
|
def find_additional_properties(instance, schema):
|
||||||
|
"""
|
||||||
|
Return the set of additional properties for the given ``instance``.
|
||||||
|
|
||||||
|
Weeds out properties that should have been validated by ``properties`` and
|
||||||
|
/ or ``patternProperties``.
|
||||||
|
|
||||||
|
Assumes ``instance`` is dict-like already.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
properties = schema.get("properties", {})
|
||||||
|
patterns = "|".join(schema.get("patternProperties", {}))
|
||||||
|
for property in instance:
|
||||||
|
if property not in properties:
|
||||||
|
if patterns and re.search(patterns, property):
|
||||||
|
continue
|
||||||
|
yield property
|
||||||
|
|
||||||
|
|
||||||
|
def extras_msg(extras):
|
||||||
|
"""
|
||||||
|
Create an error message for extra items or properties.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
if len(extras) == 1:
|
||||||
|
verb = "was"
|
||||||
|
else:
|
||||||
|
verb = "were"
|
||||||
|
return ", ".join(repr(extra) for extra in extras), verb
|
||||||
|
|
||||||
|
|
||||||
|
def types_msg(instance, types):
|
||||||
|
"""
|
||||||
|
Create an error message for a failure to match the given types.
|
||||||
|
|
||||||
|
If the ``instance`` is an object and contains a ``name`` property, it will
|
||||||
|
be considered to be a description of that object and used as its type.
|
||||||
|
|
||||||
|
Otherwise the message is simply the reprs of the given ``types``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
reprs = []
|
||||||
|
for type in types:
|
||||||
|
try:
|
||||||
|
reprs.append(repr(type["name"]))
|
||||||
|
except Exception:
|
||||||
|
reprs.append(repr(type))
|
||||||
|
return "%r is not of type %s" % (instance, ", ".join(reprs))
|
||||||
|
|
||||||
|
|
||||||
|
def flatten(suitable_for_isinstance):
|
||||||
|
"""
|
||||||
|
isinstance() can accept a bunch of really annoying different types:
|
||||||
|
* a single type
|
||||||
|
* a tuple of types
|
||||||
|
* an arbitrary nested tree of tuples
|
||||||
|
|
||||||
|
Return a flattened tuple of the given argument.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
types = set()
|
||||||
|
|
||||||
|
if not isinstance(suitable_for_isinstance, tuple):
|
||||||
|
suitable_for_isinstance = (suitable_for_isinstance,)
|
||||||
|
for thing in suitable_for_isinstance:
|
||||||
|
if isinstance(thing, tuple):
|
||||||
|
types.update(flatten(thing))
|
||||||
|
else:
|
||||||
|
types.add(thing)
|
||||||
|
return tuple(types)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_list(thing):
|
||||||
|
"""
|
||||||
|
Wrap ``thing`` in a list if it's a single str.
|
||||||
|
|
||||||
|
Otherwise, return it unchanged.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(thing, str_types):
|
||||||
|
return [thing]
|
||||||
|
return thing
|
||||||
|
|
||||||
|
|
||||||
|
def unbool(element, true=object(), false=object()):
|
||||||
|
"""
|
||||||
|
A hack to make True and 1 and False and 0 unique for ``uniq``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
if element is True:
|
||||||
|
return true
|
||||||
|
elif element is False:
|
||||||
|
return false
|
||||||
|
return element
|
||||||
|
|
||||||
|
|
||||||
|
def uniq(container):
|
||||||
|
"""
|
||||||
|
Check if all of a container's elements are unique.
|
||||||
|
|
||||||
|
Successively tries first to rely that the elements are hashable, then
|
||||||
|
falls back on them being sortable, and finally falls back on brute
|
||||||
|
force.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
return len(set(unbool(i) for i in container)) == len(container)
|
||||||
|
except TypeError:
|
||||||
|
try:
|
||||||
|
sort = sorted(unbool(i) for i in container)
|
||||||
|
sliced = itertools.islice(sort, 1, None)
|
||||||
|
for i, j in zip(sort, sliced):
|
||||||
|
if i == j:
|
||||||
|
return False
|
||||||
|
except (NotImplementedError, TypeError):
|
||||||
|
seen = []
|
||||||
|
for e in container:
|
||||||
|
e = unbool(e)
|
||||||
|
if e in seen:
|
||||||
|
return False
|
||||||
|
seen.append(e)
|
||||||
|
return True
|
358
lib/spack/external/jsonschema/_validators.py
vendored
Normal file
358
lib/spack/external/jsonschema/_validators.py
vendored
Normal file
|
@ -0,0 +1,358 @@
|
||||||
|
import re
|
||||||
|
|
||||||
|
from jsonschema import _utils
|
||||||
|
from jsonschema.exceptions import FormatError, ValidationError
|
||||||
|
from jsonschema.compat import iteritems
|
||||||
|
|
||||||
|
|
||||||
|
FLOAT_TOLERANCE = 10 ** -15
|
||||||
|
|
||||||
|
|
||||||
|
def patternProperties(validator, patternProperties, instance, schema):
|
||||||
|
if not validator.is_type(instance, "object"):
|
||||||
|
return
|
||||||
|
|
||||||
|
for pattern, subschema in iteritems(patternProperties):
|
||||||
|
for k, v in iteritems(instance):
|
||||||
|
if re.search(pattern, k):
|
||||||
|
for error in validator.descend(
|
||||||
|
v, subschema, path=k, schema_path=pattern,
|
||||||
|
):
|
||||||
|
yield error
|
||||||
|
|
||||||
|
|
||||||
|
def additionalProperties(validator, aP, instance, schema):
|
||||||
|
if not validator.is_type(instance, "object"):
|
||||||
|
return
|
||||||
|
|
||||||
|
extras = set(_utils.find_additional_properties(instance, schema))
|
||||||
|
|
||||||
|
if validator.is_type(aP, "object"):
|
||||||
|
for extra in extras:
|
||||||
|
for error in validator.descend(instance[extra], aP, path=extra):
|
||||||
|
yield error
|
||||||
|
elif not aP and extras:
|
||||||
|
error = "Additional properties are not allowed (%s %s unexpected)"
|
||||||
|
yield ValidationError(error % _utils.extras_msg(extras))
|
||||||
|
|
||||||
|
|
||||||
|
def items(validator, items, instance, schema):
|
||||||
|
if not validator.is_type(instance, "array"):
|
||||||
|
return
|
||||||
|
|
||||||
|
if validator.is_type(items, "object"):
|
||||||
|
for index, item in enumerate(instance):
|
||||||
|
for error in validator.descend(item, items, path=index):
|
||||||
|
yield error
|
||||||
|
else:
|
||||||
|
for (index, item), subschema in zip(enumerate(instance), items):
|
||||||
|
for error in validator.descend(
|
||||||
|
item, subschema, path=index, schema_path=index,
|
||||||
|
):
|
||||||
|
yield error
|
||||||
|
|
||||||
|
|
||||||
|
def additionalItems(validator, aI, instance, schema):
|
||||||
|
if (
|
||||||
|
not validator.is_type(instance, "array") or
|
||||||
|
validator.is_type(schema.get("items", {}), "object")
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
|
len_items = len(schema.get("items", []))
|
||||||
|
if validator.is_type(aI, "object"):
|
||||||
|
for index, item in enumerate(instance[len_items:], start=len_items):
|
||||||
|
for error in validator.descend(item, aI, path=index):
|
||||||
|
yield error
|
||||||
|
elif not aI and len(instance) > len(schema.get("items", [])):
|
||||||
|
error = "Additional items are not allowed (%s %s unexpected)"
|
||||||
|
yield ValidationError(
|
||||||
|
error %
|
||||||
|
_utils.extras_msg(instance[len(schema.get("items", [])):])
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def minimum(validator, minimum, instance, schema):
|
||||||
|
if not validator.is_type(instance, "number"):
|
||||||
|
return
|
||||||
|
|
||||||
|
if schema.get("exclusiveMinimum", False):
|
||||||
|
failed = float(instance) <= minimum
|
||||||
|
cmp = "less than or equal to"
|
||||||
|
else:
|
||||||
|
failed = float(instance) < minimum
|
||||||
|
cmp = "less than"
|
||||||
|
|
||||||
|
if failed:
|
||||||
|
yield ValidationError(
|
||||||
|
"%r is %s the minimum of %r" % (instance, cmp, minimum)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def maximum(validator, maximum, instance, schema):
|
||||||
|
if not validator.is_type(instance, "number"):
|
||||||
|
return
|
||||||
|
|
||||||
|
if schema.get("exclusiveMaximum", False):
|
||||||
|
failed = instance >= maximum
|
||||||
|
cmp = "greater than or equal to"
|
||||||
|
else:
|
||||||
|
failed = instance > maximum
|
||||||
|
cmp = "greater than"
|
||||||
|
|
||||||
|
if failed:
|
||||||
|
yield ValidationError(
|
||||||
|
"%r is %s the maximum of %r" % (instance, cmp, maximum)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def multipleOf(validator, dB, instance, schema):
|
||||||
|
if not validator.is_type(instance, "number"):
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(dB, float):
|
||||||
|
mod = instance % dB
|
||||||
|
failed = (mod > FLOAT_TOLERANCE) and (dB - mod) > FLOAT_TOLERANCE
|
||||||
|
else:
|
||||||
|
failed = instance % dB
|
||||||
|
|
||||||
|
if failed:
|
||||||
|
yield ValidationError("%r is not a multiple of %r" % (instance, dB))
|
||||||
|
|
||||||
|
|
||||||
|
def minItems(validator, mI, instance, schema):
|
||||||
|
if validator.is_type(instance, "array") and len(instance) < mI:
|
||||||
|
yield ValidationError("%r is too short" % (instance,))
|
||||||
|
|
||||||
|
|
||||||
|
def maxItems(validator, mI, instance, schema):
|
||||||
|
if validator.is_type(instance, "array") and len(instance) > mI:
|
||||||
|
yield ValidationError("%r is too long" % (instance,))
|
||||||
|
|
||||||
|
|
||||||
|
def uniqueItems(validator, uI, instance, schema):
|
||||||
|
if (
|
||||||
|
uI and
|
||||||
|
validator.is_type(instance, "array") and
|
||||||
|
not _utils.uniq(instance)
|
||||||
|
):
|
||||||
|
yield ValidationError("%r has non-unique elements" % instance)
|
||||||
|
|
||||||
|
|
||||||
|
def pattern(validator, patrn, instance, schema):
|
||||||
|
if (
|
||||||
|
validator.is_type(instance, "string") and
|
||||||
|
not re.search(patrn, instance)
|
||||||
|
):
|
||||||
|
yield ValidationError("%r does not match %r" % (instance, patrn))
|
||||||
|
|
||||||
|
|
||||||
|
def format(validator, format, instance, schema):
|
||||||
|
if validator.format_checker is not None:
|
||||||
|
try:
|
||||||
|
validator.format_checker.check(instance, format)
|
||||||
|
except FormatError as error:
|
||||||
|
yield ValidationError(error.message, cause=error.cause)
|
||||||
|
|
||||||
|
|
||||||
|
def minLength(validator, mL, instance, schema):
|
||||||
|
if validator.is_type(instance, "string") and len(instance) < mL:
|
||||||
|
yield ValidationError("%r is too short" % (instance,))
|
||||||
|
|
||||||
|
|
||||||
|
def maxLength(validator, mL, instance, schema):
|
||||||
|
if validator.is_type(instance, "string") and len(instance) > mL:
|
||||||
|
yield ValidationError("%r is too long" % (instance,))
|
||||||
|
|
||||||
|
|
||||||
|
def dependencies(validator, dependencies, instance, schema):
|
||||||
|
if not validator.is_type(instance, "object"):
|
||||||
|
return
|
||||||
|
|
||||||
|
for property, dependency in iteritems(dependencies):
|
||||||
|
if property not in instance:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if validator.is_type(dependency, "object"):
|
||||||
|
for error in validator.descend(
|
||||||
|
instance, dependency, schema_path=property,
|
||||||
|
):
|
||||||
|
yield error
|
||||||
|
else:
|
||||||
|
dependencies = _utils.ensure_list(dependency)
|
||||||
|
for dependency in dependencies:
|
||||||
|
if dependency not in instance:
|
||||||
|
yield ValidationError(
|
||||||
|
"%r is a dependency of %r" % (dependency, property)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def enum(validator, enums, instance, schema):
|
||||||
|
if instance not in enums:
|
||||||
|
yield ValidationError("%r is not one of %r" % (instance, enums))
|
||||||
|
|
||||||
|
|
||||||
|
def ref(validator, ref, instance, schema):
|
||||||
|
with validator.resolver.resolving(ref) as resolved:
|
||||||
|
for error in validator.descend(instance, resolved):
|
||||||
|
yield error
|
||||||
|
|
||||||
|
|
||||||
|
def type_draft3(validator, types, instance, schema):
|
||||||
|
types = _utils.ensure_list(types)
|
||||||
|
|
||||||
|
all_errors = []
|
||||||
|
for index, type in enumerate(types):
|
||||||
|
if type == "any":
|
||||||
|
return
|
||||||
|
if validator.is_type(type, "object"):
|
||||||
|
errors = list(validator.descend(instance, type, schema_path=index))
|
||||||
|
if not errors:
|
||||||
|
return
|
||||||
|
all_errors.extend(errors)
|
||||||
|
else:
|
||||||
|
if validator.is_type(instance, type):
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
yield ValidationError(
|
||||||
|
_utils.types_msg(instance, types), context=all_errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def properties_draft3(validator, properties, instance, schema):
|
||||||
|
if not validator.is_type(instance, "object"):
|
||||||
|
return
|
||||||
|
|
||||||
|
for property, subschema in iteritems(properties):
|
||||||
|
if property in instance:
|
||||||
|
for error in validator.descend(
|
||||||
|
instance[property],
|
||||||
|
subschema,
|
||||||
|
path=property,
|
||||||
|
schema_path=property,
|
||||||
|
):
|
||||||
|
yield error
|
||||||
|
elif subschema.get("required", False):
|
||||||
|
error = ValidationError("%r is a required property" % property)
|
||||||
|
error._set(
|
||||||
|
validator="required",
|
||||||
|
validator_value=subschema["required"],
|
||||||
|
instance=instance,
|
||||||
|
schema=schema,
|
||||||
|
)
|
||||||
|
error.path.appendleft(property)
|
||||||
|
error.schema_path.extend([property, "required"])
|
||||||
|
yield error
|
||||||
|
|
||||||
|
|
||||||
|
def disallow_draft3(validator, disallow, instance, schema):
|
||||||
|
for disallowed in _utils.ensure_list(disallow):
|
||||||
|
if validator.is_valid(instance, {"type" : [disallowed]}):
|
||||||
|
yield ValidationError(
|
||||||
|
"%r is disallowed for %r" % (disallowed, instance)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def extends_draft3(validator, extends, instance, schema):
|
||||||
|
if validator.is_type(extends, "object"):
|
||||||
|
for error in validator.descend(instance, extends):
|
||||||
|
yield error
|
||||||
|
return
|
||||||
|
for index, subschema in enumerate(extends):
|
||||||
|
for error in validator.descend(instance, subschema, schema_path=index):
|
||||||
|
yield error
|
||||||
|
|
||||||
|
|
||||||
|
def type_draft4(validator, types, instance, schema):
|
||||||
|
types = _utils.ensure_list(types)
|
||||||
|
|
||||||
|
if not any(validator.is_type(instance, type) for type in types):
|
||||||
|
yield ValidationError(_utils.types_msg(instance, types))
|
||||||
|
|
||||||
|
|
||||||
|
def properties_draft4(validator, properties, instance, schema):
|
||||||
|
if not validator.is_type(instance, "object"):
|
||||||
|
return
|
||||||
|
|
||||||
|
for property, subschema in iteritems(properties):
|
||||||
|
if property in instance:
|
||||||
|
for error in validator.descend(
|
||||||
|
instance[property],
|
||||||
|
subschema,
|
||||||
|
path=property,
|
||||||
|
schema_path=property,
|
||||||
|
):
|
||||||
|
yield error
|
||||||
|
|
||||||
|
|
||||||
|
def required_draft4(validator, required, instance, schema):
|
||||||
|
if not validator.is_type(instance, "object"):
|
||||||
|
return
|
||||||
|
for property in required:
|
||||||
|
if property not in instance:
|
||||||
|
yield ValidationError("%r is a required property" % property)
|
||||||
|
|
||||||
|
|
||||||
|
def minProperties_draft4(validator, mP, instance, schema):
|
||||||
|
if validator.is_type(instance, "object") and len(instance) < mP:
|
||||||
|
yield ValidationError(
|
||||||
|
"%r does not have enough properties" % (instance,)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def maxProperties_draft4(validator, mP, instance, schema):
|
||||||
|
if not validator.is_type(instance, "object"):
|
||||||
|
return
|
||||||
|
if validator.is_type(instance, "object") and len(instance) > mP:
|
||||||
|
yield ValidationError("%r has too many properties" % (instance,))
|
||||||
|
|
||||||
|
|
||||||
|
def allOf_draft4(validator, allOf, instance, schema):
|
||||||
|
for index, subschema in enumerate(allOf):
|
||||||
|
for error in validator.descend(instance, subschema, schema_path=index):
|
||||||
|
yield error
|
||||||
|
|
||||||
|
|
||||||
|
def oneOf_draft4(validator, oneOf, instance, schema):
|
||||||
|
subschemas = enumerate(oneOf)
|
||||||
|
all_errors = []
|
||||||
|
for index, subschema in subschemas:
|
||||||
|
errs = list(validator.descend(instance, subschema, schema_path=index))
|
||||||
|
if not errs:
|
||||||
|
first_valid = subschema
|
||||||
|
break
|
||||||
|
all_errors.extend(errs)
|
||||||
|
else:
|
||||||
|
yield ValidationError(
|
||||||
|
"%r is not valid under any of the given schemas" % (instance,),
|
||||||
|
context=all_errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
more_valid = [s for i, s in subschemas if validator.is_valid(instance, s)]
|
||||||
|
if more_valid:
|
||||||
|
more_valid.append(first_valid)
|
||||||
|
reprs = ", ".join(repr(schema) for schema in more_valid)
|
||||||
|
yield ValidationError(
|
||||||
|
"%r is valid under each of %s" % (instance, reprs)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def anyOf_draft4(validator, anyOf, instance, schema):
|
||||||
|
all_errors = []
|
||||||
|
for index, subschema in enumerate(anyOf):
|
||||||
|
errs = list(validator.descend(instance, subschema, schema_path=index))
|
||||||
|
if not errs:
|
||||||
|
break
|
||||||
|
all_errors.extend(errs)
|
||||||
|
else:
|
||||||
|
yield ValidationError(
|
||||||
|
"%r is not valid under any of the given schemas" % (instance,),
|
||||||
|
context=all_errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def not_draft4(validator, not_schema, instance, schema):
|
||||||
|
if validator.is_valid(instance, not_schema):
|
||||||
|
yield ValidationError(
|
||||||
|
"%r is not allowed for %r" % (not_schema, instance)
|
||||||
|
)
|
72
lib/spack/external/jsonschema/cli.py
vendored
Normal file
72
lib/spack/external/jsonschema/cli.py
vendored
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from jsonschema._reflect import namedAny
|
||||||
|
from jsonschema.validators import validator_for
|
||||||
|
|
||||||
|
|
||||||
|
def _namedAnyWithDefault(name):
|
||||||
|
if "." not in name:
|
||||||
|
name = "jsonschema." + name
|
||||||
|
return namedAny(name)
|
||||||
|
|
||||||
|
|
||||||
|
def _json_file(path):
|
||||||
|
with open(path) as file:
|
||||||
|
return json.load(file)
|
||||||
|
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="JSON Schema Validation CLI",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-i", "--instance",
|
||||||
|
action="append",
|
||||||
|
dest="instances",
|
||||||
|
type=_json_file,
|
||||||
|
help="a path to a JSON instance to validate "
|
||||||
|
"(may be specified multiple times)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-F", "--error-format",
|
||||||
|
default="{error.instance}: {error.message}\n",
|
||||||
|
help="the format to use for each error output message, specified in "
|
||||||
|
"a form suitable for passing to str.format, which will be called "
|
||||||
|
"with 'error' for each error",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-V", "--validator",
|
||||||
|
type=_namedAnyWithDefault,
|
||||||
|
help="the fully qualified object name of a validator to use, or, for "
|
||||||
|
"validators that are registered with jsonschema, simply the name "
|
||||||
|
"of the class.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"schema",
|
||||||
|
help="the JSON Schema to validate with",
|
||||||
|
type=_json_file,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args(args):
|
||||||
|
arguments = vars(parser.parse_args(args=args or ["--help"]))
|
||||||
|
if arguments["validator"] is None:
|
||||||
|
arguments["validator"] = validator_for(arguments["schema"])
|
||||||
|
return arguments
|
||||||
|
|
||||||
|
|
||||||
|
def main(args=sys.argv[1:]):
|
||||||
|
sys.exit(run(arguments=parse_args(args=args)))
|
||||||
|
|
||||||
|
|
||||||
|
def run(arguments, stdout=sys.stdout, stderr=sys.stderr):
|
||||||
|
error_format = arguments["error_format"]
|
||||||
|
validator = arguments["validator"](schema=arguments["schema"])
|
||||||
|
errored = False
|
||||||
|
for instance in arguments["instances"] or ():
|
||||||
|
for error in validator.iter_errors(instance):
|
||||||
|
stderr.write(error_format.format(error=error))
|
||||||
|
errored = True
|
||||||
|
return errored
|
53
lib/spack/external/jsonschema/compat.py
vendored
Normal file
53
lib/spack/external/jsonschema/compat.py
vendored
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
import sys
|
||||||
|
import operator
|
||||||
|
|
||||||
|
try:
|
||||||
|
from collections import MutableMapping, Sequence # noqa
|
||||||
|
except ImportError:
|
||||||
|
from collections.abc import MutableMapping, Sequence # noqa
|
||||||
|
|
||||||
|
PY3 = sys.version_info[0] >= 3
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
zip = zip
|
||||||
|
from io import StringIO
|
||||||
|
from urllib.parse import (
|
||||||
|
unquote, urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit
|
||||||
|
)
|
||||||
|
from urllib.request import urlopen
|
||||||
|
str_types = str,
|
||||||
|
int_types = int,
|
||||||
|
iteritems = operator.methodcaller("items")
|
||||||
|
else:
|
||||||
|
from itertools import izip as zip # noqa
|
||||||
|
from StringIO import StringIO
|
||||||
|
from urlparse import (
|
||||||
|
urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit # noqa
|
||||||
|
)
|
||||||
|
from urllib import unquote # noqa
|
||||||
|
from urllib2 import urlopen # noqa
|
||||||
|
str_types = basestring
|
||||||
|
int_types = int, long
|
||||||
|
iteritems = operator.methodcaller("iteritems")
|
||||||
|
|
||||||
|
|
||||||
|
# On python < 3.3 fragments are not handled properly with unknown schemes
|
||||||
|
def urlsplit(url):
|
||||||
|
scheme, netloc, path, query, fragment = _urlsplit(url)
|
||||||
|
if "#" in path:
|
||||||
|
path, fragment = path.split("#", 1)
|
||||||
|
return SplitResult(scheme, netloc, path, query, fragment)
|
||||||
|
|
||||||
|
|
||||||
|
def urldefrag(url):
|
||||||
|
if "#" in url:
|
||||||
|
s, n, p, q, frag = urlsplit(url)
|
||||||
|
defrag = urlunsplit((s, n, p, q, ''))
|
||||||
|
else:
|
||||||
|
defrag = url
|
||||||
|
frag = ''
|
||||||
|
return defrag, frag
|
||||||
|
|
||||||
|
|
||||||
|
# flake8: noqa
|
264
lib/spack/external/jsonschema/exceptions.py
vendored
Normal file
264
lib/spack/external/jsonschema/exceptions.py
vendored
Normal file
|
@ -0,0 +1,264 @@
|
||||||
|
from collections import defaultdict, deque
|
||||||
|
import itertools
|
||||||
|
import pprint
|
||||||
|
import textwrap
|
||||||
|
|
||||||
|
from jsonschema import _utils
|
||||||
|
from jsonschema.compat import PY3, iteritems
|
||||||
|
|
||||||
|
|
||||||
|
WEAK_MATCHES = frozenset(["anyOf", "oneOf"])
|
||||||
|
STRONG_MATCHES = frozenset()
|
||||||
|
|
||||||
|
_unset = _utils.Unset()
|
||||||
|
|
||||||
|
|
||||||
|
class _Error(Exception):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message,
|
||||||
|
validator=_unset,
|
||||||
|
path=(),
|
||||||
|
cause=None,
|
||||||
|
context=(),
|
||||||
|
validator_value=_unset,
|
||||||
|
instance=_unset,
|
||||||
|
schema=_unset,
|
||||||
|
schema_path=(),
|
||||||
|
parent=None,
|
||||||
|
):
|
||||||
|
self.message = message
|
||||||
|
self.path = self.relative_path = deque(path)
|
||||||
|
self.schema_path = self.relative_schema_path = deque(schema_path)
|
||||||
|
self.context = list(context)
|
||||||
|
self.cause = self.__cause__ = cause
|
||||||
|
self.validator = validator
|
||||||
|
self.validator_value = validator_value
|
||||||
|
self.instance = instance
|
||||||
|
self.schema = schema
|
||||||
|
self.parent = parent
|
||||||
|
|
||||||
|
for error in context:
|
||||||
|
error.parent = self
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<%s: %r>" % (self.__class__.__name__, self.message)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return unicode(self).encode("utf-8")
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
essential_for_verbose = (
|
||||||
|
self.validator, self.validator_value, self.instance, self.schema,
|
||||||
|
)
|
||||||
|
if any(m is _unset for m in essential_for_verbose):
|
||||||
|
return self.message
|
||||||
|
|
||||||
|
pschema = pprint.pformat(self.schema, width=72)
|
||||||
|
pinstance = pprint.pformat(self.instance, width=72)
|
||||||
|
return self.message + textwrap.dedent("""
|
||||||
|
|
||||||
|
Failed validating %r in schema%s:
|
||||||
|
%s
|
||||||
|
|
||||||
|
On instance%s:
|
||||||
|
%s
|
||||||
|
""".rstrip()
|
||||||
|
) % (
|
||||||
|
self.validator,
|
||||||
|
_utils.format_as_index(list(self.relative_schema_path)[:-1]),
|
||||||
|
_utils.indent(pschema),
|
||||||
|
_utils.format_as_index(self.relative_path),
|
||||||
|
_utils.indent(pinstance),
|
||||||
|
)
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
__str__ = __unicode__
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_from(cls, other):
|
||||||
|
return cls(**other._contents())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def absolute_path(self):
|
||||||
|
parent = self.parent
|
||||||
|
if parent is None:
|
||||||
|
return self.relative_path
|
||||||
|
|
||||||
|
path = deque(self.relative_path)
|
||||||
|
path.extendleft(parent.absolute_path)
|
||||||
|
return path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def absolute_schema_path(self):
|
||||||
|
parent = self.parent
|
||||||
|
if parent is None:
|
||||||
|
return self.relative_schema_path
|
||||||
|
|
||||||
|
path = deque(self.relative_schema_path)
|
||||||
|
path.extendleft(parent.absolute_schema_path)
|
||||||
|
return path
|
||||||
|
|
||||||
|
def _set(self, **kwargs):
|
||||||
|
for k, v in iteritems(kwargs):
|
||||||
|
if getattr(self, k) is _unset:
|
||||||
|
setattr(self, k, v)
|
||||||
|
|
||||||
|
def _contents(self):
|
||||||
|
attrs = (
|
||||||
|
"message", "cause", "context", "validator", "validator_value",
|
||||||
|
"path", "schema_path", "instance", "schema", "parent",
|
||||||
|
)
|
||||||
|
return dict((attr, getattr(self, attr)) for attr in attrs)
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationError(_Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SchemaError(_Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RefResolutionError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UnknownType(Exception):
|
||||||
|
def __init__(self, type, instance, schema):
|
||||||
|
self.type = type
|
||||||
|
self.instance = instance
|
||||||
|
self.schema = schema
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return unicode(self).encode("utf-8")
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
pschema = pprint.pformat(self.schema, width=72)
|
||||||
|
pinstance = pprint.pformat(self.instance, width=72)
|
||||||
|
return textwrap.dedent("""
|
||||||
|
Unknown type %r for validator with schema:
|
||||||
|
%s
|
||||||
|
|
||||||
|
While checking instance:
|
||||||
|
%s
|
||||||
|
""".rstrip()
|
||||||
|
) % (self.type, _utils.indent(pschema), _utils.indent(pinstance))
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
__str__ = __unicode__
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class FormatError(Exception):
|
||||||
|
def __init__(self, message, cause=None):
|
||||||
|
super(FormatError, self).__init__(message, cause)
|
||||||
|
self.message = message
|
||||||
|
self.cause = self.__cause__ = cause
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.message.encode("utf-8")
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.message
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
__str__ = __unicode__
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorTree(object):
|
||||||
|
"""
|
||||||
|
ErrorTrees make it easier to check which validations failed.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
_instance = _unset
|
||||||
|
|
||||||
|
def __init__(self, errors=()):
|
||||||
|
self.errors = {}
|
||||||
|
self._contents = defaultdict(self.__class__)
|
||||||
|
|
||||||
|
for error in errors:
|
||||||
|
container = self
|
||||||
|
for element in error.path:
|
||||||
|
container = container[element]
|
||||||
|
container.errors[error.validator] = error
|
||||||
|
|
||||||
|
self._instance = error.instance
|
||||||
|
|
||||||
|
def __contains__(self, index):
|
||||||
|
"""
|
||||||
|
Check whether ``instance[index]`` has any errors.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
return index in self._contents
|
||||||
|
|
||||||
|
def __getitem__(self, index):
|
||||||
|
"""
|
||||||
|
Retrieve the child tree one level down at the given ``index``.
|
||||||
|
|
||||||
|
If the index is not in the instance that this tree corresponds to and
|
||||||
|
is not known by this tree, whatever error would be raised by
|
||||||
|
``instance.__getitem__`` will be propagated (usually this is some
|
||||||
|
subclass of :class:`LookupError`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self._instance is not _unset and index not in self:
|
||||||
|
self._instance[index]
|
||||||
|
return self._contents[index]
|
||||||
|
|
||||||
|
def __setitem__(self, index, value):
|
||||||
|
self._contents[index] = value
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""
|
||||||
|
Iterate (non-recursively) over the indices in the instance with errors.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
return iter(self._contents)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
"""
|
||||||
|
Same as :attr:`total_errors`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.total_errors
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<%s (%s total errors)>" % (self.__class__.__name__, len(self))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def total_errors(self):
|
||||||
|
"""
|
||||||
|
The total number of errors in the entire tree, including children.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
child_errors = sum(len(tree) for _, tree in iteritems(self._contents))
|
||||||
|
return len(self.errors) + child_errors
|
||||||
|
|
||||||
|
|
||||||
|
def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES):
|
||||||
|
def relevance(error):
|
||||||
|
validator = error.validator
|
||||||
|
return -len(error.path), validator not in weak, validator in strong
|
||||||
|
return relevance
|
||||||
|
|
||||||
|
|
||||||
|
relevance = by_relevance()
|
||||||
|
|
||||||
|
|
||||||
|
def best_match(errors, key=relevance):
|
||||||
|
errors = iter(errors)
|
||||||
|
best = next(errors, None)
|
||||||
|
if best is None:
|
||||||
|
return
|
||||||
|
best = max(itertools.chain([best], errors), key=key)
|
||||||
|
|
||||||
|
while best.context:
|
||||||
|
best = min(best.context, key=key)
|
||||||
|
return best
|
201
lib/spack/external/jsonschema/schemas/draft3.json
vendored
Normal file
201
lib/spack/external/jsonschema/schemas/draft3.json
vendored
Normal file
|
@ -0,0 +1,201 @@
|
||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-03/schema#",
|
||||||
|
"dependencies": {
|
||||||
|
"exclusiveMaximum": "maximum",
|
||||||
|
"exclusiveMinimum": "minimum"
|
||||||
|
},
|
||||||
|
"id": "http://json-schema.org/draft-03/schema#",
|
||||||
|
"properties": {
|
||||||
|
"$ref": {
|
||||||
|
"format": "uri",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"$schema": {
|
||||||
|
"format": "uri",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"additionalItems": {
|
||||||
|
"default": {},
|
||||||
|
"type": [
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
"boolean"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"additionalProperties": {
|
||||||
|
"default": {},
|
||||||
|
"type": [
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
"boolean"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"default": {
|
||||||
|
"type": "any"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"array",
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"array",
|
||||||
|
"object"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"disallow": {
|
||||||
|
"items": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"array"
|
||||||
|
],
|
||||||
|
"uniqueItems": true
|
||||||
|
},
|
||||||
|
"divisibleBy": {
|
||||||
|
"default": 1,
|
||||||
|
"exclusiveMinimum": true,
|
||||||
|
"minimum": 0,
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"enum": {
|
||||||
|
"minItems": 1,
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": true
|
||||||
|
},
|
||||||
|
"exclusiveMaximum": {
|
||||||
|
"default": false,
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"exclusiveMinimum": {
|
||||||
|
"default": false,
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"extends": {
|
||||||
|
"default": {},
|
||||||
|
"items": {
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
"type": [
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
"array"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"format": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"format": "uri",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"items": {
|
||||||
|
"default": {},
|
||||||
|
"items": {
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
"type": [
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
"array"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"maxDecimal": {
|
||||||
|
"minimum": 0,
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"maxItems": {
|
||||||
|
"minimum": 0,
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"maxLength": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"maximum": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"minItems": {
|
||||||
|
"default": 0,
|
||||||
|
"minimum": 0,
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"minLength": {
|
||||||
|
"default": 0,
|
||||||
|
"minimum": 0,
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"minimum": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"pattern": {
|
||||||
|
"format": "regex",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"patternProperties": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"properties": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#",
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"required": {
|
||||||
|
"default": false,
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
"default": "any",
|
||||||
|
"items": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"array"
|
||||||
|
],
|
||||||
|
"uniqueItems": true
|
||||||
|
},
|
||||||
|
"uniqueItems": {
|
||||||
|
"default": false,
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object"
|
||||||
|
}
|
221
lib/spack/external/jsonschema/schemas/draft4.json
vendored
Normal file
221
lib/spack/external/jsonschema/schemas/draft4.json
vendored
Normal file
|
@ -0,0 +1,221 @@
|
||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
|
"default": {},
|
||||||
|
"definitions": {
|
||||||
|
"positiveInteger": {
|
||||||
|
"minimum": 0,
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"positiveIntegerDefault0": {
|
||||||
|
"allOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/positiveInteger"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"default": 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"schemaArray": {
|
||||||
|
"items": {
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
"minItems": 1,
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
|
"simpleTypes": {
|
||||||
|
"enum": [
|
||||||
|
"array",
|
||||||
|
"boolean",
|
||||||
|
"integer",
|
||||||
|
"null",
|
||||||
|
"number",
|
||||||
|
"object",
|
||||||
|
"string"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"stringArray": {
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"minItems": 1,
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"exclusiveMaximum": [
|
||||||
|
"maximum"
|
||||||
|
],
|
||||||
|
"exclusiveMinimum": [
|
||||||
|
"minimum"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"description": "Core schema meta-schema",
|
||||||
|
"id": "http://json-schema.org/draft-04/schema#",
|
||||||
|
"properties": {
|
||||||
|
"$schema": {
|
||||||
|
"format": "uri",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"additionalItems": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default": {}
|
||||||
|
},
|
||||||
|
"additionalProperties": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default": {}
|
||||||
|
},
|
||||||
|
"allOf": {
|
||||||
|
"$ref": "#/definitions/schemaArray"
|
||||||
|
},
|
||||||
|
"anyOf": {
|
||||||
|
"$ref": "#/definitions/schemaArray"
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"definitions": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/stringArray"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"enum": {
|
||||||
|
"minItems": 1,
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": true
|
||||||
|
},
|
||||||
|
"exclusiveMaximum": {
|
||||||
|
"default": false,
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"exclusiveMinimum": {
|
||||||
|
"default": false,
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"format": "uri",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"items": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/schemaArray"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default": {}
|
||||||
|
},
|
||||||
|
"maxItems": {
|
||||||
|
"$ref": "#/definitions/positiveInteger"
|
||||||
|
},
|
||||||
|
"maxLength": {
|
||||||
|
"$ref": "#/definitions/positiveInteger"
|
||||||
|
},
|
||||||
|
"maxProperties": {
|
||||||
|
"$ref": "#/definitions/positiveInteger"
|
||||||
|
},
|
||||||
|
"maximum": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"minItems": {
|
||||||
|
"$ref": "#/definitions/positiveIntegerDefault0"
|
||||||
|
},
|
||||||
|
"minLength": {
|
||||||
|
"$ref": "#/definitions/positiveIntegerDefault0"
|
||||||
|
},
|
||||||
|
"minProperties": {
|
||||||
|
"$ref": "#/definitions/positiveIntegerDefault0"
|
||||||
|
},
|
||||||
|
"minimum": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"multipleOf": {
|
||||||
|
"exclusiveMinimum": true,
|
||||||
|
"minimum": 0,
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"not": {
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
"oneOf": {
|
||||||
|
"$ref": "#/definitions/schemaArray"
|
||||||
|
},
|
||||||
|
"pattern": {
|
||||||
|
"format": "regex",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"patternProperties": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"properties": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#"
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"required": {
|
||||||
|
"$ref": "#/definitions/stringArray"
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/simpleTypes"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/simpleTypes"
|
||||||
|
},
|
||||||
|
"minItems": 1,
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"uniqueItems": {
|
||||||
|
"default": false,
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object"
|
||||||
|
}
|
0
lib/spack/external/jsonschema/tests/__init__.py
vendored
Normal file
0
lib/spack/external/jsonschema/tests/__init__.py
vendored
Normal file
15
lib/spack/external/jsonschema/tests/compat.py
vendored
Normal file
15
lib/spack/external/jsonschema/tests/compat.py
vendored
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info[:2] < (2, 7): # pragma: no cover
|
||||||
|
import unittest2 as unittest
|
||||||
|
else:
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
try:
|
||||||
|
from unittest import mock
|
||||||
|
except ImportError:
|
||||||
|
import mock
|
||||||
|
|
||||||
|
|
||||||
|
# flake8: noqa
|
110
lib/spack/external/jsonschema/tests/test_cli.py
vendored
Normal file
110
lib/spack/external/jsonschema/tests/test_cli.py
vendored
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
from jsonschema import Draft4Validator, ValidationError, cli
|
||||||
|
from jsonschema.compat import StringIO
|
||||||
|
from jsonschema.tests.compat import mock, unittest
|
||||||
|
|
||||||
|
|
||||||
|
def fake_validator(*errors):
|
||||||
|
errors = list(reversed(errors))
|
||||||
|
|
||||||
|
class FakeValidator(object):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def iter_errors(self, instance):
|
||||||
|
if errors:
|
||||||
|
return errors.pop()
|
||||||
|
return []
|
||||||
|
return FakeValidator
|
||||||
|
|
||||||
|
|
||||||
|
class TestParser(unittest.TestCase):
|
||||||
|
FakeValidator = fake_validator()
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
mock_open = mock.mock_open()
|
||||||
|
patch_open = mock.patch.object(cli, "open", mock_open, create=True)
|
||||||
|
patch_open.start()
|
||||||
|
self.addCleanup(patch_open.stop)
|
||||||
|
|
||||||
|
mock_json_load = mock.Mock()
|
||||||
|
mock_json_load.return_value = {}
|
||||||
|
patch_json_load = mock.patch("json.load")
|
||||||
|
patch_json_load.start()
|
||||||
|
self.addCleanup(patch_json_load.stop)
|
||||||
|
|
||||||
|
def test_find_validator_by_fully_qualified_object_name(self):
|
||||||
|
arguments = cli.parse_args(
|
||||||
|
[
|
||||||
|
"--validator",
|
||||||
|
"jsonschema.tests.test_cli.TestParser.FakeValidator",
|
||||||
|
"--instance", "foo.json",
|
||||||
|
"schema.json",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertIs(arguments["validator"], self.FakeValidator)
|
||||||
|
|
||||||
|
def test_find_validator_in_jsonschema(self):
|
||||||
|
arguments = cli.parse_args(
|
||||||
|
[
|
||||||
|
"--validator", "Draft4Validator",
|
||||||
|
"--instance", "foo.json",
|
||||||
|
"schema.json",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertIs(arguments["validator"], Draft4Validator)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCLI(unittest.TestCase):
|
||||||
|
def test_successful_validation(self):
|
||||||
|
stdout, stderr = StringIO(), StringIO()
|
||||||
|
exit_code = cli.run(
|
||||||
|
{
|
||||||
|
"validator": fake_validator(),
|
||||||
|
"schema": {},
|
||||||
|
"instances": [1],
|
||||||
|
"error_format": "{error.message}",
|
||||||
|
},
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr,
|
||||||
|
)
|
||||||
|
self.assertFalse(stdout.getvalue())
|
||||||
|
self.assertFalse(stderr.getvalue())
|
||||||
|
self.assertEqual(exit_code, 0)
|
||||||
|
|
||||||
|
def test_unsuccessful_validation(self):
|
||||||
|
error = ValidationError("I am an error!", instance=1)
|
||||||
|
stdout, stderr = StringIO(), StringIO()
|
||||||
|
exit_code = cli.run(
|
||||||
|
{
|
||||||
|
"validator": fake_validator([error]),
|
||||||
|
"schema": {},
|
||||||
|
"instances": [1],
|
||||||
|
"error_format": "{error.instance} - {error.message}",
|
||||||
|
},
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr,
|
||||||
|
)
|
||||||
|
self.assertFalse(stdout.getvalue())
|
||||||
|
self.assertEqual(stderr.getvalue(), "1 - I am an error!")
|
||||||
|
self.assertEqual(exit_code, 1)
|
||||||
|
|
||||||
|
def test_unsuccessful_validation_multiple_instances(self):
|
||||||
|
first_errors = [
|
||||||
|
ValidationError("9", instance=1),
|
||||||
|
ValidationError("8", instance=1),
|
||||||
|
]
|
||||||
|
second_errors = [ValidationError("7", instance=2)]
|
||||||
|
stdout, stderr = StringIO(), StringIO()
|
||||||
|
exit_code = cli.run(
|
||||||
|
{
|
||||||
|
"validator": fake_validator(first_errors, second_errors),
|
||||||
|
"schema": {},
|
||||||
|
"instances": [1, 2],
|
||||||
|
"error_format": "{error.instance} - {error.message}\t",
|
||||||
|
},
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr,
|
||||||
|
)
|
||||||
|
self.assertFalse(stdout.getvalue())
|
||||||
|
self.assertEqual(stderr.getvalue(), "1 - 9\t1 - 8\t2 - 7\t")
|
||||||
|
self.assertEqual(exit_code, 1)
|
382
lib/spack/external/jsonschema/tests/test_exceptions.py
vendored
Normal file
382
lib/spack/external/jsonschema/tests/test_exceptions.py
vendored
Normal file
|
@ -0,0 +1,382 @@
|
||||||
|
import textwrap
|
||||||
|
|
||||||
|
from jsonschema import Draft4Validator, exceptions
|
||||||
|
from jsonschema.compat import PY3
|
||||||
|
from jsonschema.tests.compat import mock, unittest
|
||||||
|
|
||||||
|
|
||||||
|
class TestBestMatch(unittest.TestCase):
|
||||||
|
def best_match(self, errors):
|
||||||
|
errors = list(errors)
|
||||||
|
best = exceptions.best_match(errors)
|
||||||
|
reversed_best = exceptions.best_match(reversed(errors))
|
||||||
|
self.assertEqual(
|
||||||
|
best,
|
||||||
|
reversed_best,
|
||||||
|
msg="Didn't return a consistent best match!\n"
|
||||||
|
"Got: {0}\n\nThen: {1}".format(best, reversed_best),
|
||||||
|
)
|
||||||
|
return best
|
||||||
|
|
||||||
|
def test_shallower_errors_are_better_matches(self):
|
||||||
|
validator = Draft4Validator(
|
||||||
|
{
|
||||||
|
"properties" : {
|
||||||
|
"foo" : {
|
||||||
|
"minProperties" : 2,
|
||||||
|
"properties" : {"bar" : {"type" : "object"}},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
best = self.best_match(validator.iter_errors({"foo" : {"bar" : []}}))
|
||||||
|
self.assertEqual(best.validator, "minProperties")
|
||||||
|
|
||||||
|
def test_oneOf_and_anyOf_are_weak_matches(self):
|
||||||
|
"""
|
||||||
|
A property you *must* match is probably better than one you have to
|
||||||
|
match a part of.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
validator = Draft4Validator(
|
||||||
|
{
|
||||||
|
"minProperties" : 2,
|
||||||
|
"anyOf" : [{"type" : "string"}, {"type" : "number"}],
|
||||||
|
"oneOf" : [{"type" : "string"}, {"type" : "number"}],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
best = self.best_match(validator.iter_errors({}))
|
||||||
|
self.assertEqual(best.validator, "minProperties")
|
||||||
|
|
||||||
|
def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self):
|
||||||
|
"""
|
||||||
|
If the most relevant error is an anyOf, then we traverse its context
|
||||||
|
and select the otherwise *least* relevant error, since in this case
|
||||||
|
that means the most specific, deep, error inside the instance.
|
||||||
|
|
||||||
|
I.e. since only one of the schemas must match, we look for the most
|
||||||
|
relevant one.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
validator = Draft4Validator(
|
||||||
|
{
|
||||||
|
"properties" : {
|
||||||
|
"foo" : {
|
||||||
|
"anyOf" : [
|
||||||
|
{"type" : "string"},
|
||||||
|
{"properties" : {"bar" : {"type" : "array"}}},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
|
||||||
|
self.assertEqual(best.validator_value, "array")
|
||||||
|
|
||||||
|
def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self):
|
||||||
|
"""
|
||||||
|
If the most relevant error is an oneOf, then we traverse its context
|
||||||
|
and select the otherwise *least* relevant error, since in this case
|
||||||
|
that means the most specific, deep, error inside the instance.
|
||||||
|
|
||||||
|
I.e. since only one of the schemas must match, we look for the most
|
||||||
|
relevant one.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
validator = Draft4Validator(
|
||||||
|
{
|
||||||
|
"properties" : {
|
||||||
|
"foo" : {
|
||||||
|
"oneOf" : [
|
||||||
|
{"type" : "string"},
|
||||||
|
{"properties" : {"bar" : {"type" : "array"}}},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
|
||||||
|
self.assertEqual(best.validator_value, "array")
|
||||||
|
|
||||||
|
def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self):
|
||||||
|
"""
|
||||||
|
Now, if the error is allOf, we traverse but select the *most* relevant
|
||||||
|
error from the context, because all schemas here must match anyways.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
validator = Draft4Validator(
|
||||||
|
{
|
||||||
|
"properties" : {
|
||||||
|
"foo" : {
|
||||||
|
"allOf" : [
|
||||||
|
{"type" : "string"},
|
||||||
|
{"properties" : {"bar" : {"type" : "array"}}},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
|
||||||
|
self.assertEqual(best.validator_value, "string")
|
||||||
|
|
||||||
|
def test_nested_context_for_oneOf(self):
|
||||||
|
validator = Draft4Validator(
|
||||||
|
{
|
||||||
|
"properties" : {
|
||||||
|
"foo" : {
|
||||||
|
"oneOf" : [
|
||||||
|
{"type" : "string"},
|
||||||
|
{
|
||||||
|
"oneOf" : [
|
||||||
|
{"type" : "string"},
|
||||||
|
{
|
||||||
|
"properties" : {
|
||||||
|
"bar" : {"type" : "array"}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
|
||||||
|
self.assertEqual(best.validator_value, "array")
|
||||||
|
|
||||||
|
def test_one_error(self):
|
||||||
|
validator = Draft4Validator({"minProperties" : 2})
|
||||||
|
error, = validator.iter_errors({})
|
||||||
|
self.assertEqual(
|
||||||
|
exceptions.best_match(validator.iter_errors({})).validator,
|
||||||
|
"minProperties",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_no_errors(self):
|
||||||
|
validator = Draft4Validator({})
|
||||||
|
self.assertIsNone(exceptions.best_match(validator.iter_errors({})))
|
||||||
|
|
||||||
|
|
||||||
|
class TestByRelevance(unittest.TestCase):
|
||||||
|
def test_short_paths_are_better_matches(self):
|
||||||
|
shallow = exceptions.ValidationError("Oh no!", path=["baz"])
|
||||||
|
deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"])
|
||||||
|
match = max([shallow, deep], key=exceptions.relevance)
|
||||||
|
self.assertIs(match, shallow)
|
||||||
|
|
||||||
|
match = max([deep, shallow], key=exceptions.relevance)
|
||||||
|
self.assertIs(match, shallow)
|
||||||
|
|
||||||
|
def test_global_errors_are_even_better_matches(self):
|
||||||
|
shallow = exceptions.ValidationError("Oh no!", path=[])
|
||||||
|
deep = exceptions.ValidationError("Oh yes!", path=["foo"])
|
||||||
|
|
||||||
|
errors = sorted([shallow, deep], key=exceptions.relevance)
|
||||||
|
self.assertEqual(
|
||||||
|
[list(error.path) for error in errors],
|
||||||
|
[["foo"], []],
|
||||||
|
)
|
||||||
|
|
||||||
|
errors = sorted([deep, shallow], key=exceptions.relevance)
|
||||||
|
self.assertEqual(
|
||||||
|
[list(error.path) for error in errors],
|
||||||
|
[["foo"], []],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_weak_validators_are_lower_priority(self):
|
||||||
|
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
|
||||||
|
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
|
||||||
|
|
||||||
|
best_match = exceptions.by_relevance(weak="a")
|
||||||
|
|
||||||
|
match = max([weak, normal], key=best_match)
|
||||||
|
self.assertIs(match, normal)
|
||||||
|
|
||||||
|
match = max([normal, weak], key=best_match)
|
||||||
|
self.assertIs(match, normal)
|
||||||
|
|
||||||
|
def test_strong_validators_are_higher_priority(self):
|
||||||
|
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
|
||||||
|
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
|
||||||
|
strong = exceptions.ValidationError("Oh fine!", path=[], validator="c")
|
||||||
|
|
||||||
|
best_match = exceptions.by_relevance(weak="a", strong="c")
|
||||||
|
|
||||||
|
match = max([weak, normal, strong], key=best_match)
|
||||||
|
self.assertIs(match, strong)
|
||||||
|
|
||||||
|
match = max([strong, normal, weak], key=best_match)
|
||||||
|
self.assertIs(match, strong)
|
||||||
|
|
||||||
|
|
||||||
|
class TestErrorTree(unittest.TestCase):
|
||||||
|
def test_it_knows_how_many_total_errors_it_contains(self):
|
||||||
|
errors = [mock.MagicMock() for _ in range(8)]
|
||||||
|
tree = exceptions.ErrorTree(errors)
|
||||||
|
self.assertEqual(tree.total_errors, 8)
|
||||||
|
|
||||||
|
def test_it_contains_an_item_if_the_item_had_an_error(self):
|
||||||
|
errors = [exceptions.ValidationError("a message", path=["bar"])]
|
||||||
|
tree = exceptions.ErrorTree(errors)
|
||||||
|
self.assertIn("bar", tree)
|
||||||
|
|
||||||
|
def test_it_does_not_contain_an_item_if_the_item_had_no_error(self):
|
||||||
|
errors = [exceptions.ValidationError("a message", path=["bar"])]
|
||||||
|
tree = exceptions.ErrorTree(errors)
|
||||||
|
self.assertNotIn("foo", tree)
|
||||||
|
|
||||||
|
def test_validators_that_failed_appear_in_errors_dict(self):
|
||||||
|
error = exceptions.ValidationError("a message", validator="foo")
|
||||||
|
tree = exceptions.ErrorTree([error])
|
||||||
|
self.assertEqual(tree.errors, {"foo" : error})
|
||||||
|
|
||||||
|
def test_it_creates_a_child_tree_for_each_nested_path(self):
|
||||||
|
errors = [
|
||||||
|
exceptions.ValidationError("a bar message", path=["bar"]),
|
||||||
|
exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]),
|
||||||
|
]
|
||||||
|
tree = exceptions.ErrorTree(errors)
|
||||||
|
self.assertIn(0, tree["bar"])
|
||||||
|
self.assertNotIn(1, tree["bar"])
|
||||||
|
|
||||||
|
def test_children_have_their_errors_dicts_built(self):
|
||||||
|
e1, e2 = (
|
||||||
|
exceptions.ValidationError("1", validator="foo", path=["bar", 0]),
|
||||||
|
exceptions.ValidationError("2", validator="quux", path=["bar", 0]),
|
||||||
|
)
|
||||||
|
tree = exceptions.ErrorTree([e1, e2])
|
||||||
|
self.assertEqual(tree["bar"][0].errors, {"foo" : e1, "quux" : e2})
|
||||||
|
|
||||||
|
def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self):
|
||||||
|
error = exceptions.ValidationError("123", validator="foo", instance=[])
|
||||||
|
tree = exceptions.ErrorTree([error])
|
||||||
|
|
||||||
|
with self.assertRaises(IndexError):
|
||||||
|
tree[0]
|
||||||
|
|
||||||
|
def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self):
|
||||||
|
"""
|
||||||
|
If a validator is dumb (like :validator:`required` in draft 3) and
|
||||||
|
refers to a path that isn't in the instance, the tree still properly
|
||||||
|
returns a subtree for that path.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
error = exceptions.ValidationError(
|
||||||
|
"a message", validator="foo", instance={}, path=["foo"],
|
||||||
|
)
|
||||||
|
tree = exceptions.ErrorTree([error])
|
||||||
|
self.assertIsInstance(tree["foo"], exceptions.ErrorTree)
|
||||||
|
|
||||||
|
|
||||||
|
class TestErrorReprStr(unittest.TestCase):
|
||||||
|
def make_error(self, **kwargs):
|
||||||
|
defaults = dict(
|
||||||
|
message=u"hello",
|
||||||
|
validator=u"type",
|
||||||
|
validator_value=u"string",
|
||||||
|
instance=5,
|
||||||
|
schema={u"type": u"string"},
|
||||||
|
)
|
||||||
|
defaults.update(kwargs)
|
||||||
|
return exceptions.ValidationError(**defaults)
|
||||||
|
|
||||||
|
def assertShows(self, expected, **kwargs):
|
||||||
|
if PY3:
|
||||||
|
expected = expected.replace("u'", "'")
|
||||||
|
expected = textwrap.dedent(expected).rstrip("\n")
|
||||||
|
|
||||||
|
error = self.make_error(**kwargs)
|
||||||
|
message_line, _, rest = str(error).partition("\n")
|
||||||
|
self.assertEqual(message_line, error.message)
|
||||||
|
self.assertEqual(rest, expected)
|
||||||
|
|
||||||
|
def test_repr(self):
|
||||||
|
self.assertEqual(
|
||||||
|
repr(exceptions.ValidationError(message="Hello!")),
|
||||||
|
"<ValidationError: %r>" % "Hello!",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_unset_error(self):
|
||||||
|
error = exceptions.ValidationError("message")
|
||||||
|
self.assertEqual(str(error), "message")
|
||||||
|
|
||||||
|
kwargs = {
|
||||||
|
"validator": "type",
|
||||||
|
"validator_value": "string",
|
||||||
|
"instance": 5,
|
||||||
|
"schema": {"type": "string"}
|
||||||
|
}
|
||||||
|
# Just the message should show if any of the attributes are unset
|
||||||
|
for attr in kwargs:
|
||||||
|
k = dict(kwargs)
|
||||||
|
del k[attr]
|
||||||
|
error = exceptions.ValidationError("message", **k)
|
||||||
|
self.assertEqual(str(error), "message")
|
||||||
|
|
||||||
|
def test_empty_paths(self):
|
||||||
|
self.assertShows(
|
||||||
|
"""
|
||||||
|
Failed validating u'type' in schema:
|
||||||
|
{u'type': u'string'}
|
||||||
|
|
||||||
|
On instance:
|
||||||
|
5
|
||||||
|
""",
|
||||||
|
path=[],
|
||||||
|
schema_path=[],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_one_item_paths(self):
|
||||||
|
self.assertShows(
|
||||||
|
"""
|
||||||
|
Failed validating u'type' in schema:
|
||||||
|
{u'type': u'string'}
|
||||||
|
|
||||||
|
On instance[0]:
|
||||||
|
5
|
||||||
|
""",
|
||||||
|
path=[0],
|
||||||
|
schema_path=["items"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_multiple_item_paths(self):
|
||||||
|
self.assertShows(
|
||||||
|
"""
|
||||||
|
Failed validating u'type' in schema[u'items'][0]:
|
||||||
|
{u'type': u'string'}
|
||||||
|
|
||||||
|
On instance[0][u'a']:
|
||||||
|
5
|
||||||
|
""",
|
||||||
|
path=[0, u"a"],
|
||||||
|
schema_path=[u"items", 0, 1],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_uses_pprint(self):
|
||||||
|
with mock.patch("pprint.pformat") as pformat:
|
||||||
|
str(self.make_error())
|
||||||
|
self.assertEqual(pformat.call_count, 2) # schema + instance
|
||||||
|
|
||||||
|
def test_str_works_with_instances_having_overriden_eq_operator(self):
|
||||||
|
"""
|
||||||
|
Check for https://github.com/Julian/jsonschema/issues/164 which
|
||||||
|
rendered exceptions unusable when a `ValidationError` involved
|
||||||
|
instances with an `__eq__` method that returned truthy values.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
instance = mock.MagicMock()
|
||||||
|
error = exceptions.ValidationError(
|
||||||
|
"a message",
|
||||||
|
validator="foo",
|
||||||
|
instance=instance,
|
||||||
|
validator_value="some",
|
||||||
|
schema="schema",
|
||||||
|
)
|
||||||
|
str(error)
|
||||||
|
self.assertFalse(instance.__eq__.called)
|
63
lib/spack/external/jsonschema/tests/test_format.py
vendored
Normal file
63
lib/spack/external/jsonschema/tests/test_format.py
vendored
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
"""
|
||||||
|
Tests for the parts of jsonschema related to the :validator:`format` property.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from jsonschema.tests.compat import mock, unittest
|
||||||
|
|
||||||
|
from jsonschema import FormatError, ValidationError, FormatChecker
|
||||||
|
from jsonschema.validators import Draft4Validator
|
||||||
|
|
||||||
|
|
||||||
|
class TestFormatChecker(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.fn = mock.Mock()
|
||||||
|
|
||||||
|
def test_it_can_validate_no_formats(self):
|
||||||
|
checker = FormatChecker(formats=())
|
||||||
|
self.assertFalse(checker.checkers)
|
||||||
|
|
||||||
|
def test_it_raises_a_key_error_for_unknown_formats(self):
|
||||||
|
with self.assertRaises(KeyError):
|
||||||
|
FormatChecker(formats=["o noes"])
|
||||||
|
|
||||||
|
def test_it_can_register_cls_checkers(self):
|
||||||
|
with mock.patch.dict(FormatChecker.checkers, clear=True):
|
||||||
|
FormatChecker.cls_checks("new")(self.fn)
|
||||||
|
self.assertEqual(FormatChecker.checkers, {"new" : (self.fn, ())})
|
||||||
|
|
||||||
|
def test_it_can_register_checkers(self):
|
||||||
|
checker = FormatChecker()
|
||||||
|
checker.checks("new")(self.fn)
|
||||||
|
self.assertEqual(
|
||||||
|
checker.checkers,
|
||||||
|
dict(FormatChecker.checkers, new=(self.fn, ()))
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_it_catches_registered_errors(self):
|
||||||
|
checker = FormatChecker()
|
||||||
|
cause = self.fn.side_effect = ValueError()
|
||||||
|
|
||||||
|
checker.checks("foo", raises=ValueError)(self.fn)
|
||||||
|
|
||||||
|
with self.assertRaises(FormatError) as cm:
|
||||||
|
checker.check("bar", "foo")
|
||||||
|
|
||||||
|
self.assertIs(cm.exception.cause, cause)
|
||||||
|
self.assertIs(cm.exception.__cause__, cause)
|
||||||
|
|
||||||
|
# Unregistered errors should not be caught
|
||||||
|
self.fn.side_effect = AttributeError
|
||||||
|
with self.assertRaises(AttributeError):
|
||||||
|
checker.check("bar", "foo")
|
||||||
|
|
||||||
|
def test_format_error_causes_become_validation_error_causes(self):
|
||||||
|
checker = FormatChecker()
|
||||||
|
checker.checks("foo", raises=ValueError)(self.fn)
|
||||||
|
cause = self.fn.side_effect = ValueError()
|
||||||
|
validator = Draft4Validator({"format" : "foo"}, format_checker=checker)
|
||||||
|
|
||||||
|
with self.assertRaises(ValidationError) as cm:
|
||||||
|
validator.validate("bar")
|
||||||
|
|
||||||
|
self.assertIs(cm.exception.__cause__, cause)
|
290
lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py
vendored
Normal file
290
lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py
vendored
Normal file
|
@ -0,0 +1,290 @@
|
||||||
|
"""
|
||||||
|
Test runner for the JSON Schema official test suite
|
||||||
|
|
||||||
|
Tests comprehensive correctness of each draft's validator.
|
||||||
|
|
||||||
|
See https://github.com/json-schema/JSON-Schema-Test-Suite for details.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from contextlib import closing
|
||||||
|
from decimal import Decimal
|
||||||
|
import glob
|
||||||
|
import json
|
||||||
|
import io
|
||||||
|
import itertools
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
from sys import pypy_version_info
|
||||||
|
except ImportError:
|
||||||
|
pypy_version_info = None
|
||||||
|
|
||||||
|
from jsonschema import (
|
||||||
|
FormatError, SchemaError, ValidationError, Draft3Validator,
|
||||||
|
Draft4Validator, FormatChecker, draft3_format_checker,
|
||||||
|
draft4_format_checker, validate,
|
||||||
|
)
|
||||||
|
from jsonschema.compat import PY3
|
||||||
|
from jsonschema.tests.compat import mock, unittest
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
|
|
||||||
|
REPO_ROOT = os.path.join(os.path.dirname(jsonschema.__file__), os.path.pardir)
|
||||||
|
SUITE = os.getenv("JSON_SCHEMA_TEST_SUITE", os.path.join(REPO_ROOT, "json"))
|
||||||
|
|
||||||
|
if not os.path.isdir(SUITE):
|
||||||
|
raise ValueError(
|
||||||
|
"Can't find the JSON-Schema-Test-Suite directory. Set the "
|
||||||
|
"'JSON_SCHEMA_TEST_SUITE' environment variable or run the tests from "
|
||||||
|
"alongside a checkout of the suite."
|
||||||
|
)
|
||||||
|
|
||||||
|
TESTS_DIR = os.path.join(SUITE, "tests")
|
||||||
|
JSONSCHEMA_SUITE = os.path.join(SUITE, "bin", "jsonschema_suite")
|
||||||
|
|
||||||
|
remotes_stdout = subprocess.Popen(
|
||||||
|
["python", JSONSCHEMA_SUITE, "remotes"], stdout=subprocess.PIPE,
|
||||||
|
).stdout
|
||||||
|
|
||||||
|
with closing(remotes_stdout):
|
||||||
|
if PY3:
|
||||||
|
remotes_stdout = io.TextIOWrapper(remotes_stdout)
|
||||||
|
REMOTES = json.load(remotes_stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def make_case(schema, data, valid, name):
|
||||||
|
if valid:
|
||||||
|
def test_case(self):
|
||||||
|
kwargs = getattr(self, "validator_kwargs", {})
|
||||||
|
validate(data, schema, cls=self.validator_class, **kwargs)
|
||||||
|
else:
|
||||||
|
def test_case(self):
|
||||||
|
kwargs = getattr(self, "validator_kwargs", {})
|
||||||
|
with self.assertRaises(ValidationError):
|
||||||
|
validate(data, schema, cls=self.validator_class, **kwargs)
|
||||||
|
|
||||||
|
if not PY3:
|
||||||
|
name = name.encode("utf-8")
|
||||||
|
test_case.__name__ = name
|
||||||
|
|
||||||
|
return test_case
|
||||||
|
|
||||||
|
|
||||||
|
def maybe_skip(skip, test_case, case, test):
|
||||||
|
if skip is not None:
|
||||||
|
reason = skip(case, test)
|
||||||
|
if reason is not None:
|
||||||
|
test_case = unittest.skip(reason)(test_case)
|
||||||
|
return test_case
|
||||||
|
|
||||||
|
|
||||||
|
def load_json_cases(tests_glob, ignore_glob="", basedir=TESTS_DIR, skip=None):
|
||||||
|
if ignore_glob:
|
||||||
|
ignore_glob = os.path.join(basedir, ignore_glob)
|
||||||
|
|
||||||
|
def add_test_methods(test_class):
|
||||||
|
ignored = set(glob.iglob(ignore_glob))
|
||||||
|
|
||||||
|
for filename in glob.iglob(os.path.join(basedir, tests_glob)):
|
||||||
|
if filename in ignored:
|
||||||
|
continue
|
||||||
|
|
||||||
|
validating, _ = os.path.splitext(os.path.basename(filename))
|
||||||
|
id = itertools.count(1)
|
||||||
|
|
||||||
|
with open(filename) as test_file:
|
||||||
|
for case in json.load(test_file):
|
||||||
|
for test in case["tests"]:
|
||||||
|
name = "test_%s_%s_%s" % (
|
||||||
|
validating,
|
||||||
|
next(id),
|
||||||
|
re.sub(r"[\W ]+", "_", test["description"]),
|
||||||
|
)
|
||||||
|
assert not hasattr(test_class, name), name
|
||||||
|
|
||||||
|
test_case = make_case(
|
||||||
|
data=test["data"],
|
||||||
|
schema=case["schema"],
|
||||||
|
valid=test["valid"],
|
||||||
|
name=name,
|
||||||
|
)
|
||||||
|
test_case = maybe_skip(skip, test_case, case, test)
|
||||||
|
setattr(test_class, name, test_case)
|
||||||
|
|
||||||
|
return test_class
|
||||||
|
return add_test_methods
|
||||||
|
|
||||||
|
|
||||||
|
class TypesMixin(object):
|
||||||
|
@unittest.skipIf(PY3, "In Python 3 json.load always produces unicode")
|
||||||
|
def test_string_a_bytestring_is_a_string(self):
|
||||||
|
self.validator_class({"type" : "string"}).validate(b"foo")
|
||||||
|
|
||||||
|
|
||||||
|
class DecimalMixin(object):
|
||||||
|
def test_it_can_validate_with_decimals(self):
|
||||||
|
schema = {"type" : "number"}
|
||||||
|
validator = self.validator_class(
|
||||||
|
schema, types={"number" : (int, float, Decimal)}
|
||||||
|
)
|
||||||
|
|
||||||
|
for valid in [1, 1.1, Decimal(1) / Decimal(8)]:
|
||||||
|
validator.validate(valid)
|
||||||
|
|
||||||
|
for invalid in ["foo", {}, [], True, None]:
|
||||||
|
with self.assertRaises(ValidationError):
|
||||||
|
validator.validate(invalid)
|
||||||
|
|
||||||
|
|
||||||
|
def missing_format(checker):
|
||||||
|
def missing_format(case, test):
|
||||||
|
format = case["schema"].get("format")
|
||||||
|
if format not in checker.checkers:
|
||||||
|
return "Format checker {0!r} not found.".format(format)
|
||||||
|
elif (
|
||||||
|
format == "date-time" and
|
||||||
|
pypy_version_info is not None and
|
||||||
|
pypy_version_info[:2] <= (1, 9)
|
||||||
|
):
|
||||||
|
# datetime.datetime is overzealous about typechecking in <=1.9
|
||||||
|
return "datetime.datetime is broken on this version of PyPy."
|
||||||
|
return missing_format
|
||||||
|
|
||||||
|
|
||||||
|
class FormatMixin(object):
|
||||||
|
def test_it_returns_true_for_formats_it_does_not_know_about(self):
|
||||||
|
validator = self.validator_class(
|
||||||
|
{"format" : "carrot"}, format_checker=FormatChecker(),
|
||||||
|
)
|
||||||
|
validator.validate("bugs")
|
||||||
|
|
||||||
|
def test_it_does_not_validate_formats_by_default(self):
|
||||||
|
validator = self.validator_class({})
|
||||||
|
self.assertIsNone(validator.format_checker)
|
||||||
|
|
||||||
|
def test_it_validates_formats_if_a_checker_is_provided(self):
|
||||||
|
checker = mock.Mock(spec=FormatChecker)
|
||||||
|
validator = self.validator_class(
|
||||||
|
{"format" : "foo"}, format_checker=checker,
|
||||||
|
)
|
||||||
|
|
||||||
|
validator.validate("bar")
|
||||||
|
|
||||||
|
checker.check.assert_called_once_with("bar", "foo")
|
||||||
|
|
||||||
|
cause = ValueError()
|
||||||
|
checker.check.side_effect = FormatError('aoeu', cause=cause)
|
||||||
|
|
||||||
|
with self.assertRaises(ValidationError) as cm:
|
||||||
|
validator.validate("bar")
|
||||||
|
# Make sure original cause is attached
|
||||||
|
self.assertIs(cm.exception.cause, cause)
|
||||||
|
|
||||||
|
def test_it_validates_formats_of_any_type(self):
|
||||||
|
checker = mock.Mock(spec=FormatChecker)
|
||||||
|
validator = self.validator_class(
|
||||||
|
{"format" : "foo"}, format_checker=checker,
|
||||||
|
)
|
||||||
|
|
||||||
|
validator.validate([1, 2, 3])
|
||||||
|
|
||||||
|
checker.check.assert_called_once_with([1, 2, 3], "foo")
|
||||||
|
|
||||||
|
cause = ValueError()
|
||||||
|
checker.check.side_effect = FormatError('aoeu', cause=cause)
|
||||||
|
|
||||||
|
with self.assertRaises(ValidationError) as cm:
|
||||||
|
validator.validate([1, 2, 3])
|
||||||
|
# Make sure original cause is attached
|
||||||
|
self.assertIs(cm.exception.cause, cause)
|
||||||
|
|
||||||
|
|
||||||
|
if sys.maxunicode == 2 ** 16 - 1: # This is a narrow build.
|
||||||
|
def narrow_unicode_build(case, test):
|
||||||
|
if "supplementary Unicode" in test["description"]:
|
||||||
|
return "Not running surrogate Unicode case, this Python is narrow."
|
||||||
|
else:
|
||||||
|
def narrow_unicode_build(case, test): # This isn't, skip nothing.
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@load_json_cases(
|
||||||
|
"draft3/*.json",
|
||||||
|
skip=narrow_unicode_build,
|
||||||
|
ignore_glob="draft3/refRemote.json",
|
||||||
|
)
|
||||||
|
@load_json_cases(
|
||||||
|
"draft3/optional/format.json", skip=missing_format(draft3_format_checker)
|
||||||
|
)
|
||||||
|
@load_json_cases("draft3/optional/bignum.json")
|
||||||
|
@load_json_cases("draft3/optional/zeroTerminatedFloats.json")
|
||||||
|
class TestDraft3(unittest.TestCase, TypesMixin, DecimalMixin, FormatMixin):
|
||||||
|
validator_class = Draft3Validator
|
||||||
|
validator_kwargs = {"format_checker" : draft3_format_checker}
|
||||||
|
|
||||||
|
def test_any_type_is_valid_for_type_any(self):
|
||||||
|
validator = self.validator_class({"type" : "any"})
|
||||||
|
validator.validate(mock.Mock())
|
||||||
|
|
||||||
|
# TODO: we're in need of more meta schema tests
|
||||||
|
def test_invalid_properties(self):
|
||||||
|
with self.assertRaises(SchemaError):
|
||||||
|
validate({}, {"properties": {"test": True}},
|
||||||
|
cls=self.validator_class)
|
||||||
|
|
||||||
|
def test_minItems_invalid_string(self):
|
||||||
|
with self.assertRaises(SchemaError):
|
||||||
|
# needs to be an integer
|
||||||
|
validate([1], {"minItems" : "1"}, cls=self.validator_class)
|
||||||
|
|
||||||
|
|
||||||
|
@load_json_cases(
|
||||||
|
"draft4/*.json",
|
||||||
|
skip=narrow_unicode_build,
|
||||||
|
ignore_glob="draft4/refRemote.json",
|
||||||
|
)
|
||||||
|
@load_json_cases(
|
||||||
|
"draft4/optional/format.json", skip=missing_format(draft4_format_checker)
|
||||||
|
)
|
||||||
|
@load_json_cases("draft4/optional/bignum.json")
|
||||||
|
@load_json_cases("draft4/optional/zeroTerminatedFloats.json")
|
||||||
|
class TestDraft4(unittest.TestCase, TypesMixin, DecimalMixin, FormatMixin):
|
||||||
|
validator_class = Draft4Validator
|
||||||
|
validator_kwargs = {"format_checker" : draft4_format_checker}
|
||||||
|
|
||||||
|
# TODO: we're in need of more meta schema tests
|
||||||
|
def test_invalid_properties(self):
|
||||||
|
with self.assertRaises(SchemaError):
|
||||||
|
validate({}, {"properties": {"test": True}},
|
||||||
|
cls=self.validator_class)
|
||||||
|
|
||||||
|
def test_minItems_invalid_string(self):
|
||||||
|
with self.assertRaises(SchemaError):
|
||||||
|
# needs to be an integer
|
||||||
|
validate([1], {"minItems" : "1"}, cls=self.validator_class)
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteRefResolutionMixin(object):
|
||||||
|
def setUp(self):
|
||||||
|
patch = mock.patch("jsonschema.validators.requests")
|
||||||
|
requests = patch.start()
|
||||||
|
requests.get.side_effect = self.resolve
|
||||||
|
self.addCleanup(patch.stop)
|
||||||
|
|
||||||
|
def resolve(self, reference):
|
||||||
|
_, _, reference = reference.partition("http://localhost:1234/")
|
||||||
|
return mock.Mock(**{"json.return_value" : REMOTES.get(reference)})
|
||||||
|
|
||||||
|
|
||||||
|
@load_json_cases("draft3/refRemote.json")
|
||||||
|
class Draft3RemoteResolution(RemoteRefResolutionMixin, unittest.TestCase):
|
||||||
|
validator_class = Draft3Validator
|
||||||
|
|
||||||
|
|
||||||
|
@load_json_cases("draft4/refRemote.json")
|
||||||
|
class Draft4RemoteResolution(RemoteRefResolutionMixin, unittest.TestCase):
|
||||||
|
validator_class = Draft4Validator
|
786
lib/spack/external/jsonschema/tests/test_validators.py
vendored
Normal file
786
lib/spack/external/jsonschema/tests/test_validators.py
vendored
Normal file
|
@ -0,0 +1,786 @@
|
||||||
|
from collections import deque
|
||||||
|
from contextlib import contextmanager
|
||||||
|
import json
|
||||||
|
|
||||||
|
from jsonschema import FormatChecker, ValidationError
|
||||||
|
from jsonschema.tests.compat import mock, unittest
|
||||||
|
from jsonschema.validators import (
|
||||||
|
RefResolutionError, UnknownType, Draft3Validator,
|
||||||
|
Draft4Validator, RefResolver, create, extend, validator_for, validate,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreateAndExtend(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.meta_schema = {u"properties" : {u"smelly" : {}}}
|
||||||
|
self.smelly = mock.MagicMock()
|
||||||
|
self.validators = {u"smelly" : self.smelly}
|
||||||
|
self.types = {u"dict" : dict}
|
||||||
|
self.Validator = create(
|
||||||
|
meta_schema=self.meta_schema,
|
||||||
|
validators=self.validators,
|
||||||
|
default_types=self.types,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.validator_value = 12
|
||||||
|
self.schema = {u"smelly" : self.validator_value}
|
||||||
|
self.validator = self.Validator(self.schema)
|
||||||
|
|
||||||
|
def test_attrs(self):
|
||||||
|
self.assertEqual(self.Validator.VALIDATORS, self.validators)
|
||||||
|
self.assertEqual(self.Validator.META_SCHEMA, self.meta_schema)
|
||||||
|
self.assertEqual(self.Validator.DEFAULT_TYPES, self.types)
|
||||||
|
|
||||||
|
def test_init(self):
|
||||||
|
self.assertEqual(self.validator.schema, self.schema)
|
||||||
|
|
||||||
|
def test_iter_errors(self):
|
||||||
|
instance = "hello"
|
||||||
|
|
||||||
|
self.smelly.return_value = []
|
||||||
|
self.assertEqual(list(self.validator.iter_errors(instance)), [])
|
||||||
|
|
||||||
|
error = mock.Mock()
|
||||||
|
self.smelly.return_value = [error]
|
||||||
|
self.assertEqual(list(self.validator.iter_errors(instance)), [error])
|
||||||
|
|
||||||
|
self.smelly.assert_called_with(
|
||||||
|
self.validator, self.validator_value, instance, self.schema,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_if_a_version_is_provided_it_is_registered(self):
|
||||||
|
with mock.patch("jsonschema.validators.validates") as validates:
|
||||||
|
validates.side_effect = lambda version : lambda cls : cls
|
||||||
|
Validator = create(meta_schema={u"id" : ""}, version="my version")
|
||||||
|
validates.assert_called_once_with("my version")
|
||||||
|
self.assertEqual(Validator.__name__, "MyVersionValidator")
|
||||||
|
|
||||||
|
def test_if_a_version_is_not_provided_it_is_not_registered(self):
|
||||||
|
with mock.patch("jsonschema.validators.validates") as validates:
|
||||||
|
create(meta_schema={u"id" : "id"})
|
||||||
|
self.assertFalse(validates.called)
|
||||||
|
|
||||||
|
def test_extend(self):
|
||||||
|
validators = dict(self.Validator.VALIDATORS)
|
||||||
|
new = mock.Mock()
|
||||||
|
|
||||||
|
Extended = extend(self.Validator, validators={u"a new one" : new})
|
||||||
|
|
||||||
|
validators.update([(u"a new one", new)])
|
||||||
|
self.assertEqual(Extended.VALIDATORS, validators)
|
||||||
|
self.assertNotIn(u"a new one", self.Validator.VALIDATORS)
|
||||||
|
|
||||||
|
self.assertEqual(Extended.META_SCHEMA, self.Validator.META_SCHEMA)
|
||||||
|
self.assertEqual(Extended.DEFAULT_TYPES, self.Validator.DEFAULT_TYPES)
|
||||||
|
|
||||||
|
|
||||||
|
class TestIterErrors(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.validator = Draft3Validator({})
|
||||||
|
|
||||||
|
def test_iter_errors(self):
|
||||||
|
instance = [1, 2]
|
||||||
|
schema = {
|
||||||
|
u"disallow" : u"array",
|
||||||
|
u"enum" : [["a", "b", "c"], ["d", "e", "f"]],
|
||||||
|
u"minItems" : 3
|
||||||
|
}
|
||||||
|
|
||||||
|
got = (e.message for e in self.validator.iter_errors(instance, schema))
|
||||||
|
expected = [
|
||||||
|
"%r is disallowed for [1, 2]" % (schema["disallow"],),
|
||||||
|
"[1, 2] is too short",
|
||||||
|
"[1, 2] is not one of %r" % (schema["enum"],),
|
||||||
|
]
|
||||||
|
self.assertEqual(sorted(got), sorted(expected))
|
||||||
|
|
||||||
|
def test_iter_errors_multiple_failures_one_validator(self):
|
||||||
|
instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"}
|
||||||
|
schema = {
|
||||||
|
u"properties" : {
|
||||||
|
"foo" : {u"type" : "string"},
|
||||||
|
"bar" : {u"minItems" : 2},
|
||||||
|
"baz" : {u"maximum" : 10, u"enum" : [2, 4, 6, 8]},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
errors = list(self.validator.iter_errors(instance, schema))
|
||||||
|
self.assertEqual(len(errors), 4)
|
||||||
|
|
||||||
|
|
||||||
|
class TestValidationErrorMessages(unittest.TestCase):
|
||||||
|
def message_for(self, instance, schema, *args, **kwargs):
|
||||||
|
kwargs.setdefault("cls", Draft3Validator)
|
||||||
|
with self.assertRaises(ValidationError) as e:
|
||||||
|
validate(instance, schema, *args, **kwargs)
|
||||||
|
return e.exception.message
|
||||||
|
|
||||||
|
def test_single_type_failure(self):
|
||||||
|
message = self.message_for(instance=1, schema={u"type" : u"string"})
|
||||||
|
self.assertEqual(message, "1 is not of type %r" % u"string")
|
||||||
|
|
||||||
|
def test_single_type_list_failure(self):
|
||||||
|
message = self.message_for(instance=1, schema={u"type" : [u"string"]})
|
||||||
|
self.assertEqual(message, "1 is not of type %r" % u"string")
|
||||||
|
|
||||||
|
def test_multiple_type_failure(self):
|
||||||
|
types = u"string", u"object"
|
||||||
|
message = self.message_for(instance=1, schema={u"type" : list(types)})
|
||||||
|
self.assertEqual(message, "1 is not of type %r, %r" % types)
|
||||||
|
|
||||||
|
def test_object_without_title_type_failure(self):
|
||||||
|
type = {u"type" : [{u"minimum" : 3}]}
|
||||||
|
message = self.message_for(instance=1, schema={u"type" : [type]})
|
||||||
|
self.assertEqual(message, "1 is not of type %r" % (type,))
|
||||||
|
|
||||||
|
def test_object_with_name_type_failure(self):
|
||||||
|
name = "Foo"
|
||||||
|
schema = {u"type" : [{u"name" : name, u"minimum" : 3}]}
|
||||||
|
message = self.message_for(instance=1, schema=schema)
|
||||||
|
self.assertEqual(message, "1 is not of type %r" % (name,))
|
||||||
|
|
||||||
|
def test_minimum(self):
|
||||||
|
message = self.message_for(instance=1, schema={"minimum" : 2})
|
||||||
|
self.assertEqual(message, "1 is less than the minimum of 2")
|
||||||
|
|
||||||
|
def test_maximum(self):
|
||||||
|
message = self.message_for(instance=1, schema={"maximum" : 0})
|
||||||
|
self.assertEqual(message, "1 is greater than the maximum of 0")
|
||||||
|
|
||||||
|
def test_dependencies_failure_has_single_element_not_list(self):
|
||||||
|
depend, on = "bar", "foo"
|
||||||
|
schema = {u"dependencies" : {depend : on}}
|
||||||
|
message = self.message_for({"bar" : 2}, schema)
|
||||||
|
self.assertEqual(message, "%r is a dependency of %r" % (on, depend))
|
||||||
|
|
||||||
|
def test_additionalItems_single_failure(self):
|
||||||
|
message = self.message_for(
|
||||||
|
[2], {u"items" : [], u"additionalItems" : False},
|
||||||
|
)
|
||||||
|
self.assertIn("(2 was unexpected)", message)
|
||||||
|
|
||||||
|
def test_additionalItems_multiple_failures(self):
|
||||||
|
message = self.message_for(
|
||||||
|
[1, 2, 3], {u"items" : [], u"additionalItems" : False}
|
||||||
|
)
|
||||||
|
self.assertIn("(1, 2, 3 were unexpected)", message)
|
||||||
|
|
||||||
|
def test_additionalProperties_single_failure(self):
|
||||||
|
additional = "foo"
|
||||||
|
schema = {u"additionalProperties" : False}
|
||||||
|
message = self.message_for({additional : 2}, schema)
|
||||||
|
self.assertIn("(%r was unexpected)" % (additional,), message)
|
||||||
|
|
||||||
|
def test_additionalProperties_multiple_failures(self):
|
||||||
|
schema = {u"additionalProperties" : False}
|
||||||
|
message = self.message_for(dict.fromkeys(["foo", "bar"]), schema)
|
||||||
|
|
||||||
|
self.assertIn(repr("foo"), message)
|
||||||
|
self.assertIn(repr("bar"), message)
|
||||||
|
self.assertIn("were unexpected)", message)
|
||||||
|
|
||||||
|
def test_invalid_format_default_message(self):
|
||||||
|
checker = FormatChecker(formats=())
|
||||||
|
check_fn = mock.Mock(return_value=False)
|
||||||
|
checker.checks(u"thing")(check_fn)
|
||||||
|
|
||||||
|
schema = {u"format" : u"thing"}
|
||||||
|
message = self.message_for("bla", schema, format_checker=checker)
|
||||||
|
|
||||||
|
self.assertIn(repr("bla"), message)
|
||||||
|
self.assertIn(repr("thing"), message)
|
||||||
|
self.assertIn("is not a", message)
|
||||||
|
|
||||||
|
|
||||||
|
class TestValidationErrorDetails(unittest.TestCase):
|
||||||
|
# TODO: These really need unit tests for each individual validator, rather
|
||||||
|
# than just these higher level tests.
|
||||||
|
def test_anyOf(self):
|
||||||
|
instance = 5
|
||||||
|
schema = {
|
||||||
|
"anyOf": [
|
||||||
|
{"minimum": 20},
|
||||||
|
{"type": "string"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
validator = Draft4Validator(schema)
|
||||||
|
errors = list(validator.iter_errors(instance))
|
||||||
|
self.assertEqual(len(errors), 1)
|
||||||
|
e = errors[0]
|
||||||
|
|
||||||
|
self.assertEqual(e.validator, "anyOf")
|
||||||
|
self.assertEqual(e.validator_value, schema["anyOf"])
|
||||||
|
self.assertEqual(e.instance, instance)
|
||||||
|
self.assertEqual(e.schema, schema)
|
||||||
|
self.assertIsNone(e.parent)
|
||||||
|
|
||||||
|
self.assertEqual(e.path, deque([]))
|
||||||
|
self.assertEqual(e.relative_path, deque([]))
|
||||||
|
self.assertEqual(e.absolute_path, deque([]))
|
||||||
|
|
||||||
|
self.assertEqual(e.schema_path, deque(["anyOf"]))
|
||||||
|
self.assertEqual(e.relative_schema_path, deque(["anyOf"]))
|
||||||
|
self.assertEqual(e.absolute_schema_path, deque(["anyOf"]))
|
||||||
|
|
||||||
|
self.assertEqual(len(e.context), 2)
|
||||||
|
|
||||||
|
e1, e2 = sorted_errors(e.context)
|
||||||
|
|
||||||
|
self.assertEqual(e1.validator, "minimum")
|
||||||
|
self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"])
|
||||||
|
self.assertEqual(e1.instance, instance)
|
||||||
|
self.assertEqual(e1.schema, schema["anyOf"][0])
|
||||||
|
self.assertIs(e1.parent, e)
|
||||||
|
|
||||||
|
self.assertEqual(e1.path, deque([]))
|
||||||
|
self.assertEqual(e1.absolute_path, deque([]))
|
||||||
|
self.assertEqual(e1.relative_path, deque([]))
|
||||||
|
|
||||||
|
self.assertEqual(e1.schema_path, deque([0, "minimum"]))
|
||||||
|
self.assertEqual(e1.relative_schema_path, deque([0, "minimum"]))
|
||||||
|
self.assertEqual(
|
||||||
|
e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertFalse(e1.context)
|
||||||
|
|
||||||
|
self.assertEqual(e2.validator, "type")
|
||||||
|
self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"])
|
||||||
|
self.assertEqual(e2.instance, instance)
|
||||||
|
self.assertEqual(e2.schema, schema["anyOf"][1])
|
||||||
|
self.assertIs(e2.parent, e)
|
||||||
|
|
||||||
|
self.assertEqual(e2.path, deque([]))
|
||||||
|
self.assertEqual(e2.relative_path, deque([]))
|
||||||
|
self.assertEqual(e2.absolute_path, deque([]))
|
||||||
|
|
||||||
|
self.assertEqual(e2.schema_path, deque([1, "type"]))
|
||||||
|
self.assertEqual(e2.relative_schema_path, deque([1, "type"]))
|
||||||
|
self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"]))
|
||||||
|
|
||||||
|
self.assertEqual(len(e2.context), 0)
|
||||||
|
|
||||||
|
def test_type(self):
|
||||||
|
instance = {"foo": 1}
|
||||||
|
schema = {
|
||||||
|
"type": [
|
||||||
|
{"type": "integer"},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"foo": {"enum": [2]}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
validator = Draft3Validator(schema)
|
||||||
|
errors = list(validator.iter_errors(instance))
|
||||||
|
self.assertEqual(len(errors), 1)
|
||||||
|
e = errors[0]
|
||||||
|
|
||||||
|
self.assertEqual(e.validator, "type")
|
||||||
|
self.assertEqual(e.validator_value, schema["type"])
|
||||||
|
self.assertEqual(e.instance, instance)
|
||||||
|
self.assertEqual(e.schema, schema)
|
||||||
|
self.assertIsNone(e.parent)
|
||||||
|
|
||||||
|
self.assertEqual(e.path, deque([]))
|
||||||
|
self.assertEqual(e.relative_path, deque([]))
|
||||||
|
self.assertEqual(e.absolute_path, deque([]))
|
||||||
|
|
||||||
|
self.assertEqual(e.schema_path, deque(["type"]))
|
||||||
|
self.assertEqual(e.relative_schema_path, deque(["type"]))
|
||||||
|
self.assertEqual(e.absolute_schema_path, deque(["type"]))
|
||||||
|
|
||||||
|
self.assertEqual(len(e.context), 2)
|
||||||
|
|
||||||
|
e1, e2 = sorted_errors(e.context)
|
||||||
|
|
||||||
|
self.assertEqual(e1.validator, "type")
|
||||||
|
self.assertEqual(e1.validator_value, schema["type"][0]["type"])
|
||||||
|
self.assertEqual(e1.instance, instance)
|
||||||
|
self.assertEqual(e1.schema, schema["type"][0])
|
||||||
|
self.assertIs(e1.parent, e)
|
||||||
|
|
||||||
|
self.assertEqual(e1.path, deque([]))
|
||||||
|
self.assertEqual(e1.relative_path, deque([]))
|
||||||
|
self.assertEqual(e1.absolute_path, deque([]))
|
||||||
|
|
||||||
|
self.assertEqual(e1.schema_path, deque([0, "type"]))
|
||||||
|
self.assertEqual(e1.relative_schema_path, deque([0, "type"]))
|
||||||
|
self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"]))
|
||||||
|
|
||||||
|
self.assertFalse(e1.context)
|
||||||
|
|
||||||
|
self.assertEqual(e2.validator, "enum")
|
||||||
|
self.assertEqual(e2.validator_value, [2])
|
||||||
|
self.assertEqual(e2.instance, 1)
|
||||||
|
self.assertEqual(e2.schema, {u"enum" : [2]})
|
||||||
|
self.assertIs(e2.parent, e)
|
||||||
|
|
||||||
|
self.assertEqual(e2.path, deque(["foo"]))
|
||||||
|
self.assertEqual(e2.relative_path, deque(["foo"]))
|
||||||
|
self.assertEqual(e2.absolute_path, deque(["foo"]))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
e2.schema_path, deque([1, "properties", "foo", "enum"]),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
e2.relative_schema_path, deque([1, "properties", "foo", "enum"]),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
e2.absolute_schema_path,
|
||||||
|
deque(["type", 1, "properties", "foo", "enum"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertFalse(e2.context)
|
||||||
|
|
||||||
|
def test_single_nesting(self):
|
||||||
|
instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"}
|
||||||
|
schema = {
|
||||||
|
"properties" : {
|
||||||
|
"foo" : {"type" : "string"},
|
||||||
|
"bar" : {"minItems" : 2},
|
||||||
|
"baz" : {"maximum" : 10, "enum" : [2, 4, 6, 8]},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
validator = Draft3Validator(schema)
|
||||||
|
errors = validator.iter_errors(instance)
|
||||||
|
e1, e2, e3, e4 = sorted_errors(errors)
|
||||||
|
|
||||||
|
self.assertEqual(e1.path, deque(["bar"]))
|
||||||
|
self.assertEqual(e2.path, deque(["baz"]))
|
||||||
|
self.assertEqual(e3.path, deque(["baz"]))
|
||||||
|
self.assertEqual(e4.path, deque(["foo"]))
|
||||||
|
|
||||||
|
self.assertEqual(e1.relative_path, deque(["bar"]))
|
||||||
|
self.assertEqual(e2.relative_path, deque(["baz"]))
|
||||||
|
self.assertEqual(e3.relative_path, deque(["baz"]))
|
||||||
|
self.assertEqual(e4.relative_path, deque(["foo"]))
|
||||||
|
|
||||||
|
self.assertEqual(e1.absolute_path, deque(["bar"]))
|
||||||
|
self.assertEqual(e2.absolute_path, deque(["baz"]))
|
||||||
|
self.assertEqual(e3.absolute_path, deque(["baz"]))
|
||||||
|
self.assertEqual(e4.absolute_path, deque(["foo"]))
|
||||||
|
|
||||||
|
self.assertEqual(e1.validator, "minItems")
|
||||||
|
self.assertEqual(e2.validator, "enum")
|
||||||
|
self.assertEqual(e3.validator, "maximum")
|
||||||
|
self.assertEqual(e4.validator, "type")
|
||||||
|
|
||||||
|
def test_multiple_nesting(self):
|
||||||
|
instance = [1, {"foo" : 2, "bar" : {"baz" : [1]}}, "quux"]
|
||||||
|
schema = {
|
||||||
|
"type" : "string",
|
||||||
|
"items" : {
|
||||||
|
"type" : ["string", "object"],
|
||||||
|
"properties" : {
|
||||||
|
"foo" : {"enum" : [1, 3]},
|
||||||
|
"bar" : {
|
||||||
|
"type" : "array",
|
||||||
|
"properties" : {
|
||||||
|
"bar" : {"required" : True},
|
||||||
|
"baz" : {"minItems" : 2},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
validator = Draft3Validator(schema)
|
||||||
|
errors = validator.iter_errors(instance)
|
||||||
|
e1, e2, e3, e4, e5, e6 = sorted_errors(errors)
|
||||||
|
|
||||||
|
self.assertEqual(e1.path, deque([]))
|
||||||
|
self.assertEqual(e2.path, deque([0]))
|
||||||
|
self.assertEqual(e3.path, deque([1, "bar"]))
|
||||||
|
self.assertEqual(e4.path, deque([1, "bar", "bar"]))
|
||||||
|
self.assertEqual(e5.path, deque([1, "bar", "baz"]))
|
||||||
|
self.assertEqual(e6.path, deque([1, "foo"]))
|
||||||
|
|
||||||
|
self.assertEqual(e1.schema_path, deque(["type"]))
|
||||||
|
self.assertEqual(e2.schema_path, deque(["items", "type"]))
|
||||||
|
self.assertEqual(
|
||||||
|
list(e3.schema_path), ["items", "properties", "bar", "type"],
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
list(e4.schema_path),
|
||||||
|
["items", "properties", "bar", "properties", "bar", "required"],
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
list(e5.schema_path),
|
||||||
|
["items", "properties", "bar", "properties", "baz", "minItems"]
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
list(e6.schema_path), ["items", "properties", "foo", "enum"],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(e1.validator, "type")
|
||||||
|
self.assertEqual(e2.validator, "type")
|
||||||
|
self.assertEqual(e3.validator, "type")
|
||||||
|
self.assertEqual(e4.validator, "required")
|
||||||
|
self.assertEqual(e5.validator, "minItems")
|
||||||
|
self.assertEqual(e6.validator, "enum")
|
||||||
|
|
||||||
|
def test_additionalProperties(self):
|
||||||
|
instance = {"bar": "bar", "foo": 2}
|
||||||
|
schema = {
|
||||||
|
"additionalProperties" : {"type": "integer", "minimum": 5}
|
||||||
|
}
|
||||||
|
|
||||||
|
validator = Draft3Validator(schema)
|
||||||
|
errors = validator.iter_errors(instance)
|
||||||
|
e1, e2 = sorted_errors(errors)
|
||||||
|
|
||||||
|
self.assertEqual(e1.path, deque(["bar"]))
|
||||||
|
self.assertEqual(e2.path, deque(["foo"]))
|
||||||
|
|
||||||
|
self.assertEqual(e1.validator, "type")
|
||||||
|
self.assertEqual(e2.validator, "minimum")
|
||||||
|
|
||||||
|
def test_patternProperties(self):
|
||||||
|
instance = {"bar": 1, "foo": 2}
|
||||||
|
schema = {
|
||||||
|
"patternProperties" : {
|
||||||
|
"bar": {"type": "string"},
|
||||||
|
"foo": {"minimum": 5}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
validator = Draft3Validator(schema)
|
||||||
|
errors = validator.iter_errors(instance)
|
||||||
|
e1, e2 = sorted_errors(errors)
|
||||||
|
|
||||||
|
self.assertEqual(e1.path, deque(["bar"]))
|
||||||
|
self.assertEqual(e2.path, deque(["foo"]))
|
||||||
|
|
||||||
|
self.assertEqual(e1.validator, "type")
|
||||||
|
self.assertEqual(e2.validator, "minimum")
|
||||||
|
|
||||||
|
def test_additionalItems(self):
|
||||||
|
instance = ["foo", 1]
|
||||||
|
schema = {
|
||||||
|
"items": [],
|
||||||
|
"additionalItems" : {"type": "integer", "minimum": 5}
|
||||||
|
}
|
||||||
|
|
||||||
|
validator = Draft3Validator(schema)
|
||||||
|
errors = validator.iter_errors(instance)
|
||||||
|
e1, e2 = sorted_errors(errors)
|
||||||
|
|
||||||
|
self.assertEqual(e1.path, deque([0]))
|
||||||
|
self.assertEqual(e2.path, deque([1]))
|
||||||
|
|
||||||
|
self.assertEqual(e1.validator, "type")
|
||||||
|
self.assertEqual(e2.validator, "minimum")
|
||||||
|
|
||||||
|
def test_additionalItems_with_items(self):
|
||||||
|
instance = ["foo", "bar", 1]
|
||||||
|
schema = {
|
||||||
|
"items": [{}],
|
||||||
|
"additionalItems" : {"type": "integer", "minimum": 5}
|
||||||
|
}
|
||||||
|
|
||||||
|
validator = Draft3Validator(schema)
|
||||||
|
errors = validator.iter_errors(instance)
|
||||||
|
e1, e2 = sorted_errors(errors)
|
||||||
|
|
||||||
|
self.assertEqual(e1.path, deque([1]))
|
||||||
|
self.assertEqual(e2.path, deque([2]))
|
||||||
|
|
||||||
|
self.assertEqual(e1.validator, "type")
|
||||||
|
self.assertEqual(e2.validator, "minimum")
|
||||||
|
|
||||||
|
|
||||||
|
class ValidatorTestMixin(object):
|
||||||
|
def setUp(self):
|
||||||
|
self.instance = mock.Mock()
|
||||||
|
self.schema = {}
|
||||||
|
self.resolver = mock.Mock()
|
||||||
|
self.validator = self.validator_class(self.schema)
|
||||||
|
|
||||||
|
def test_valid_instances_are_valid(self):
|
||||||
|
errors = iter([])
|
||||||
|
|
||||||
|
with mock.patch.object(
|
||||||
|
self.validator, "iter_errors", return_value=errors,
|
||||||
|
):
|
||||||
|
self.assertTrue(
|
||||||
|
self.validator.is_valid(self.instance, self.schema)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_invalid_instances_are_not_valid(self):
|
||||||
|
errors = iter([mock.Mock()])
|
||||||
|
|
||||||
|
with mock.patch.object(
|
||||||
|
self.validator, "iter_errors", return_value=errors,
|
||||||
|
):
|
||||||
|
self.assertFalse(
|
||||||
|
self.validator.is_valid(self.instance, self.schema)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_non_existent_properties_are_ignored(self):
|
||||||
|
instance, my_property, my_value = mock.Mock(), mock.Mock(), mock.Mock()
|
||||||
|
validate(instance=instance, schema={my_property : my_value})
|
||||||
|
|
||||||
|
def test_it_creates_a_ref_resolver_if_not_provided(self):
|
||||||
|
self.assertIsInstance(self.validator.resolver, RefResolver)
|
||||||
|
|
||||||
|
def test_it_delegates_to_a_ref_resolver(self):
|
||||||
|
resolver = RefResolver("", {})
|
||||||
|
schema = {"$ref" : mock.Mock()}
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def resolving():
|
||||||
|
yield {"type": "integer"}
|
||||||
|
|
||||||
|
with mock.patch.object(resolver, "resolving") as resolve:
|
||||||
|
resolve.return_value = resolving()
|
||||||
|
with self.assertRaises(ValidationError):
|
||||||
|
self.validator_class(schema, resolver=resolver).validate(None)
|
||||||
|
|
||||||
|
resolve.assert_called_once_with(schema["$ref"])
|
||||||
|
|
||||||
|
def test_is_type_is_true_for_valid_type(self):
|
||||||
|
self.assertTrue(self.validator.is_type("foo", "string"))
|
||||||
|
|
||||||
|
def test_is_type_is_false_for_invalid_type(self):
|
||||||
|
self.assertFalse(self.validator.is_type("foo", "array"))
|
||||||
|
|
||||||
|
def test_is_type_evades_bool_inheriting_from_int(self):
|
||||||
|
self.assertFalse(self.validator.is_type(True, "integer"))
|
||||||
|
self.assertFalse(self.validator.is_type(True, "number"))
|
||||||
|
|
||||||
|
def test_is_type_raises_exception_for_unknown_type(self):
|
||||||
|
with self.assertRaises(UnknownType):
|
||||||
|
self.validator.is_type("foo", object())
|
||||||
|
|
||||||
|
|
||||||
|
class TestDraft3Validator(ValidatorTestMixin, unittest.TestCase):
|
||||||
|
validator_class = Draft3Validator
|
||||||
|
|
||||||
|
def test_is_type_is_true_for_any_type(self):
|
||||||
|
self.assertTrue(self.validator.is_valid(mock.Mock(), {"type": "any"}))
|
||||||
|
|
||||||
|
def test_is_type_does_not_evade_bool_if_it_is_being_tested(self):
|
||||||
|
self.assertTrue(self.validator.is_type(True, "boolean"))
|
||||||
|
self.assertTrue(self.validator.is_valid(True, {"type": "any"}))
|
||||||
|
|
||||||
|
def test_non_string_custom_types(self):
|
||||||
|
schema = {'type': [None]}
|
||||||
|
cls = self.validator_class(schema, types={None: type(None)})
|
||||||
|
cls.validate(None, schema)
|
||||||
|
|
||||||
|
|
||||||
|
class TestDraft4Validator(ValidatorTestMixin, unittest.TestCase):
|
||||||
|
validator_class = Draft4Validator
|
||||||
|
|
||||||
|
|
||||||
|
class TestBuiltinFormats(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
The built-in (specification-defined) formats do not raise type errors.
|
||||||
|
|
||||||
|
If an instance or value is not a string, it should be ignored.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
for format in FormatChecker.checkers:
|
||||||
|
def test(self, format=format):
|
||||||
|
v = Draft4Validator({"format": format}, format_checker=FormatChecker())
|
||||||
|
v.validate(123)
|
||||||
|
|
||||||
|
name = "test_{0}_ignores_non_strings".format(format)
|
||||||
|
test.__name__ = name
|
||||||
|
setattr(TestBuiltinFormats, name, test)
|
||||||
|
del test # Ugh py.test. Stop discovering top level tests.
|
||||||
|
|
||||||
|
|
||||||
|
class TestValidatorFor(unittest.TestCase):
|
||||||
|
def test_draft_3(self):
|
||||||
|
schema = {"$schema" : "http://json-schema.org/draft-03/schema"}
|
||||||
|
self.assertIs(validator_for(schema), Draft3Validator)
|
||||||
|
|
||||||
|
schema = {"$schema" : "http://json-schema.org/draft-03/schema#"}
|
||||||
|
self.assertIs(validator_for(schema), Draft3Validator)
|
||||||
|
|
||||||
|
def test_draft_4(self):
|
||||||
|
schema = {"$schema" : "http://json-schema.org/draft-04/schema"}
|
||||||
|
self.assertIs(validator_for(schema), Draft4Validator)
|
||||||
|
|
||||||
|
schema = {"$schema" : "http://json-schema.org/draft-04/schema#"}
|
||||||
|
self.assertIs(validator_for(schema), Draft4Validator)
|
||||||
|
|
||||||
|
def test_custom_validator(self):
|
||||||
|
Validator = create(meta_schema={"id" : "meta schema id"}, version="12")
|
||||||
|
schema = {"$schema" : "meta schema id"}
|
||||||
|
self.assertIs(validator_for(schema), Validator)
|
||||||
|
|
||||||
|
def test_validator_for_jsonschema_default(self):
|
||||||
|
self.assertIs(validator_for({}), Draft4Validator)
|
||||||
|
|
||||||
|
def test_validator_for_custom_default(self):
|
||||||
|
self.assertIs(validator_for({}, default=None), None)
|
||||||
|
|
||||||
|
|
||||||
|
class TestValidate(unittest.TestCase):
|
||||||
|
def test_draft3_validator_is_chosen(self):
|
||||||
|
schema = {"$schema" : "http://json-schema.org/draft-03/schema#"}
|
||||||
|
with mock.patch.object(Draft3Validator, "check_schema") as chk_schema:
|
||||||
|
validate({}, schema)
|
||||||
|
chk_schema.assert_called_once_with(schema)
|
||||||
|
# Make sure it works without the empty fragment
|
||||||
|
schema = {"$schema" : "http://json-schema.org/draft-03/schema"}
|
||||||
|
with mock.patch.object(Draft3Validator, "check_schema") as chk_schema:
|
||||||
|
validate({}, schema)
|
||||||
|
chk_schema.assert_called_once_with(schema)
|
||||||
|
|
||||||
|
def test_draft4_validator_is_chosen(self):
|
||||||
|
schema = {"$schema" : "http://json-schema.org/draft-04/schema#"}
|
||||||
|
with mock.patch.object(Draft4Validator, "check_schema") as chk_schema:
|
||||||
|
validate({}, schema)
|
||||||
|
chk_schema.assert_called_once_with(schema)
|
||||||
|
|
||||||
|
def test_draft4_validator_is_the_default(self):
|
||||||
|
with mock.patch.object(Draft4Validator, "check_schema") as chk_schema:
|
||||||
|
validate({}, {})
|
||||||
|
chk_schema.assert_called_once_with({})
|
||||||
|
|
||||||
|
|
||||||
|
class TestRefResolver(unittest.TestCase):
|
||||||
|
|
||||||
|
base_uri = ""
|
||||||
|
stored_uri = "foo://stored"
|
||||||
|
stored_schema = {"stored" : "schema"}
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.referrer = {}
|
||||||
|
self.store = {self.stored_uri : self.stored_schema}
|
||||||
|
self.resolver = RefResolver(self.base_uri, self.referrer, self.store)
|
||||||
|
|
||||||
|
def test_it_does_not_retrieve_schema_urls_from_the_network(self):
|
||||||
|
ref = Draft3Validator.META_SCHEMA["id"]
|
||||||
|
with mock.patch.object(self.resolver, "resolve_remote") as remote:
|
||||||
|
with self.resolver.resolving(ref) as resolved:
|
||||||
|
self.assertEqual(resolved, Draft3Validator.META_SCHEMA)
|
||||||
|
self.assertFalse(remote.called)
|
||||||
|
|
||||||
|
def test_it_resolves_local_refs(self):
|
||||||
|
ref = "#/properties/foo"
|
||||||
|
self.referrer["properties"] = {"foo" : object()}
|
||||||
|
with self.resolver.resolving(ref) as resolved:
|
||||||
|
self.assertEqual(resolved, self.referrer["properties"]["foo"])
|
||||||
|
|
||||||
|
def test_it_resolves_local_refs_with_id(self):
|
||||||
|
schema = {"id": "foo://bar/schema#", "a": {"foo": "bar"}}
|
||||||
|
resolver = RefResolver.from_schema(schema)
|
||||||
|
with resolver.resolving("#/a") as resolved:
|
||||||
|
self.assertEqual(resolved, schema["a"])
|
||||||
|
with resolver.resolving("foo://bar/schema#/a") as resolved:
|
||||||
|
self.assertEqual(resolved, schema["a"])
|
||||||
|
|
||||||
|
def test_it_retrieves_stored_refs(self):
|
||||||
|
with self.resolver.resolving(self.stored_uri) as resolved:
|
||||||
|
self.assertIs(resolved, self.stored_schema)
|
||||||
|
|
||||||
|
self.resolver.store["cached_ref"] = {"foo" : 12}
|
||||||
|
with self.resolver.resolving("cached_ref#/foo") as resolved:
|
||||||
|
self.assertEqual(resolved, 12)
|
||||||
|
|
||||||
|
def test_it_retrieves_unstored_refs_via_requests(self):
|
||||||
|
ref = "http://bar#baz"
|
||||||
|
schema = {"baz" : 12}
|
||||||
|
|
||||||
|
with mock.patch("jsonschema.validators.requests") as requests:
|
||||||
|
requests.get.return_value.json.return_value = schema
|
||||||
|
with self.resolver.resolving(ref) as resolved:
|
||||||
|
self.assertEqual(resolved, 12)
|
||||||
|
requests.get.assert_called_once_with("http://bar")
|
||||||
|
|
||||||
|
def test_it_retrieves_unstored_refs_via_urlopen(self):
|
||||||
|
ref = "http://bar#baz"
|
||||||
|
schema = {"baz" : 12}
|
||||||
|
|
||||||
|
with mock.patch("jsonschema.validators.requests", None):
|
||||||
|
with mock.patch("jsonschema.validators.urlopen") as urlopen:
|
||||||
|
urlopen.return_value.read.return_value = (
|
||||||
|
json.dumps(schema).encode("utf8"))
|
||||||
|
with self.resolver.resolving(ref) as resolved:
|
||||||
|
self.assertEqual(resolved, 12)
|
||||||
|
urlopen.assert_called_once_with("http://bar")
|
||||||
|
|
||||||
|
def test_it_can_construct_a_base_uri_from_a_schema(self):
|
||||||
|
schema = {"id" : "foo"}
|
||||||
|
resolver = RefResolver.from_schema(schema)
|
||||||
|
self.assertEqual(resolver.base_uri, "foo")
|
||||||
|
with resolver.resolving("") as resolved:
|
||||||
|
self.assertEqual(resolved, schema)
|
||||||
|
with resolver.resolving("#") as resolved:
|
||||||
|
self.assertEqual(resolved, schema)
|
||||||
|
with resolver.resolving("foo") as resolved:
|
||||||
|
self.assertEqual(resolved, schema)
|
||||||
|
with resolver.resolving("foo#") as resolved:
|
||||||
|
self.assertEqual(resolved, schema)
|
||||||
|
|
||||||
|
def test_it_can_construct_a_base_uri_from_a_schema_without_id(self):
|
||||||
|
schema = {}
|
||||||
|
resolver = RefResolver.from_schema(schema)
|
||||||
|
self.assertEqual(resolver.base_uri, "")
|
||||||
|
with resolver.resolving("") as resolved:
|
||||||
|
self.assertEqual(resolved, schema)
|
||||||
|
with resolver.resolving("#") as resolved:
|
||||||
|
self.assertEqual(resolved, schema)
|
||||||
|
|
||||||
|
def test_custom_uri_scheme_handlers(self):
|
||||||
|
schema = {"foo": "bar"}
|
||||||
|
ref = "foo://bar"
|
||||||
|
foo_handler = mock.Mock(return_value=schema)
|
||||||
|
resolver = RefResolver("", {}, handlers={"foo": foo_handler})
|
||||||
|
with resolver.resolving(ref) as resolved:
|
||||||
|
self.assertEqual(resolved, schema)
|
||||||
|
foo_handler.assert_called_once_with(ref)
|
||||||
|
|
||||||
|
def test_cache_remote_on(self):
|
||||||
|
ref = "foo://bar"
|
||||||
|
foo_handler = mock.Mock()
|
||||||
|
resolver = RefResolver(
|
||||||
|
"", {}, cache_remote=True, handlers={"foo" : foo_handler},
|
||||||
|
)
|
||||||
|
with resolver.resolving(ref):
|
||||||
|
pass
|
||||||
|
with resolver.resolving(ref):
|
||||||
|
pass
|
||||||
|
foo_handler.assert_called_once_with(ref)
|
||||||
|
|
||||||
|
def test_cache_remote_off(self):
|
||||||
|
ref = "foo://bar"
|
||||||
|
foo_handler = mock.Mock()
|
||||||
|
resolver = RefResolver(
|
||||||
|
"", {}, cache_remote=False, handlers={"foo" : foo_handler},
|
||||||
|
)
|
||||||
|
with resolver.resolving(ref):
|
||||||
|
pass
|
||||||
|
with resolver.resolving(ref):
|
||||||
|
pass
|
||||||
|
self.assertEqual(foo_handler.call_count, 2)
|
||||||
|
|
||||||
|
def test_if_you_give_it_junk_you_get_a_resolution_error(self):
|
||||||
|
ref = "foo://bar"
|
||||||
|
foo_handler = mock.Mock(side_effect=ValueError("Oh no! What's this?"))
|
||||||
|
resolver = RefResolver("", {}, handlers={"foo" : foo_handler})
|
||||||
|
with self.assertRaises(RefResolutionError) as err:
|
||||||
|
with resolver.resolving(ref):
|
||||||
|
pass
|
||||||
|
self.assertEqual(str(err.exception), "Oh no! What's this?")
|
||||||
|
|
||||||
|
|
||||||
|
def sorted_errors(errors):
|
||||||
|
def key(error):
|
||||||
|
return (
|
||||||
|
[str(e) for e in error.path],
|
||||||
|
[str(e) for e in error.schema_path]
|
||||||
|
)
|
||||||
|
return sorted(errors, key=key)
|
428
lib/spack/external/jsonschema/validators.py
vendored
Normal file
428
lib/spack/external/jsonschema/validators.py
vendored
Normal file
|
@ -0,0 +1,428 @@
|
||||||
|
from __future__ import division
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import json
|
||||||
|
import numbers
|
||||||
|
|
||||||
|
try:
|
||||||
|
import requests
|
||||||
|
except ImportError:
|
||||||
|
requests = None
|
||||||
|
|
||||||
|
from jsonschema import _utils, _validators
|
||||||
|
from jsonschema.compat import (
|
||||||
|
Sequence, urljoin, urlsplit, urldefrag, unquote, urlopen,
|
||||||
|
str_types, int_types, iteritems,
|
||||||
|
)
|
||||||
|
from jsonschema.exceptions import ErrorTree # Backwards compatibility # noqa
|
||||||
|
from jsonschema.exceptions import RefResolutionError, SchemaError, UnknownType
|
||||||
|
|
||||||
|
|
||||||
|
_unset = _utils.Unset()
|
||||||
|
|
||||||
|
validators = {}
|
||||||
|
meta_schemas = _utils.URIDict()
|
||||||
|
|
||||||
|
|
||||||
|
def validates(version):
|
||||||
|
"""
|
||||||
|
Register the decorated validator for a ``version`` of the specification.
|
||||||
|
|
||||||
|
Registered validators and their meta schemas will be considered when
|
||||||
|
parsing ``$schema`` properties' URIs.
|
||||||
|
|
||||||
|
:argument str version: an identifier to use as the version's name
|
||||||
|
:returns: a class decorator to decorate the validator with the version
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _validates(cls):
|
||||||
|
validators[version] = cls
|
||||||
|
if u"id" in cls.META_SCHEMA:
|
||||||
|
meta_schemas[cls.META_SCHEMA[u"id"]] = cls
|
||||||
|
return cls
|
||||||
|
return _validates
|
||||||
|
|
||||||
|
|
||||||
|
def create(meta_schema, validators=(), version=None, default_types=None): # noqa
|
||||||
|
if default_types is None:
|
||||||
|
default_types = {
|
||||||
|
u"array" : list, u"boolean" : bool, u"integer" : int_types,
|
||||||
|
u"null" : type(None), u"number" : numbers.Number, u"object" : dict,
|
||||||
|
u"string" : str_types,
|
||||||
|
}
|
||||||
|
|
||||||
|
class Validator(object):
|
||||||
|
VALIDATORS = dict(validators)
|
||||||
|
META_SCHEMA = dict(meta_schema)
|
||||||
|
DEFAULT_TYPES = dict(default_types)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, schema, types=(), resolver=None, format_checker=None,
|
||||||
|
):
|
||||||
|
self._types = dict(self.DEFAULT_TYPES)
|
||||||
|
self._types.update(types)
|
||||||
|
|
||||||
|
if resolver is None:
|
||||||
|
resolver = RefResolver.from_schema(schema)
|
||||||
|
|
||||||
|
self.resolver = resolver
|
||||||
|
self.format_checker = format_checker
|
||||||
|
self.schema = schema
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def check_schema(cls, schema):
|
||||||
|
for error in cls(cls.META_SCHEMA).iter_errors(schema):
|
||||||
|
raise SchemaError.create_from(error)
|
||||||
|
|
||||||
|
def iter_errors(self, instance, _schema=None):
|
||||||
|
if _schema is None:
|
||||||
|
_schema = self.schema
|
||||||
|
|
||||||
|
with self.resolver.in_scope(_schema.get(u"id", u"")):
|
||||||
|
ref = _schema.get(u"$ref")
|
||||||
|
if ref is not None:
|
||||||
|
validators = [(u"$ref", ref)]
|
||||||
|
else:
|
||||||
|
validators = iteritems(_schema)
|
||||||
|
|
||||||
|
for k, v in validators:
|
||||||
|
validator = self.VALIDATORS.get(k)
|
||||||
|
if validator is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
errors = validator(self, v, instance, _schema) or ()
|
||||||
|
for error in errors:
|
||||||
|
# set details if not already set by the called fn
|
||||||
|
error._set(
|
||||||
|
validator=k,
|
||||||
|
validator_value=v,
|
||||||
|
instance=instance,
|
||||||
|
schema=_schema,
|
||||||
|
)
|
||||||
|
if k != u"$ref":
|
||||||
|
error.schema_path.appendleft(k)
|
||||||
|
yield error
|
||||||
|
|
||||||
|
def descend(self, instance, schema, path=None, schema_path=None):
|
||||||
|
for error in self.iter_errors(instance, schema):
|
||||||
|
if path is not None:
|
||||||
|
error.path.appendleft(path)
|
||||||
|
if schema_path is not None:
|
||||||
|
error.schema_path.appendleft(schema_path)
|
||||||
|
yield error
|
||||||
|
|
||||||
|
def validate(self, *args, **kwargs):
|
||||||
|
for error in self.iter_errors(*args, **kwargs):
|
||||||
|
raise error
|
||||||
|
|
||||||
|
def is_type(self, instance, type):
|
||||||
|
if type not in self._types:
|
||||||
|
raise UnknownType(type, instance, self.schema)
|
||||||
|
pytypes = self._types[type]
|
||||||
|
|
||||||
|
# bool inherits from int, so ensure bools aren't reported as ints
|
||||||
|
if isinstance(instance, bool):
|
||||||
|
pytypes = _utils.flatten(pytypes)
|
||||||
|
is_number = any(
|
||||||
|
issubclass(pytype, numbers.Number) for pytype in pytypes
|
||||||
|
)
|
||||||
|
if is_number and bool not in pytypes:
|
||||||
|
return False
|
||||||
|
return isinstance(instance, pytypes)
|
||||||
|
|
||||||
|
def is_valid(self, instance, _schema=None):
|
||||||
|
error = next(self.iter_errors(instance, _schema), None)
|
||||||
|
return error is None
|
||||||
|
|
||||||
|
if version is not None:
|
||||||
|
Validator = validates(version)(Validator)
|
||||||
|
Validator.__name__ = version.title().replace(" ", "") + "Validator"
|
||||||
|
|
||||||
|
return Validator
|
||||||
|
|
||||||
|
|
||||||
|
def extend(validator, validators, version=None):
|
||||||
|
all_validators = dict(validator.VALIDATORS)
|
||||||
|
all_validators.update(validators)
|
||||||
|
return create(
|
||||||
|
meta_schema=validator.META_SCHEMA,
|
||||||
|
validators=all_validators,
|
||||||
|
version=version,
|
||||||
|
default_types=validator.DEFAULT_TYPES,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Draft3Validator = create(
|
||||||
|
meta_schema=_utils.load_schema("draft3"),
|
||||||
|
validators={
|
||||||
|
u"$ref" : _validators.ref,
|
||||||
|
u"additionalItems" : _validators.additionalItems,
|
||||||
|
u"additionalProperties" : _validators.additionalProperties,
|
||||||
|
u"dependencies" : _validators.dependencies,
|
||||||
|
u"disallow" : _validators.disallow_draft3,
|
||||||
|
u"divisibleBy" : _validators.multipleOf,
|
||||||
|
u"enum" : _validators.enum,
|
||||||
|
u"extends" : _validators.extends_draft3,
|
||||||
|
u"format" : _validators.format,
|
||||||
|
u"items" : _validators.items,
|
||||||
|
u"maxItems" : _validators.maxItems,
|
||||||
|
u"maxLength" : _validators.maxLength,
|
||||||
|
u"maximum" : _validators.maximum,
|
||||||
|
u"minItems" : _validators.minItems,
|
||||||
|
u"minLength" : _validators.minLength,
|
||||||
|
u"minimum" : _validators.minimum,
|
||||||
|
u"multipleOf" : _validators.multipleOf,
|
||||||
|
u"pattern" : _validators.pattern,
|
||||||
|
u"patternProperties" : _validators.patternProperties,
|
||||||
|
u"properties" : _validators.properties_draft3,
|
||||||
|
u"type" : _validators.type_draft3,
|
||||||
|
u"uniqueItems" : _validators.uniqueItems,
|
||||||
|
},
|
||||||
|
version="draft3",
|
||||||
|
)
|
||||||
|
|
||||||
|
Draft4Validator = create(
|
||||||
|
meta_schema=_utils.load_schema("draft4"),
|
||||||
|
validators={
|
||||||
|
u"$ref" : _validators.ref,
|
||||||
|
u"additionalItems" : _validators.additionalItems,
|
||||||
|
u"additionalProperties" : _validators.additionalProperties,
|
||||||
|
u"allOf" : _validators.allOf_draft4,
|
||||||
|
u"anyOf" : _validators.anyOf_draft4,
|
||||||
|
u"dependencies" : _validators.dependencies,
|
||||||
|
u"enum" : _validators.enum,
|
||||||
|
u"format" : _validators.format,
|
||||||
|
u"items" : _validators.items,
|
||||||
|
u"maxItems" : _validators.maxItems,
|
||||||
|
u"maxLength" : _validators.maxLength,
|
||||||
|
u"maxProperties" : _validators.maxProperties_draft4,
|
||||||
|
u"maximum" : _validators.maximum,
|
||||||
|
u"minItems" : _validators.minItems,
|
||||||
|
u"minLength" : _validators.minLength,
|
||||||
|
u"minProperties" : _validators.minProperties_draft4,
|
||||||
|
u"minimum" : _validators.minimum,
|
||||||
|
u"multipleOf" : _validators.multipleOf,
|
||||||
|
u"not" : _validators.not_draft4,
|
||||||
|
u"oneOf" : _validators.oneOf_draft4,
|
||||||
|
u"pattern" : _validators.pattern,
|
||||||
|
u"patternProperties" : _validators.patternProperties,
|
||||||
|
u"properties" : _validators.properties_draft4,
|
||||||
|
u"required" : _validators.required_draft4,
|
||||||
|
u"type" : _validators.type_draft4,
|
||||||
|
u"uniqueItems" : _validators.uniqueItems,
|
||||||
|
},
|
||||||
|
version="draft4",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RefResolver(object):
|
||||||
|
"""
|
||||||
|
Resolve JSON References.
|
||||||
|
|
||||||
|
:argument str base_uri: URI of the referring document
|
||||||
|
:argument referrer: the actual referring document
|
||||||
|
:argument dict store: a mapping from URIs to documents to cache
|
||||||
|
:argument bool cache_remote: whether remote refs should be cached after
|
||||||
|
first resolution
|
||||||
|
:argument dict handlers: a mapping from URI schemes to functions that
|
||||||
|
should be used to retrieve them
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, base_uri, referrer, store=(), cache_remote=True, handlers=(),
|
||||||
|
):
|
||||||
|
self.base_uri = base_uri
|
||||||
|
self.resolution_scope = base_uri
|
||||||
|
# This attribute is not used, it is for backwards compatibility
|
||||||
|
self.referrer = referrer
|
||||||
|
self.cache_remote = cache_remote
|
||||||
|
self.handlers = dict(handlers)
|
||||||
|
|
||||||
|
self.store = _utils.URIDict(
|
||||||
|
(id, validator.META_SCHEMA)
|
||||||
|
for id, validator in iteritems(meta_schemas)
|
||||||
|
)
|
||||||
|
self.store.update(store)
|
||||||
|
self.store[base_uri] = referrer
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_schema(cls, schema, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Construct a resolver from a JSON schema object.
|
||||||
|
|
||||||
|
:argument schema schema: the referring schema
|
||||||
|
:rtype: :class:`RefResolver`
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
return cls(schema.get(u"id", u""), schema, *args, **kwargs)
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def in_scope(self, scope):
|
||||||
|
old_scope = self.resolution_scope
|
||||||
|
self.resolution_scope = urljoin(old_scope, scope)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
self.resolution_scope = old_scope
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def resolving(self, ref):
|
||||||
|
"""
|
||||||
|
Context manager which resolves a JSON ``ref`` and enters the
|
||||||
|
resolution scope of this ref.
|
||||||
|
|
||||||
|
:argument str ref: reference to resolve
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
full_uri = urljoin(self.resolution_scope, ref)
|
||||||
|
uri, fragment = urldefrag(full_uri)
|
||||||
|
if not uri:
|
||||||
|
uri = self.base_uri
|
||||||
|
|
||||||
|
if uri in self.store:
|
||||||
|
document = self.store[uri]
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
document = self.resolve_remote(uri)
|
||||||
|
except Exception as exc:
|
||||||
|
raise RefResolutionError(exc)
|
||||||
|
|
||||||
|
old_base_uri, self.base_uri = self.base_uri, uri
|
||||||
|
try:
|
||||||
|
with self.in_scope(uri):
|
||||||
|
yield self.resolve_fragment(document, fragment)
|
||||||
|
finally:
|
||||||
|
self.base_uri = old_base_uri
|
||||||
|
|
||||||
|
def resolve_fragment(self, document, fragment):
|
||||||
|
"""
|
||||||
|
Resolve a ``fragment`` within the referenced ``document``.
|
||||||
|
|
||||||
|
:argument document: the referrant document
|
||||||
|
:argument str fragment: a URI fragment to resolve within it
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
fragment = fragment.lstrip(u"/")
|
||||||
|
parts = unquote(fragment).split(u"/") if fragment else []
|
||||||
|
|
||||||
|
for part in parts:
|
||||||
|
part = part.replace(u"~1", u"/").replace(u"~0", u"~")
|
||||||
|
|
||||||
|
if isinstance(document, Sequence):
|
||||||
|
# Array indexes should be turned into integers
|
||||||
|
try:
|
||||||
|
part = int(part)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
document = document[part]
|
||||||
|
except (TypeError, LookupError):
|
||||||
|
raise RefResolutionError(
|
||||||
|
"Unresolvable JSON pointer: %r" % fragment
|
||||||
|
)
|
||||||
|
|
||||||
|
return document
|
||||||
|
|
||||||
|
def resolve_remote(self, uri):
|
||||||
|
"""
|
||||||
|
Resolve a remote ``uri``.
|
||||||
|
|
||||||
|
Does not check the store first, but stores the retrieved document in
|
||||||
|
the store if :attr:`RefResolver.cache_remote` is True.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
If the requests_ library is present, ``jsonschema`` will use it to
|
||||||
|
request the remote ``uri``, so that the correct encoding is
|
||||||
|
detected and used.
|
||||||
|
|
||||||
|
If it isn't, or if the scheme of the ``uri`` is not ``http`` or
|
||||||
|
``https``, UTF-8 is assumed.
|
||||||
|
|
||||||
|
:argument str uri: the URI to resolve
|
||||||
|
:returns: the retrieved document
|
||||||
|
|
||||||
|
.. _requests: http://pypi.python.org/pypi/requests/
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
scheme = urlsplit(uri).scheme
|
||||||
|
|
||||||
|
if scheme in self.handlers:
|
||||||
|
result = self.handlers[scheme](uri)
|
||||||
|
elif (
|
||||||
|
scheme in [u"http", u"https"] and
|
||||||
|
requests and
|
||||||
|
getattr(requests.Response, "json", None) is not None
|
||||||
|
):
|
||||||
|
# Requests has support for detecting the correct encoding of
|
||||||
|
# json over http
|
||||||
|
if callable(requests.Response.json):
|
||||||
|
result = requests.get(uri).json()
|
||||||
|
else:
|
||||||
|
result = requests.get(uri).json
|
||||||
|
else:
|
||||||
|
# Otherwise, pass off to urllib and assume utf-8
|
||||||
|
result = json.loads(urlopen(uri).read().decode("utf-8"))
|
||||||
|
|
||||||
|
if self.cache_remote:
|
||||||
|
self.store[uri] = result
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def validator_for(schema, default=_unset):
|
||||||
|
if default is _unset:
|
||||||
|
default = Draft4Validator
|
||||||
|
return meta_schemas.get(schema.get(u"$schema", u""), default)
|
||||||
|
|
||||||
|
|
||||||
|
def validate(instance, schema, cls=None, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Validate an instance under the given schema.
|
||||||
|
|
||||||
|
>>> validate([2, 3, 4], {"maxItems" : 2})
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
ValidationError: [2, 3, 4] is too long
|
||||||
|
|
||||||
|
:func:`validate` will first verify that the provided schema is itself
|
||||||
|
valid, since not doing so can lead to less obvious error messages and fail
|
||||||
|
in less obvious or consistent ways. If you know you have a valid schema
|
||||||
|
already or don't care, you might prefer using the
|
||||||
|
:meth:`~IValidator.validate` method directly on a specific validator
|
||||||
|
(e.g. :meth:`Draft4Validator.validate`).
|
||||||
|
|
||||||
|
|
||||||
|
:argument instance: the instance to validate
|
||||||
|
:argument schema: the schema to validate with
|
||||||
|
:argument cls: an :class:`IValidator` class that will be used to validate
|
||||||
|
the instance.
|
||||||
|
|
||||||
|
If the ``cls`` argument is not provided, two things will happen in
|
||||||
|
accordance with the specification. First, if the schema has a
|
||||||
|
:validator:`$schema` property containing a known meta-schema [#]_ then the
|
||||||
|
proper validator will be used. The specification recommends that all
|
||||||
|
schemas contain :validator:`$schema` properties for this reason. If no
|
||||||
|
:validator:`$schema` property is found, the default validator class is
|
||||||
|
:class:`Draft4Validator`.
|
||||||
|
|
||||||
|
Any other provided positional and keyword arguments will be passed on when
|
||||||
|
instantiating the ``cls``.
|
||||||
|
|
||||||
|
:raises:
|
||||||
|
:exc:`ValidationError` if the instance is invalid
|
||||||
|
|
||||||
|
:exc:`SchemaError` if the schema itself is invalid
|
||||||
|
|
||||||
|
.. rubric:: Footnotes
|
||||||
|
.. [#] known by a validator registered with :func:`validates`
|
||||||
|
"""
|
||||||
|
if cls is None:
|
||||||
|
cls = validator_for(schema)
|
||||||
|
cls.check_schema(schema)
|
||||||
|
cls(schema, *args, **kwargs).validate(instance)
|
|
@ -26,6 +26,7 @@
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import functools
|
import functools
|
||||||
|
import collections
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
# Ignore emacs backups when listing modules
|
# Ignore emacs backups when listing modules
|
||||||
|
@ -167,16 +168,32 @@ def has_method(cls, name):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def memoized(obj):
|
class memoized(object):
|
||||||
"""Decorator that caches the results of a function, storing them
|
"""Decorator that caches the results of a function, storing them
|
||||||
in an attribute of that function."""
|
in an attribute of that function."""
|
||||||
cache = obj.cache = {}
|
def __init__(self, func):
|
||||||
@functools.wraps(obj)
|
self.func = func
|
||||||
def memoizer(*args, **kwargs):
|
self.cache = {}
|
||||||
if args not in cache:
|
|
||||||
cache[args] = obj(*args, **kwargs)
|
|
||||||
return cache[args]
|
def __call__(self, *args):
|
||||||
return memoizer
|
if not isinstance(args, collections.Hashable):
|
||||||
|
# Not hashable, so just call the function.
|
||||||
|
return self.func(*args)
|
||||||
|
|
||||||
|
if args not in self.cache:
|
||||||
|
self.cache[args] = self.func(*args)
|
||||||
|
return self.cache[args]
|
||||||
|
|
||||||
|
|
||||||
|
def __get__(self, obj, objtype):
|
||||||
|
"""Support instance methods."""
|
||||||
|
return functools.partial(self.__call__, obj)
|
||||||
|
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
"""Expunge cache so that self.func will be called again."""
|
||||||
|
self.cache.clear()
|
||||||
|
|
||||||
|
|
||||||
def list_modules(directory, **kwargs):
|
def list_modules(directory, **kwargs):
|
||||||
|
|
|
@ -63,35 +63,46 @@ def msg(message, *args):
|
||||||
def info(message, *args, **kwargs):
|
def info(message, *args, **kwargs):
|
||||||
format = kwargs.get('format', '*b')
|
format = kwargs.get('format', '*b')
|
||||||
stream = kwargs.get('stream', sys.stdout)
|
stream = kwargs.get('stream', sys.stdout)
|
||||||
|
wrap = kwargs.get('wrap', False)
|
||||||
|
|
||||||
cprint("@%s{==>} %s" % (format, cescape(str(message))), stream=stream)
|
cprint("@%s{==>} %s" % (format, cescape(str(message))), stream=stream)
|
||||||
for arg in args:
|
for arg in args:
|
||||||
lines = textwrap.wrap(
|
if wrap:
|
||||||
str(arg), initial_indent=indent, subsequent_indent=indent)
|
lines = textwrap.wrap(
|
||||||
for line in lines:
|
str(arg), initial_indent=indent, subsequent_indent=indent)
|
||||||
stream.write(line + '\n')
|
for line in lines:
|
||||||
|
stream.write(line + '\n')
|
||||||
|
else:
|
||||||
|
stream.write(indent + str(arg) + '\n')
|
||||||
|
|
||||||
|
|
||||||
def verbose(message, *args):
|
def verbose(message, *args, **kwargs):
|
||||||
if _verbose:
|
if _verbose:
|
||||||
info(message, *args, format='c')
|
kwargs.setdefault('format', 'c')
|
||||||
|
info(message, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def debug(message, *args):
|
def debug(message, *args, **kwargs):
|
||||||
if _debug:
|
if _debug:
|
||||||
info(message, *args, format='g', stream=sys.stderr)
|
kwargs.setdefault('format', 'g')
|
||||||
|
kwargs.setdefault('stream', sys.stderr)
|
||||||
|
info(message, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def error(message, *args):
|
def error(message, *args, **kwargs):
|
||||||
info("Error: " + str(message), *args, format='*r', stream=sys.stderr)
|
kwargs.setdefault('format', '*r')
|
||||||
|
kwargs.setdefault('stream', sys.stderr)
|
||||||
|
info("Error: " + str(message), *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def warn(message, *args):
|
def warn(message, *args, **kwargs):
|
||||||
info("Warning: " + str(message), *args, format='*Y', stream=sys.stderr)
|
kwargs.setdefault('format', '*Y')
|
||||||
|
kwargs.setdefault('stream', sys.stderr)
|
||||||
|
info("Warning: " + str(message), *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def die(message, *args):
|
def die(message, *args, **kwargs):
|
||||||
error(message, *args)
|
error(message, *args, **kwargs)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -210,6 +210,13 @@ def colify(elts, **options):
|
||||||
|
|
||||||
|
|
||||||
def colify_table(table, **options):
|
def colify_table(table, **options):
|
||||||
|
"""Version of colify() for data expressed in rows, (list of lists).
|
||||||
|
|
||||||
|
Same as regular colify but takes a list of lists, where each
|
||||||
|
sub-list must be the same length, and each is interpreted as a
|
||||||
|
row in a table. Regular colify displays a sequential list of
|
||||||
|
values in columns.
|
||||||
|
"""
|
||||||
if table is None:
|
if table is None:
|
||||||
raise TypeError("Can't call colify_table on NoneType")
|
raise TypeError("Can't call colify_table on NoneType")
|
||||||
elif not table or not table[0]:
|
elif not table or not table[0]:
|
||||||
|
|
|
@ -23,9 +23,11 @@
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import getpass
|
import getpass
|
||||||
from llnl.util.filesystem import *
|
from llnl.util.filesystem import *
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
# This lives in $prefix/lib/spack/spack/__file__
|
# This lives in $prefix/lib/spack/spack/__file__
|
||||||
spack_root = ancestor(__file__, 4)
|
spack_root = ancestor(__file__, 4)
|
||||||
|
@ -42,6 +44,7 @@
|
||||||
hooks_path = join_path(module_path, "hooks")
|
hooks_path = join_path(module_path, "hooks")
|
||||||
var_path = join_path(spack_root, "var", "spack")
|
var_path = join_path(spack_root, "var", "spack")
|
||||||
stage_path = join_path(var_path, "stage")
|
stage_path = join_path(var_path, "stage")
|
||||||
|
repos_path = join_path(var_path, "repos")
|
||||||
share_path = join_path(spack_root, "share", "spack")
|
share_path = join_path(spack_root, "share", "spack")
|
||||||
|
|
||||||
prefix = spack_root
|
prefix = spack_root
|
||||||
|
@ -50,11 +53,14 @@
|
||||||
etc_path = join_path(prefix, "etc")
|
etc_path = join_path(prefix, "etc")
|
||||||
|
|
||||||
#
|
#
|
||||||
# Set up the packages database.
|
# Set up the default packages database.
|
||||||
#
|
#
|
||||||
from spack.packages import PackageDB
|
import spack.repository
|
||||||
packages_path = join_path(var_path, "packages")
|
try:
|
||||||
db = PackageDB(packages_path)
|
repo = spack.repository.RepoPath()
|
||||||
|
sys.meta_path.append(repo)
|
||||||
|
except spack.error.SpackError, e:
|
||||||
|
tty.die('while initializing Spack RepoPath:', e.message)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Set up the installed packages database
|
# Set up the installed packages database
|
||||||
|
@ -63,13 +69,10 @@
|
||||||
installed_db = Database(install_path)
|
installed_db = Database(install_path)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Paths to mock files for testing.
|
# Paths to built-in Spack repositories.
|
||||||
#
|
#
|
||||||
mock_packages_path = join_path(var_path, "mock_packages")
|
packages_path = join_path(repos_path, "builtin")
|
||||||
|
mock_packages_path = join_path(repos_path, "builtin.mock")
|
||||||
mock_config_path = join_path(var_path, "mock_configs")
|
|
||||||
mock_site_config = join_path(mock_config_path, "site_spackconfig")
|
|
||||||
mock_user_config = join_path(mock_config_path, "user_spackconfig")
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# This controls how spack lays out install prefixes and
|
# This controls how spack lays out install prefixes and
|
||||||
|
@ -149,7 +152,7 @@
|
||||||
# When packages call 'from spack import *', this extra stuff is brought in.
|
# When packages call 'from spack import *', this extra stuff is brought in.
|
||||||
#
|
#
|
||||||
# Spack internal code should call 'import spack' and accesses other
|
# Spack internal code should call 'import spack' and accesses other
|
||||||
# variables (spack.db, paths, etc.) directly.
|
# variables (spack.repo, paths, etc.) directly.
|
||||||
#
|
#
|
||||||
# TODO: maybe this should be separated out and should go in build_environment.py?
|
# TODO: maybe this should be separated out and should go in build_environment.py?
|
||||||
# TODO: it's not clear where all the stuff that needs to be included in packages
|
# TODO: it's not clear where all the stuff that needs to be included in packages
|
||||||
|
|
|
@ -173,7 +173,7 @@ def add_env_path(path):
|
||||||
path_set("PKG_CONFIG_PATH", pkg_config_dirs)
|
path_set("PKG_CONFIG_PATH", pkg_config_dirs)
|
||||||
|
|
||||||
|
|
||||||
def set_module_variables_for_package(pkg):
|
def set_module_variables_for_package(pkg, m):
|
||||||
"""Populate the module scope of install() with some useful functions.
|
"""Populate the module scope of install() with some useful functions.
|
||||||
This makes things easier for package writers.
|
This makes things easier for package writers.
|
||||||
"""
|
"""
|
||||||
|
@ -244,11 +244,32 @@ def get_rpaths(pkg):
|
||||||
return rpaths
|
return rpaths
|
||||||
|
|
||||||
|
|
||||||
|
def parent_class_modules(cls):
|
||||||
|
"""Get list of super class modules that are all descend from spack.Package"""
|
||||||
|
if not issubclass(cls, spack.Package) or issubclass(spack.Package, cls):
|
||||||
|
return []
|
||||||
|
result = []
|
||||||
|
module = sys.modules.get(cls.__module__)
|
||||||
|
if module:
|
||||||
|
result = [ module ]
|
||||||
|
for c in cls.__bases__:
|
||||||
|
result.extend(parent_class_modules(c))
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def setup_package(pkg):
|
def setup_package(pkg):
|
||||||
"""Execute all environment setup routines."""
|
"""Execute all environment setup routines."""
|
||||||
set_compiler_environment_variables(pkg)
|
set_compiler_environment_variables(pkg)
|
||||||
set_build_environment_variables(pkg)
|
set_build_environment_variables(pkg)
|
||||||
set_module_variables_for_package(pkg)
|
|
||||||
|
# If a user makes their own package repo, e.g.
|
||||||
|
# spack.repos.mystuff.libelf.Libelf, and they inherit from
|
||||||
|
# an existing class like spack.repos.original.libelf.Libelf,
|
||||||
|
# then set the module variables for both classes so the
|
||||||
|
# parent class can still use them if it gets called.
|
||||||
|
modules = parent_class_modules(pkg.__class__)
|
||||||
|
for mod in modules:
|
||||||
|
set_module_variables_for_package(pkg, mod)
|
||||||
|
|
||||||
# Allow dependencies to set up environment as well.
|
# Allow dependencies to set up environment as well.
|
||||||
for dep_spec in pkg.spec.traverse(root=False):
|
for dep_spec in pkg.spec.traverse(root=False):
|
||||||
|
|
|
@ -31,6 +31,15 @@
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
import spack.config
|
||||||
|
|
||||||
|
#
|
||||||
|
# Settings for commands that modify configuration
|
||||||
|
#
|
||||||
|
# Commands that modify confguration By default modify the *highest* priority scope.
|
||||||
|
default_modify_scope = spack.config.highest_precedence_scope().name
|
||||||
|
# Commands that list confguration list *all* scopes by default.
|
||||||
|
default_list_scope = None
|
||||||
|
|
||||||
# cmd has a submodule called "list" so preserve the python list module
|
# cmd has a submodule called "list" so preserve the python list module
|
||||||
python_list = list
|
python_list = list
|
||||||
|
|
|
@ -81,7 +81,7 @@ def get_checksums(versions, urls, **kwargs):
|
||||||
|
|
||||||
def checksum(parser, args):
|
def checksum(parser, args):
|
||||||
# get the package we're going to generate checksums for
|
# get the package we're going to generate checksums for
|
||||||
pkg = spack.db.get(args.package)
|
pkg = spack.repo.get(args.package)
|
||||||
|
|
||||||
# If the user asked for specific versions, use those.
|
# If the user asked for specific versions, use those.
|
||||||
if args.versions:
|
if args.versions:
|
||||||
|
|
|
@ -42,5 +42,5 @@ def clean(parser, args):
|
||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
package = spack.db.get(spec)
|
package = spack.repo.get(spec)
|
||||||
package.do_clean()
|
package.do_clean()
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
import sys
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
@ -41,17 +42,32 @@ def setup_parser(subparser):
|
||||||
sp = subparser.add_subparsers(
|
sp = subparser.add_subparsers(
|
||||||
metavar='SUBCOMMAND', dest='compiler_command')
|
metavar='SUBCOMMAND', dest='compiler_command')
|
||||||
|
|
||||||
update_parser = sp.add_parser(
|
scopes = spack.config.config_scopes
|
||||||
'add', help='Add compilers to the Spack configuration.')
|
|
||||||
update_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
|
|
||||||
|
|
||||||
remove_parser = sp.add_parser('remove', help='remove compiler')
|
# Add
|
||||||
remove_parser.add_argument('path')
|
add_parser = sp.add_parser('add', help='Add compilers to the Spack configuration.')
|
||||||
|
add_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
|
||||||
|
add_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||||
|
help="Configuration scope to modify.")
|
||||||
|
|
||||||
list_parser = sp.add_parser('list', help='list available compilers')
|
# Remove
|
||||||
|
remove_parser = sp.add_parser('remove', aliases=['rm'], help='Remove compiler by spec.')
|
||||||
|
remove_parser.add_argument(
|
||||||
|
'-a', '--all', action='store_true', help='Remove ALL compilers that match spec.')
|
||||||
|
remove_parser.add_argument('compiler_spec')
|
||||||
|
remove_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||||
|
help="Configuration scope to modify.")
|
||||||
|
|
||||||
info_parser = sp.add_parser('info', help='Show compiler paths.')
|
# List
|
||||||
|
list_parser = sp.add_parser('list', help='list available compilers')
|
||||||
|
list_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
|
||||||
|
help="Configuration scope to read from.")
|
||||||
|
|
||||||
|
# Info
|
||||||
|
info_parser = sp.add_parser('info', help='Show compiler paths.')
|
||||||
info_parser.add_argument('compiler_spec')
|
info_parser.add_argument('compiler_spec')
|
||||||
|
info_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
|
||||||
|
help="Configuration scope to read from.")
|
||||||
|
|
||||||
|
|
||||||
def compiler_add(args):
|
def compiler_add(args):
|
||||||
|
@ -62,26 +78,40 @@ def compiler_add(args):
|
||||||
paths = get_path('PATH')
|
paths = get_path('PATH')
|
||||||
|
|
||||||
compilers = [c for c in spack.compilers.find_compilers(*args.add_paths)
|
compilers = [c for c in spack.compilers.find_compilers(*args.add_paths)
|
||||||
if c.spec not in spack.compilers.all_compilers()]
|
if c.spec not in spack.compilers.all_compilers(scope=args.scope)]
|
||||||
|
|
||||||
if compilers:
|
if compilers:
|
||||||
spack.compilers.add_compilers_to_config('user', *compilers)
|
spack.compilers.add_compilers_to_config(compilers, scope=args.scope)
|
||||||
n = len(compilers)
|
n = len(compilers)
|
||||||
tty.msg("Added %d new compiler%s to %s" % (
|
s = 's' if n > 1 else ''
|
||||||
n, 's' if n > 1 else '', spack.config.get_config_scope_filename('user', 'compilers')))
|
filename = spack.config.get_config_filename(args.scope, 'compilers')
|
||||||
|
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||||
colify(reversed(sorted(c.spec for c in compilers)), indent=4)
|
colify(reversed(sorted(c.spec for c in compilers)), indent=4)
|
||||||
else:
|
else:
|
||||||
tty.msg("Found no new compilers")
|
tty.msg("Found no new compilers")
|
||||||
|
|
||||||
|
|
||||||
def compiler_remove(args):
|
def compiler_remove(args):
|
||||||
pass
|
cspec = CompilerSpec(args.compiler_spec)
|
||||||
|
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||||
|
|
||||||
|
if not compilers:
|
||||||
|
tty.die("No compilers match spec %s." % cspec)
|
||||||
|
elif not args.all and len(compilers) > 1:
|
||||||
|
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
||||||
|
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
|
||||||
|
tty.msg("Or, you can use `spack compiler remove -a` to remove all of them.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
for compiler in compilers:
|
||||||
|
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
||||||
|
tty.msg("Removed compiler %s." % compiler.spec)
|
||||||
|
|
||||||
|
|
||||||
def compiler_info(args):
|
def compiler_info(args):
|
||||||
"""Print info about all compilers matching a spec."""
|
"""Print info about all compilers matching a spec."""
|
||||||
cspec = CompilerSpec(args.compiler_spec)
|
cspec = CompilerSpec(args.compiler_spec)
|
||||||
compilers = spack.compilers.compilers_for_spec(cspec)
|
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||||
|
|
||||||
if not compilers:
|
if not compilers:
|
||||||
tty.error("No compilers match spec %s." % cspec)
|
tty.error("No compilers match spec %s." % cspec)
|
||||||
|
@ -96,7 +126,7 @@ def compiler_info(args):
|
||||||
|
|
||||||
def compiler_list(args):
|
def compiler_list(args):
|
||||||
tty.msg("Available compilers")
|
tty.msg("Available compilers")
|
||||||
index = index_by(spack.compilers.all_compilers(), 'name')
|
index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name')
|
||||||
for i, (name, compilers) in enumerate(index.items()):
|
for i, (name, compilers) in enumerate(index.items()):
|
||||||
if i >= 1: print
|
if i >= 1: print
|
||||||
|
|
||||||
|
@ -108,6 +138,7 @@ def compiler_list(args):
|
||||||
def compiler(parser, args):
|
def compiler(parser, args):
|
||||||
action = { 'add' : compiler_add,
|
action = { 'add' : compiler_add,
|
||||||
'remove' : compiler_remove,
|
'remove' : compiler_remove,
|
||||||
|
'rm' : compiler_remove,
|
||||||
'info' : compiler_info,
|
'info' : compiler_info,
|
||||||
'list' : compiler_list }
|
'list' : compiler_list }
|
||||||
action[args.compiler_command](args)
|
action[args.compiler_command](args)
|
||||||
|
|
|
@ -26,9 +26,14 @@
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
from llnl.util.lang import index_by
|
from llnl.util.lang import index_by
|
||||||
|
|
||||||
|
import spack
|
||||||
from spack.cmd.compiler import compiler_list
|
from spack.cmd.compiler import compiler_list
|
||||||
|
|
||||||
description = "List available compilers. Same as 'spack compiler list'."
|
description = "List available compilers. Same as 'spack compiler list'."
|
||||||
|
|
||||||
|
def setup_parser(subparser):
|
||||||
|
subparser.add_argument('--scope', choices=spack.config.config_scopes,
|
||||||
|
help="Configuration scope to read/modify.")
|
||||||
|
|
||||||
def compilers(parser, args):
|
def compilers(parser, args):
|
||||||
compiler_list(args)
|
compiler_list(args)
|
||||||
|
|
|
@ -44,22 +44,22 @@ def setup_parser(subparser):
|
||||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
|
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
|
||||||
|
|
||||||
get_parser = sp.add_parser('get', help='Print configuration values.')
|
get_parser = sp.add_parser('get', help='Print configuration values.')
|
||||||
get_parser.add_argument('category', help="Configuration category to print.")
|
get_parser.add_argument('section', help="Configuration section to print.")
|
||||||
|
|
||||||
edit_parser = sp.add_parser('edit', help='Edit configuration file.')
|
edit_parser = sp.add_parser('edit', help='Edit configuration file.')
|
||||||
edit_parser.add_argument('category', help="Configuration category to edit")
|
edit_parser.add_argument('section', help="Configuration section to edit")
|
||||||
|
|
||||||
|
|
||||||
def config_get(args):
|
def config_get(args):
|
||||||
spack.config.print_category(args.category)
|
spack.config.print_section(args.section)
|
||||||
|
|
||||||
|
|
||||||
def config_edit(args):
|
def config_edit(args):
|
||||||
if not args.scope:
|
if not args.scope:
|
||||||
args.scope = 'user'
|
args.scope = 'user'
|
||||||
if not args.category:
|
if not args.section:
|
||||||
args.category = None
|
args.section = None
|
||||||
config_file = spack.config.get_config_scope_filename(args.scope, args.category)
|
config_file = spack.config.get_config_filename(args.scope, args.section)
|
||||||
spack.editor(config_file)
|
spack.editor(config_file)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,9 @@
|
||||||
import spack.cmd.checksum
|
import spack.cmd.checksum
|
||||||
import spack.url
|
import spack.url
|
||||||
import spack.util.web
|
import spack.util.web
|
||||||
|
from spack.spec import Spec
|
||||||
from spack.util.naming import *
|
from spack.util.naming import *
|
||||||
|
from spack.repository import Repo, RepoError
|
||||||
import spack.util.crypto as crypto
|
import spack.util.crypto as crypto
|
||||||
|
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
@ -85,18 +87,34 @@ def install(self, spec, prefix):
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
def make_version_calls(ver_hash_tuples):
|
||||||
|
"""Adds a version() call to the package for each version found."""
|
||||||
|
max_len = max(len(str(v)) for v, h in ver_hash_tuples)
|
||||||
|
format = " version(%%-%ds, '%%s')" % (max_len + 2)
|
||||||
|
return '\n'.join(format % ("'%s'" % v, h) for v, h in ver_hash_tuples)
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument('url', nargs='?', help="url of package archive")
|
subparser.add_argument('url', nargs='?', help="url of package archive")
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--keep-stage', action='store_true', dest='keep_stage',
|
'--keep-stage', action='store_true',
|
||||||
help="Don't clean up staging area when command completes.")
|
help="Don't clean up staging area when command completes.")
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-n', '--name', dest='alternate_name', default=None,
|
'-n', '--name', dest='alternate_name', default=None, metavar='NAME',
|
||||||
help="Override the autodetected name for the created package.")
|
help="Override the autodetected name for the created package.")
|
||||||
|
subparser.add_argument(
|
||||||
|
'-r', '--repo', default=None,
|
||||||
|
help="Path to a repository where the package should be created.")
|
||||||
|
subparser.add_argument(
|
||||||
|
'-N', '--namespace',
|
||||||
|
help="Specify a namespace for the package. Must be the namespace of "
|
||||||
|
"a repository registered with Spack.")
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-f', '--force', action='store_true', dest='force',
|
'-f', '--force', action='store_true', dest='force',
|
||||||
help="Overwrite any existing package file with the same name.")
|
help="Overwrite any existing package file with the same name.")
|
||||||
|
|
||||||
|
setup_parser.subparser = subparser
|
||||||
|
|
||||||
|
|
||||||
class ConfigureGuesser(object):
|
class ConfigureGuesser(object):
|
||||||
def __call__(self, stage):
|
def __call__(self, stage):
|
||||||
|
@ -134,16 +152,7 @@ def __call__(self, stage):
|
||||||
self.build_system = build_system
|
self.build_system = build_system
|
||||||
|
|
||||||
|
|
||||||
def make_version_calls(ver_hash_tuples):
|
def guess_name_and_version(url, args):
|
||||||
"""Adds a version() call to the package for each version found."""
|
|
||||||
max_len = max(len(str(v)) for v, h in ver_hash_tuples)
|
|
||||||
format = " version(%%-%ds, '%%s')" % (max_len + 2)
|
|
||||||
return '\n'.join(format % ("'%s'" % v, h) for v, h in ver_hash_tuples)
|
|
||||||
|
|
||||||
|
|
||||||
def create(parser, args):
|
|
||||||
url = args.url
|
|
||||||
|
|
||||||
# Try to deduce name and version of the new package from the URL
|
# Try to deduce name and version of the new package from the URL
|
||||||
version = spack.url.parse_version(url)
|
version = spack.url.parse_version(url)
|
||||||
if not version:
|
if not version:
|
||||||
|
@ -160,12 +169,52 @@ def create(parser, args):
|
||||||
tty.die("Couldn't guess a name for this package. Try running:", "",
|
tty.die("Couldn't guess a name for this package. Try running:", "",
|
||||||
"spack create --name <name> <url>")
|
"spack create --name <name> <url>")
|
||||||
|
|
||||||
if not valid_module_name(name):
|
if not valid_fully_qualified_module_name(name):
|
||||||
tty.die("Package name can only contain A-Z, a-z, 0-9, '_' and '-'")
|
tty.die("Package name can only contain A-Z, a-z, 0-9, '_' and '-'")
|
||||||
|
|
||||||
tty.msg("This looks like a URL for %s version %s." % (name, version))
|
return name, version
|
||||||
tty.msg("Creating template for package %s" % name)
|
|
||||||
|
|
||||||
|
|
||||||
|
def find_repository(spec, args):
|
||||||
|
# figure out namespace for spec
|
||||||
|
if spec.namespace and args.namespace and spec.namespace != args.namespace:
|
||||||
|
tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace, args.namespace))
|
||||||
|
|
||||||
|
if not spec.namespace and args.namespace:
|
||||||
|
spec.namespace = args.namespace
|
||||||
|
|
||||||
|
# Figure out where the new package should live.
|
||||||
|
repo_path = args.repo
|
||||||
|
if repo_path is not None:
|
||||||
|
try:
|
||||||
|
repo = Repo(repo_path)
|
||||||
|
if spec.namespace and spec.namespace != repo.namespace:
|
||||||
|
tty.die("Can't create package with namespace %s in repo with namespace %s."
|
||||||
|
% (spec.namespace, repo.namespace))
|
||||||
|
except RepoError as e:
|
||||||
|
tty.die(str(e))
|
||||||
|
else:
|
||||||
|
if spec.namespace:
|
||||||
|
repo = spack.repo.get_repo(spec.namespace, None)
|
||||||
|
if not repo:
|
||||||
|
tty.die("Unknown namespace: %s" % spec.namespace)
|
||||||
|
else:
|
||||||
|
repo = spack.repo.first_repo()
|
||||||
|
|
||||||
|
# Set the namespace on the spec if it's not there already
|
||||||
|
if not spec.namespace:
|
||||||
|
spec.namespace = repo.namespace
|
||||||
|
|
||||||
|
return repo
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_tarballs(url, name, args):
|
||||||
|
"""Try to find versions of the supplied archive by scraping the web.
|
||||||
|
|
||||||
|
Prompts the user to select how many to download if many are found.
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
versions = spack.util.web.find_versions_of_archive(url)
|
versions = spack.util.web.find_versions_of_archive(url)
|
||||||
rkeys = sorted(versions.keys(), reverse=True)
|
rkeys = sorted(versions.keys(), reverse=True)
|
||||||
versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))
|
versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))
|
||||||
|
@ -184,13 +233,35 @@ def create(parser, args):
|
||||||
default=5, abort='q')
|
default=5, abort='q')
|
||||||
|
|
||||||
if not archives_to_fetch:
|
if not archives_to_fetch:
|
||||||
tty.msg("Aborted.")
|
tty.die("Aborted.")
|
||||||
return
|
|
||||||
|
|
||||||
|
sorted_versions = sorted(versions.keys(), reverse=True)
|
||||||
|
sorted_urls = [versions[v] for v in sorted_versions]
|
||||||
|
return sorted_versions[:archives_to_fetch], sorted_urls[:archives_to_fetch]
|
||||||
|
|
||||||
|
|
||||||
|
def create(parser, args):
|
||||||
|
url = args.url
|
||||||
|
if not url:
|
||||||
|
setup_parser.subparser.print_help()
|
||||||
|
return
|
||||||
|
|
||||||
|
# Figure out a name and repo for the package.
|
||||||
|
name, version = guess_name_and_version(url, args)
|
||||||
|
spec = Spec(name)
|
||||||
|
name = spec.name # factors out namespace, if any
|
||||||
|
repo = find_repository(spec, args)
|
||||||
|
|
||||||
|
tty.msg("This looks like a URL for %s version %s." % (name, version))
|
||||||
|
tty.msg("Creating template for package %s" % name)
|
||||||
|
|
||||||
|
# Fetch tarballs (prompting user if necessary)
|
||||||
|
versions, urls = fetch_tarballs(url, name, args)
|
||||||
|
|
||||||
|
# Try to guess what configure system is used.
|
||||||
guesser = ConfigureGuesser()
|
guesser = ConfigureGuesser()
|
||||||
ver_hash_tuples = spack.cmd.checksum.get_checksums(
|
ver_hash_tuples = spack.cmd.checksum.get_checksums(
|
||||||
versions.keys()[:archives_to_fetch],
|
versions, urls,
|
||||||
[versions[v] for v in versions.keys()[:archives_to_fetch]],
|
|
||||||
first_stage_function=guesser,
|
first_stage_function=guesser,
|
||||||
keep_stage=args.keep_stage)
|
keep_stage=args.keep_stage)
|
||||||
|
|
||||||
|
@ -202,7 +273,7 @@ def create(parser, args):
|
||||||
name = 'py-%s' % name
|
name = 'py-%s' % name
|
||||||
|
|
||||||
# Create a directory for the new package.
|
# Create a directory for the new package.
|
||||||
pkg_path = spack.db.filename_for_package_name(name)
|
pkg_path = repo.filename_for_package_name(name)
|
||||||
if os.path.exists(pkg_path) and not args.force:
|
if os.path.exists(pkg_path) and not args.force:
|
||||||
tty.die("%s already exists." % pkg_path)
|
tty.die("%s already exists." % pkg_path)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -61,7 +61,7 @@ def diy(self, args):
|
||||||
# Take a write lock before checking for existence.
|
# Take a write lock before checking for existence.
|
||||||
with spack.installed_db.write_transaction():
|
with spack.installed_db.write_transaction():
|
||||||
spec = specs[0]
|
spec = specs[0]
|
||||||
if not spack.db.exists(spec.name):
|
if not spack.repo.exists(spec.name):
|
||||||
tty.warn("No such package: %s" % spec.name)
|
tty.warn("No such package: %s" % spec.name)
|
||||||
create = tty.get_yes_or_no("Create this package?", default=False)
|
create = tty.get_yes_or_no("Create this package?", default=False)
|
||||||
if not create:
|
if not create:
|
||||||
|
@ -69,14 +69,14 @@ def diy(self, args):
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
tty.msg("Running 'spack edit -f %s'" % spec.name)
|
tty.msg("Running 'spack edit -f %s'" % spec.name)
|
||||||
edit_package(spec.name, True)
|
edit_package(spec.name, spack.repo.first_repo(), None, True)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not spec.version.concrete:
|
if not spec.version.concrete:
|
||||||
tty.die("spack diy spec must have a single, concrete version.")
|
tty.die("spack diy spec must have a single, concrete version.")
|
||||||
|
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
package = spack.db.get(spec)
|
package = spack.repo.get(spec)
|
||||||
|
|
||||||
if package.installed:
|
if package.installed:
|
||||||
tty.error("Already installed in %s" % package.prefix)
|
tty.error("Already installed in %s" % package.prefix)
|
||||||
|
|
|
@ -30,6 +30,8 @@
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
from spack.spec import Spec
|
||||||
|
from spack.repository import Repo
|
||||||
from spack.util.naming import mod_to_class
|
from spack.util.naming import mod_to_class
|
||||||
|
|
||||||
description = "Open package files in $EDITOR"
|
description = "Open package files in $EDITOR"
|
||||||
|
@ -53,9 +55,16 @@ def install(self, spec, prefix):
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
|
||||||
def edit_package(name, force=False):
|
def edit_package(name, repo_path, namespace, force=False):
|
||||||
path = spack.db.filename_for_package_name(name)
|
if repo_path:
|
||||||
|
repo = Repo(repo_path)
|
||||||
|
elif namespace:
|
||||||
|
repo = spack.repo.get_repo(namespace)
|
||||||
|
else:
|
||||||
|
repo = spack.repo
|
||||||
|
path = repo.filename_for_package_name(name)
|
||||||
|
|
||||||
|
spec = Spec(name)
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
tty.die("Something's wrong. '%s' is not a file!" % path)
|
tty.die("Something's wrong. '%s' is not a file!" % path)
|
||||||
|
@ -63,13 +72,13 @@ def edit_package(name, force=False):
|
||||||
tty.die("Insufficient permissions on '%s'!" % path)
|
tty.die("Insufficient permissions on '%s'!" % path)
|
||||||
elif not force:
|
elif not force:
|
||||||
tty.die("No package '%s'. Use spack create, or supply -f/--force "
|
tty.die("No package '%s'. Use spack create, or supply -f/--force "
|
||||||
"to edit a new file." % name)
|
"to edit a new file." % spec.name)
|
||||||
else:
|
else:
|
||||||
mkdirp(os.path.dirname(path))
|
mkdirp(os.path.dirname(path))
|
||||||
with open(path, "w") as pkg_file:
|
with open(path, "w") as pkg_file:
|
||||||
pkg_file.write(
|
pkg_file.write(
|
||||||
package_template.substitute(
|
package_template.substitute(
|
||||||
name=name, class_name=mod_to_class(name)))
|
name=spec.name, class_name=mod_to_class(spec.name)))
|
||||||
|
|
||||||
spack.editor(path)
|
spack.editor(path)
|
||||||
|
|
||||||
|
@ -78,9 +87,26 @@ def setup_parser(subparser):
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-f', '--force', dest='force', action='store_true',
|
'-f', '--force', dest='force', action='store_true',
|
||||||
help="Open a new file in $EDITOR even if package doesn't exist.")
|
help="Open a new file in $EDITOR even if package doesn't exist.")
|
||||||
subparser.add_argument(
|
|
||||||
'-c', '--command', dest='edit_command', action='store_true',
|
excl_args = subparser.add_mutually_exclusive_group()
|
||||||
help="Edit the command with the supplied name instead of a package.")
|
|
||||||
|
# Various filetypes you can edit directly from the cmd line.
|
||||||
|
excl_args.add_argument(
|
||||||
|
'-c', '--command', dest='path', action='store_const',
|
||||||
|
const=spack.cmd.command_path, help="Edit the command with the supplied name.")
|
||||||
|
excl_args.add_argument(
|
||||||
|
'-t', '--test', dest='path', action='store_const',
|
||||||
|
const=spack.test_path, help="Edit the test with the supplied name.")
|
||||||
|
excl_args.add_argument(
|
||||||
|
'-m', '--module', dest='path', action='store_const',
|
||||||
|
const=spack.module_path, help="Edit the main spack module with the supplied name.")
|
||||||
|
|
||||||
|
# Options for editing packages
|
||||||
|
excl_args.add_argument(
|
||||||
|
'-r', '--repo', default=None, help="Path to repo to edit package in.")
|
||||||
|
excl_args.add_argument(
|
||||||
|
'-N', '--namespace', default=None, help="Namespace of package to edit.")
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'name', nargs='?', default=None, help="name of package to edit")
|
'name', nargs='?', default=None, help="name of package to edit")
|
||||||
|
|
||||||
|
@ -88,19 +114,17 @@ def setup_parser(subparser):
|
||||||
def edit(parser, args):
|
def edit(parser, args):
|
||||||
name = args.name
|
name = args.name
|
||||||
|
|
||||||
if args.edit_command:
|
path = spack.packages_path
|
||||||
if not name:
|
if args.path:
|
||||||
path = spack.cmd.command_path
|
path = args.path
|
||||||
else:
|
if name:
|
||||||
path = join_path(spack.cmd.command_path, name + ".py")
|
path = join_path(path, name + ".py")
|
||||||
if not os.path.exists(path):
|
if not args.force and not os.path.exists(path):
|
||||||
tty.die("No command named '%s'." % name)
|
tty.die("No command named '%s'." % name)
|
||||||
spack.editor(path)
|
spack.editor(path)
|
||||||
|
|
||||||
|
elif name:
|
||||||
|
edit_package(name, args.repo, args.namespace, args.force)
|
||||||
else:
|
else:
|
||||||
# By default open the directory where packages or commands live.
|
# By default open the directory where packages or commands live.
|
||||||
if not name:
|
spack.editor(path)
|
||||||
path = spack.packages_path
|
|
||||||
spack.editor(path)
|
|
||||||
else:
|
|
||||||
edit_package(name, args.force)
|
|
||||||
|
|
|
@ -74,8 +74,7 @@ def extensions(parser, args):
|
||||||
|
|
||||||
#
|
#
|
||||||
# List package names of extensions
|
# List package names of extensions
|
||||||
#
|
extensions = spack.repo.extensions_for(spec)
|
||||||
extensions = spack.db.extensions_for(spec)
|
|
||||||
if not extensions:
|
if not extensions:
|
||||||
tty.msg("%s has no extensions." % spec.cshort_spec)
|
tty.msg("%s has no extensions." % spec.cshort_spec)
|
||||||
return
|
return
|
||||||
|
|
|
@ -52,10 +52,10 @@ def fetch(parser, args):
|
||||||
if args.missing or args.dependencies:
|
if args.missing or args.dependencies:
|
||||||
to_fetch = set()
|
to_fetch = set()
|
||||||
for s in spec.traverse():
|
for s in spec.traverse():
|
||||||
package = spack.db.get(s)
|
package = spack.repo.get(s)
|
||||||
if args.missing and package.installed:
|
if args.missing and package.installed:
|
||||||
continue
|
continue
|
||||||
package.do_fetch()
|
package.do_fetch()
|
||||||
|
|
||||||
package = spack.db.get(spec)
|
package = spack.repo.get(spec)
|
||||||
package.do_fetch()
|
package.do_fetch()
|
||||||
|
|
|
@ -40,6 +40,9 @@
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
format_group = subparser.add_mutually_exclusive_group()
|
format_group = subparser.add_mutually_exclusive_group()
|
||||||
|
format_group.add_argument(
|
||||||
|
'-s', '--short', action='store_const', dest='mode', const='short',
|
||||||
|
help='Show only specs (default)')
|
||||||
format_group.add_argument(
|
format_group.add_argument(
|
||||||
'-p', '--paths', action='store_const', dest='mode', const='paths',
|
'-p', '--paths', action='store_const', dest='mode', const='paths',
|
||||||
help='Show paths to package install directories')
|
help='Show paths to package install directories')
|
||||||
|
@ -48,21 +51,24 @@ def setup_parser(subparser):
|
||||||
help='Show full dependency DAG of installed packages')
|
help='Show full dependency DAG of installed packages')
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-l', '--long', action='store_true', dest='long',
|
'-l', '--long', action='store_true',
|
||||||
help='Show dependency hashes as well as versions.')
|
help='Show dependency hashes as well as versions.')
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-L', '--very-long', action='store_true', dest='very_long',
|
'-L', '--very-long', action='store_true',
|
||||||
help='Show dependency hashes as well as versions.')
|
help='Show dependency hashes as well as versions.')
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-u', '--unknown', action='store_true', dest='unknown',
|
'-u', '--unknown', action='store_true',
|
||||||
help='Show only specs Spack does not have a package for.')
|
help='Show only specs Spack does not have a package for.')
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-m', '--missing', action='store_true', dest='missing',
|
'-m', '--missing', action='store_true',
|
||||||
help='Show missing dependencies as well as installed specs.')
|
help='Show missing dependencies as well as installed specs.')
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-M', '--only-missing', action='store_true', dest='only_missing',
|
'-M', '--only-missing', action='store_true',
|
||||||
help='Show only missing dependencies.')
|
help='Show only missing dependencies.')
|
||||||
|
subparser.add_argument(
|
||||||
|
'-N', '--namespace', action='store_true',
|
||||||
|
help='Show fully qualified package names.')
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'query_specs', nargs=argparse.REMAINDER,
|
'query_specs', nargs=argparse.REMAINDER,
|
||||||
|
@ -76,6 +82,7 @@ def gray_hash(spec, length):
|
||||||
def display_specs(specs, **kwargs):
|
def display_specs(specs, **kwargs):
|
||||||
mode = kwargs.get('mode', 'short')
|
mode = kwargs.get('mode', 'short')
|
||||||
hashes = kwargs.get('long', False)
|
hashes = kwargs.get('long', False)
|
||||||
|
namespace = kwargs.get('namespace', False)
|
||||||
|
|
||||||
hlen = 7
|
hlen = 7
|
||||||
if kwargs.get('very_long', False):
|
if kwargs.get('very_long', False):
|
||||||
|
@ -97,7 +104,8 @@ def display_specs(specs, **kwargs):
|
||||||
specs = index[(architecture,compiler)]
|
specs = index[(architecture,compiler)]
|
||||||
specs.sort()
|
specs.sort()
|
||||||
|
|
||||||
abbreviated = [s.format('$_$@$+', color=True) for s in specs]
|
nfmt = '.' if namespace else '_'
|
||||||
|
abbreviated = [s.format('$%s$@$+' % nfmt, color=True) for s in specs]
|
||||||
if mode == 'paths':
|
if mode == 'paths':
|
||||||
# Print one spec per line along with prefix path
|
# Print one spec per line along with prefix path
|
||||||
width = max(len(s) for s in abbreviated)
|
width = max(len(s) for s in abbreviated)
|
||||||
|
@ -112,7 +120,7 @@ def display_specs(specs, **kwargs):
|
||||||
elif mode == 'deps':
|
elif mode == 'deps':
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
print spec.tree(
|
print spec.tree(
|
||||||
format='$_$@$+',
|
format='$%s$@$+' % nfmt,
|
||||||
color=True,
|
color=True,
|
||||||
indent=4,
|
indent=4,
|
||||||
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None)
|
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None)
|
||||||
|
@ -122,7 +130,7 @@ def fmt(s):
|
||||||
string = ""
|
string = ""
|
||||||
if hashes:
|
if hashes:
|
||||||
string += gray_hash(s, hlen) + ' '
|
string += gray_hash(s, hlen) + ' '
|
||||||
string += s.format('$-_$@$+', color=True)
|
string += s.format('$-%s$@$+' % nfmt, color=True)
|
||||||
|
|
||||||
return string
|
return string
|
||||||
colify(fmt(s) for s in specs)
|
colify(fmt(s) for s in specs)
|
||||||
|
@ -137,7 +145,7 @@ def find(parser, args):
|
||||||
# Filter out specs that don't exist.
|
# Filter out specs that don't exist.
|
||||||
query_specs = spack.cmd.parse_specs(args.query_specs)
|
query_specs = spack.cmd.parse_specs(args.query_specs)
|
||||||
query_specs, nonexisting = partition_list(
|
query_specs, nonexisting = partition_list(
|
||||||
query_specs, lambda s: spack.db.exists(s.name))
|
query_specs, lambda s: spack.repo.exists(s.name))
|
||||||
|
|
||||||
if nonexisting:
|
if nonexisting:
|
||||||
msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '')
|
msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '')
|
||||||
|
@ -171,4 +179,5 @@ def find(parser, args):
|
||||||
tty.msg("%d installed packages." % len(specs))
|
tty.msg("%d installed packages." % len(specs))
|
||||||
display_specs(specs, mode=args.mode,
|
display_specs(specs, mode=args.mode,
|
||||||
long=args.long,
|
long=args.long,
|
||||||
very_long=args.very_long)
|
very_long=args.very_long,
|
||||||
|
namespace=args.namespace)
|
||||||
|
|
|
@ -105,5 +105,5 @@ def print_text_info(pkg):
|
||||||
|
|
||||||
|
|
||||||
def info(parser, args):
|
def info(parser, args):
|
||||||
pkg = spack.db.get(args.name)
|
pkg = spack.repo.get(args.name)
|
||||||
print_text_info(pkg)
|
print_text_info(pkg)
|
||||||
|
|
|
@ -70,7 +70,7 @@ def install(parser, args):
|
||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
package = spack.db.get(spec)
|
package = spack.repo.get(spec)
|
||||||
with spack.installed_db.write_transaction():
|
with spack.installed_db.write_transaction():
|
||||||
package.do_install(
|
package.do_install(
|
||||||
keep_prefix=args.keep_prefix,
|
keep_prefix=args.keep_prefix,
|
||||||
|
|
|
@ -43,7 +43,7 @@ def setup_parser(subparser):
|
||||||
|
|
||||||
def list(parser, args):
|
def list(parser, args):
|
||||||
# Start with all package names.
|
# Start with all package names.
|
||||||
pkgs = spack.db.all_package_names()
|
pkgs = spack.repo.all_package_names()
|
||||||
|
|
||||||
# filter if a filter arg was provided
|
# filter if a filter arg was provided
|
||||||
if args.filter:
|
if args.filter:
|
||||||
|
|
|
@ -72,7 +72,7 @@ def location(parser, args):
|
||||||
print spack.prefix
|
print spack.prefix
|
||||||
|
|
||||||
elif args.packages:
|
elif args.packages:
|
||||||
print spack.db.root
|
print spack.repo.root
|
||||||
|
|
||||||
elif args.stages:
|
elif args.stages:
|
||||||
print spack.stage_path
|
print spack.stage_path
|
||||||
|
@ -94,12 +94,12 @@ def location(parser, args):
|
||||||
|
|
||||||
if args.package_dir:
|
if args.package_dir:
|
||||||
# This one just needs the spec name.
|
# This one just needs the spec name.
|
||||||
print join_path(spack.db.root, spec.name)
|
print join_path(spack.repo.root, spec.name)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# These versions need concretized specs.
|
# These versions need concretized specs.
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
pkg = spack.db.get(spec)
|
pkg = spack.repo.get(spec)
|
||||||
|
|
||||||
if args.stage_dir:
|
if args.stage_dir:
|
||||||
print pkg.stage.path
|
print pkg.stage.path
|
||||||
|
|
|
@ -36,6 +36,7 @@
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
|
from spack.util.spack_yaml import syaml_dict
|
||||||
|
|
||||||
description = "Manage mirrors."
|
description = "Manage mirrors."
|
||||||
|
|
||||||
|
@ -47,6 +48,7 @@ def setup_parser(subparser):
|
||||||
sp = subparser.add_subparsers(
|
sp = subparser.add_subparsers(
|
||||||
metavar='SUBCOMMAND', dest='mirror_command')
|
metavar='SUBCOMMAND', dest='mirror_command')
|
||||||
|
|
||||||
|
# Create
|
||||||
create_parser = sp.add_parser('create', help=mirror_create.__doc__)
|
create_parser = sp.add_parser('create', help=mirror_create.__doc__)
|
||||||
create_parser.add_argument('-d', '--directory', default=None,
|
create_parser.add_argument('-d', '--directory', default=None,
|
||||||
help="Directory in which to create mirror.")
|
help="Directory in which to create mirror.")
|
||||||
|
@ -60,15 +62,29 @@ def setup_parser(subparser):
|
||||||
'-o', '--one-version-per-spec', action='store_const', const=1, default=0,
|
'-o', '--one-version-per-spec', action='store_const', const=1, default=0,
|
||||||
help="Only fetch one 'preferred' version per spec, not all known versions.")
|
help="Only fetch one 'preferred' version per spec, not all known versions.")
|
||||||
|
|
||||||
|
scopes = spack.config.config_scopes
|
||||||
|
|
||||||
|
# Add
|
||||||
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
|
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
|
||||||
add_parser.add_argument('name', help="Mnemonic name for mirror.")
|
add_parser.add_argument('name', help="Mnemonic name for mirror.")
|
||||||
add_parser.add_argument(
|
add_parser.add_argument(
|
||||||
'url', help="URL of mirror directory created by 'spack mirror create'.")
|
'url', help="URL of mirror directory created by 'spack mirror create'.")
|
||||||
|
add_parser.add_argument(
|
||||||
|
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||||
|
help="Configuration scope to modify.")
|
||||||
|
|
||||||
remove_parser = sp.add_parser('remove', help=mirror_remove.__doc__)
|
# Remove
|
||||||
|
remove_parser = sp.add_parser('remove', aliases=['rm'], help=mirror_remove.__doc__)
|
||||||
remove_parser.add_argument('name')
|
remove_parser.add_argument('name')
|
||||||
|
remove_parser.add_argument(
|
||||||
|
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||||
|
help="Configuration scope to modify.")
|
||||||
|
|
||||||
|
# List
|
||||||
list_parser = sp.add_parser('list', help=mirror_list.__doc__)
|
list_parser = sp.add_parser('list', help=mirror_list.__doc__)
|
||||||
|
list_parser.add_argument(
|
||||||
|
'--scope', choices=scopes, default=spack.cmd.default_list_scope,
|
||||||
|
help="Configuration scope to read from.")
|
||||||
|
|
||||||
|
|
||||||
def mirror_add(args):
|
def mirror_add(args):
|
||||||
|
@ -77,31 +93,51 @@ def mirror_add(args):
|
||||||
if url.startswith('/'):
|
if url.startswith('/'):
|
||||||
url = 'file://' + url
|
url = 'file://' + url
|
||||||
|
|
||||||
mirror_dict = { args.name : url }
|
mirrors = spack.config.get_config('mirrors', scope=args.scope)
|
||||||
spack.config.add_to_mirror_config({ args.name : url })
|
if not mirrors:
|
||||||
|
mirrors = syaml_dict()
|
||||||
|
|
||||||
|
for name, u in mirrors.items():
|
||||||
|
if name == args.name:
|
||||||
|
tty.die("Mirror with name %s already exists." % name)
|
||||||
|
if u == url:
|
||||||
|
tty.die("Mirror with url %s already exists." % url)
|
||||||
|
# should only be one item per mirror dict.
|
||||||
|
|
||||||
|
items = [(n,u) for n,u in mirrors.items()]
|
||||||
|
items.insert(0, (args.name, url))
|
||||||
|
mirrors = syaml_dict(items)
|
||||||
|
spack.config.update_config('mirrors', mirrors, scope=args.scope)
|
||||||
|
|
||||||
|
|
||||||
def mirror_remove(args):
|
def mirror_remove(args):
|
||||||
"""Remove a mirror by name."""
|
"""Remove a mirror by name."""
|
||||||
name = args.name
|
name = args.name
|
||||||
|
|
||||||
rmd_something = spack.config.remove_from_config('mirrors', name)
|
mirrors = spack.config.get_config('mirrors', scope=args.scope)
|
||||||
if not rmd_something:
|
if not mirrors:
|
||||||
tty.die("No such mirror: %s" % name)
|
mirrors = syaml_dict()
|
||||||
|
|
||||||
|
if not name in mirrors:
|
||||||
|
tty.die("No mirror with name %s" % name)
|
||||||
|
|
||||||
|
old_value = mirrors.pop(name)
|
||||||
|
spack.config.update_config('mirrors', mirrors, scope=args.scope)
|
||||||
|
tty.msg("Removed mirror %s with url %s." % (name, old_value))
|
||||||
|
|
||||||
|
|
||||||
def mirror_list(args):
|
def mirror_list(args):
|
||||||
"""Print out available mirrors to the console."""
|
"""Print out available mirrors to the console."""
|
||||||
sec_names = spack.config.get_mirror_config()
|
mirrors = spack.config.get_config('mirrors', scope=args.scope)
|
||||||
if not sec_names:
|
if not mirrors:
|
||||||
tty.msg("No mirrors configured.")
|
tty.msg("No mirrors configured.")
|
||||||
return
|
return
|
||||||
|
|
||||||
max_len = max(len(s) for s in sec_names)
|
max_len = max(len(n) for n in mirrors.keys())
|
||||||
fmt = "%%-%ds%%s" % (max_len + 4)
|
fmt = "%%-%ds%%s" % (max_len + 4)
|
||||||
|
|
||||||
for name, val in sec_names.iteritems():
|
for name in mirrors:
|
||||||
print fmt % (name, val)
|
print fmt % (name, mirrors[name])
|
||||||
|
|
||||||
|
|
||||||
def _read_specs_from_file(filename):
|
def _read_specs_from_file(filename):
|
||||||
|
@ -130,7 +166,7 @@ def mirror_create(args):
|
||||||
|
|
||||||
# If nothing is passed, use all packages.
|
# If nothing is passed, use all packages.
|
||||||
if not specs:
|
if not specs:
|
||||||
specs = [Spec(n) for n in spack.db.all_package_names()]
|
specs = [Spec(n) for n in spack.repo.all_package_names()]
|
||||||
specs.sort(key=lambda s: s.format("$_$@").lower())
|
specs.sort(key=lambda s: s.format("$_$@").lower())
|
||||||
|
|
||||||
if args.dependencies:
|
if args.dependencies:
|
||||||
|
@ -175,6 +211,7 @@ def mirror(parser, args):
|
||||||
action = { 'create' : mirror_create,
|
action = { 'create' : mirror_create,
|
||||||
'add' : mirror_add,
|
'add' : mirror_add,
|
||||||
'remove' : mirror_remove,
|
'remove' : mirror_remove,
|
||||||
|
'rm' : mirror_remove,
|
||||||
'list' : mirror_list }
|
'list' : mirror_list }
|
||||||
|
|
||||||
action[args.mirror_command](args)
|
action[args.mirror_command](args)
|
||||||
|
|
|
@ -48,7 +48,7 @@ def rst_table(elts):
|
||||||
|
|
||||||
def print_rst_package_list():
|
def print_rst_package_list():
|
||||||
"""Print out information on all packages in restructured text."""
|
"""Print out information on all packages in restructured text."""
|
||||||
pkgs = sorted(spack.db.all_packages(), key=lambda s:s.name.lower())
|
pkgs = sorted(spack.repo.all_packages(), key=lambda s:s.name.lower())
|
||||||
|
|
||||||
print ".. _package-list:"
|
print ".. _package-list:"
|
||||||
print
|
print
|
||||||
|
|
|
@ -47,5 +47,5 @@ def patch(parser, args):
|
||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
package = spack.db.get(spec)
|
package = spack.repo.get(spec)
|
||||||
package.do_patch()
|
package.do_patch()
|
||||||
|
|
|
@ -85,7 +85,7 @@ def list_packages(rev):
|
||||||
|
|
||||||
def pkg_add(args):
|
def pkg_add(args):
|
||||||
for pkg_name in args.packages:
|
for pkg_name in args.packages:
|
||||||
filename = spack.db.filename_for_package_name(pkg_name)
|
filename = spack.repo.filename_for_package_name(pkg_name)
|
||||||
if not os.path.isfile(filename):
|
if not os.path.isfile(filename):
|
||||||
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
|
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
|
||||||
|
|
||||||
|
|
|
@ -39,4 +39,4 @@ def setup_parser(subparser):
|
||||||
|
|
||||||
def providers(parser, args):
|
def providers(parser, args):
|
||||||
for spec in spack.cmd.parse_specs(args.vpkg_spec):
|
for spec in spack.cmd.parse_specs(args.vpkg_spec):
|
||||||
colify(sorted(spack.db.providers_for(spec)), indent=4)
|
colify(sorted(spack.repo.providers_for(spec)), indent=4)
|
||||||
|
|
218
lib/spack/spack/cmd/repo.py
Normal file
218
lib/spack/spack/cmd/repo.py
Normal file
|
@ -0,0 +1,218 @@
|
||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://llnl.github.io/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from external import argparse
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.filesystem import join_path, mkdirp
|
||||||
|
|
||||||
|
import spack.spec
|
||||||
|
import spack.config
|
||||||
|
from spack.util.environment import get_path
|
||||||
|
from spack.repository import *
|
||||||
|
|
||||||
|
description = "Manage package source repositories."
|
||||||
|
|
||||||
|
def setup_parser(subparser):
|
||||||
|
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
|
||||||
|
scopes = spack.config.config_scopes
|
||||||
|
|
||||||
|
# Create
|
||||||
|
create_parser = sp.add_parser('create', help=repo_create.__doc__)
|
||||||
|
create_parser.add_argument(
|
||||||
|
'directory', help="Directory to create the repo in.")
|
||||||
|
create_parser.add_argument(
|
||||||
|
'namespace', help="Namespace to identify packages in the repository. "
|
||||||
|
"Defaults to the directory name.", nargs='?')
|
||||||
|
|
||||||
|
# List
|
||||||
|
list_parser = sp.add_parser('list', help=repo_list.__doc__)
|
||||||
|
list_parser.add_argument(
|
||||||
|
'--scope', choices=scopes, default=spack.cmd.default_list_scope,
|
||||||
|
help="Configuration scope to read from.")
|
||||||
|
|
||||||
|
# Add
|
||||||
|
add_parser = sp.add_parser('add', help=repo_add.__doc__)
|
||||||
|
add_parser.add_argument('path', help="Path to a Spack package repository directory.")
|
||||||
|
add_parser.add_argument(
|
||||||
|
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||||
|
help="Configuration scope to modify.")
|
||||||
|
|
||||||
|
# Remove
|
||||||
|
remove_parser = sp.add_parser('remove', help=repo_remove.__doc__, aliases=['rm'])
|
||||||
|
remove_parser.add_argument(
|
||||||
|
'path_or_namespace',
|
||||||
|
help="Path or namespace of a Spack package repository.")
|
||||||
|
remove_parser.add_argument(
|
||||||
|
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||||
|
help="Configuration scope to modify.")
|
||||||
|
|
||||||
|
|
||||||
|
def repo_create(args):
|
||||||
|
"""Create a new package repository."""
|
||||||
|
root = canonicalize_path(args.directory)
|
||||||
|
namespace = args.namespace
|
||||||
|
|
||||||
|
if not args.namespace:
|
||||||
|
namespace = os.path.basename(root)
|
||||||
|
|
||||||
|
if not re.match(r'\w[\.\w-]*', namespace):
|
||||||
|
tty.die("'%s' is not a valid namespace." % namespace)
|
||||||
|
|
||||||
|
existed = False
|
||||||
|
if os.path.exists(root):
|
||||||
|
if os.path.isfile(root):
|
||||||
|
tty.die('File %s already exists and is not a directory' % root)
|
||||||
|
elif os.path.isdir(root):
|
||||||
|
if not os.access(root, os.R_OK | os.W_OK):
|
||||||
|
tty.die('Cannot create new repo in %s: cannot access directory.' % root)
|
||||||
|
if os.listdir(root):
|
||||||
|
tty.die('Cannot create new repo in %s: directory is not empty.' % root)
|
||||||
|
existed = True
|
||||||
|
|
||||||
|
full_path = os.path.realpath(root)
|
||||||
|
parent = os.path.dirname(full_path)
|
||||||
|
if not os.access(parent, os.R_OK | os.W_OK):
|
||||||
|
tty.die("Cannot create repository in %s: can't access parent!" % root)
|
||||||
|
|
||||||
|
try:
|
||||||
|
config_path = os.path.join(root, repo_config_name)
|
||||||
|
packages_path = os.path.join(root, packages_dir_name)
|
||||||
|
|
||||||
|
mkdirp(packages_path)
|
||||||
|
with open(config_path, 'w') as config:
|
||||||
|
config.write("repo:\n")
|
||||||
|
config.write(" namespace: '%s'\n" % namespace)
|
||||||
|
|
||||||
|
except (IOError, OSError) as e:
|
||||||
|
tty.die('Failed to create new repository in %s.' % root,
|
||||||
|
"Caused by %s: %s" % (type(e), e))
|
||||||
|
|
||||||
|
# try to clean up.
|
||||||
|
if existed:
|
||||||
|
shutil.rmtree(config_path, ignore_errors=True)
|
||||||
|
shutil.rmtree(packages_path, ignore_errors=True)
|
||||||
|
else:
|
||||||
|
shutil.rmtree(root, ignore_errors=True)
|
||||||
|
|
||||||
|
tty.msg("Created repo with namespace '%s'." % namespace)
|
||||||
|
tty.msg("To register it with spack, run this command:",
|
||||||
|
'spack repo add %s' % full_path)
|
||||||
|
|
||||||
|
|
||||||
|
def repo_add(args):
|
||||||
|
"""Add a package source to Spack's configuration."""
|
||||||
|
path = args.path
|
||||||
|
|
||||||
|
# real_path is absolute and handles substitution.
|
||||||
|
canon_path = canonicalize_path(path)
|
||||||
|
|
||||||
|
# check if the path exists
|
||||||
|
if not os.path.exists(canon_path):
|
||||||
|
tty.die("No such file or directory: '%s'." % path)
|
||||||
|
|
||||||
|
# Make sure the path is a directory.
|
||||||
|
if not os.path.isdir(canon_path):
|
||||||
|
tty.die("Not a Spack repository: '%s'." % path)
|
||||||
|
|
||||||
|
# Make sure it's actually a spack repository by constructing it.
|
||||||
|
repo = Repo(canon_path)
|
||||||
|
|
||||||
|
# If that succeeds, finally add it to the configuration.
|
||||||
|
repos = spack.config.get_config('repos', args.scope)
|
||||||
|
if not repos: repos = []
|
||||||
|
|
||||||
|
if repo.root in repos or path in repos:
|
||||||
|
tty.die("Repository is already registered with Spack: '%s'" % path)
|
||||||
|
|
||||||
|
repos.insert(0, canon_path)
|
||||||
|
spack.config.update_config('repos', repos, args.scope)
|
||||||
|
tty.msg("Created repo with namespace '%s'." % repo.namespace)
|
||||||
|
|
||||||
|
|
||||||
|
def repo_remove(args):
|
||||||
|
"""Remove a repository from Spack's configuration."""
|
||||||
|
repos = spack.config.get_config('repos', args.scope)
|
||||||
|
path_or_namespace = args.path_or_namespace
|
||||||
|
|
||||||
|
# If the argument is a path, remove that repository from config.
|
||||||
|
canon_path = canonicalize_path(path_or_namespace)
|
||||||
|
for repo_path in repos:
|
||||||
|
repo_canon_path = canonicalize_path(repo_path)
|
||||||
|
if canon_path == repo_canon_path:
|
||||||
|
repos.remove(repo_path)
|
||||||
|
spack.config.update_config('repos', repos, args.scope)
|
||||||
|
tty.msg("Removed repository '%s'." % repo_path)
|
||||||
|
return
|
||||||
|
|
||||||
|
# If it is a namespace, remove corresponding repo
|
||||||
|
for path in repos:
|
||||||
|
try:
|
||||||
|
repo = Repo(path)
|
||||||
|
if repo.namespace == path_or_namespace:
|
||||||
|
repos.remove(path)
|
||||||
|
spack.config.update_config('repos', repos, args.scope)
|
||||||
|
tty.msg("Removed repository '%s' with namespace %s."
|
||||||
|
% (repo.root, repo.namespace))
|
||||||
|
return
|
||||||
|
except RepoError as e:
|
||||||
|
continue
|
||||||
|
|
||||||
|
tty.die("No repository with path or namespace: '%s'"
|
||||||
|
% path_or_namespace)
|
||||||
|
|
||||||
|
|
||||||
|
def repo_list(args):
|
||||||
|
"""Show registered repositories and their namespaces."""
|
||||||
|
roots = spack.config.get_config('repos', args.scope)
|
||||||
|
repos = []
|
||||||
|
for r in roots:
|
||||||
|
try:
|
||||||
|
repos.append(Repo(r))
|
||||||
|
except RepoError as e:
|
||||||
|
continue
|
||||||
|
|
||||||
|
msg = "%d package repositor" % len(repos)
|
||||||
|
msg += "y." if len(repos) == 1 else "ies."
|
||||||
|
tty.msg(msg)
|
||||||
|
|
||||||
|
if not repos:
|
||||||
|
return
|
||||||
|
|
||||||
|
max_ns_len = max(len(r.namespace) for r in repos)
|
||||||
|
for repo in repos:
|
||||||
|
fmt = "%%-%ds%%s" % (max_ns_len + 4)
|
||||||
|
print fmt % (repo.namespace, repo.root)
|
||||||
|
|
||||||
|
|
||||||
|
def repo(parser, args):
|
||||||
|
action = { 'create' : repo_create,
|
||||||
|
'list' : repo_list,
|
||||||
|
'add' : repo_add,
|
||||||
|
'remove' : repo_remove,
|
||||||
|
'rm' : repo_remove}
|
||||||
|
action[args.repo_command](args)
|
|
@ -42,5 +42,5 @@ def restage(parser, args):
|
||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
package = spack.db.get(spec)
|
package = spack.repo.get(spec)
|
||||||
package.do_restage()
|
package.do_restage()
|
||||||
|
|
|
@ -49,5 +49,5 @@ def stage(parser, args):
|
||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
package = spack.db.get(spec)
|
package = spack.repo.get(spec)
|
||||||
package.do_stage()
|
package.do_stage()
|
||||||
|
|
|
@ -115,7 +115,7 @@ def fetch_log(path):
|
||||||
|
|
||||||
def failed_dependencies(spec):
|
def failed_dependencies(spec):
|
||||||
return set(childSpec for childSpec in spec.dependencies.itervalues() if not
|
return set(childSpec for childSpec in spec.dependencies.itervalues() if not
|
||||||
spack.db.get(childSpec).installed)
|
spack.repo.get(childSpec).installed)
|
||||||
|
|
||||||
|
|
||||||
def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log):
|
def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log):
|
||||||
|
@ -126,7 +126,7 @@ def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
failedDeps = failed_dependencies(spec)
|
failedDeps = failed_dependencies(spec)
|
||||||
package = spack.db.get(spec)
|
package = spack.repo.get(spec)
|
||||||
if failedDeps:
|
if failedDeps:
|
||||||
result = TestResult.SKIPPED
|
result = TestResult.SKIPPED
|
||||||
dep = iter(failedDeps).next()
|
dep = iter(failedDeps).next()
|
||||||
|
@ -171,7 +171,7 @@ def test_install(parser, args):
|
||||||
|
|
||||||
newInstalls = set()
|
newInstalls = set()
|
||||||
for spec in topSpec.traverse():
|
for spec in topSpec.traverse():
|
||||||
package = spack.db.get(spec)
|
package = spack.repo.get(spec)
|
||||||
if not package.installed:
|
if not package.installed:
|
||||||
newInstalls.add(spec)
|
newInstalls.add(spec)
|
||||||
|
|
||||||
|
@ -188,7 +188,7 @@ def test_install(parser, args):
|
||||||
# Calling do_install for the top-level package would be sufficient but
|
# Calling do_install for the top-level package would be sufficient but
|
||||||
# this attempts to keep going if any package fails (other packages which
|
# this attempts to keep going if any package fails (other packages which
|
||||||
# are not dependents may succeed)
|
# are not dependents may succeed)
|
||||||
package = spack.db.get(spec)
|
package = spack.repo.get(spec)
|
||||||
if (not failed_dependencies(spec)) and (not package.installed):
|
if (not failed_dependencies(spec)) and (not package.installed):
|
||||||
try:
|
try:
|
||||||
package.do_install(
|
package.do_install(
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.packages
|
import spack.repository
|
||||||
from spack.cmd.find import display_specs
|
from spack.cmd.find import display_specs
|
||||||
from spack.package import PackageStillNeededError
|
from spack.package import PackageStillNeededError
|
||||||
|
|
||||||
|
@ -79,10 +79,9 @@ def uninstall(parser, args):
|
||||||
try:
|
try:
|
||||||
# should work if package is known to spack
|
# should work if package is known to spack
|
||||||
pkgs.append(s.package)
|
pkgs.append(s.package)
|
||||||
|
except spack.repository.UnknownPackageError, e:
|
||||||
except spack.packages.UnknownPackageError, e:
|
# The package.py file has gone away -- but still
|
||||||
# The package.py file has gone away -- but still want to
|
# want to uninstall.
|
||||||
# uninstall.
|
|
||||||
spack.Package(s).do_uninstall(force=True)
|
spack.Package(s).do_uninstall(force=True)
|
||||||
|
|
||||||
# Sort packages to be uninstalled by the number of installed dependents
|
# Sort packages to be uninstalled by the number of installed dependents
|
||||||
|
|
|
@ -41,7 +41,7 @@ def setup_parser(subparser):
|
||||||
|
|
||||||
def urls(parser, args):
|
def urls(parser, args):
|
||||||
urls = set()
|
urls = set()
|
||||||
for pkg in spack.db.all_packages():
|
for pkg in spack.repo.all_packages():
|
||||||
url = getattr(pkg.__class__, 'url', None)
|
url = getattr(pkg.__class__, 'url', None)
|
||||||
if url:
|
if url:
|
||||||
urls.add(url)
|
urls.add(url)
|
||||||
|
|
|
@ -34,7 +34,7 @@ def setup_parser(subparser):
|
||||||
|
|
||||||
|
|
||||||
def versions(parser, args):
|
def versions(parser, args):
|
||||||
pkg = spack.db.get(args.package)
|
pkg = spack.repo.get(args.package)
|
||||||
|
|
||||||
safe_versions = pkg.versions
|
safe_versions = pkg.versions
|
||||||
fetched_versions = pkg.fetch_remote_versions()
|
fetched_versions = pkg.fetch_remote_versions()
|
||||||
|
|
|
@ -27,6 +27,7 @@
|
||||||
"""
|
"""
|
||||||
import imp
|
import imp
|
||||||
import os
|
import os
|
||||||
|
import platform
|
||||||
|
|
||||||
from llnl.util.lang import memoized, list_modules
|
from llnl.util.lang import memoized, list_modules
|
||||||
from llnl.util.filesystem import join_path
|
from llnl.util.filesystem import join_path
|
||||||
|
@ -35,6 +36,7 @@
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.architecture
|
||||||
|
|
||||||
from spack.util.multiproc import parmap
|
from spack.util.multiproc import parmap
|
||||||
from spack.compiler import Compiler
|
from spack.compiler import Compiler
|
||||||
|
@ -45,50 +47,128 @@
|
||||||
_imported_compilers_module = 'spack.compilers'
|
_imported_compilers_module = 'spack.compilers'
|
||||||
_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
|
_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
|
||||||
|
|
||||||
_default_order = ['gcc', 'intel', 'pgi', 'clang', 'xlc']
|
# TODO: customize order in config file
|
||||||
|
if platform.system() == 'Darwin':
|
||||||
|
_default_order = ['clang', 'gcc', 'intel']
|
||||||
|
else:
|
||||||
|
_default_order = ['gcc', 'intel', 'pgi', 'clang', 'xlc']
|
||||||
|
|
||||||
|
|
||||||
def _auto_compiler_spec(function):
|
def _auto_compiler_spec(function):
|
||||||
def converter(cspec_like):
|
def converter(cspec_like, *args, **kwargs):
|
||||||
if not isinstance(cspec_like, spack.spec.CompilerSpec):
|
if not isinstance(cspec_like, spack.spec.CompilerSpec):
|
||||||
cspec_like = spack.spec.CompilerSpec(cspec_like)
|
cspec_like = spack.spec.CompilerSpec(cspec_like)
|
||||||
return function(cspec_like)
|
return function(cspec_like, *args, **kwargs)
|
||||||
return converter
|
return converter
|
||||||
|
|
||||||
|
|
||||||
def _get_config():
|
def _to_dict(compiler):
|
||||||
"""Get a Spack config, but make sure it has compiler configuration
|
"""Return a dict version of compiler suitable to insert in YAML."""
|
||||||
first."""
|
return {
|
||||||
|
str(compiler.spec) : dict(
|
||||||
|
(attr, getattr(compiler, attr, None))
|
||||||
|
for attr in _required_instance_vars)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_compiler_config(arch=None, scope=None):
|
||||||
|
"""Return the compiler configuration for the specified architecture.
|
||||||
|
"""
|
||||||
# If any configuration file has compilers, just stick with the
|
# If any configuration file has compilers, just stick with the
|
||||||
# ones already configured.
|
# ones already configured.
|
||||||
config = spack.config.get_compilers_config()
|
config = spack.config.get_config('compilers', scope=scope)
|
||||||
existing = [spack.spec.CompilerSpec(s)
|
|
||||||
for s in config]
|
|
||||||
if existing:
|
|
||||||
return config
|
|
||||||
|
|
||||||
compilers = find_compilers(*get_path('PATH'))
|
my_arch = spack.architecture.sys_type()
|
||||||
add_compilers_to_config('user', *compilers)
|
if arch is None:
|
||||||
|
arch = my_arch
|
||||||
|
|
||||||
# After writing compilers to the user config, return a full config
|
if arch in config:
|
||||||
# from all files.
|
return config[arch]
|
||||||
return spack.config.get_compilers_config()
|
|
||||||
|
# Only for the current arch in *highest* scope: automatically try to
|
||||||
|
# find compilers if none are configured yet.
|
||||||
|
if arch == my_arch and scope == 'user':
|
||||||
|
config[arch] = {}
|
||||||
|
compilers = find_compilers(*get_path('PATH'))
|
||||||
|
for compiler in compilers:
|
||||||
|
config[arch].update(_to_dict(compiler))
|
||||||
|
spack.config.update_config('compilers', config, scope=scope)
|
||||||
|
return config[arch]
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def add_compilers_to_config(compilers, arch=None, scope=None):
|
||||||
|
"""Add compilers to the config for the specified architecture.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- compilers: a list of Compiler objects.
|
||||||
|
- arch: arch to add compilers for.
|
||||||
|
- scope: configuration scope to modify.
|
||||||
|
"""
|
||||||
|
if arch is None:
|
||||||
|
arch = spack.architecture.sys_type()
|
||||||
|
|
||||||
|
compiler_config = get_compiler_config(arch, scope)
|
||||||
|
for compiler in compilers:
|
||||||
|
compiler_config[str(compiler.spec)] = dict(
|
||||||
|
(c, getattr(compiler, c, "None"))
|
||||||
|
for c in _required_instance_vars)
|
||||||
|
|
||||||
|
update = { arch : compiler_config }
|
||||||
|
spack.config.update_config('compilers', update, scope)
|
||||||
|
|
||||||
|
|
||||||
|
@_auto_compiler_spec
|
||||||
|
def remove_compiler_from_config(compiler_spec, arch=None, scope=None):
|
||||||
|
"""Remove compilers from the config, by spec.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- compiler_specs: a list of CompilerSpec objects.
|
||||||
|
- arch: arch to add compilers for.
|
||||||
|
- scope: configuration scope to modify.
|
||||||
|
"""
|
||||||
|
if arch is None:
|
||||||
|
arch = spack.architecture.sys_type()
|
||||||
|
|
||||||
|
compiler_config = get_compiler_config(arch, scope)
|
||||||
|
del compiler_config[str(compiler_spec)]
|
||||||
|
update = { arch : compiler_config }
|
||||||
|
|
||||||
|
spack.config.update_config('compilers', update, scope)
|
||||||
|
|
||||||
|
|
||||||
|
def all_compilers_config(arch=None, scope=None):
|
||||||
|
"""Return a set of specs for all the compiler versions currently
|
||||||
|
available to build with. These are instances of CompilerSpec.
|
||||||
|
"""
|
||||||
|
# Get compilers for this architecture.
|
||||||
|
arch_config = get_compiler_config(arch, scope)
|
||||||
|
|
||||||
|
# Merge 'all' compilers with arch-specific ones.
|
||||||
|
# Arch-specific compilers have higher precedence.
|
||||||
|
merged_config = get_compiler_config('all', scope=scope)
|
||||||
|
merged_config = spack.config._merge_yaml(merged_config, arch_config)
|
||||||
|
|
||||||
|
return merged_config
|
||||||
|
|
||||||
|
|
||||||
|
def all_compilers(arch=None, scope=None):
|
||||||
|
# Return compiler specs from the merged config.
|
||||||
|
return [spack.spec.CompilerSpec(s)
|
||||||
|
for s in all_compilers_config(arch, scope)]
|
||||||
|
|
||||||
|
|
||||||
_cached_default_compiler = None
|
|
||||||
def default_compiler():
|
def default_compiler():
|
||||||
global _cached_default_compiler
|
|
||||||
if _cached_default_compiler:
|
|
||||||
return _cached_default_compiler
|
|
||||||
versions = []
|
versions = []
|
||||||
for name in _default_order: # TODO: customize order.
|
for name in _default_order:
|
||||||
versions = find(name)
|
versions = find(name)
|
||||||
if versions: break
|
if versions:
|
||||||
|
break
|
||||||
if not versions:
|
else:
|
||||||
raise NoCompilersError()
|
raise NoCompilersError()
|
||||||
|
|
||||||
_cached_default_compiler = sorted(versions)[-1]
|
return sorted(versions)[-1]
|
||||||
return _cached_default_compiler
|
|
||||||
|
|
||||||
|
|
||||||
def find_compilers(*path):
|
def find_compilers(*path):
|
||||||
|
@ -123,20 +203,6 @@ def find_compilers(*path):
|
||||||
return clist
|
return clist
|
||||||
|
|
||||||
|
|
||||||
def add_compilers_to_config(scope, *compilers):
|
|
||||||
compiler_config_tree = {}
|
|
||||||
for compiler in compilers:
|
|
||||||
compiler_entry = {}
|
|
||||||
for c in _required_instance_vars:
|
|
||||||
val = getattr(compiler, c)
|
|
||||||
if not val:
|
|
||||||
val = "None"
|
|
||||||
compiler_entry[c] = val
|
|
||||||
compiler_config_tree[str(compiler.spec)] = compiler_entry
|
|
||||||
spack.config.add_to_compiler_config(compiler_config_tree, scope)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def supported_compilers():
|
def supported_compilers():
|
||||||
"""Return a set of names of compilers supported by Spack.
|
"""Return a set of names of compilers supported by Spack.
|
||||||
|
|
||||||
|
@ -152,27 +218,19 @@ def supported(compiler_spec):
|
||||||
return compiler_spec.name in supported_compilers()
|
return compiler_spec.name in supported_compilers()
|
||||||
|
|
||||||
|
|
||||||
def all_compilers():
|
|
||||||
"""Return a set of specs for all the compiler versions currently
|
|
||||||
available to build with. These are instances of CompilerSpec.
|
|
||||||
"""
|
|
||||||
configuration = _get_config()
|
|
||||||
return [spack.spec.CompilerSpec(s) for s in configuration]
|
|
||||||
|
|
||||||
|
|
||||||
@_auto_compiler_spec
|
@_auto_compiler_spec
|
||||||
def find(compiler_spec):
|
def find(compiler_spec, arch=None, scope=None):
|
||||||
"""Return specs of available compilers that match the supplied
|
"""Return specs of available compilers that match the supplied
|
||||||
compiler spec. Return an list if nothing found."""
|
compiler spec. Return an list if nothing found."""
|
||||||
return [c for c in all_compilers() if c.satisfies(compiler_spec)]
|
return [c for c in all_compilers(arch, scope) if c.satisfies(compiler_spec)]
|
||||||
|
|
||||||
|
|
||||||
@_auto_compiler_spec
|
@_auto_compiler_spec
|
||||||
def compilers_for_spec(compiler_spec):
|
def compilers_for_spec(compiler_spec, arch=None, scope=None):
|
||||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||||
Returns an empty list if none are found.
|
Returns an empty list if none are found.
|
||||||
"""
|
"""
|
||||||
config = _get_config()
|
config = all_compilers_config(arch, scope)
|
||||||
|
|
||||||
def get_compiler(cspec):
|
def get_compiler(cspec):
|
||||||
items = config[str(cspec)]
|
items = config[str(cspec)]
|
||||||
|
@ -191,7 +249,7 @@ def get_compiler(cspec):
|
||||||
|
|
||||||
return cls(cspec, *compiler_paths)
|
return cls(cspec, *compiler_paths)
|
||||||
|
|
||||||
matches = find(compiler_spec)
|
matches = find(compiler_spec, arch, scope)
|
||||||
return [get_compiler(cspec) for cspec in matches]
|
return [get_compiler(cspec) for cspec in matches]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
##############################################################################
|
##############################################################################
|
||||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
|
||||||
# Produced at the Lawrence Livermore National Laboratory.
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
#
|
#
|
||||||
# This file is part of Spack.
|
# This file is part of Spack.
|
||||||
|
@ -45,11 +45,11 @@
|
||||||
Configuration file format
|
Configuration file format
|
||||||
===============================
|
===============================
|
||||||
|
|
||||||
Configuration files are formatted using YAML syntax.
|
Configuration files are formatted using YAML syntax. This format is
|
||||||
This format is implemented by Python's
|
implemented by libyaml (included with Spack as an external module),
|
||||||
yaml class, and it's easy to read and versatile.
|
and it's easy to read and versatile.
|
||||||
|
|
||||||
The config files are structured as trees, like this ``compiler`` section::
|
Config files are structured as trees, like this ``compiler`` section::
|
||||||
|
|
||||||
compilers:
|
compilers:
|
||||||
chaos_5_x86_64_ib:
|
chaos_5_x86_64_ib:
|
||||||
|
@ -67,274 +67,475 @@
|
||||||
categorize entries beneath them in the tree. At the root of the tree,
|
categorize entries beneath them in the tree. At the root of the tree,
|
||||||
entries like ''cc'' and ''cxx'' are specified as name/value pairs.
|
entries like ''cc'' and ''cxx'' are specified as name/value pairs.
|
||||||
|
|
||||||
Spack returns these trees as nested dicts. The dict for the above example
|
``config.get_config()`` returns these trees as nested dicts, but it
|
||||||
would looks like:
|
strips the first level off. So, ``config.get_config('compilers')``
|
||||||
|
would return something like this for the above example:
|
||||||
|
|
||||||
{ 'compilers' :
|
{ 'chaos_5_x86_64_ib' :
|
||||||
{ 'chaos_5_x86_64_ib' :
|
{ 'gcc@4.4.7' :
|
||||||
{ 'gcc@4.4.7' :
|
{ 'cc' : '/usr/bin/gcc',
|
||||||
{ 'cc' : '/usr/bin/gcc',
|
'cxx' : '/usr/bin/g++'
|
||||||
'cxx' : '/usr/bin/g++'
|
'f77' : '/usr/bin/gfortran'
|
||||||
'f77' : '/usr/bin/gfortran'
|
'fc' : '/usr/bin/gfortran' }
|
||||||
'fc' : '/usr/bin/gfortran' }
|
}
|
||||||
}
|
{ 'bgqos_0' :
|
||||||
{ 'bgqos_0' :
|
{ 'cc' : '/usr/local/bin/mpixlc' } }
|
||||||
{ 'cc' : '/usr/local/bin/mpixlc' }
|
|
||||||
}
|
Likewise, the ``mirrors.yaml`` file's first line must be ``mirrors:``,
|
||||||
}
|
but ``get_config()`` strips that off too.
|
||||||
|
|
||||||
|
Precedence
|
||||||
|
===============================
|
||||||
|
|
||||||
|
``config.py`` routines attempt to recursively merge configuration
|
||||||
|
across scopes. So if there are ``compilers.py`` files in both the
|
||||||
|
site scope and the user scope, ``get_config('compilers')`` will return
|
||||||
|
merged dictionaries of *all* the compilers available. If a user
|
||||||
|
compiler conflicts with a site compiler, Spack will overwrite the site
|
||||||
|
configuration wtih the user configuration. If both the user and site
|
||||||
|
``mirrors.yaml`` files contain lists of mirrors, then ``get_config()``
|
||||||
|
will return a concatenated list of mirrors, with the user config items
|
||||||
|
first.
|
||||||
|
|
||||||
|
Sometimes, it is useful to *completely* override a site setting with a
|
||||||
|
user one. To accomplish this, you can use *two* colons at the end of
|
||||||
|
a key in a configuration file. For example, this:
|
||||||
|
|
||||||
|
compilers::
|
||||||
|
chaos_5_x86_64_ib:
|
||||||
|
gcc@4.4.7:
|
||||||
|
cc: /usr/bin/gcc
|
||||||
|
cxx: /usr/bin/g++
|
||||||
|
f77: /usr/bin/gfortran
|
||||||
|
fc: /usr/bin/gfortran
|
||||||
|
bgqos_0:
|
||||||
|
xlc@12.1:
|
||||||
|
cc: /usr/local/bin/mpixlc
|
||||||
|
...
|
||||||
|
|
||||||
|
Will make Spack take compilers *only* from the user configuration, and
|
||||||
|
the site configuration will be ignored.
|
||||||
|
|
||||||
Some routines, like get_mirrors_config and get_compilers_config may strip
|
|
||||||
off the top-levels of the tree and return subtrees.
|
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import exceptions
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
import copy
|
||||||
from ordereddict_backport import OrderedDict
|
import jsonschema
|
||||||
from llnl.util.lang import memoized
|
from jsonschema import Draft4Validator, validators
|
||||||
import spack.error
|
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
from yaml.error import MarkedYAMLError
|
from yaml.error import MarkedYAMLError
|
||||||
|
from ordereddict_backport import OrderedDict
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import mkdirp
|
from llnl.util.filesystem import mkdirp
|
||||||
|
|
||||||
_config_sections = {}
|
import spack
|
||||||
class _ConfigCategory:
|
from spack.error import SpackError
|
||||||
name = None
|
|
||||||
filename = None
|
|
||||||
merge = True
|
|
||||||
def __init__(self, n, f, m):
|
|
||||||
self.name = n
|
|
||||||
self.filename = f
|
|
||||||
self.merge = m
|
|
||||||
self.files_read_from = []
|
|
||||||
self.result_dict = {}
|
|
||||||
_config_sections[n] = self
|
|
||||||
|
|
||||||
_ConfigCategory('compilers', 'compilers.yaml', True)
|
# Hacked yaml for configuration files preserves line numbers.
|
||||||
_ConfigCategory('mirrors', 'mirrors.yaml', True)
|
import spack.util.spack_yaml as syaml
|
||||||
_ConfigCategory('view', 'views.yaml', True)
|
|
||||||
_ConfigCategory('order', 'orders.yaml', True)
|
|
||||||
|
|
||||||
"""Names of scopes and their corresponding configuration files."""
|
|
||||||
config_scopes = [('site', os.path.join(spack.etc_path, 'spack')),
|
|
||||||
('user', os.path.expanduser('~/.spack'))]
|
|
||||||
|
|
||||||
_compiler_by_arch = {}
|
"""Dict from section names -> schema for that section."""
|
||||||
_read_config_file_result = {}
|
section_schemas = {
|
||||||
def _read_config_file(filename):
|
'compilers': {
|
||||||
"""Read a given YAML configuration file"""
|
'$schema': 'http://json-schema.org/schema#',
|
||||||
global _read_config_file_result
|
'title': 'Spack compiler configuration file schema',
|
||||||
if filename in _read_config_file_result:
|
'type': 'object',
|
||||||
return _read_config_file_result[filename]
|
'additionalProperties': False,
|
||||||
|
'patternProperties': {
|
||||||
|
'compilers:?': { # optional colon for overriding site config.
|
||||||
|
'type': 'object',
|
||||||
|
'default': {},
|
||||||
|
'additionalProperties': False,
|
||||||
|
'patternProperties': {
|
||||||
|
r'\w[\w-]*': { # architecture
|
||||||
|
'type': 'object',
|
||||||
|
'additionalProperties': False,
|
||||||
|
'patternProperties': {
|
||||||
|
r'\w[\w-]*@\w[\w-]*': { # compiler spec
|
||||||
|
'type': 'object',
|
||||||
|
'additionalProperties': False,
|
||||||
|
'required': ['cc', 'cxx', 'f77', 'fc'],
|
||||||
|
'properties': {
|
||||||
|
'cc': { 'anyOf': [ {'type' : 'string' },
|
||||||
|
{'type' : 'null' }]},
|
||||||
|
'cxx': { 'anyOf': [ {'type' : 'string' },
|
||||||
|
{'type' : 'null' }]},
|
||||||
|
'f77': { 'anyOf': [ {'type' : 'string' },
|
||||||
|
{'type' : 'null' }]},
|
||||||
|
'fc': { 'anyOf': [ {'type' : 'string' },
|
||||||
|
{'type' : 'null' }]},
|
||||||
|
},},},},},},},},
|
||||||
|
|
||||||
|
'mirrors': {
|
||||||
|
'$schema': 'http://json-schema.org/schema#',
|
||||||
|
'title': 'Spack mirror configuration file schema',
|
||||||
|
'type': 'object',
|
||||||
|
'additionalProperties': False,
|
||||||
|
'patternProperties': {
|
||||||
|
r'mirrors:?': {
|
||||||
|
'type': 'object',
|
||||||
|
'default': {},
|
||||||
|
'additionalProperties': False,
|
||||||
|
'patternProperties': {
|
||||||
|
r'\w[\w-]*': {
|
||||||
|
'type': 'string'},},},},},
|
||||||
|
|
||||||
|
'repos': {
|
||||||
|
'$schema': 'http://json-schema.org/schema#',
|
||||||
|
'title': 'Spack repository configuration file schema',
|
||||||
|
'type': 'object',
|
||||||
|
'additionalProperties': False,
|
||||||
|
'patternProperties': {
|
||||||
|
r'repos:?': {
|
||||||
|
'type': 'array',
|
||||||
|
'default': [],
|
||||||
|
'items': {
|
||||||
|
'type': 'string'},},},},
|
||||||
|
}
|
||||||
|
|
||||||
|
"""OrderedDict of config scopes keyed by name.
|
||||||
|
Later scopes will override earlier scopes.
|
||||||
|
"""
|
||||||
|
config_scopes = OrderedDict()
|
||||||
|
|
||||||
|
|
||||||
|
def validate_section_name(section):
|
||||||
|
"""Raise a ValueError if the section is not a valid section."""
|
||||||
|
if section not in section_schemas:
|
||||||
|
raise ValueError("Invalid config section: '%s'. Options are %s."
|
||||||
|
% (section, section_schemas))
|
||||||
|
|
||||||
|
|
||||||
|
def extend_with_default(validator_class):
|
||||||
|
"""Add support for the 'default' attribute for properties and patternProperties.
|
||||||
|
|
||||||
|
jsonschema does not handle this out of the box -- it only
|
||||||
|
validates. This allows us to set default values for configs
|
||||||
|
where certain fields are `None` b/c they're deleted or
|
||||||
|
commented out.
|
||||||
|
|
||||||
|
"""
|
||||||
|
validate_properties = validator_class.VALIDATORS["properties"]
|
||||||
|
validate_pattern_properties = validator_class.VALIDATORS["patternProperties"]
|
||||||
|
|
||||||
|
def set_defaults(validator, properties, instance, schema):
|
||||||
|
for property, subschema in properties.iteritems():
|
||||||
|
if "default" in subschema:
|
||||||
|
instance.setdefault(property, subschema["default"])
|
||||||
|
for err in validate_properties(validator, properties, instance, schema):
|
||||||
|
yield err
|
||||||
|
|
||||||
|
def set_pp_defaults(validator, properties, instance, schema):
|
||||||
|
for property, subschema in properties.iteritems():
|
||||||
|
if "default" in subschema:
|
||||||
|
if isinstance(instance, dict):
|
||||||
|
for key, val in instance.iteritems():
|
||||||
|
if re.match(property, key) and val is None:
|
||||||
|
instance[key] = subschema["default"]
|
||||||
|
|
||||||
|
for err in validate_pattern_properties(validator, properties, instance, schema):
|
||||||
|
yield err
|
||||||
|
|
||||||
|
return validators.extend(validator_class, {
|
||||||
|
"properties" : set_defaults,
|
||||||
|
"patternProperties" : set_pp_defaults
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
DefaultSettingValidator = extend_with_default(Draft4Validator)
|
||||||
|
|
||||||
|
def validate_section(data, schema):
|
||||||
|
"""Validate data read in from a Spack YAML file.
|
||||||
|
|
||||||
|
This leverages the line information (start_mark, end_mark) stored
|
||||||
|
on Spack YAML structures.
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
DefaultSettingValidator(schema).validate(data)
|
||||||
|
except jsonschema.ValidationError as e:
|
||||||
|
raise ConfigFormatError(e, data)
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigScope(object):
|
||||||
|
"""This class represents a configuration scope.
|
||||||
|
|
||||||
|
A scope is one directory containing named configuration files.
|
||||||
|
Each file is a config "section" (e.g., mirrors, compilers, etc).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, path):
|
||||||
|
self.name = name # scope name.
|
||||||
|
self.path = path # path to directory containing configs.
|
||||||
|
self.sections = {} # sections read from config files.
|
||||||
|
|
||||||
|
# Register in a dict of all ConfigScopes
|
||||||
|
# TODO: make this cleaner. Mocking up for testing is brittle.
|
||||||
|
global config_scopes
|
||||||
|
config_scopes[name] = self
|
||||||
|
|
||||||
|
def get_section_filename(self, section):
|
||||||
|
validate_section_name(section)
|
||||||
|
return os.path.join(self.path, "%s.yaml" % section)
|
||||||
|
|
||||||
|
|
||||||
|
def get_section(self, section):
|
||||||
|
if not section in self.sections:
|
||||||
|
path = self.get_section_filename(section)
|
||||||
|
schema = section_schemas[section]
|
||||||
|
data = _read_config_file(path, schema)
|
||||||
|
self.sections[section] = data
|
||||||
|
return self.sections[section]
|
||||||
|
|
||||||
|
|
||||||
|
def write_section(self, section):
|
||||||
|
filename = self.get_section_filename(section)
|
||||||
|
data = self.get_section(section)
|
||||||
|
try:
|
||||||
|
mkdirp(self.path)
|
||||||
|
with open(filename, 'w') as f:
|
||||||
|
validate_section(data, section_schemas[section])
|
||||||
|
syaml.dump(data, stream=f, default_flow_style=False)
|
||||||
|
except jsonschema.ValidationError as e:
|
||||||
|
raise ConfigSanityError(e, data)
|
||||||
|
except (yaml.YAMLError, IOError) as e:
|
||||||
|
raise ConfigFileError("Error writing to config file: '%s'" % str(e))
|
||||||
|
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
"""Empty cached config information."""
|
||||||
|
self.sections = {}
|
||||||
|
|
||||||
|
|
||||||
|
ConfigScope('site', os.path.join(spack.etc_path, 'spack')),
|
||||||
|
ConfigScope('user', os.path.expanduser('~/.spack'))
|
||||||
|
|
||||||
|
|
||||||
|
def highest_precedence_scope():
|
||||||
|
"""Get the scope with highest precedence (prefs will override others)."""
|
||||||
|
return config_scopes.values()[-1]
|
||||||
|
|
||||||
|
|
||||||
|
def validate_scope(scope):
|
||||||
|
"""Ensure that scope is valid, and return a valid scope if it is None.
|
||||||
|
|
||||||
|
This should be used by routines in ``config.py`` to validate
|
||||||
|
scope name arguments, and to determine a default scope where no
|
||||||
|
scope is specified.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if scope is None:
|
||||||
|
# default to the scope with highest precedence.
|
||||||
|
return highest_precedence_scope()
|
||||||
|
|
||||||
|
elif scope in config_scopes:
|
||||||
|
return config_scopes[scope]
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ValueError("Invalid config scope: '%s'. Must be one of %s."
|
||||||
|
% (scope, config_scopes.keys()))
|
||||||
|
|
||||||
|
|
||||||
|
def _read_config_file(filename, schema):
|
||||||
|
"""Read a YAML configuration file."""
|
||||||
|
# Ignore nonexisting files.
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
return None
|
||||||
|
|
||||||
|
elif not os.path.isfile(filename):
|
||||||
|
raise ConfigFileError(
|
||||||
|
"Invlaid configuration. %s exists but is not a file." % filename)
|
||||||
|
|
||||||
|
elif not os.access(filename, os.R_OK):
|
||||||
|
raise ConfigFileError("Config file is not readable: %s." % filename)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
tty.debug("Reading config file %s" % filename)
|
||||||
with open(filename) as f:
|
with open(filename) as f:
|
||||||
ydict = yaml.load(f)
|
data = syaml.load(f)
|
||||||
|
|
||||||
|
validate_section(data, schema)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
except MarkedYAMLError, e:
|
except MarkedYAMLError, e:
|
||||||
tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
|
raise ConfigFileError(
|
||||||
except exceptions.IOError, e:
|
"Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
|
||||||
_read_config_file_result[filename] = None
|
|
||||||
return None
|
except IOError, e:
|
||||||
_read_config_file_result[filename] = ydict
|
raise ConfigFileError(
|
||||||
return ydict
|
"Error reading configuration file %s: %s" % (filename, str(e)))
|
||||||
|
|
||||||
|
|
||||||
def clear_config_caches():
|
def clear_config_caches():
|
||||||
"""Clears the caches for configuration files, which will cause them
|
"""Clears the caches for configuration files, which will cause them
|
||||||
to be re-read upon the next request"""
|
to be re-read upon the next request"""
|
||||||
for key,s in _config_sections.iteritems():
|
for scope in config_scopes.values():
|
||||||
s.files_read_from = []
|
scope.clear()
|
||||||
s.result_dict = {}
|
|
||||||
spack.config._read_config_file_result = {}
|
|
||||||
spack.config._compiler_by_arch = {}
|
|
||||||
spack.compilers._cached_default_compiler = None
|
|
||||||
|
|
||||||
|
|
||||||
def _merge_dicts(d1, d2):
|
def _merge_yaml(dest, source):
|
||||||
"""Recursively merges two configuration trees, with entries
|
"""Merges source into dest; entries in source take precedence over dest.
|
||||||
in d2 taking precedence over d1"""
|
|
||||||
if not d1:
|
|
||||||
return d2.copy()
|
|
||||||
if not d2:
|
|
||||||
return d1
|
|
||||||
|
|
||||||
for key2, val2 in d2.iteritems():
|
This routine may modify dest and should be assigned to dest, in
|
||||||
if not key2 in d1:
|
case dest was None to begin with, e.g.:
|
||||||
d1[key2] = val2
|
|
||||||
|
dest = _merge_yaml(dest, source)
|
||||||
|
|
||||||
|
Config file authors can optionally end any attribute in a dict
|
||||||
|
with `::` instead of `:`, and the key will override that of the
|
||||||
|
parent instead of merging.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def they_are(t):
|
||||||
|
return isinstance(dest, t) and isinstance(source, t)
|
||||||
|
|
||||||
|
# If both are None, handle specially and return None.
|
||||||
|
if source is None and dest is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# If source is None, overwrite with source.
|
||||||
|
elif source is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Source list is prepended (for precedence)
|
||||||
|
if they_are(list):
|
||||||
|
seen = set(source)
|
||||||
|
dest[:] = source + [x for x in dest if x not in seen]
|
||||||
|
return dest
|
||||||
|
|
||||||
|
# Source dict is merged into dest.
|
||||||
|
elif they_are(dict):
|
||||||
|
for sk, sv in source.iteritems():
|
||||||
|
if not sk in dest:
|
||||||
|
dest[sk] = copy.copy(sv)
|
||||||
|
else:
|
||||||
|
dest[sk] = _merge_yaml(dest[sk], source[sk])
|
||||||
|
return dest
|
||||||
|
|
||||||
|
# In any other case, overwrite with a copy of the source value.
|
||||||
|
else:
|
||||||
|
return copy.copy(source)
|
||||||
|
|
||||||
|
|
||||||
|
def get_config(section, scope=None):
|
||||||
|
"""Get configuration settings for a section.
|
||||||
|
|
||||||
|
Strips off the top-level section name from the YAML dict.
|
||||||
|
"""
|
||||||
|
validate_section_name(section)
|
||||||
|
merged_section = syaml.syaml_dict()
|
||||||
|
|
||||||
|
if scope is None:
|
||||||
|
scopes = config_scopes.values()
|
||||||
|
else:
|
||||||
|
scopes = [validate_scope(scope)]
|
||||||
|
|
||||||
|
for scope in scopes:
|
||||||
|
# read potentially cached data from the scope.
|
||||||
|
data = scope.get_section(section)
|
||||||
|
|
||||||
|
# Skip empty configs
|
||||||
|
if not data or not isinstance(data, dict):
|
||||||
continue
|
continue
|
||||||
val1 = d1[key2]
|
|
||||||
if isinstance(val1, dict) and isinstance(val2, dict):
|
|
||||||
d1[key2] = _merge_dicts(val1, val2)
|
|
||||||
continue
|
|
||||||
if isinstance(val1, list) and isinstance(val2, list):
|
|
||||||
val1.extend(val2)
|
|
||||||
seen = set()
|
|
||||||
d1[key2] = [ x for x in val1 if not (x in seen or seen.add(x)) ]
|
|
||||||
continue
|
|
||||||
d1[key2] = val2
|
|
||||||
return d1
|
|
||||||
|
|
||||||
|
# Allow complete override of site config with '<section>::'
|
||||||
def get_config(category_name):
|
override_key = section + ':'
|
||||||
"""Get the confguration tree for the names category. Strips off the
|
if not (section in data or override_key in data):
|
||||||
top-level category entry from the dict"""
|
tty.warn("Skipping bad configuration file: '%s'" % scope.path)
|
||||||
global config_scopes
|
|
||||||
category = _config_sections[category_name]
|
|
||||||
if category.result_dict:
|
|
||||||
return category.result_dict
|
|
||||||
|
|
||||||
category.result_dict = {}
|
|
||||||
for scope, scope_path in config_scopes:
|
|
||||||
path = os.path.join(scope_path, category.filename)
|
|
||||||
result = _read_config_file(path)
|
|
||||||
if not result:
|
|
||||||
continue
|
continue
|
||||||
if not category_name in result:
|
|
||||||
continue
|
if override_key in data:
|
||||||
category.files_read_from.insert(0, path)
|
merged_section = data[override_key]
|
||||||
result = result[category_name]
|
|
||||||
if category.merge:
|
|
||||||
category.result_dict = _merge_dicts(category.result_dict, result)
|
|
||||||
else:
|
else:
|
||||||
category.result_dict = result
|
merged_section = _merge_yaml(merged_section, data[section])
|
||||||
return category.result_dict
|
|
||||||
|
return merged_section
|
||||||
|
|
||||||
|
|
||||||
def get_compilers_config(arch=None):
|
def get_config_filename(scope, section):
|
||||||
"""Get the compiler configuration from config files for the given
|
"""For some scope and section, get the name of the configuration file"""
|
||||||
architecture. Strips off the architecture component of the
|
scope = validate_scope(scope)
|
||||||
configuration"""
|
return scope.get_section_filename(section)
|
||||||
global _compiler_by_arch
|
|
||||||
if not arch:
|
|
||||||
arch = spack.architecture.sys_type()
|
|
||||||
if arch in _compiler_by_arch:
|
|
||||||
return _compiler_by_arch[arch]
|
|
||||||
|
|
||||||
cc_config = get_config('compilers')
|
|
||||||
if arch in cc_config and 'all' in cc_config:
|
def update_config(section, update_data, scope=None):
|
||||||
arch_compiler = dict(cc_config[arch])
|
"""Update the configuration file for a particular scope.
|
||||||
_compiler_by_arch[arch] = _merge_dict(arch_compiler, cc_config['all'])
|
|
||||||
elif arch in cc_config:
|
Overwrites contents of a section in a scope with update_data,
|
||||||
_compiler_by_arch[arch] = cc_config[arch]
|
then writes out the config file.
|
||||||
elif 'all' in cc_config:
|
|
||||||
_compiler_by_arch[arch] = cc_config['all']
|
update_data should have the top-level section name stripped off
|
||||||
|
(it will be re-added). Data itself can be a list, dict, or any
|
||||||
|
other yaml-ish structure.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# read in the config to ensure we've got current data
|
||||||
|
get_config(section)
|
||||||
|
|
||||||
|
validate_section_name(section) # validate section name
|
||||||
|
scope = validate_scope(scope) # get ConfigScope object from string.
|
||||||
|
|
||||||
|
# read only the requested section's data.
|
||||||
|
scope.sections[section] = { section : update_data }
|
||||||
|
scope.write_section(section)
|
||||||
|
|
||||||
|
|
||||||
|
def print_section(section):
|
||||||
|
"""Print a configuration to stdout."""
|
||||||
|
try:
|
||||||
|
data = syaml.syaml_dict()
|
||||||
|
data[section] = get_config(section)
|
||||||
|
syaml.dump(data, stream=sys.stdout, default_flow_style=False)
|
||||||
|
except (yaml.YAMLError, IOError) as e:
|
||||||
|
raise ConfigError("Error reading configuration: %s" % section)
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigError(SpackError): pass
|
||||||
|
class ConfigFileError(ConfigError): pass
|
||||||
|
|
||||||
|
def get_path(path, data):
|
||||||
|
if path:
|
||||||
|
return get_path(path[1:], data[path[0]])
|
||||||
else:
|
else:
|
||||||
_compiler_by_arch[arch] = {}
|
return data
|
||||||
return _compiler_by_arch[arch]
|
|
||||||
|
|
||||||
|
class ConfigFormatError(ConfigError):
|
||||||
|
"""Raised when a configuration format does not match its schema."""
|
||||||
|
def __init__(self, validation_error, data):
|
||||||
|
# Try to get line number from erroneous instance and its parent
|
||||||
|
instance_mark = getattr(validation_error.instance, '_start_mark', None)
|
||||||
|
parent_mark = getattr(validation_error.parent, '_start_mark', None)
|
||||||
|
path = getattr(validation_error, 'path', None)
|
||||||
|
|
||||||
def get_mirror_config():
|
# Try really hard to get the parent (which sometimes is not
|
||||||
"""Get the mirror configuration from config files"""
|
# set) This digs it out of the validated structure if it's not
|
||||||
return get_config('mirrors')
|
# on the validation_error.
|
||||||
|
if not parent_mark:
|
||||||
|
parent_path = list(path)[:-1]
|
||||||
|
parent = get_path(parent_path, data)
|
||||||
|
if path[-1] in parent:
|
||||||
|
if isinstance(parent, dict):
|
||||||
|
keylist = parent.keys()
|
||||||
|
elif isinstance(parent, list):
|
||||||
|
keylist = parent
|
||||||
|
idx = keylist.index(path[-1])
|
||||||
|
parent_mark = getattr(keylist[idx], '_start_mark', None)
|
||||||
|
|
||||||
|
if instance_mark:
|
||||||
|
location = '%s:%d' % (instance_mark.name, instance_mark.line + 1)
|
||||||
|
elif parent_mark:
|
||||||
|
location = '%s:%d' % (parent_mark.name, parent_mark.line + 1)
|
||||||
|
elif path:
|
||||||
|
location = 'At ' + ':'.join(path)
|
||||||
|
else:
|
||||||
|
location = '<unknown line>'
|
||||||
|
|
||||||
def get_config_scope_dirname(scope):
|
message = '%s: %s' % (location, validation_error.message)
|
||||||
"""For a scope return the config directory"""
|
super(ConfigError, self).__init__(message)
|
||||||
global config_scopes
|
|
||||||
for s,p in config_scopes:
|
|
||||||
if s == scope:
|
|
||||||
return p
|
|
||||||
tty.die("Unknown scope %s. Valid options are %s" %
|
|
||||||
(scope, ", ".join([s for s,p in config_scopes])))
|
|
||||||
|
|
||||||
|
class ConfigSanityError(ConfigFormatError):
|
||||||
def get_config_scope_filename(scope, category_name):
|
"""Same as ConfigFormatError, raised when config is written by Spack."""
|
||||||
"""For some scope and category, get the name of the configuration file"""
|
|
||||||
if not category_name in _config_sections:
|
|
||||||
tty.die("Unknown config category %s. Valid options are: %s" %
|
|
||||||
(category_name, ", ".join([s for s in _config_sections])))
|
|
||||||
return os.path.join(get_config_scope_dirname(scope), _config_sections[category_name].filename)
|
|
||||||
|
|
||||||
|
|
||||||
def add_to_config(category_name, addition_dict, scope=None):
|
|
||||||
"""Merge a new dict into a configuration tree and write the new
|
|
||||||
configuration to disk"""
|
|
||||||
global _read_config_file_result
|
|
||||||
get_config(category_name)
|
|
||||||
category = _config_sections[category_name]
|
|
||||||
|
|
||||||
#If scope is specified, use it. Otherwise use the last config scope that
|
|
||||||
#we successfully parsed data from.
|
|
||||||
file = None
|
|
||||||
path = None
|
|
||||||
if not scope and not category.files_read_from:
|
|
||||||
scope = 'user'
|
|
||||||
if scope:
|
|
||||||
try:
|
|
||||||
dir = get_config_scope_dirname(scope)
|
|
||||||
if not os.path.exists(dir):
|
|
||||||
mkdirp(dir)
|
|
||||||
path = os.path.join(dir, category.filename)
|
|
||||||
file = open(path, 'w')
|
|
||||||
except exceptions.IOError, e:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
for p in category.files_read_from:
|
|
||||||
try:
|
|
||||||
file = open(p, 'w')
|
|
||||||
except exceptions.IOError, e:
|
|
||||||
pass
|
|
||||||
if file:
|
|
||||||
path = p
|
|
||||||
break;
|
|
||||||
if not file:
|
|
||||||
tty.die('Unable to write to config file %s' % path)
|
|
||||||
|
|
||||||
#Merge the new information into the existing file info, then write to disk
|
|
||||||
new_dict = _read_config_file_result[path]
|
|
||||||
if new_dict and category_name in new_dict:
|
|
||||||
new_dict = new_dict[category_name]
|
|
||||||
new_dict = _merge_dicts(new_dict, addition_dict)
|
|
||||||
new_dict = { category_name : new_dict }
|
|
||||||
_read_config_file_result[path] = new_dict
|
|
||||||
yaml.dump(new_dict, stream=file, default_flow_style=False)
|
|
||||||
file.close()
|
|
||||||
|
|
||||||
#Merge the new information into the cached results
|
|
||||||
category.result_dict = _merge_dicts(category.result_dict, addition_dict)
|
|
||||||
|
|
||||||
|
|
||||||
def add_to_mirror_config(addition_dict, scope=None):
|
|
||||||
"""Add mirrors to the configuration files"""
|
|
||||||
add_to_config('mirrors', addition_dict, scope)
|
|
||||||
|
|
||||||
|
|
||||||
def add_to_compiler_config(addition_dict, scope=None, arch=None):
|
|
||||||
"""Add compilerss to the configuration files"""
|
|
||||||
if not arch:
|
|
||||||
arch = spack.architecture.sys_type()
|
|
||||||
add_to_config('compilers', { arch : addition_dict }, scope)
|
|
||||||
clear_config_caches()
|
|
||||||
|
|
||||||
|
|
||||||
def remove_from_config(category_name, key_to_rm, scope=None):
|
|
||||||
"""Remove a configuration key and write a new configuration to disk"""
|
|
||||||
global config_scopes
|
|
||||||
get_config(category_name)
|
|
||||||
scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes]
|
|
||||||
category = _config_sections[category_name]
|
|
||||||
|
|
||||||
rmd_something = False
|
|
||||||
for s in scopes_to_rm_from:
|
|
||||||
path = get_config_scope_filename(scope, category_name)
|
|
||||||
result = _read_config_file(path)
|
|
||||||
if not result:
|
|
||||||
continue
|
|
||||||
if not key_to_rm in result[category_name]:
|
|
||||||
continue
|
|
||||||
with open(path, 'w') as f:
|
|
||||||
result[category_name].pop(key_to_rm, None)
|
|
||||||
yaml.dump(result, stream=f, default_flow_style=False)
|
|
||||||
category.result_dict.pop(key_to_rm, None)
|
|
||||||
rmd_something = True
|
|
||||||
return rmd_something
|
|
||||||
|
|
||||||
|
|
||||||
"""Print a configuration to stdout"""
|
|
||||||
def print_category(category_name):
|
|
||||||
if not category_name in _config_sections:
|
|
||||||
tty.die("Unknown config category %s. Valid options are: %s" %
|
|
||||||
(category_name, ", ".join([s for s in _config_sections])))
|
|
||||||
yaml.dump(get_config(category_name), stream=sys.stdout, default_flow_style=False)
|
|
||||||
|
|
|
@ -54,7 +54,7 @@
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.packages import UnknownPackageError
|
from spack.repository import UnknownPackageError
|
||||||
|
|
||||||
# DB goes in this directory underneath the root
|
# DB goes in this directory underneath the root
|
||||||
_db_dirname = '.spack-db'
|
_db_dirname = '.spack-db'
|
||||||
|
@ -215,7 +215,6 @@ def _read_spec_from_yaml(self, hash_key, installs, parent_key=None):
|
||||||
# Specs from the database need to be marked concrete because
|
# Specs from the database need to be marked concrete because
|
||||||
# they represent actual installations.
|
# they represent actual installations.
|
||||||
spec._mark_concrete()
|
spec._mark_concrete()
|
||||||
|
|
||||||
return spec
|
return spec
|
||||||
|
|
||||||
|
|
||||||
|
@ -554,7 +553,7 @@ def query(self, query_spec=any, known=any, installed=True):
|
||||||
for key, rec in self._data.items():
|
for key, rec in self._data.items():
|
||||||
if installed is not any and rec.installed != installed:
|
if installed is not any and rec.installed != installed:
|
||||||
continue
|
continue
|
||||||
if known is not any and spack.db.exists(rec.spec.name) != known:
|
if known is not any and spack.repo.exists(rec.spec.name) != known:
|
||||||
continue
|
continue
|
||||||
if query_spec is any or rec.spec.satisfies(query_spec):
|
if query_spec is any or rec.spec.satisfies(query_spec):
|
||||||
results.append(rec.spec)
|
results.append(rec.spec)
|
||||||
|
|
|
@ -244,11 +244,10 @@ def patch(pkg, url_or_filename, level=1, when=None):
|
||||||
if when is None:
|
if when is None:
|
||||||
when = pkg.name
|
when = pkg.name
|
||||||
when_spec = parse_anonymous_spec(when, pkg.name)
|
when_spec = parse_anonymous_spec(when, pkg.name)
|
||||||
|
|
||||||
cur_patches = pkg.patches.setdefault(when_spec, [])
|
cur_patches = pkg.patches.setdefault(when_spec, [])
|
||||||
# if this spec is identical to some other, then append this
|
# if this spec is identical to some other, then append this
|
||||||
# patch to the existing list.
|
# patch to the existing list.
|
||||||
cur_patches.append(Patch(pkg.name, url_or_filename, level))
|
cur_patches.append(Patch(pkg, url_or_filename, level))
|
||||||
|
|
||||||
|
|
||||||
@directive('variants')
|
@directive('variants')
|
||||||
|
|
|
@ -213,7 +213,6 @@ def read_spec(self, path):
|
||||||
|
|
||||||
# Specs read from actual installations are always concrete
|
# Specs read from actual installations are always concrete
|
||||||
spec._mark_concrete()
|
spec._mark_concrete()
|
||||||
|
|
||||||
return spec
|
return spec
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -523,7 +523,7 @@ def quote(string):
|
||||||
return '"%s"' % string
|
return '"%s"' % string
|
||||||
|
|
||||||
if not specs:
|
if not specs:
|
||||||
specs = [p.name for p in spack.db.all_packages()]
|
specs = [p.name for p in spack.repo.all_packages()]
|
||||||
else:
|
else:
|
||||||
roots = specs
|
roots = specs
|
||||||
specs = set()
|
specs = set()
|
||||||
|
|
|
@ -374,7 +374,7 @@ def __init__(self, spec):
|
||||||
self._total_time = 0.0
|
self._total_time = 0.0
|
||||||
|
|
||||||
if self.is_extension:
|
if self.is_extension:
|
||||||
spack.db.get(self.extendee_spec)._check_extendable()
|
spack.repo.get(self.extendee_spec)._check_extendable()
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -566,7 +566,7 @@ def preorder_traversal(self, visited=None, **kwargs):
|
||||||
yield spec
|
yield spec
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for pkg in spack.db.get(name).preorder_traversal(visited, **kwargs):
|
for pkg in spack.repo.get(name).preorder_traversal(visited, **kwargs):
|
||||||
yield pkg
|
yield pkg
|
||||||
|
|
||||||
|
|
||||||
|
@ -808,6 +808,12 @@ def do_patch(self):
|
||||||
touch(no_patches_file)
|
touch(no_patches_file)
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def namespace(self):
|
||||||
|
namespace, dot, module = self.__module__.rpartition('.')
|
||||||
|
return namespace
|
||||||
|
|
||||||
|
|
||||||
def do_fake_install(self):
|
def do_fake_install(self):
|
||||||
"""Make a fake install directory contaiing a 'fake' file in bin."""
|
"""Make a fake install directory contaiing a 'fake' file in bin."""
|
||||||
mkdirp(self.prefix.bin)
|
mkdirp(self.prefix.bin)
|
||||||
|
@ -886,7 +892,7 @@ def cleanup():
|
||||||
tty.warn("Keeping install prefix in place despite error.",
|
tty.warn("Keeping install prefix in place despite error.",
|
||||||
"Spack will think this package is installed." +
|
"Spack will think this package is installed." +
|
||||||
"Manually remove this directory to fix:",
|
"Manually remove this directory to fix:",
|
||||||
self.prefix)
|
self.prefix, wrap=True)
|
||||||
|
|
||||||
|
|
||||||
def real_work():
|
def real_work():
|
||||||
|
|
|
@ -1,210 +0,0 @@
|
||||||
##############################################################################
|
|
||||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
|
||||||
# Produced at the Lawrence Livermore National Laboratory.
|
|
||||||
#
|
|
||||||
# This file is part of Spack.
|
|
||||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
|
||||||
# LLNL-CODE-647188
|
|
||||||
#
|
|
||||||
# For details, see https://github.com/llnl/spack
|
|
||||||
# Please also see the LICENSE file for our notice and the LGPL.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License (as published by
|
|
||||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
|
||||||
# conditions of the GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU Lesser General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
##############################################################################
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import inspect
|
|
||||||
import glob
|
|
||||||
import imp
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
from llnl.util.filesystem import join_path
|
|
||||||
from llnl.util.lang import *
|
|
||||||
|
|
||||||
import spack.error
|
|
||||||
import spack.spec
|
|
||||||
from spack.virtual import ProviderIndex
|
|
||||||
from spack.util.naming import mod_to_class, validate_module_name
|
|
||||||
|
|
||||||
# Name of module under which packages are imported
|
|
||||||
_imported_packages_module = 'spack.packages'
|
|
||||||
|
|
||||||
# Name of the package file inside a package directory
|
|
||||||
_package_file_name = 'package.py'
|
|
||||||
|
|
||||||
|
|
||||||
def _autospec(function):
|
|
||||||
"""Decorator that automatically converts the argument of a single-arg
|
|
||||||
function to a Spec."""
|
|
||||||
def converter(self, spec_like, **kwargs):
|
|
||||||
if not isinstance(spec_like, spack.spec.Spec):
|
|
||||||
spec_like = spack.spec.Spec(spec_like)
|
|
||||||
return function(self, spec_like, **kwargs)
|
|
||||||
return converter
|
|
||||||
|
|
||||||
|
|
||||||
class PackageDB(object):
|
|
||||||
def __init__(self, root):
|
|
||||||
"""Construct a new package database from a root directory."""
|
|
||||||
self.root = root
|
|
||||||
self.instances = {}
|
|
||||||
self.provider_index = None
|
|
||||||
|
|
||||||
|
|
||||||
@_autospec
|
|
||||||
def get(self, spec, **kwargs):
|
|
||||||
if spec.virtual:
|
|
||||||
raise UnknownPackageError(spec.name)
|
|
||||||
|
|
||||||
key = hash(spec)
|
|
||||||
if kwargs.get('new', False):
|
|
||||||
if key in self.instances:
|
|
||||||
del self.instances[key]
|
|
||||||
|
|
||||||
if not key in self.instances:
|
|
||||||
package_class = self.get_class_for_package_name(spec.name)
|
|
||||||
try:
|
|
||||||
copy = spec.copy() # defensive copy. Package owns its spec.
|
|
||||||
self.instances[key] = package_class(copy)
|
|
||||||
except Exception, e:
|
|
||||||
if spack.debug:
|
|
||||||
sys.excepthook(*sys.exc_info())
|
|
||||||
raise FailedConstructorError(spec.name, e)
|
|
||||||
|
|
||||||
return self.instances[key]
|
|
||||||
|
|
||||||
|
|
||||||
@_autospec
|
|
||||||
def delete(self, spec):
|
|
||||||
"""Force a package to be recreated."""
|
|
||||||
del self.instances[spec.dag_hash()]
|
|
||||||
|
|
||||||
|
|
||||||
def purge(self):
|
|
||||||
"""Clear entire package instance cache."""
|
|
||||||
self.instances.clear()
|
|
||||||
|
|
||||||
|
|
||||||
@_autospec
|
|
||||||
def providers_for(self, vpkg_spec):
|
|
||||||
if self.provider_index is None:
|
|
||||||
self.provider_index = ProviderIndex(self.all_package_names())
|
|
||||||
|
|
||||||
providers = self.provider_index.providers_for(vpkg_spec)
|
|
||||||
if not providers:
|
|
||||||
raise UnknownPackageError(vpkg_spec.name)
|
|
||||||
return providers
|
|
||||||
|
|
||||||
|
|
||||||
@_autospec
|
|
||||||
def extensions_for(self, extendee_spec):
|
|
||||||
return [p for p in self.all_packages() if p.extends(extendee_spec)]
|
|
||||||
|
|
||||||
|
|
||||||
def dirname_for_package_name(self, pkg_name):
|
|
||||||
"""Get the directory name for a particular package. This is the
|
|
||||||
directory that contains its package.py file."""
|
|
||||||
return join_path(self.root, pkg_name)
|
|
||||||
|
|
||||||
|
|
||||||
def filename_for_package_name(self, pkg_name):
|
|
||||||
"""Get the filename for the module we should load for a particular
|
|
||||||
package. Packages for a pacakge DB live in
|
|
||||||
``$root/<package_name>/package.py``
|
|
||||||
|
|
||||||
This will return a proper package.py path even if the
|
|
||||||
package doesn't exist yet, so callers will need to ensure
|
|
||||||
the package exists before importing.
|
|
||||||
"""
|
|
||||||
validate_module_name(pkg_name)
|
|
||||||
pkg_dir = self.dirname_for_package_name(pkg_name)
|
|
||||||
return join_path(pkg_dir, _package_file_name)
|
|
||||||
|
|
||||||
|
|
||||||
@memoized
|
|
||||||
def all_package_names(self):
|
|
||||||
"""Generator function for all packages. This looks for
|
|
||||||
``<pkg_name>/package.py`` files within the root direcotry"""
|
|
||||||
all_package_names = []
|
|
||||||
for pkg_name in os.listdir(self.root):
|
|
||||||
pkg_dir = join_path(self.root, pkg_name)
|
|
||||||
pkg_file = join_path(pkg_dir, _package_file_name)
|
|
||||||
if os.path.isfile(pkg_file):
|
|
||||||
all_package_names.append(pkg_name)
|
|
||||||
all_package_names.sort()
|
|
||||||
return all_package_names
|
|
||||||
|
|
||||||
|
|
||||||
def all_packages(self):
|
|
||||||
for name in self.all_package_names():
|
|
||||||
yield self.get(name)
|
|
||||||
|
|
||||||
|
|
||||||
@memoized
|
|
||||||
def exists(self, pkg_name):
|
|
||||||
"""Whether a package with the supplied name exists ."""
|
|
||||||
return os.path.exists(self.filename_for_package_name(pkg_name))
|
|
||||||
|
|
||||||
|
|
||||||
@memoized
|
|
||||||
def get_class_for_package_name(self, pkg_name):
|
|
||||||
"""Get an instance of the class for a particular package.
|
|
||||||
|
|
||||||
This method uses Python's ``imp`` package to load python
|
|
||||||
source from a Spack package's ``package.py`` file. A
|
|
||||||
normal python import would only load each package once, but
|
|
||||||
because we do this dynamically, the method needs to be
|
|
||||||
memoized to ensure there is only ONE package class
|
|
||||||
instance, per package, per database.
|
|
||||||
"""
|
|
||||||
file_path = self.filename_for_package_name(pkg_name)
|
|
||||||
|
|
||||||
if os.path.exists(file_path):
|
|
||||||
if not os.path.isfile(file_path):
|
|
||||||
tty.die("Something's wrong. '%s' is not a file!" % file_path)
|
|
||||||
if not os.access(file_path, os.R_OK):
|
|
||||||
tty.die("Cannot read '%s'!" % file_path)
|
|
||||||
else:
|
|
||||||
raise UnknownPackageError(pkg_name)
|
|
||||||
|
|
||||||
class_name = mod_to_class(pkg_name)
|
|
||||||
try:
|
|
||||||
module_name = _imported_packages_module + '.' + pkg_name
|
|
||||||
module = imp.load_source(module_name, file_path)
|
|
||||||
|
|
||||||
except ImportError, e:
|
|
||||||
tty.die("Error while importing %s from %s:\n%s" % (
|
|
||||||
pkg_name, file_path, e.message))
|
|
||||||
|
|
||||||
cls = getattr(module, class_name)
|
|
||||||
if not inspect.isclass(cls):
|
|
||||||
tty.die("%s.%s is not a class" % (pkg_name, class_name))
|
|
||||||
|
|
||||||
return cls
|
|
||||||
|
|
||||||
|
|
||||||
class UnknownPackageError(spack.error.SpackError):
|
|
||||||
"""Raised when we encounter a package spack doesn't have."""
|
|
||||||
def __init__(self, name):
|
|
||||||
super(UnknownPackageError, self).__init__("Package '%s' not found." % name)
|
|
||||||
self.name = name
|
|
||||||
|
|
||||||
|
|
||||||
class FailedConstructorError(spack.error.SpackError):
|
|
||||||
"""Raised when a package's class constructor fails."""
|
|
||||||
def __init__(self, name, reason):
|
|
||||||
super(FailedConstructorError, self).__init__(
|
|
||||||
"Class constructor failed for package '%s'." % name,
|
|
||||||
str(reason))
|
|
||||||
self.name = name
|
|
|
@ -41,8 +41,8 @@ class Patch(object):
|
||||||
"""This class describes a patch to be applied to some expanded
|
"""This class describes a patch to be applied to some expanded
|
||||||
source code."""
|
source code."""
|
||||||
|
|
||||||
def __init__(self, pkg_name, path_or_url, level):
|
def __init__(self, pkg, path_or_url, level):
|
||||||
self.pkg_name = pkg_name
|
self.pkg_name = pkg.name
|
||||||
self.path_or_url = path_or_url
|
self.path_or_url = path_or_url
|
||||||
self.path = None
|
self.path = None
|
||||||
self.url = None
|
self.url = None
|
||||||
|
@ -54,7 +54,7 @@ def __init__(self, pkg_name, path_or_url, level):
|
||||||
if '://' in path_or_url:
|
if '://' in path_or_url:
|
||||||
self.url = path_or_url
|
self.url = path_or_url
|
||||||
else:
|
else:
|
||||||
pkg_dir = spack.db.dirname_for_package_name(pkg_name)
|
pkg_dir = spack.repo.dirname_for_package_name(self.pkg_name)
|
||||||
self.path = join_path(pkg_dir, path_or_url)
|
self.path = join_path(pkg_dir, path_or_url)
|
||||||
if not os.path.isfile(self.path):
|
if not os.path.isfile(self.path):
|
||||||
raise NoSuchPatchFileError(pkg_name, self.path)
|
raise NoSuchPatchFileError(pkg_name, self.path)
|
||||||
|
|
747
lib/spack/spack/repository.py
Normal file
747
lib/spack/spack/repository.py
Normal file
|
@ -0,0 +1,747 @@
|
||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://llnl.github.io/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
import os
|
||||||
|
import exceptions
|
||||||
|
import sys
|
||||||
|
import inspect
|
||||||
|
import imp
|
||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
from bisect import bisect_left
|
||||||
|
from external import yaml
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.filesystem import join_path
|
||||||
|
|
||||||
|
import spack.error
|
||||||
|
import spack.config
|
||||||
|
import spack.spec
|
||||||
|
from spack.virtual import ProviderIndex
|
||||||
|
from spack.util.naming import *
|
||||||
|
|
||||||
|
#
|
||||||
|
# Super-namespace for all packages.
|
||||||
|
# Package modules are imported as spack.pkg.<namespace>.<pkg-name>.
|
||||||
|
#
|
||||||
|
repo_namespace = 'spack.pkg'
|
||||||
|
|
||||||
|
#
|
||||||
|
# These names describe how repos should be laid out in the filesystem.
|
||||||
|
#
|
||||||
|
repo_config_name = 'repo.yaml' # Top-level filename for repo config.
|
||||||
|
packages_dir_name = 'packages' # Top-level repo directory containing pkgs.
|
||||||
|
package_file_name = 'package.py' # Filename for packages in a repository.
|
||||||
|
|
||||||
|
# Guaranteed unused default value for some functions.
|
||||||
|
NOT_PROVIDED = object()
|
||||||
|
|
||||||
|
|
||||||
|
def _autospec(function):
|
||||||
|
"""Decorator that automatically converts the argument of a single-arg
|
||||||
|
function to a Spec."""
|
||||||
|
def converter(self, spec_like, *args, **kwargs):
|
||||||
|
if not isinstance(spec_like, spack.spec.Spec):
|
||||||
|
spec_like = spack.spec.Spec(spec_like)
|
||||||
|
return function(self, spec_like, *args, **kwargs)
|
||||||
|
return converter
|
||||||
|
|
||||||
|
|
||||||
|
def _make_namespace_module(ns):
|
||||||
|
module = imp.new_module(ns)
|
||||||
|
module.__file__ = "(spack namespace)"
|
||||||
|
module.__path__ = []
|
||||||
|
module.__package__ = ns
|
||||||
|
return module
|
||||||
|
|
||||||
|
|
||||||
|
def substitute_spack_prefix(path):
|
||||||
|
"""Replaces instances of $spack with Spack's prefix."""
|
||||||
|
return re.sub(r'^\$spack', spack.prefix, path)
|
||||||
|
|
||||||
|
|
||||||
|
def canonicalize_path(path):
|
||||||
|
"""Substitute $spack, expand user home, take abspath."""
|
||||||
|
path = substitute_spack_prefix(path)
|
||||||
|
path = os.path.expanduser(path)
|
||||||
|
path = os.path.abspath(path)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
class RepoPath(object):
|
||||||
|
"""A RepoPath is a list of repos that function as one.
|
||||||
|
|
||||||
|
It functions exactly like a Repo, but it operates on the
|
||||||
|
combined results of the Repos in its list instead of on a
|
||||||
|
single package repository.
|
||||||
|
"""
|
||||||
|
def __init__(self, *repo_dirs, **kwargs):
|
||||||
|
# super-namespace for all packages in the RepoPath
|
||||||
|
self.super_namespace = kwargs.get('namespace', repo_namespace)
|
||||||
|
|
||||||
|
self.repos = []
|
||||||
|
self.by_namespace = NamespaceTrie()
|
||||||
|
self.by_path = {}
|
||||||
|
|
||||||
|
self._all_package_names = []
|
||||||
|
self._provider_index = None
|
||||||
|
|
||||||
|
# If repo_dirs is empty, just use the configuration
|
||||||
|
if not repo_dirs:
|
||||||
|
repo_dirs = spack.config.get_config('repos')
|
||||||
|
if not repo_dirs:
|
||||||
|
raise NoRepoConfiguredError(
|
||||||
|
"Spack configuration contains no package repositories.")
|
||||||
|
|
||||||
|
# Add each repo to this path.
|
||||||
|
for root in repo_dirs:
|
||||||
|
try:
|
||||||
|
repo = Repo(root, self.super_namespace)
|
||||||
|
self.put_last(repo)
|
||||||
|
except RepoError as e:
|
||||||
|
tty.warn("Failed to initialize repository at '%s'." % root,
|
||||||
|
e.message,
|
||||||
|
"To remove the bad repository, run this command:",
|
||||||
|
" spack repo rm %s" % root)
|
||||||
|
|
||||||
|
|
||||||
|
def swap(self, other):
|
||||||
|
"""Convenience function to make swapping repostiories easier.
|
||||||
|
|
||||||
|
This is currently used by mock tests.
|
||||||
|
TODO: Maybe there is a cleaner way.
|
||||||
|
|
||||||
|
"""
|
||||||
|
attrs = ['repos',
|
||||||
|
'by_namespace',
|
||||||
|
'by_path',
|
||||||
|
'_all_package_names',
|
||||||
|
'_provider_index']
|
||||||
|
for attr in attrs:
|
||||||
|
tmp = getattr(self, attr)
|
||||||
|
setattr(self, attr, getattr(other, attr))
|
||||||
|
setattr(other, attr, tmp)
|
||||||
|
|
||||||
|
|
||||||
|
def _add(self, repo):
|
||||||
|
"""Add a repository to the namespace and path indexes.
|
||||||
|
|
||||||
|
Checks for duplicates -- two repos can't have the same root
|
||||||
|
directory, and they provide have the same namespace.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if repo.root in self.by_path:
|
||||||
|
raise DuplicateRepoError("Duplicate repository: '%s'" % repo.root)
|
||||||
|
|
||||||
|
if repo.namespace in self.by_namespace:
|
||||||
|
raise DuplicateRepoError(
|
||||||
|
"Package repos '%s' and '%s' both provide namespace %s."
|
||||||
|
% (repo.root, self.by_namespace[repo.namespace].root, repo.namespace))
|
||||||
|
|
||||||
|
# Add repo to the pkg indexes
|
||||||
|
self.by_namespace[repo.full_namespace] = repo
|
||||||
|
self.by_path[repo.root] = repo
|
||||||
|
|
||||||
|
# add names to the cached name list
|
||||||
|
new_pkgs = set(repo.all_package_names())
|
||||||
|
new_pkgs.update(set(self._all_package_names))
|
||||||
|
self._all_package_names = sorted(new_pkgs, key=lambda n:n.lower())
|
||||||
|
|
||||||
|
|
||||||
|
def put_first(self, repo):
|
||||||
|
"""Add repo first in the search path."""
|
||||||
|
self._add(repo)
|
||||||
|
self.repos.insert(0, repo)
|
||||||
|
|
||||||
|
|
||||||
|
def put_last(self, repo):
|
||||||
|
"""Add repo last in the search path."""
|
||||||
|
self._add(repo)
|
||||||
|
self.repos.append(repo)
|
||||||
|
|
||||||
|
|
||||||
|
def remove(self, repo):
|
||||||
|
"""Remove a repo from the search path."""
|
||||||
|
if repo in self.repos:
|
||||||
|
self.repos.remove(repo)
|
||||||
|
|
||||||
|
|
||||||
|
def get_repo(self, namespace, default=NOT_PROVIDED):
|
||||||
|
"""Get a repository by namespace.
|
||||||
|
Arguments
|
||||||
|
namespace
|
||||||
|
Look up this namespace in the RepoPath, and return
|
||||||
|
it if found.
|
||||||
|
|
||||||
|
Optional Arguments
|
||||||
|
default
|
||||||
|
If default is provided, return it when the namespace
|
||||||
|
isn't found. If not, raise an UnknownNamespaceError.
|
||||||
|
"""
|
||||||
|
fullspace = '%s.%s' % (self.super_namespace, namespace)
|
||||||
|
if fullspace not in self.by_namespace:
|
||||||
|
if default == NOT_PROVIDED:
|
||||||
|
raise UnknownNamespaceError(namespace)
|
||||||
|
return default
|
||||||
|
return self.by_namespace[fullspace]
|
||||||
|
|
||||||
|
|
||||||
|
def first_repo(self):
|
||||||
|
"""Get the first repo in precedence order."""
|
||||||
|
return self.repos[0] if self.repos else None
|
||||||
|
|
||||||
|
|
||||||
|
def all_package_names(self):
|
||||||
|
"""Return all unique package names in all repositories."""
|
||||||
|
return self._all_package_names
|
||||||
|
|
||||||
|
|
||||||
|
def all_packages(self):
|
||||||
|
for name in self.all_package_names():
|
||||||
|
yield self.get(name)
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def providers_for(self, vpkg_spec):
|
||||||
|
if self._provider_index is None:
|
||||||
|
self._provider_index = ProviderIndex(self.all_package_names())
|
||||||
|
|
||||||
|
providers = self._provider_index.providers_for(vpkg_spec)
|
||||||
|
if not providers:
|
||||||
|
raise UnknownPackageError(vpkg_spec.name)
|
||||||
|
return providers
|
||||||
|
|
||||||
|
|
||||||
|
def find_module(self, fullname, path=None):
|
||||||
|
"""Implements precedence for overlaid namespaces.
|
||||||
|
|
||||||
|
Loop checks each namespace in self.repos for packages, and
|
||||||
|
also handles loading empty containing namespaces.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# namespaces are added to repo, and package modules are leaves.
|
||||||
|
namespace, dot, module_name = fullname.rpartition('.')
|
||||||
|
|
||||||
|
# If it's a module in some repo, or if it is the repo's
|
||||||
|
# namespace, let the repo handle it.
|
||||||
|
for repo in self.repos:
|
||||||
|
if namespace == repo.full_namespace:
|
||||||
|
if repo.real_name(module_name):
|
||||||
|
return repo
|
||||||
|
elif fullname == repo.full_namespace:
|
||||||
|
return repo
|
||||||
|
|
||||||
|
# No repo provides the namespace, but it is a valid prefix of
|
||||||
|
# something in the RepoPath.
|
||||||
|
if self.by_namespace.is_prefix(fullname):
|
||||||
|
return self
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def load_module(self, fullname):
|
||||||
|
"""Handles loading container namespaces when necessary.
|
||||||
|
|
||||||
|
See ``Repo`` for how actual package modules are loaded.
|
||||||
|
"""
|
||||||
|
if fullname in sys.modules:
|
||||||
|
return sys.modules[fullname]
|
||||||
|
|
||||||
|
# partition fullname into prefix and module name.
|
||||||
|
namespace, dot, module_name = fullname.rpartition('.')
|
||||||
|
|
||||||
|
if not self.by_namespace.is_prefix(fullname):
|
||||||
|
raise ImportError("No such Spack repo: %s" % fullname)
|
||||||
|
|
||||||
|
module = _make_namespace_module(namespace)
|
||||||
|
module.__loader__ = self
|
||||||
|
sys.modules[fullname] = module
|
||||||
|
return module
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def repo_for_pkg(self, spec):
|
||||||
|
"""Given a spec, get the repository for its package."""
|
||||||
|
# If the spec already has a namespace, then return the
|
||||||
|
# corresponding repo if we know about it.
|
||||||
|
if spec.namespace:
|
||||||
|
fullspace = '%s.%s' % (self.super_namespace, spec.namespace)
|
||||||
|
if fullspace not in self.by_namespace:
|
||||||
|
raise UnknownNamespaceError(spec.namespace)
|
||||||
|
return self.by_namespace[fullspace]
|
||||||
|
|
||||||
|
# If there's no namespace, search in the RepoPath.
|
||||||
|
for repo in self.repos:
|
||||||
|
if spec.name in repo:
|
||||||
|
return repo
|
||||||
|
else:
|
||||||
|
raise UnknownPackageError(spec.name)
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def get(self, spec, new=False):
|
||||||
|
"""Find a repo that contains the supplied spec's package.
|
||||||
|
|
||||||
|
Raises UnknownPackageError if not found.
|
||||||
|
"""
|
||||||
|
return self.repo_for_pkg(spec).get(spec)
|
||||||
|
|
||||||
|
|
||||||
|
def dirname_for_package_name(self, pkg_name):
|
||||||
|
return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name)
|
||||||
|
|
||||||
|
|
||||||
|
def filename_for_package_name(self, pkg_name):
|
||||||
|
return self.repo_for_pkg(pkg_name).filename_for_package_name(pkg_name)
|
||||||
|
|
||||||
|
|
||||||
|
def exists(self, pkg_name):
|
||||||
|
return any(repo.exists(pkg_name) for repo in self.repos)
|
||||||
|
|
||||||
|
|
||||||
|
def __contains__(self, pkg_name):
|
||||||
|
return self.exists(pkg_name)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Repo(object):
|
||||||
|
"""Class representing a package repository in the filesystem.
|
||||||
|
|
||||||
|
Each package repository must have a top-level configuration file
|
||||||
|
called `repo.yaml`.
|
||||||
|
|
||||||
|
Currently, `repo.yaml` this must define:
|
||||||
|
|
||||||
|
`namespace`:
|
||||||
|
A Python namespace where the repository's packages should live.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, root, namespace=repo_namespace):
|
||||||
|
"""Instantiate a package repository from a filesystem path.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
root The root directory of the repository.
|
||||||
|
|
||||||
|
namespace A super-namespace that will contain the repo-defined
|
||||||
|
namespace (this is generally jsut `spack.pkg`). The
|
||||||
|
super-namespace is Spack's way of separating repositories
|
||||||
|
from other python namespaces.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Root directory, containing _repo.yaml and package dirs
|
||||||
|
# Allow roots to by spack-relative by starting with '$spack'
|
||||||
|
self.root = canonicalize_path(root)
|
||||||
|
|
||||||
|
# super-namespace for all packages in the Repo
|
||||||
|
self.super_namespace = namespace
|
||||||
|
|
||||||
|
# check and raise BadRepoError on fail.
|
||||||
|
def check(condition, msg):
|
||||||
|
if not condition: raise BadRepoError(msg)
|
||||||
|
|
||||||
|
# Validate repository layout.
|
||||||
|
self.config_file = join_path(self.root, repo_config_name)
|
||||||
|
check(os.path.isfile(self.config_file),
|
||||||
|
"No %s found in '%s'" % (repo_config_name, root))
|
||||||
|
self.packages_path = join_path(self.root, packages_dir_name)
|
||||||
|
check(os.path.isdir(self.packages_path),
|
||||||
|
"No directory '%s' found in '%s'" % (repo_config_name, root))
|
||||||
|
|
||||||
|
# Read configuration and validate namespace
|
||||||
|
config = self._read_config()
|
||||||
|
check('namespace' in config, '%s must define a namespace.'
|
||||||
|
% join_path(root, repo_config_name))
|
||||||
|
|
||||||
|
self.namespace = config['namespace']
|
||||||
|
check(re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace),
|
||||||
|
("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root)) +
|
||||||
|
"Namespaces must be valid python identifiers separated by '.'")
|
||||||
|
|
||||||
|
# Set up 'full_namespace' to include the super-namespace
|
||||||
|
if self.super_namespace:
|
||||||
|
self.full_namespace = "%s.%s" % (self.super_namespace, self.namespace)
|
||||||
|
else:
|
||||||
|
self.full_namespace = self.namespace
|
||||||
|
|
||||||
|
# Keep name components around for checking prefixes.
|
||||||
|
self._names = self.full_namespace.split('.')
|
||||||
|
|
||||||
|
# These are internal cache variables.
|
||||||
|
self._modules = {}
|
||||||
|
self._classes = {}
|
||||||
|
self._instances = {}
|
||||||
|
self._provider_index = None
|
||||||
|
self._all_package_names = None
|
||||||
|
|
||||||
|
# make sure the namespace for packages in this repo exists.
|
||||||
|
self._create_namespace()
|
||||||
|
|
||||||
|
|
||||||
|
def _create_namespace(self):
|
||||||
|
"""Create this repo's namespace module and insert it into sys.modules.
|
||||||
|
|
||||||
|
Ensures that modules loaded via the repo have a home, and that
|
||||||
|
we don't get runtime warnings from Python's module system.
|
||||||
|
|
||||||
|
"""
|
||||||
|
parent = None
|
||||||
|
for l in range(1, len(self._names)+1):
|
||||||
|
ns = '.'.join(self._names[:l])
|
||||||
|
if not ns in sys.modules:
|
||||||
|
module = _make_namespace_module(ns)
|
||||||
|
module.__loader__ = self
|
||||||
|
sys.modules[ns] = module
|
||||||
|
|
||||||
|
# Ensure the namespace is an atrribute of its parent,
|
||||||
|
# if it has not been set by something else already.
|
||||||
|
#
|
||||||
|
# This ensures that we can do things like:
|
||||||
|
# import spack.pkg.builtin.mpich as mpich
|
||||||
|
if parent:
|
||||||
|
modname = self._names[l-1]
|
||||||
|
if not hasattr(parent, modname):
|
||||||
|
setattr(parent, modname, module)
|
||||||
|
else:
|
||||||
|
# no need to set up a module, but keep track of the parent.
|
||||||
|
module = sys.modules[ns]
|
||||||
|
parent = module
|
||||||
|
|
||||||
|
|
||||||
|
def real_name(self, import_name):
|
||||||
|
"""Allow users to import Spack packages using Python identifiers.
|
||||||
|
|
||||||
|
A python identifier might map to many different Spack package
|
||||||
|
names due to hyphen/underscore ambiguity.
|
||||||
|
|
||||||
|
Easy example:
|
||||||
|
num3proxy -> 3proxy
|
||||||
|
|
||||||
|
Ambiguous:
|
||||||
|
foo_bar -> foo_bar, foo-bar
|
||||||
|
|
||||||
|
More ambiguous:
|
||||||
|
foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz
|
||||||
|
"""
|
||||||
|
if import_name in self:
|
||||||
|
return import_name
|
||||||
|
|
||||||
|
options = possible_spack_module_names(import_name)
|
||||||
|
options.remove(import_name)
|
||||||
|
for name in options:
|
||||||
|
if name in self:
|
||||||
|
return name
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def is_prefix(self, fullname):
|
||||||
|
"""True if fullname is a prefix of this Repo's namespace."""
|
||||||
|
parts = fullname.split('.')
|
||||||
|
return self._names[:len(parts)] == parts
|
||||||
|
|
||||||
|
|
||||||
|
def find_module(self, fullname, path=None):
|
||||||
|
"""Python find_module import hook.
|
||||||
|
|
||||||
|
Returns this Repo if it can load the module; None if not.
|
||||||
|
"""
|
||||||
|
if self.is_prefix(fullname):
|
||||||
|
return self
|
||||||
|
|
||||||
|
namespace, dot, module_name = fullname.rpartition('.')
|
||||||
|
if namespace == self.full_namespace:
|
||||||
|
if self.real_name(module_name):
|
||||||
|
return self
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def load_module(self, fullname):
|
||||||
|
"""Python importer load hook.
|
||||||
|
|
||||||
|
Tries to load the module; raises an ImportError if it can't.
|
||||||
|
"""
|
||||||
|
if fullname in sys.modules:
|
||||||
|
return sys.modules[fullname]
|
||||||
|
|
||||||
|
namespace, dot, module_name = fullname.rpartition('.')
|
||||||
|
|
||||||
|
if self.is_prefix(fullname):
|
||||||
|
module = _make_namespace_module(fullname)
|
||||||
|
|
||||||
|
elif namespace == self.full_namespace:
|
||||||
|
real_name = self.real_name(module_name)
|
||||||
|
if not real_name:
|
||||||
|
raise ImportError("No module %s in %s" % (module_name, self))
|
||||||
|
module = self._get_pkg_module(real_name)
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ImportError("No module %s in %s" % (fullname, self))
|
||||||
|
|
||||||
|
module.__loader__ = self
|
||||||
|
sys.modules[fullname] = module
|
||||||
|
return module
|
||||||
|
|
||||||
|
|
||||||
|
def _read_config(self):
|
||||||
|
"""Check for a YAML config file in this db's root directory."""
|
||||||
|
try:
|
||||||
|
with open(self.config_file) as reponame_file:
|
||||||
|
yaml_data = yaml.load(reponame_file)
|
||||||
|
|
||||||
|
if (not yaml_data or 'repo' not in yaml_data or
|
||||||
|
not isinstance(yaml_data['repo'], dict)):
|
||||||
|
tty.die("Invalid %s in repository %s"
|
||||||
|
% (repo_config_name, self.root))
|
||||||
|
|
||||||
|
return yaml_data['repo']
|
||||||
|
|
||||||
|
except exceptions.IOError, e:
|
||||||
|
tty.die("Error reading %s when opening %s"
|
||||||
|
% (self.config_file, self.root))
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def get(self, spec, new=False):
|
||||||
|
if spec.virtual:
|
||||||
|
raise UnknownPackageError(spec.name)
|
||||||
|
|
||||||
|
if spec.namespace and spec.namespace != self.namespace:
|
||||||
|
raise UnknownPackageError("Repository %s does not contain package %s."
|
||||||
|
% (self.namespace, spec.fullname))
|
||||||
|
|
||||||
|
key = hash(spec)
|
||||||
|
if new or key not in self._instances:
|
||||||
|
package_class = self._get_pkg_class(spec.name)
|
||||||
|
try:
|
||||||
|
copy = spec.copy() # defensive copy. Package owns its spec.
|
||||||
|
self._instances[key] = package_class(copy)
|
||||||
|
except Exception, e:
|
||||||
|
if spack.debug:
|
||||||
|
sys.excepthook(*sys.exc_info())
|
||||||
|
raise FailedConstructorError(spec.fullname, *sys.exc_info())
|
||||||
|
|
||||||
|
return self._instances[key]
|
||||||
|
|
||||||
|
|
||||||
|
def purge(self):
|
||||||
|
"""Clear entire package instance cache."""
|
||||||
|
self._instances.clear()
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def providers_for(self, vpkg_spec):
|
||||||
|
if self._provider_index is None:
|
||||||
|
self._provider_index = ProviderIndex(self.all_package_names())
|
||||||
|
|
||||||
|
providers = self._provider_index.providers_for(vpkg_spec)
|
||||||
|
if not providers:
|
||||||
|
raise UnknownPackageError(vpkg_spec.name)
|
||||||
|
return providers
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def extensions_for(self, extendee_spec):
|
||||||
|
return [p for p in self.all_packages() if p.extends(extendee_spec)]
|
||||||
|
|
||||||
|
|
||||||
|
def _check_namespace(self, spec):
|
||||||
|
"""Check that the spec's namespace is the same as this repository's."""
|
||||||
|
if spec.namespace and spec.namespace != self.namespace:
|
||||||
|
raise UnknownNamespaceError(spec.namespace)
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def dirname_for_package_name(self, spec):
|
||||||
|
"""Get the directory name for a particular package. This is the
|
||||||
|
directory that contains its package.py file."""
|
||||||
|
self._check_namespace(spec)
|
||||||
|
return join_path(self.packages_path, spec.name)
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def filename_for_package_name(self, spec):
|
||||||
|
"""Get the filename for the module we should load for a particular
|
||||||
|
package. Packages for a Repo live in
|
||||||
|
``$root/<package_name>/package.py``
|
||||||
|
|
||||||
|
This will return a proper package.py path even if the
|
||||||
|
package doesn't exist yet, so callers will need to ensure
|
||||||
|
the package exists before importing.
|
||||||
|
"""
|
||||||
|
self._check_namespace(spec)
|
||||||
|
pkg_dir = self.dirname_for_package_name(spec.name)
|
||||||
|
return join_path(pkg_dir, package_file_name)
|
||||||
|
|
||||||
|
|
||||||
|
def all_package_names(self):
|
||||||
|
"""Returns a sorted list of all package names in the Repo."""
|
||||||
|
if self._all_package_names is None:
|
||||||
|
self._all_package_names = []
|
||||||
|
|
||||||
|
for pkg_name in os.listdir(self.packages_path):
|
||||||
|
# Skip non-directories in the package root.
|
||||||
|
pkg_dir = join_path(self.packages_path, pkg_name)
|
||||||
|
if not os.path.isdir(pkg_dir):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip directories without a package.py in them.
|
||||||
|
pkg_file = join_path(self.packages_path, pkg_name, package_file_name)
|
||||||
|
if not os.path.isfile(pkg_file):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Warn about invalid names that look like packages.
|
||||||
|
if not valid_module_name(pkg_name):
|
||||||
|
tty.warn("Skipping package at %s. '%s' is not a valid Spack module name."
|
||||||
|
% (pkg_dir, pkg_name))
|
||||||
|
continue
|
||||||
|
|
||||||
|
# All checks passed. Add it to the list.
|
||||||
|
self._all_package_names.append(pkg_name)
|
||||||
|
self._all_package_names.sort()
|
||||||
|
|
||||||
|
return self._all_package_names
|
||||||
|
|
||||||
|
|
||||||
|
def all_packages(self):
|
||||||
|
for name in self.all_package_names():
|
||||||
|
yield self.get(name)
|
||||||
|
|
||||||
|
|
||||||
|
def exists(self, pkg_name):
|
||||||
|
"""Whether a package with the supplied name exists."""
|
||||||
|
# This does a binary search in the sorted list.
|
||||||
|
idx = bisect_left(self.all_package_names(), pkg_name)
|
||||||
|
return (idx < len(self._all_package_names) and
|
||||||
|
self._all_package_names[idx] == pkg_name)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_pkg_module(self, pkg_name):
|
||||||
|
"""Create a module for a particular package.
|
||||||
|
|
||||||
|
This caches the module within this Repo *instance*. It does
|
||||||
|
*not* add it to ``sys.modules``. So, you can construct
|
||||||
|
multiple Repos for testing and ensure that the module will be
|
||||||
|
loaded once per repo.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if pkg_name not in self._modules:
|
||||||
|
file_path = self.filename_for_package_name(pkg_name)
|
||||||
|
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
raise UnknownPackageError(pkg_name, self)
|
||||||
|
|
||||||
|
if not os.path.isfile(file_path):
|
||||||
|
tty.die("Something's wrong. '%s' is not a file!" % file_path)
|
||||||
|
|
||||||
|
if not os.access(file_path, os.R_OK):
|
||||||
|
tty.die("Cannot read '%s'!" % file_path)
|
||||||
|
|
||||||
|
# e.g., spack.pkg.builtin.mpich
|
||||||
|
fullname = "%s.%s" % (self.full_namespace, pkg_name)
|
||||||
|
|
||||||
|
module = imp.load_source(fullname, file_path)
|
||||||
|
module.__package__ = self.full_namespace
|
||||||
|
module.__loader__ = self
|
||||||
|
self._modules[pkg_name] = module
|
||||||
|
|
||||||
|
return self._modules[pkg_name]
|
||||||
|
|
||||||
|
|
||||||
|
def _get_pkg_class(self, pkg_name):
|
||||||
|
"""Get the class for the package out of its module.
|
||||||
|
|
||||||
|
First loads (or fetches from cache) a module for the
|
||||||
|
package. Then extracts the package class from the module
|
||||||
|
according to Spack's naming convention.
|
||||||
|
"""
|
||||||
|
class_name = mod_to_class(pkg_name)
|
||||||
|
module = self._get_pkg_module(pkg_name)
|
||||||
|
|
||||||
|
cls = getattr(module, class_name)
|
||||||
|
if not inspect.isclass(cls):
|
||||||
|
tty.die("%s.%s is not a class" % (pkg_name, class_name))
|
||||||
|
|
||||||
|
return cls
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "[Repo '%s' at '%s']" % (self.namespace, self.root)
|
||||||
|
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self.__str__()
|
||||||
|
|
||||||
|
|
||||||
|
def __contains__(self, pkg_name):
|
||||||
|
return self.exists(pkg_name)
|
||||||
|
|
||||||
|
|
||||||
|
class RepoError(spack.error.SpackError):
|
||||||
|
"""Superclass for repository-related errors."""
|
||||||
|
|
||||||
|
|
||||||
|
class NoRepoConfiguredError(RepoError):
|
||||||
|
"""Raised when there are no repositories configured."""
|
||||||
|
|
||||||
|
|
||||||
|
class BadRepoError(RepoError):
|
||||||
|
"""Raised when repo layout is invalid."""
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateRepoError(RepoError):
|
||||||
|
"""Raised when duplicate repos are added to a RepoPath."""
|
||||||
|
|
||||||
|
|
||||||
|
class PackageLoadError(spack.error.SpackError):
|
||||||
|
"""Superclass for errors related to loading packages."""
|
||||||
|
|
||||||
|
|
||||||
|
class UnknownPackageError(PackageLoadError):
|
||||||
|
"""Raised when we encounter a package spack doesn't have."""
|
||||||
|
def __init__(self, name, repo=None):
|
||||||
|
msg = None
|
||||||
|
if repo:
|
||||||
|
msg = "Package %s not found in repository %s." % (name, repo)
|
||||||
|
else:
|
||||||
|
msg = "Package %s not found." % name
|
||||||
|
super(UnknownPackageError, self).__init__(msg)
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
|
||||||
|
class UnknownNamespaceError(PackageLoadError):
|
||||||
|
"""Raised when we encounter an unknown namespace"""
|
||||||
|
def __init__(self, namespace):
|
||||||
|
super(UnknownNamespaceError, self).__init__(
|
||||||
|
"Unknown namespace: %s" % namespace)
|
||||||
|
|
||||||
|
|
||||||
|
class FailedConstructorError(PackageLoadError):
|
||||||
|
"""Raised when a package's class constructor fails."""
|
||||||
|
def __init__(self, name, exc_type, exc_obj, exc_tb):
|
||||||
|
super(FailedConstructorError, self).__init__(
|
||||||
|
"Class constructor failed for package '%s'." % name,
|
||||||
|
'\nCaused by:\n' +
|
||||||
|
('%s: %s\n' % (exc_type.__name__, exc_obj)) +
|
||||||
|
''.join(traceback.format_tb(exc_tb)))
|
||||||
|
self.name = name
|
|
@ -6,7 +6,7 @@
|
||||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
# LLNL-CODE-647188
|
# LLNL-CODE-647188
|
||||||
#
|
#
|
||||||
# For details, see https://scalability-llnl.github.io/spack
|
# For details, see https://llnl.github.io/spack
|
||||||
# Please also see the LICENSE file for our notice and the LGPL.
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
#
|
#
|
||||||
# This program is free software; you can redistribute it and/or modify
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
@ -22,9 +22,11 @@
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
"""
|
"""Describes an optional resource needed for a build.
|
||||||
Describes an optional resource needed for a build. Typically a bunch of sources that can be built in-tree within another
|
|
||||||
|
Typically a bunch of sources that can be built in-tree within another
|
||||||
package to enable optional features.
|
package to enable optional features.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -412,6 +412,7 @@ def __init__(self, spec_like, *dep_like, **kwargs):
|
||||||
self.dependencies = other.dependencies
|
self.dependencies = other.dependencies
|
||||||
self.variants = other.variants
|
self.variants = other.variants
|
||||||
self.variants.spec = self
|
self.variants.spec = self
|
||||||
|
self.namespace = other.namespace
|
||||||
|
|
||||||
# Specs are by default not assumed to be normal, but in some
|
# Specs are by default not assumed to be normal, but in some
|
||||||
# cases we've read them from a file want to assume normal.
|
# cases we've read them from a file want to assume normal.
|
||||||
|
@ -464,6 +465,13 @@ def _add_dependency(self, spec):
|
||||||
self.dependencies[spec.name] = spec
|
self.dependencies[spec.name] = spec
|
||||||
spec.dependents[self.name] = self
|
spec.dependents[self.name] = self
|
||||||
|
|
||||||
|
#
|
||||||
|
# Public interface
|
||||||
|
#
|
||||||
|
@property
|
||||||
|
def fullname(self):
|
||||||
|
return '%s.%s' % (self.namespace, self.name) if self.namespace else self.name
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def root(self):
|
def root(self):
|
||||||
|
@ -486,7 +494,7 @@ def root(self):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def package(self):
|
def package(self):
|
||||||
return spack.db.get(self)
|
return spack.repo.get(self)
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -504,7 +512,7 @@ def virtual(self):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_virtual(name):
|
def is_virtual(name):
|
||||||
"""Test if a name is virtual without requiring a Spec."""
|
"""Test if a name is virtual without requiring a Spec."""
|
||||||
return not spack.db.exists(name)
|
return not spack.repo.exists(name)
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -517,11 +525,13 @@ def concrete(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
self._concrete = bool(not self.virtual
|
self._concrete = bool(not self.virtual
|
||||||
|
and self.namespace is not None
|
||||||
and self.versions.concrete
|
and self.versions.concrete
|
||||||
and self.variants.concrete
|
and self.variants.concrete
|
||||||
and self.architecture
|
and self.architecture
|
||||||
and self.compiler and self.compiler.concrete
|
and self.compiler and self.compiler.concrete
|
||||||
and self.dependencies.concrete)
|
and self.dependencies.concrete)
|
||||||
|
|
||||||
return self._concrete
|
return self._concrete
|
||||||
|
|
||||||
|
|
||||||
|
@ -657,6 +667,12 @@ def to_node_dict(self):
|
||||||
'dependencies' : dict((d, self.dependencies[d].dag_hash())
|
'dependencies' : dict((d, self.dependencies[d].dag_hash())
|
||||||
for d in sorted(self.dependencies))
|
for d in sorted(self.dependencies))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Older concrete specs do not have a namespace. Omit for
|
||||||
|
# consistent hashing.
|
||||||
|
if not self.concrete or self.namespace:
|
||||||
|
d['namespace'] = self.namespace
|
||||||
|
|
||||||
if self.compiler:
|
if self.compiler:
|
||||||
d.update(self.compiler.to_dict())
|
d.update(self.compiler.to_dict())
|
||||||
else:
|
else:
|
||||||
|
@ -681,6 +697,7 @@ def from_node_dict(node):
|
||||||
node = node[name]
|
node = node[name]
|
||||||
|
|
||||||
spec = Spec(name)
|
spec = Spec(name)
|
||||||
|
spec.namespace = node.get('namespace', None)
|
||||||
spec.versions = VersionList.from_dict(node)
|
spec.versions = VersionList.from_dict(node)
|
||||||
spec.architecture = node['arch']
|
spec.architecture = node['arch']
|
||||||
|
|
||||||
|
@ -797,7 +814,7 @@ def _expand_virtual_packages(self):
|
||||||
return changed
|
return changed
|
||||||
|
|
||||||
for spec in virtuals:
|
for spec in virtuals:
|
||||||
providers = spack.db.providers_for(spec)
|
providers = spack.repo.providers_for(spec)
|
||||||
concrete = spack.concretizer.choose_provider(spec, providers)
|
concrete = spack.concretizer.choose_provider(spec, providers)
|
||||||
concrete = concrete.copy()
|
concrete = concrete.copy()
|
||||||
spec._replace_with(concrete)
|
spec._replace_with(concrete)
|
||||||
|
@ -833,6 +850,19 @@ def concretize(self):
|
||||||
changed = any(changes)
|
changed = any(changes)
|
||||||
force=True
|
force=True
|
||||||
|
|
||||||
|
for s in self.traverse():
|
||||||
|
# After concretizing, assign namespaces to anything left.
|
||||||
|
# Note that this doesn't count as a "change". The repository
|
||||||
|
# configuration is constant throughout a spack run, and
|
||||||
|
# normalize and concretize evaluate Packages using Repo.get(),
|
||||||
|
# which respects precedence. So, a namespace assignment isn't
|
||||||
|
# changing how a package name would have been interpreted and
|
||||||
|
# we can do it as late as possible to allow as much
|
||||||
|
# compatibility across repositories as possible.
|
||||||
|
if s.namespace is None:
|
||||||
|
s.namespace = spack.repo.repo_for_pkg(s.name).namespace
|
||||||
|
|
||||||
|
# Mark everything in the spec as concrete, as well.
|
||||||
self._mark_concrete()
|
self._mark_concrete()
|
||||||
|
|
||||||
|
|
||||||
|
@ -919,7 +949,7 @@ def _evaluate_dependency_conditions(self, name):
|
||||||
the dependency. If no conditions are True (and we don't
|
the dependency. If no conditions are True (and we don't
|
||||||
depend on it), return None.
|
depend on it), return None.
|
||||||
"""
|
"""
|
||||||
pkg = spack.db.get(self.name)
|
pkg = spack.repo.get(self.fullname)
|
||||||
conditions = pkg.dependencies[name]
|
conditions = pkg.dependencies[name]
|
||||||
|
|
||||||
# evaluate when specs to figure out constraints on the dependency.
|
# evaluate when specs to figure out constraints on the dependency.
|
||||||
|
@ -1047,7 +1077,7 @@ def _normalize_helper(self, visited, spec_deps, provider_index):
|
||||||
any_change = False
|
any_change = False
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
pkg = spack.db.get(self.name)
|
pkg = spack.repo.get(self.fullname)
|
||||||
while changed:
|
while changed:
|
||||||
changed = False
|
changed = False
|
||||||
for dep_name in pkg.dependencies:
|
for dep_name in pkg.dependencies:
|
||||||
|
@ -1068,18 +1098,17 @@ def normalize(self, force=False):
|
||||||
the root, and ONLY the ones that were explicitly provided are there.
|
the root, and ONLY the ones that were explicitly provided are there.
|
||||||
Normalization turns a partial flat spec into a DAG, where:
|
Normalization turns a partial flat spec into a DAG, where:
|
||||||
|
|
||||||
1. ALL dependencies of the root package are in the DAG.
|
1. Known dependencies of the root package are in the DAG.
|
||||||
2. Each node's dependencies dict only contains its direct deps.
|
2. Each node's dependencies dict only contains its known direct deps.
|
||||||
3. There is only ONE unique spec for each package in the DAG.
|
3. There is only ONE unique spec for each package in the DAG.
|
||||||
|
|
||||||
* This includes virtual packages. If there a non-virtual
|
* This includes virtual packages. If there a non-virtual
|
||||||
package that provides a virtual package that is in the spec,
|
package that provides a virtual package that is in the spec,
|
||||||
then we replace the virtual package with the non-virtual one.
|
then we replace the virtual package with the non-virtual one.
|
||||||
|
|
||||||
4. The spec DAG matches package DAG, including default variant values.
|
|
||||||
|
|
||||||
TODO: normalize should probably implement some form of cycle detection,
|
TODO: normalize should probably implement some form of cycle detection,
|
||||||
to ensure that the spec is actually a DAG.
|
to ensure that the spec is actually a DAG.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if self._normal and not force:
|
if self._normal and not force:
|
||||||
return False
|
return False
|
||||||
|
@ -1125,7 +1154,7 @@ def validate_names(self):
|
||||||
for spec in self.traverse():
|
for spec in self.traverse():
|
||||||
# Don't get a package for a virtual name.
|
# Don't get a package for a virtual name.
|
||||||
if not spec.virtual:
|
if not spec.virtual:
|
||||||
spack.db.get(spec.name)
|
spack.repo.get(spec.fullname)
|
||||||
|
|
||||||
# validate compiler in addition to the package name.
|
# validate compiler in addition to the package name.
|
||||||
if spec.compiler:
|
if spec.compiler:
|
||||||
|
@ -1148,6 +1177,10 @@ def constrain(self, other, deps=True):
|
||||||
if not self.name == other.name:
|
if not self.name == other.name:
|
||||||
raise UnsatisfiableSpecNameError(self.name, other.name)
|
raise UnsatisfiableSpecNameError(self.name, other.name)
|
||||||
|
|
||||||
|
if other.namespace is not None:
|
||||||
|
if self.namespace is not None and other.namespace != self.namespace:
|
||||||
|
raise UnsatisfiableSpecNameError(self.fullname, other.fullname)
|
||||||
|
|
||||||
if not self.versions.overlaps(other.versions):
|
if not self.versions.overlaps(other.versions):
|
||||||
raise UnsatisfiableVersionSpecError(self.versions, other.versions)
|
raise UnsatisfiableVersionSpecError(self.versions, other.versions)
|
||||||
|
|
||||||
|
@ -1191,7 +1224,7 @@ def _constrain_dependencies(self, other):
|
||||||
|
|
||||||
# TODO: might want more detail than this, e.g. specific deps
|
# TODO: might want more detail than this, e.g. specific deps
|
||||||
# in violation. if this becomes a priority get rid of this
|
# in violation. if this becomes a priority get rid of this
|
||||||
# check and be more specici about what's wrong.
|
# check and be more specific about what's wrong.
|
||||||
if not other.satisfies_dependencies(self):
|
if not other.satisfies_dependencies(self):
|
||||||
raise UnsatisfiableDependencySpecError(other, self)
|
raise UnsatisfiableDependencySpecError(other, self)
|
||||||
|
|
||||||
|
@ -1264,7 +1297,7 @@ def satisfies(self, other, deps=True, strict=False):
|
||||||
|
|
||||||
# A concrete provider can satisfy a virtual dependency.
|
# A concrete provider can satisfy a virtual dependency.
|
||||||
if not self.virtual and other.virtual:
|
if not self.virtual and other.virtual:
|
||||||
pkg = spack.db.get(self.name)
|
pkg = spack.repo.get(self.fullname)
|
||||||
if pkg.provides(other.name):
|
if pkg.provides(other.name):
|
||||||
for provided, when_spec in pkg.provided.items():
|
for provided, when_spec in pkg.provided.items():
|
||||||
if self.satisfies(when_spec, deps=False, strict=strict):
|
if self.satisfies(when_spec, deps=False, strict=strict):
|
||||||
|
@ -1276,6 +1309,11 @@ def satisfies(self, other, deps=True, strict=False):
|
||||||
if self.name != other.name:
|
if self.name != other.name:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# namespaces either match, or other doesn't require one.
|
||||||
|
if other.namespace is not None:
|
||||||
|
if self.namespace is not None and self.namespace != other.namespace:
|
||||||
|
return False
|
||||||
|
|
||||||
if self.versions and other.versions:
|
if self.versions and other.versions:
|
||||||
if not self.versions.satisfies(other.versions, strict=strict):
|
if not self.versions.satisfies(other.versions, strict=strict):
|
||||||
return False
|
return False
|
||||||
|
@ -1375,6 +1413,7 @@ def _dup(self, other, **kwargs):
|
||||||
self.dependencies = DependencyMap()
|
self.dependencies = DependencyMap()
|
||||||
self.variants = other.variants.copy()
|
self.variants = other.variants.copy()
|
||||||
self.variants.spec = self
|
self.variants.spec = self
|
||||||
|
self.namespace = other.namespace
|
||||||
|
|
||||||
# If we copy dependencies, preserve DAG structure in the new spec
|
# If we copy dependencies, preserve DAG structure in the new spec
|
||||||
if kwargs.get('deps', True):
|
if kwargs.get('deps', True):
|
||||||
|
@ -1493,6 +1532,7 @@ def ne_dag(self, other):
|
||||||
def _cmp_node(self):
|
def _cmp_node(self):
|
||||||
"""Comparison key for just *this node* and not its deps."""
|
"""Comparison key for just *this node* and not its deps."""
|
||||||
return (self.name,
|
return (self.name,
|
||||||
|
self.namespace,
|
||||||
self.versions,
|
self.versions,
|
||||||
self.variants,
|
self.variants,
|
||||||
self.architecture,
|
self.architecture,
|
||||||
|
@ -1530,6 +1570,7 @@ def format(self, format_string='$_$@$%@$+$=', **kwargs):
|
||||||
in the format string. The format strings you can provide are::
|
in the format string. The format strings you can provide are::
|
||||||
|
|
||||||
$_ Package name
|
$_ Package name
|
||||||
|
$. Full package name (with namespace)
|
||||||
$@ Version
|
$@ Version
|
||||||
$% Compiler
|
$% Compiler
|
||||||
$%@ Compiler & compiler version
|
$%@ Compiler & compiler version
|
||||||
|
@ -1577,6 +1618,8 @@ def write(s, c):
|
||||||
|
|
||||||
if c == '_':
|
if c == '_':
|
||||||
out.write(fmt % self.name)
|
out.write(fmt % self.name)
|
||||||
|
elif c == '.':
|
||||||
|
out.write(fmt % self.fullname)
|
||||||
elif c == '@':
|
elif c == '@':
|
||||||
if self.versions and self.versions != _any_version:
|
if self.versions and self.versions != _any_version:
|
||||||
write(fmt % (c + str(self.versions)), c)
|
write(fmt % (c + str(self.versions)), c)
|
||||||
|
@ -1725,17 +1768,23 @@ def parse_compiler(self, text):
|
||||||
def spec(self):
|
def spec(self):
|
||||||
"""Parse a spec out of the input. If a spec is supplied, then initialize
|
"""Parse a spec out of the input. If a spec is supplied, then initialize
|
||||||
and return it instead of creating a new one."""
|
and return it instead of creating a new one."""
|
||||||
self.check_identifier()
|
|
||||||
|
spec_namespace, dot, spec_name = self.token.value.rpartition('.')
|
||||||
|
if not spec_namespace:
|
||||||
|
spec_namespace = None
|
||||||
|
|
||||||
|
self.check_identifier(spec_name)
|
||||||
|
|
||||||
# This will init the spec without calling __init__.
|
# This will init the spec without calling __init__.
|
||||||
spec = Spec.__new__(Spec)
|
spec = Spec.__new__(Spec)
|
||||||
spec.name = self.token.value
|
spec.name = spec_name
|
||||||
spec.versions = VersionList()
|
spec.versions = VersionList()
|
||||||
spec.variants = VariantMap(spec)
|
spec.variants = VariantMap(spec)
|
||||||
spec.architecture = None
|
spec.architecture = None
|
||||||
spec.compiler = None
|
spec.compiler = None
|
||||||
spec.dependents = DependencyMap()
|
spec.dependents = DependencyMap()
|
||||||
spec.dependencies = DependencyMap()
|
spec.dependencies = DependencyMap()
|
||||||
|
spec.namespace = spec_namespace
|
||||||
|
|
||||||
spec._normal = False
|
spec._normal = False
|
||||||
spec._concrete = False
|
spec._concrete = False
|
||||||
|
@ -1829,12 +1878,14 @@ def compiler(self):
|
||||||
return compiler
|
return compiler
|
||||||
|
|
||||||
|
|
||||||
def check_identifier(self):
|
def check_identifier(self, id=None):
|
||||||
"""The only identifiers that can contain '.' are versions, but version
|
"""The only identifiers that can contain '.' are versions, but version
|
||||||
ids are context-sensitive so we have to check on a case-by-case
|
ids are context-sensitive so we have to check on a case-by-case
|
||||||
basis. Call this if we detect a version id where it shouldn't be.
|
basis. Call this if we detect a version id where it shouldn't be.
|
||||||
"""
|
"""
|
||||||
if '.' in self.token.value:
|
if not id:
|
||||||
|
id = self.token.value
|
||||||
|
if '.' in id:
|
||||||
self.last_token_error("Identifier cannot contain '.'")
|
self.last_token_error("Identifier cannot contain '.'")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from urlparse import urljoin
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import *
|
from llnl.util.filesystem import *
|
||||||
|
@ -250,7 +251,8 @@ def fetch(self):
|
||||||
# TODO: CompositeFetchStrategy here.
|
# TODO: CompositeFetchStrategy here.
|
||||||
self.skip_checksum_for_mirror = True
|
self.skip_checksum_for_mirror = True
|
||||||
if self.mirror_path:
|
if self.mirror_path:
|
||||||
urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()]
|
mirrors = spack.config.get_config('mirrors')
|
||||||
|
urls = [urljoin(u, self.mirror_path) for name, u in mirrors.items()]
|
||||||
|
|
||||||
# If this archive is normally fetched from a tarball URL,
|
# If this archive is normally fetched from a tarball URL,
|
||||||
# then use the same digest. `spack mirror` ensures that
|
# then use the same digest. `spack mirror` ensures that
|
||||||
|
@ -370,7 +372,7 @@ def destroy(self):
|
||||||
|
|
||||||
def _get_mirrors():
|
def _get_mirrors():
|
||||||
"""Get mirrors from spack configuration."""
|
"""Get mirrors from spack configuration."""
|
||||||
config = spack.config.get_mirror_config()
|
config = spack.config.get_config('mirrors')
|
||||||
return [val for name, val in config.iteritems()]
|
return [val for name, val in config.iteritems()]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -62,7 +62,9 @@
|
||||||
'configure_guess',
|
'configure_guess',
|
||||||
'unit_install',
|
'unit_install',
|
||||||
'lock',
|
'lock',
|
||||||
'database']
|
'database',
|
||||||
|
'namespace_trie',
|
||||||
|
'yaml']
|
||||||
|
|
||||||
|
|
||||||
def list_tests():
|
def list_tests():
|
||||||
|
|
|
@ -125,22 +125,22 @@ def test_concretize_with_provides_when(self):
|
||||||
we ask for some advanced version.
|
we ask for some advanced version.
|
||||||
"""
|
"""
|
||||||
self.assertTrue(not any(spec.satisfies('mpich2@:1.0')
|
self.assertTrue(not any(spec.satisfies('mpich2@:1.0')
|
||||||
for spec in spack.db.providers_for('mpi@2.1')))
|
for spec in spack.repo.providers_for('mpi@2.1')))
|
||||||
|
|
||||||
self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
|
self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
|
||||||
for spec in spack.db.providers_for('mpi@2.2')))
|
for spec in spack.repo.providers_for('mpi@2.2')))
|
||||||
|
|
||||||
self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
|
self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
|
||||||
for spec in spack.db.providers_for('mpi@2.2')))
|
for spec in spack.repo.providers_for('mpi@2.2')))
|
||||||
|
|
||||||
self.assertTrue(not any(spec.satisfies('mpich@:1')
|
self.assertTrue(not any(spec.satisfies('mpich@:1')
|
||||||
for spec in spack.db.providers_for('mpi@2')))
|
for spec in spack.repo.providers_for('mpi@2')))
|
||||||
|
|
||||||
self.assertTrue(not any(spec.satisfies('mpich@:1')
|
self.assertTrue(not any(spec.satisfies('mpich@:1')
|
||||||
for spec in spack.db.providers_for('mpi@3')))
|
for spec in spack.repo.providers_for('mpi@3')))
|
||||||
|
|
||||||
self.assertTrue(not any(spec.satisfies('mpich2')
|
self.assertTrue(not any(spec.satisfies('mpich2')
|
||||||
for spec in spack.db.providers_for('mpi@3')))
|
for spec in spack.repo.providers_for('mpi@3')))
|
||||||
|
|
||||||
|
|
||||||
def test_virtual_is_fully_expanded_for_callpath(self):
|
def test_virtual_is_fully_expanded_for_callpath(self):
|
||||||
|
|
|
@ -26,49 +26,95 @@
|
||||||
import shutil
|
import shutil
|
||||||
import os
|
import os
|
||||||
from tempfile import mkdtemp
|
from tempfile import mkdtemp
|
||||||
|
from ordereddict_backport import OrderedDict
|
||||||
import spack
|
import spack
|
||||||
from spack.packages import PackageDB
|
import spack.config
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
|
# Some sample compiler config data
|
||||||
|
a_comps = {
|
||||||
|
"all": {
|
||||||
|
"gcc@4.7.3" : {
|
||||||
|
"cc" : "/gcc473",
|
||||||
|
"cxx": "/g++473",
|
||||||
|
"f77": None,
|
||||||
|
"fc" : None },
|
||||||
|
"gcc@4.5.0" : {
|
||||||
|
"cc" : "/gcc450",
|
||||||
|
"cxx": "/g++450",
|
||||||
|
"f77": "/gfortran",
|
||||||
|
"fc" : "/gfortran" },
|
||||||
|
"clang@3.3" : {
|
||||||
|
"cc" : "<overwritten>",
|
||||||
|
"cxx": "<overwritten>",
|
||||||
|
"f77": "<overwritten>",
|
||||||
|
"fc" : "<overwritten>" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
b_comps = {
|
||||||
|
"all": {
|
||||||
|
"icc@10.0" : {
|
||||||
|
"cc" : "/icc100",
|
||||||
|
"cxx": "/icc100",
|
||||||
|
"f77": None,
|
||||||
|
"fc" : None },
|
||||||
|
"icc@11.1" : {
|
||||||
|
"cc" : "/icc111",
|
||||||
|
"cxx": "/icp111",
|
||||||
|
"f77": "/ifort",
|
||||||
|
"fc" : "/ifort" },
|
||||||
|
"clang@3.3" : {
|
||||||
|
"cc" : "/clang",
|
||||||
|
"cxx": "/clang++",
|
||||||
|
"f77": None,
|
||||||
|
"fc" : None}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
class ConfigTest(MockPackagesTest):
|
class ConfigTest(MockPackagesTest):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.initmock()
|
super(ConfigTest, self).setUp()
|
||||||
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
|
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
|
||||||
spack.config.config_scopes = [('test_low_priority', os.path.join(self.tmp_dir, 'low')),
|
spack.config.config_scopes = OrderedDict()
|
||||||
('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
|
spack.config.ConfigScope('test_low_priority', os.path.join(self.tmp_dir, 'low'))
|
||||||
|
spack.config.ConfigScope('test_high_priority', os.path.join(self.tmp_dir, 'high'))
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
self.cleanmock()
|
super(ConfigTest, self).tearDown()
|
||||||
shutil.rmtree(self.tmp_dir, True)
|
shutil.rmtree(self.tmp_dir, True)
|
||||||
|
|
||||||
def check_config(self, comps):
|
|
||||||
config = spack.config.get_compilers_config()
|
def check_config(self, comps, *compiler_names):
|
||||||
compiler_list = ['cc', 'cxx', 'f77', 'f90']
|
"""Check that named compilers in comps match Spack's config."""
|
||||||
for key in comps:
|
config = spack.config.get_config('compilers')
|
||||||
|
compiler_list = ['cc', 'cxx', 'f77', 'fc']
|
||||||
|
for key in compiler_names:
|
||||||
for c in compiler_list:
|
for c in compiler_list:
|
||||||
if comps[key][c] == '/bad':
|
expected = comps['all'][key][c]
|
||||||
continue
|
actual = config['all'][key][c]
|
||||||
self.assertEqual(comps[key][c], config[key][c])
|
self.assertEqual(expected, actual)
|
||||||
|
|
||||||
|
|
||||||
def test_write_key(self):
|
def test_write_key_in_memory(self):
|
||||||
a_comps = {"gcc@4.7.3" : { "cc" : "/gcc473", "cxx" : "/g++473", "f77" : None, "f90" : None },
|
# Write b_comps "on top of" a_comps.
|
||||||
"gcc@4.5.0" : { "cc" : "/gcc450", "cxx" : "/g++450", "f77" : "/gfortran", "f90" : "/gfortran" },
|
spack.config.update_config('compilers', a_comps, 'test_low_priority')
|
||||||
"clang@3.3" : { "cc" : "/bad", "cxx" : "/bad", "f77" : "/bad", "f90" : "/bad" }}
|
spack.config.update_config('compilers', b_comps, 'test_high_priority')
|
||||||
|
|
||||||
b_comps = {"icc@10.0" : { "cc" : "/icc100", "cxx" : "/icc100", "f77" : None, "f90" : None },
|
# Make sure the config looks how we expect.
|
||||||
"icc@11.1" : { "cc" : "/icc111", "cxx" : "/icp111", "f77" : "/ifort", "f90" : "/ifort" },
|
self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
|
||||||
"clang@3.3" : { "cc" : "/clang", "cxx" : "/clang++", "f77" : None, "f90" : None}}
|
self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
|
||||||
|
|
||||||
spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
|
|
||||||
spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
|
|
||||||
|
|
||||||
self.check_config(a_comps)
|
def test_write_key_to_disk(self):
|
||||||
self.check_config(b_comps)
|
# Write b_comps "on top of" a_comps.
|
||||||
|
spack.config.update_config('compilers', a_comps, 'test_low_priority')
|
||||||
|
spack.config.update_config('compilers', b_comps, 'test_high_priority')
|
||||||
|
|
||||||
|
# Clear caches so we're forced to read from disk.
|
||||||
spack.config.clear_config_caches()
|
spack.config.clear_config_caches()
|
||||||
|
|
||||||
self.check_config(a_comps)
|
# Same check again, to ensure consistency.
|
||||||
self.check_config(b_comps)
|
self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
|
||||||
|
self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
|
||||||
|
|
|
@ -79,7 +79,8 @@ class DatabaseTest(MockPackagesTest):
|
||||||
|
|
||||||
def _mock_install(self, spec):
|
def _mock_install(self, spec):
|
||||||
s = Spec(spec)
|
s = Spec(spec)
|
||||||
pkg = spack.db.get(s.concretized())
|
s.concretize()
|
||||||
|
pkg = spack.repo.get(s)
|
||||||
pkg.do_install(fake=True)
|
pkg.do_install(fake=True)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -34,23 +34,27 @@
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.packages import PackageDB
|
from spack.repository import RepoPath
|
||||||
from spack.directory_layout import YamlDirectoryLayout
|
from spack.directory_layout import YamlDirectoryLayout
|
||||||
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
|
|
||||||
# number of packages to test (to reduce test time)
|
# number of packages to test (to reduce test time)
|
||||||
max_packages = 10
|
max_packages = 10
|
||||||
|
|
||||||
|
|
||||||
class DirectoryLayoutTest(unittest.TestCase):
|
class DirectoryLayoutTest(MockPackagesTest):
|
||||||
"""Tests that a directory layout works correctly and produces a
|
"""Tests that a directory layout works correctly and produces a
|
||||||
consistent install path."""
|
consistent install path."""
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
super(DirectoryLayoutTest, self).setUp()
|
||||||
self.tmpdir = tempfile.mkdtemp()
|
self.tmpdir = tempfile.mkdtemp()
|
||||||
self.layout = YamlDirectoryLayout(self.tmpdir)
|
self.layout = YamlDirectoryLayout(self.tmpdir)
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
super(DirectoryLayoutTest, self).tearDown()
|
||||||
shutil.rmtree(self.tmpdir, ignore_errors=True)
|
shutil.rmtree(self.tmpdir, ignore_errors=True)
|
||||||
self.layout = None
|
self.layout = None
|
||||||
|
|
||||||
|
@ -62,7 +66,7 @@ def test_read_and_write_spec(self):
|
||||||
finally that the directory can be removed by the directory
|
finally that the directory can be removed by the directory
|
||||||
layout.
|
layout.
|
||||||
"""
|
"""
|
||||||
packages = list(spack.db.all_packages())[:max_packages]
|
packages = list(spack.repo.all_packages())[:max_packages]
|
||||||
|
|
||||||
for pkg in packages:
|
for pkg in packages:
|
||||||
spec = pkg.spec
|
spec = pkg.spec
|
||||||
|
@ -123,17 +127,17 @@ def test_handle_unknown_package(self):
|
||||||
information about installed packages' specs to uninstall
|
information about installed packages' specs to uninstall
|
||||||
or query them again if the package goes away.
|
or query them again if the package goes away.
|
||||||
"""
|
"""
|
||||||
mock_db = PackageDB(spack.mock_packages_path)
|
mock_db = RepoPath(spack.mock_packages_path)
|
||||||
|
|
||||||
not_in_mock = set.difference(
|
not_in_mock = set.difference(
|
||||||
set(spack.db.all_package_names()),
|
set(spack.repo.all_package_names()),
|
||||||
set(mock_db.all_package_names()))
|
set(mock_db.all_package_names()))
|
||||||
packages = list(not_in_mock)[:max_packages]
|
packages = list(not_in_mock)[:max_packages]
|
||||||
|
|
||||||
# Create all the packages that are not in mock.
|
# Create all the packages that are not in mock.
|
||||||
installed_specs = {}
|
installed_specs = {}
|
||||||
for pkg_name in packages:
|
for pkg_name in packages:
|
||||||
spec = spack.db.get(pkg_name).spec
|
spec = spack.repo.get(pkg_name).spec
|
||||||
|
|
||||||
# If a spec fails to concretize, just skip it. If it is a
|
# If a spec fails to concretize, just skip it. If it is a
|
||||||
# real error, it will be caught by concretization tests.
|
# real error, it will be caught by concretization tests.
|
||||||
|
@ -145,8 +149,7 @@ def test_handle_unknown_package(self):
|
||||||
self.layout.create_install_directory(spec)
|
self.layout.create_install_directory(spec)
|
||||||
installed_specs[spec] = self.layout.path_for_spec(spec)
|
installed_specs[spec] = self.layout.path_for_spec(spec)
|
||||||
|
|
||||||
tmp = spack.db
|
spack.repo.swap(mock_db)
|
||||||
spack.db = mock_db
|
|
||||||
|
|
||||||
# Now check that even without the package files, we know
|
# Now check that even without the package files, we know
|
||||||
# enough to read a spec from the spec file.
|
# enough to read a spec from the spec file.
|
||||||
|
@ -161,12 +164,12 @@ def test_handle_unknown_package(self):
|
||||||
self.assertTrue(spec.eq_dag(spec_from_file))
|
self.assertTrue(spec.eq_dag(spec_from_file))
|
||||||
self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash())
|
self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash())
|
||||||
|
|
||||||
spack.db = tmp
|
spack.repo.swap(mock_db)
|
||||||
|
|
||||||
|
|
||||||
def test_find(self):
|
def test_find(self):
|
||||||
"""Test that finding specs within an install layout works."""
|
"""Test that finding specs within an install layout works."""
|
||||||
packages = list(spack.db.all_packages())[:max_packages]
|
packages = list(spack.repo.all_packages())[:max_packages]
|
||||||
|
|
||||||
# Create install prefixes for all packages in the list
|
# Create install prefixes for all packages in the list
|
||||||
installed_specs = {}
|
installed_specs = {}
|
||||||
|
|
|
@ -50,7 +50,7 @@ def setUp(self):
|
||||||
|
|
||||||
spec = Spec('git-test')
|
spec = Spec('git-test')
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
self.pkg = spack.db.get(spec, new=True)
|
self.pkg = spack.repo.get(spec, new=True)
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
|
|
@ -47,7 +47,7 @@ def setUp(self):
|
||||||
|
|
||||||
spec = Spec('hg-test')
|
spec = Spec('hg-test')
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
self.pkg = spack.db.get(spec, new=True)
|
self.pkg = spack.repo.get(spec, new=True)
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
|
|
@ -78,7 +78,7 @@ def test_install_and_uninstall(self):
|
||||||
self.assertTrue(spec.concrete)
|
self.assertTrue(spec.concrete)
|
||||||
|
|
||||||
# Get the package
|
# Get the package
|
||||||
pkg = spack.db.get(spec)
|
pkg = spack.repo.get(spec)
|
||||||
|
|
||||||
# Fake the URL for the package so it downloads from a file.
|
# Fake the URL for the package so it downloads from a file.
|
||||||
pkg.fetcher = URLFetchStrategy(self.repo.url)
|
pkg.fetcher = URLFetchStrategy(self.repo.url)
|
||||||
|
|
|
@ -55,7 +55,7 @@ def set_up_package(self, name, MockRepoClass, url_attr):
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
|
|
||||||
# Get the package and fix its fetch args to point to a mock repo
|
# Get the package and fix its fetch args to point to a mock repo
|
||||||
pkg = spack.db.get(spec)
|
pkg = spack.repo.get(spec)
|
||||||
repo = MockRepoClass()
|
repo = MockRepoClass()
|
||||||
self.repos[name] = repo
|
self.repos[name] = repo
|
||||||
|
|
||||||
|
|
|
@ -22,43 +22,96 @@
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
import unittest
|
import unittest
|
||||||
|
import tempfile
|
||||||
|
from ordereddict_backport import OrderedDict
|
||||||
|
|
||||||
|
from llnl.util.filesystem import mkdirp
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.config
|
import spack.config
|
||||||
from spack.packages import PackageDB
|
from spack.repository import RepoPath
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
|
|
||||||
|
mock_compiler_config = """\
|
||||||
def set_pkg_dep(pkg, spec):
|
compilers:
|
||||||
"""Alters dependence information for a package.
|
all:
|
||||||
Use this to mock up constraints.
|
clang@3.3:
|
||||||
"""
|
cc: /path/to/clang
|
||||||
spec = Spec(spec)
|
cxx: /path/to/clang++
|
||||||
spack.db.get(pkg).dependencies[spec.name] = { Spec(pkg) : spec }
|
f77: None
|
||||||
|
fc: None
|
||||||
|
gcc@4.5.0:
|
||||||
|
cc: /path/to/gcc
|
||||||
|
cxx: /path/to/g++
|
||||||
|
f77: /path/to/gfortran
|
||||||
|
fc: /path/to/gfortran
|
||||||
|
"""
|
||||||
|
|
||||||
class MockPackagesTest(unittest.TestCase):
|
class MockPackagesTest(unittest.TestCase):
|
||||||
def initmock(self):
|
def initmock(self):
|
||||||
# Use the mock packages database for these tests. This allows
|
# Use the mock packages database for these tests. This allows
|
||||||
# us to set up contrived packages that don't interfere with
|
# us to set up contrived packages that don't interfere with
|
||||||
# real ones.
|
# real ones.
|
||||||
self.real_db = spack.db
|
self.db = RepoPath(spack.mock_packages_path)
|
||||||
spack.db = PackageDB(spack.mock_packages_path)
|
spack.repo.swap(self.db)
|
||||||
|
|
||||||
spack.config.clear_config_caches()
|
spack.config.clear_config_caches()
|
||||||
self.real_scopes = spack.config.config_scopes
|
self.real_scopes = spack.config.config_scopes
|
||||||
spack.config.config_scopes = [
|
|
||||||
('site', spack.mock_site_config),
|
# Mock up temporary configuration directories
|
||||||
('user', spack.mock_user_config)]
|
self.temp_config = tempfile.mkdtemp()
|
||||||
|
self.mock_site_config = os.path.join(self.temp_config, 'site')
|
||||||
|
self.mock_user_config = os.path.join(self.temp_config, 'user')
|
||||||
|
mkdirp(self.mock_site_config)
|
||||||
|
mkdirp(self.mock_user_config)
|
||||||
|
comp_yaml = os.path.join(self.mock_site_config, 'compilers.yaml')
|
||||||
|
with open(comp_yaml, 'w') as f:
|
||||||
|
f.write(mock_compiler_config)
|
||||||
|
|
||||||
|
# TODO: Mocking this up is kind of brittle b/c ConfigScope
|
||||||
|
# TODO: constructor modifies config_scopes. Make it cleaner.
|
||||||
|
spack.config.config_scopes = OrderedDict()
|
||||||
|
spack.config.ConfigScope('site', self.mock_site_config)
|
||||||
|
spack.config.ConfigScope('user', self.mock_user_config)
|
||||||
|
|
||||||
|
# Store changes to the package's dependencies so we can
|
||||||
|
# restore later.
|
||||||
|
self.saved_deps = {}
|
||||||
|
|
||||||
|
|
||||||
|
def set_pkg_dep(self, pkg_name, spec):
|
||||||
|
"""Alters dependence information for a package.
|
||||||
|
|
||||||
|
Adds a dependency on <spec> to pkg.
|
||||||
|
Use this to mock up constraints.
|
||||||
|
"""
|
||||||
|
spec = Spec(spec)
|
||||||
|
|
||||||
|
# Save original dependencies before making any changes.
|
||||||
|
pkg = spack.repo.get(pkg_name)
|
||||||
|
if pkg_name not in self.saved_deps:
|
||||||
|
self.saved_deps[pkg_name] = (pkg, pkg.dependencies.copy())
|
||||||
|
|
||||||
|
# Change dep spec
|
||||||
|
pkg.dependencies[spec.name] = { Spec(pkg_name) : spec }
|
||||||
|
|
||||||
|
|
||||||
def cleanmock(self):
|
def cleanmock(self):
|
||||||
"""Restore the real packages path after any test."""
|
"""Restore the real packages path after any test."""
|
||||||
spack.db = self.real_db
|
spack.repo.swap(self.db)
|
||||||
spack.config.config_scopes = self.real_scopes
|
spack.config.config_scopes = self.real_scopes
|
||||||
|
shutil.rmtree(self.temp_config, ignore_errors=True)
|
||||||
spack.config.clear_config_caches()
|
spack.config.clear_config_caches()
|
||||||
|
|
||||||
|
# Restore dependency changes that happened during the test
|
||||||
|
for pkg_name, (pkg, deps) in self.saved_deps.items():
|
||||||
|
pkg.dependencies.clear()
|
||||||
|
pkg.dependencies.update(deps)
|
||||||
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.initmock()
|
self.initmock()
|
||||||
|
@ -66,5 +119,3 @@ def setUp(self):
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
self.cleanmock()
|
self.cleanmock()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -38,92 +38,92 @@
|
||||||
class MultiMethodTest(MockPackagesTest):
|
class MultiMethodTest(MockPackagesTest):
|
||||||
|
|
||||||
def test_no_version_match(self):
|
def test_no_version_match(self):
|
||||||
pkg = spack.db.get('multimethod@2.0')
|
pkg = spack.repo.get('multimethod@2.0')
|
||||||
self.assertRaises(NoSuchMethodError, pkg.no_version_2)
|
self.assertRaises(NoSuchMethodError, pkg.no_version_2)
|
||||||
|
|
||||||
|
|
||||||
def test_one_version_match(self):
|
def test_one_version_match(self):
|
||||||
pkg = spack.db.get('multimethod@1.0')
|
pkg = spack.repo.get('multimethod@1.0')
|
||||||
self.assertEqual(pkg.no_version_2(), 1)
|
self.assertEqual(pkg.no_version_2(), 1)
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod@3.0')
|
pkg = spack.repo.get('multimethod@3.0')
|
||||||
self.assertEqual(pkg.no_version_2(), 3)
|
self.assertEqual(pkg.no_version_2(), 3)
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod@4.0')
|
pkg = spack.repo.get('multimethod@4.0')
|
||||||
self.assertEqual(pkg.no_version_2(), 4)
|
self.assertEqual(pkg.no_version_2(), 4)
|
||||||
|
|
||||||
|
|
||||||
def test_version_overlap(self):
|
def test_version_overlap(self):
|
||||||
pkg = spack.db.get('multimethod@2.0')
|
pkg = spack.repo.get('multimethod@2.0')
|
||||||
self.assertEqual(pkg.version_overlap(), 1)
|
self.assertEqual(pkg.version_overlap(), 1)
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod@5.0')
|
pkg = spack.repo.get('multimethod@5.0')
|
||||||
self.assertEqual(pkg.version_overlap(), 2)
|
self.assertEqual(pkg.version_overlap(), 2)
|
||||||
|
|
||||||
|
|
||||||
def test_mpi_version(self):
|
def test_mpi_version(self):
|
||||||
pkg = spack.db.get('multimethod^mpich@3.0.4')
|
pkg = spack.repo.get('multimethod^mpich@3.0.4')
|
||||||
self.assertEqual(pkg.mpi_version(), 3)
|
self.assertEqual(pkg.mpi_version(), 3)
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod^mpich2@1.2')
|
pkg = spack.repo.get('multimethod^mpich2@1.2')
|
||||||
self.assertEqual(pkg.mpi_version(), 2)
|
self.assertEqual(pkg.mpi_version(), 2)
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod^mpich@1.0')
|
pkg = spack.repo.get('multimethod^mpich@1.0')
|
||||||
self.assertEqual(pkg.mpi_version(), 1)
|
self.assertEqual(pkg.mpi_version(), 1)
|
||||||
|
|
||||||
|
|
||||||
def test_undefined_mpi_version(self):
|
def test_undefined_mpi_version(self):
|
||||||
pkg = spack.db.get('multimethod^mpich@0.4')
|
pkg = spack.repo.get('multimethod^mpich@0.4')
|
||||||
self.assertEqual(pkg.mpi_version(), 1)
|
self.assertEqual(pkg.mpi_version(), 1)
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod^mpich@1.4')
|
pkg = spack.repo.get('multimethod^mpich@1.4')
|
||||||
self.assertEqual(pkg.mpi_version(), 1)
|
self.assertEqual(pkg.mpi_version(), 1)
|
||||||
|
|
||||||
|
|
||||||
def test_default_works(self):
|
def test_default_works(self):
|
||||||
pkg = spack.db.get('multimethod%gcc')
|
pkg = spack.repo.get('multimethod%gcc')
|
||||||
self.assertEqual(pkg.has_a_default(), 'gcc')
|
self.assertEqual(pkg.has_a_default(), 'gcc')
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod%intel')
|
pkg = spack.repo.get('multimethod%intel')
|
||||||
self.assertEqual(pkg.has_a_default(), 'intel')
|
self.assertEqual(pkg.has_a_default(), 'intel')
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod%pgi')
|
pkg = spack.repo.get('multimethod%pgi')
|
||||||
self.assertEqual(pkg.has_a_default(), 'default')
|
self.assertEqual(pkg.has_a_default(), 'default')
|
||||||
|
|
||||||
|
|
||||||
def test_architecture_match(self):
|
def test_architecture_match(self):
|
||||||
pkg = spack.db.get('multimethod=x86_64')
|
pkg = spack.repo.get('multimethod=x86_64')
|
||||||
self.assertEqual(pkg.different_by_architecture(), 'x86_64')
|
self.assertEqual(pkg.different_by_architecture(), 'x86_64')
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod=ppc64')
|
pkg = spack.repo.get('multimethod=ppc64')
|
||||||
self.assertEqual(pkg.different_by_architecture(), 'ppc64')
|
self.assertEqual(pkg.different_by_architecture(), 'ppc64')
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod=ppc32')
|
pkg = spack.repo.get('multimethod=ppc32')
|
||||||
self.assertEqual(pkg.different_by_architecture(), 'ppc32')
|
self.assertEqual(pkg.different_by_architecture(), 'ppc32')
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod=arm64')
|
pkg = spack.repo.get('multimethod=arm64')
|
||||||
self.assertEqual(pkg.different_by_architecture(), 'arm64')
|
self.assertEqual(pkg.different_by_architecture(), 'arm64')
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod=macos')
|
pkg = spack.repo.get('multimethod=macos')
|
||||||
self.assertRaises(NoSuchMethodError, pkg.different_by_architecture)
|
self.assertRaises(NoSuchMethodError, pkg.different_by_architecture)
|
||||||
|
|
||||||
|
|
||||||
def test_dependency_match(self):
|
def test_dependency_match(self):
|
||||||
pkg = spack.db.get('multimethod^zmpi')
|
pkg = spack.repo.get('multimethod^zmpi')
|
||||||
self.assertEqual(pkg.different_by_dep(), 'zmpi')
|
self.assertEqual(pkg.different_by_dep(), 'zmpi')
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod^mpich')
|
pkg = spack.repo.get('multimethod^mpich')
|
||||||
self.assertEqual(pkg.different_by_dep(), 'mpich')
|
self.assertEqual(pkg.different_by_dep(), 'mpich')
|
||||||
|
|
||||||
# If we try to switch on some entirely different dep, it's ambiguous,
|
# If we try to switch on some entirely different dep, it's ambiguous,
|
||||||
# but should take the first option
|
# but should take the first option
|
||||||
pkg = spack.db.get('multimethod^foobar')
|
pkg = spack.repo.get('multimethod^foobar')
|
||||||
self.assertEqual(pkg.different_by_dep(), 'mpich')
|
self.assertEqual(pkg.different_by_dep(), 'mpich')
|
||||||
|
|
||||||
|
|
||||||
def test_virtual_dep_match(self):
|
def test_virtual_dep_match(self):
|
||||||
pkg = spack.db.get('multimethod^mpich2')
|
pkg = spack.repo.get('multimethod^mpich2')
|
||||||
self.assertEqual(pkg.different_by_virtual_dep(), 2)
|
self.assertEqual(pkg.different_by_virtual_dep(), 2)
|
||||||
|
|
||||||
pkg = spack.db.get('multimethod^mpich@1.0')
|
pkg = spack.repo.get('multimethod^mpich@1.0')
|
||||||
self.assertEqual(pkg.different_by_virtual_dep(), 1)
|
self.assertEqual(pkg.different_by_virtual_dep(), 1)
|
||||||
|
|
114
lib/spack/spack/test/namespace_trie.py
Normal file
114
lib/spack/spack/test/namespace_trie.py
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://llnl.github.io/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
import unittest
|
||||||
|
from spack.util.naming import NamespaceTrie
|
||||||
|
|
||||||
|
|
||||||
|
class NamespaceTrieTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.trie = NamespaceTrie()
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_single(self):
|
||||||
|
self.trie['foo'] = 'bar'
|
||||||
|
|
||||||
|
self.assertTrue(self.trie.is_prefix('foo'))
|
||||||
|
self.assertTrue(self.trie.has_value('foo'))
|
||||||
|
self.assertEqual(self.trie['foo'], 'bar')
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_multiple(self):
|
||||||
|
self.trie['foo.bar'] = 'baz'
|
||||||
|
|
||||||
|
self.assertFalse(self.trie.has_value('foo'))
|
||||||
|
self.assertTrue(self.trie.is_prefix('foo'))
|
||||||
|
|
||||||
|
self.assertTrue(self.trie.is_prefix('foo.bar'))
|
||||||
|
self.assertTrue(self.trie.has_value('foo.bar'))
|
||||||
|
self.assertEqual(self.trie['foo.bar'], 'baz')
|
||||||
|
|
||||||
|
self.assertFalse(self.trie.is_prefix('foo.bar.baz'))
|
||||||
|
self.assertFalse(self.trie.has_value('foo.bar.baz'))
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_three(self):
|
||||||
|
# add a three-level namespace
|
||||||
|
self.trie['foo.bar.baz'] = 'quux'
|
||||||
|
|
||||||
|
self.assertTrue(self.trie.is_prefix('foo'))
|
||||||
|
self.assertFalse(self.trie.has_value('foo'))
|
||||||
|
|
||||||
|
self.assertTrue(self.trie.is_prefix('foo.bar'))
|
||||||
|
self.assertFalse(self.trie.has_value('foo.bar'))
|
||||||
|
|
||||||
|
self.assertTrue(self.trie.is_prefix('foo.bar.baz'))
|
||||||
|
self.assertTrue(self.trie.has_value('foo.bar.baz'))
|
||||||
|
self.assertEqual(self.trie['foo.bar.baz'], 'quux')
|
||||||
|
|
||||||
|
self.assertFalse(self.trie.is_prefix('foo.bar.baz.quux'))
|
||||||
|
self.assertFalse(self.trie.has_value('foo.bar.baz.quux'))
|
||||||
|
|
||||||
|
# Try to add a second element in a prefix namespace
|
||||||
|
self.trie['foo.bar'] = 'blah'
|
||||||
|
|
||||||
|
self.assertTrue(self.trie.is_prefix('foo'))
|
||||||
|
self.assertFalse(self.trie.has_value('foo'))
|
||||||
|
|
||||||
|
self.assertTrue(self.trie.is_prefix('foo.bar'))
|
||||||
|
self.assertTrue(self.trie.has_value('foo.bar'))
|
||||||
|
self.assertEqual(self.trie['foo.bar'], 'blah')
|
||||||
|
|
||||||
|
self.assertTrue(self.trie.is_prefix('foo.bar.baz'))
|
||||||
|
self.assertTrue(self.trie.has_value('foo.bar.baz'))
|
||||||
|
self.assertEqual(self.trie['foo.bar.baz'], 'quux')
|
||||||
|
|
||||||
|
self.assertFalse(self.trie.is_prefix('foo.bar.baz.quux'))
|
||||||
|
self.assertFalse(self.trie.has_value('foo.bar.baz.quux'))
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_none_single(self):
|
||||||
|
self.trie['foo'] = None
|
||||||
|
self.assertTrue(self.trie.is_prefix('foo'))
|
||||||
|
self.assertTrue(self.trie.has_value('foo'))
|
||||||
|
self.assertEqual(self.trie['foo'], None)
|
||||||
|
|
||||||
|
self.assertFalse(self.trie.is_prefix('foo.bar'))
|
||||||
|
self.assertFalse(self.trie.has_value('foo.bar'))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_none_multiple(self):
|
||||||
|
self.trie['foo.bar'] = None
|
||||||
|
|
||||||
|
self.assertTrue(self.trie.is_prefix('foo'))
|
||||||
|
self.assertFalse(self.trie.has_value('foo'))
|
||||||
|
|
||||||
|
self.assertTrue(self.trie.is_prefix('foo.bar'))
|
||||||
|
self.assertTrue(self.trie.has_value('foo.bar'))
|
||||||
|
self.assertEqual(self.trie['foo.bar'], None)
|
||||||
|
|
||||||
|
self.assertFalse(self.trie.is_prefix('foo.bar.baz'))
|
||||||
|
self.assertFalse(self.trie.has_value('foo.bar.baz'))
|
|
@ -28,16 +28,15 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.url as url
|
from spack.repository import RepoPath
|
||||||
from spack.packages import PackageDB
|
|
||||||
|
|
||||||
|
|
||||||
class PackageSanityTest(unittest.TestCase):
|
class PackageSanityTest(unittest.TestCase):
|
||||||
|
|
||||||
def check_db(self):
|
def check_db(self):
|
||||||
"""Get all packages in a DB to make sure they work."""
|
"""Get all packages in a DB to make sure they work."""
|
||||||
for name in spack.db.all_package_names():
|
for name in spack.repo.all_package_names():
|
||||||
spack.db.get(name)
|
spack.repo.get(name)
|
||||||
|
|
||||||
|
|
||||||
def test_get_all_packages(self):
|
def test_get_all_packages(self):
|
||||||
|
@ -47,15 +46,15 @@ def test_get_all_packages(self):
|
||||||
|
|
||||||
def test_get_all_mock_packages(self):
|
def test_get_all_mock_packages(self):
|
||||||
"""Get the mock packages once each too."""
|
"""Get the mock packages once each too."""
|
||||||
tmp = spack.db
|
db = RepoPath(spack.mock_packages_path)
|
||||||
spack.db = PackageDB(spack.mock_packages_path)
|
spack.repo.swap(db)
|
||||||
self.check_db()
|
self.check_db()
|
||||||
spack.db = tmp
|
spack.repo.swap(db)
|
||||||
|
|
||||||
|
|
||||||
def test_url_versions(self):
|
def test_url_versions(self):
|
||||||
"""Check URLs for regular packages, if they are explicitly defined."""
|
"""Check URLs for regular packages, if they are explicitly defined."""
|
||||||
for pkg in spack.db.all_packages():
|
for pkg in spack.repo.all_packages():
|
||||||
for v, vattrs in pkg.versions.items():
|
for v, vattrs in pkg.versions.items():
|
||||||
if 'url' in vattrs:
|
if 'url' in vattrs:
|
||||||
# If there is a url for the version check it.
|
# If there is a url for the version check it.
|
||||||
|
|
|
@ -27,7 +27,7 @@
|
||||||
from llnl.util.filesystem import join_path
|
from llnl.util.filesystem import join_path
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.packages as packages
|
from spack.repository import Repo
|
||||||
from spack.util.naming import mod_to_class
|
from spack.util.naming import mod_to_class
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
|
@ -35,27 +35,32 @@
|
||||||
class PackagesTest(MockPackagesTest):
|
class PackagesTest(MockPackagesTest):
|
||||||
|
|
||||||
def test_load_package(self):
|
def test_load_package(self):
|
||||||
pkg = spack.db.get('mpich')
|
pkg = spack.repo.get('mpich')
|
||||||
|
|
||||||
|
|
||||||
def test_package_name(self):
|
def test_package_name(self):
|
||||||
pkg = spack.db.get('mpich')
|
pkg = spack.repo.get('mpich')
|
||||||
self.assertEqual(pkg.name, 'mpich')
|
self.assertEqual(pkg.name, 'mpich')
|
||||||
|
|
||||||
|
|
||||||
def test_package_filename(self):
|
def test_package_filename(self):
|
||||||
filename = spack.db.filename_for_package_name('mpich')
|
repo = Repo(spack.mock_packages_path)
|
||||||
self.assertEqual(filename, join_path(spack.mock_packages_path, 'mpich', 'package.py'))
|
filename = repo.filename_for_package_name('mpich')
|
||||||
|
self.assertEqual(filename,
|
||||||
|
join_path(spack.mock_packages_path, 'packages', 'mpich', 'package.py'))
|
||||||
|
|
||||||
|
|
||||||
def test_package_name(self):
|
def test_package_name(self):
|
||||||
pkg = spack.db.get('mpich')
|
pkg = spack.repo.get('mpich')
|
||||||
self.assertEqual(pkg.name, 'mpich')
|
self.assertEqual(pkg.name, 'mpich')
|
||||||
|
|
||||||
|
|
||||||
def test_nonexisting_package_filename(self):
|
def test_nonexisting_package_filename(self):
|
||||||
filename = spack.db.filename_for_package_name('some-nonexisting-package')
|
repo = Repo(spack.mock_packages_path)
|
||||||
self.assertEqual(filename, join_path(spack.mock_packages_path, 'some-nonexisting-package', 'package.py'))
|
filename = repo.filename_for_package_name('some-nonexisting-package')
|
||||||
|
self.assertEqual(
|
||||||
|
filename,
|
||||||
|
join_path(spack.mock_packages_path, 'packages', 'some-nonexisting-package', 'package.py'))
|
||||||
|
|
||||||
|
|
||||||
def test_package_class_names(self):
|
def test_package_class_names(self):
|
||||||
|
@ -64,3 +69,38 @@ def test_package_class_names(self):
|
||||||
self.assertEqual('PmgrCollective', mod_to_class('pmgr-collective'))
|
self.assertEqual('PmgrCollective', mod_to_class('pmgr-collective'))
|
||||||
self.assertEqual('Pmgrcollective', mod_to_class('PmgrCollective'))
|
self.assertEqual('Pmgrcollective', mod_to_class('PmgrCollective'))
|
||||||
self.assertEqual('_3db', mod_to_class('3db'))
|
self.assertEqual('_3db', mod_to_class('3db'))
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Below tests target direct imports of spack packages from the
|
||||||
|
# spack.pkg namespace
|
||||||
|
#
|
||||||
|
|
||||||
|
def test_import_package(self):
|
||||||
|
import spack.pkg.builtin.mock.mpich
|
||||||
|
|
||||||
|
|
||||||
|
def test_import_package_as(self):
|
||||||
|
import spack.pkg.builtin.mock.mpich as mp
|
||||||
|
|
||||||
|
|
||||||
|
def test_import_class_from_package(self):
|
||||||
|
from spack.pkg.builtin.mock.mpich import Mpich
|
||||||
|
|
||||||
|
|
||||||
|
def test_import_module_from_package(self):
|
||||||
|
from spack.pkg.builtin.mock import mpich
|
||||||
|
|
||||||
|
|
||||||
|
def test_import_namespace_container_modules(self):
|
||||||
|
import spack.pkg
|
||||||
|
import spack.pkg as p
|
||||||
|
from spack import pkg
|
||||||
|
|
||||||
|
import spack.pkg.builtin
|
||||||
|
import spack.pkg.builtin as b
|
||||||
|
from spack.pkg import builtin
|
||||||
|
|
||||||
|
import spack.pkg.builtin.mock
|
||||||
|
import spack.pkg.builtin.mock as m
|
||||||
|
from spack.pkg.builtin import mock
|
||||||
|
|
|
@ -54,8 +54,8 @@ def pyfiles(self, *search_paths):
|
||||||
|
|
||||||
|
|
||||||
def package_py_files(self):
|
def package_py_files(self):
|
||||||
for name in spack.db.all_package_names():
|
for name in spack.repo.all_package_names():
|
||||||
yield spack.db.filename_for_package_name(name)
|
yield spack.repo.filename_for_package_name(name)
|
||||||
|
|
||||||
|
|
||||||
def check_python_versions(self, *files):
|
def check_python_versions(self, *files):
|
||||||
|
|
|
@ -40,8 +40,8 @@
|
||||||
class SpecDagTest(MockPackagesTest):
|
class SpecDagTest(MockPackagesTest):
|
||||||
|
|
||||||
def test_conflicting_package_constraints(self):
|
def test_conflicting_package_constraints(self):
|
||||||
set_pkg_dep('mpileaks', 'mpich@1.0')
|
self.set_pkg_dep('mpileaks', 'mpich@1.0')
|
||||||
set_pkg_dep('callpath', 'mpich@2.0')
|
self.set_pkg_dep('callpath', 'mpich@2.0')
|
||||||
|
|
||||||
spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
|
spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
|
||||||
|
|
||||||
|
@ -223,25 +223,25 @@ def test_dependents_and_dependencies_are_correct(self):
|
||||||
|
|
||||||
|
|
||||||
def test_unsatisfiable_version(self):
|
def test_unsatisfiable_version(self):
|
||||||
set_pkg_dep('mpileaks', 'mpich@1.0')
|
self.set_pkg_dep('mpileaks', 'mpich@1.0')
|
||||||
spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf')
|
spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf')
|
||||||
self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize)
|
self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize)
|
||||||
|
|
||||||
|
|
||||||
def test_unsatisfiable_compiler(self):
|
def test_unsatisfiable_compiler(self):
|
||||||
set_pkg_dep('mpileaks', 'mpich%gcc')
|
self.set_pkg_dep('mpileaks', 'mpich%gcc')
|
||||||
spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf ^libdwarf')
|
spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf ^libdwarf')
|
||||||
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
|
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
|
||||||
|
|
||||||
|
|
||||||
def test_unsatisfiable_compiler_version(self):
|
def test_unsatisfiable_compiler_version(self):
|
||||||
set_pkg_dep('mpileaks', 'mpich%gcc@4.6')
|
self.set_pkg_dep('mpileaks', 'mpich%gcc@4.6')
|
||||||
spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf ^libdwarf')
|
spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf ^libdwarf')
|
||||||
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
|
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
|
||||||
|
|
||||||
|
|
||||||
def test_unsatisfiable_architecture(self):
|
def test_unsatisfiable_architecture(self):
|
||||||
set_pkg_dep('mpileaks', 'mpich=bgqos_0')
|
self.set_pkg_dep('mpileaks', 'mpich=bgqos_0')
|
||||||
spec = Spec('mpileaks ^mpich=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
|
spec = Spec('mpileaks ^mpich=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
|
||||||
self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)
|
self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,10 @@ class SpecSematicsTest(MockPackagesTest):
|
||||||
# ================================================================================
|
# ================================================================================
|
||||||
def check_satisfies(self, spec, anon_spec, concrete=False):
|
def check_satisfies(self, spec, anon_spec, concrete=False):
|
||||||
left = Spec(spec, concrete=concrete)
|
left = Spec(spec, concrete=concrete)
|
||||||
right = parse_anonymous_spec(anon_spec, left.name)
|
try:
|
||||||
|
right = Spec(anon_spec) # if it's not anonymous, allow it.
|
||||||
|
except:
|
||||||
|
right = parse_anonymous_spec(anon_spec, left.name)
|
||||||
|
|
||||||
# Satisfies is one-directional.
|
# Satisfies is one-directional.
|
||||||
self.assertTrue(left.satisfies(right))
|
self.assertTrue(left.satisfies(right))
|
||||||
|
@ -48,7 +51,10 @@ def check_satisfies(self, spec, anon_spec, concrete=False):
|
||||||
|
|
||||||
def check_unsatisfiable(self, spec, anon_spec, concrete=False):
|
def check_unsatisfiable(self, spec, anon_spec, concrete=False):
|
||||||
left = Spec(spec, concrete=concrete)
|
left = Spec(spec, concrete=concrete)
|
||||||
right = parse_anonymous_spec(anon_spec, left.name)
|
try:
|
||||||
|
right = Spec(anon_spec) # if it's not anonymous, allow it.
|
||||||
|
except:
|
||||||
|
right = parse_anonymous_spec(anon_spec, left.name)
|
||||||
|
|
||||||
self.assertFalse(left.satisfies(right))
|
self.assertFalse(left.satisfies(right))
|
||||||
self.assertFalse(left.satisfies(anon_spec))
|
self.assertFalse(left.satisfies(anon_spec))
|
||||||
|
@ -88,6 +94,28 @@ def test_satisfies(self):
|
||||||
self.check_satisfies('libdwarf^libelf@0.8.13', '^libelf@0:1')
|
self.check_satisfies('libdwarf^libelf@0.8.13', '^libelf@0:1')
|
||||||
|
|
||||||
|
|
||||||
|
def test_satisfies_namespace(self):
|
||||||
|
self.check_satisfies('builtin.mpich', 'mpich')
|
||||||
|
self.check_satisfies('builtin.mock.mpich', 'mpich')
|
||||||
|
|
||||||
|
# TODO: only works for deps now, but shouldn't we allow this for root spec?
|
||||||
|
# self.check_satisfies('builtin.mock.mpich', 'mpi')
|
||||||
|
|
||||||
|
self.check_satisfies('builtin.mock.mpich', 'builtin.mock.mpich')
|
||||||
|
|
||||||
|
self.check_unsatisfiable('builtin.mock.mpich', 'builtin.mpich')
|
||||||
|
|
||||||
|
|
||||||
|
def test_satisfies_namespaced_dep(self):
|
||||||
|
"""Ensure spec from same or unspecified namespace satisfies namespace constraint."""
|
||||||
|
self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpich')
|
||||||
|
|
||||||
|
self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpi')
|
||||||
|
self.check_satisfies('mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich')
|
||||||
|
|
||||||
|
self.check_unsatisfiable('mpileaks ^builtin.mock.mpich', '^builtin.mpich')
|
||||||
|
|
||||||
|
|
||||||
def test_satisfies_compiler(self):
|
def test_satisfies_compiler(self):
|
||||||
self.check_satisfies('foo%gcc', '%gcc')
|
self.check_satisfies('foo%gcc', '%gcc')
|
||||||
self.check_satisfies('foo%intel', '%intel')
|
self.check_satisfies('foo%intel', '%intel')
|
||||||
|
|
|
@ -49,7 +49,7 @@ def setUp(self):
|
||||||
|
|
||||||
spec = Spec('svn-test')
|
spec = Spec('svn-test')
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
self.pkg = spack.db.get(spec, new=True)
|
self.pkg = spack.repo.get(spec, new=True)
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
|
|
@ -79,12 +79,16 @@ def setUp(self):
|
||||||
pkgX.installed = False
|
pkgX.installed = False
|
||||||
pkgY.installed = False
|
pkgY.installed = False
|
||||||
|
|
||||||
|
self.saved_db = spack.repo
|
||||||
pkgDb = MockPackageDb({specX:pkgX, specY:pkgY})
|
pkgDb = MockPackageDb({specX:pkgX, specY:pkgY})
|
||||||
spack.db = pkgDb
|
spack.repo = pkgDb
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
super(UnitInstallTest, self).tearDown()
|
super(UnitInstallTest, self).tearDown()
|
||||||
|
|
||||||
|
spack.repo = self.saved_db
|
||||||
|
|
||||||
def test_installing_both(self):
|
def test_installing_both(self):
|
||||||
mo = MockOutput()
|
mo = MockOutput()
|
||||||
|
|
||||||
|
@ -94,7 +98,8 @@ def test_installing_both(self):
|
||||||
|
|
||||||
self.assertEqual(mo.results,
|
self.assertEqual(mo.results,
|
||||||
{bIdX:test_install.TestResult.PASSED,
|
{bIdX:test_install.TestResult.PASSED,
|
||||||
bIdY:test_install.TestResult.PASSED})
|
bIdY:test_install.TestResult.PASSED})
|
||||||
|
|
||||||
|
|
||||||
def test_dependency_already_installed(self):
|
def test_dependency_already_installed(self):
|
||||||
mo = MockOutput()
|
mo = MockOutput()
|
||||||
|
@ -102,11 +107,11 @@ def test_dependency_already_installed(self):
|
||||||
pkgX.installed = True
|
pkgX.installed = True
|
||||||
pkgY.installed = True
|
pkgY.installed = True
|
||||||
test_install.create_test_output(specX, [specX], mo, getLogFunc=mock_fetch_log)
|
test_install.create_test_output(specX, [specX], mo, getLogFunc=mock_fetch_log)
|
||||||
|
|
||||||
self.assertEqual(mo.results, {bIdX:test_install.TestResult.PASSED})
|
self.assertEqual(mo.results, {bIdX:test_install.TestResult.PASSED})
|
||||||
|
|
||||||
#TODO: add test(s) where Y fails to install
|
#TODO: add test(s) where Y fails to install
|
||||||
|
|
||||||
|
|
||||||
class MockPackageDb(object):
|
class MockPackageDb(object):
|
||||||
def __init__(self, init=None):
|
def __init__(self, init=None):
|
||||||
self.specToPkg = {}
|
self.specToPkg = {}
|
||||||
|
@ -118,4 +123,3 @@ def get(self, spec):
|
||||||
|
|
||||||
def mock_fetch_log(path):
|
def mock_fetch_log(path):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,6 @@
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.url as url
|
import spack.url as url
|
||||||
from spack.packages import PackageDB
|
|
||||||
|
|
||||||
|
|
||||||
class PackageSanityTest(unittest.TestCase):
|
class PackageSanityTest(unittest.TestCase):
|
||||||
|
|
93
lib/spack/spack/test/yaml.py
Normal file
93
lib/spack/spack/test/yaml.py
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://github.com/llnl/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
"""
|
||||||
|
Test Spack's custom YAML format.
|
||||||
|
"""
|
||||||
|
import unittest
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
test_file = """\
|
||||||
|
config_file:
|
||||||
|
x86_64:
|
||||||
|
foo: /path/to/foo
|
||||||
|
bar: /path/to/bar
|
||||||
|
baz: /path/to/baz
|
||||||
|
some_list:
|
||||||
|
- item 1
|
||||||
|
- item 2
|
||||||
|
- item 3
|
||||||
|
another_list:
|
||||||
|
[ 1, 2, 3 ]
|
||||||
|
some_key: some_string
|
||||||
|
"""
|
||||||
|
|
||||||
|
test_data = {
|
||||||
|
'config_file' : syaml.syaml_dict([
|
||||||
|
('x86_64', syaml.syaml_dict([
|
||||||
|
('foo', '/path/to/foo'),
|
||||||
|
('bar', '/path/to/bar'),
|
||||||
|
('baz', '/path/to/baz' )])),
|
||||||
|
('some_list', [ 'item 1', 'item 2', 'item 3' ]),
|
||||||
|
('another_list', [ 1, 2, 3 ]),
|
||||||
|
('some_key', 'some_string')
|
||||||
|
])}
|
||||||
|
|
||||||
|
class YamlTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.data = syaml.load(test_file)
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse(self):
|
||||||
|
self.assertEqual(test_data, self.data)
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_order(self):
|
||||||
|
self.assertEqual(
|
||||||
|
['x86_64', 'some_list', 'another_list', 'some_key'],
|
||||||
|
self.data['config_file'].keys())
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
['foo', 'bar', 'baz'],
|
||||||
|
self.data['config_file']['x86_64'].keys())
|
||||||
|
|
||||||
|
|
||||||
|
def test_line_numbers(self):
|
||||||
|
def check(obj, start_line, end_line):
|
||||||
|
self.assertEqual(obj._start_mark.line, start_line)
|
||||||
|
self.assertEqual(obj._end_mark.line, end_line)
|
||||||
|
|
||||||
|
check(self.data, 0, 12)
|
||||||
|
check(self.data['config_file'], 1, 12)
|
||||||
|
check(self.data['config_file']['x86_64'], 2, 5)
|
||||||
|
check(self.data['config_file']['x86_64']['foo'], 2, 2)
|
||||||
|
check(self.data['config_file']['x86_64']['bar'], 3, 3)
|
||||||
|
check(self.data['config_file']['x86_64']['baz'], 4, 4)
|
||||||
|
check(self.data['config_file']['some_list'], 6, 9)
|
||||||
|
check(self.data['config_file']['some_list'][0], 6, 6)
|
||||||
|
check(self.data['config_file']['some_list'][1], 7, 7)
|
||||||
|
check(self.data['config_file']['some_list'][2], 8, 8)
|
||||||
|
check(self.data['config_file']['another_list'], 10, 10)
|
||||||
|
check(self.data['config_file']['some_key'], 11, 11)
|
|
@ -1,13 +1,22 @@
|
||||||
# Need this because of spack.util.string
|
# Need this because of spack.util.string
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import string
|
import string
|
||||||
|
import itertools
|
||||||
import re
|
import re
|
||||||
|
from StringIO import StringIO
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
|
|
||||||
|
__all__ = ['mod_to_class', 'spack_module_to_python_module', 'valid_module_name',
|
||||||
|
'valid_fully_qualified_module_name', 'validate_fully_qualified_module_name',
|
||||||
|
'validate_module_name', 'possible_spack_module_names', 'NamespaceTrie']
|
||||||
|
|
||||||
# Valid module names can contain '-' but can't start with it.
|
# Valid module names can contain '-' but can't start with it.
|
||||||
_valid_module_re = r'^\w[\w-]*$'
|
_valid_module_re = r'^\w[\w-]*$'
|
||||||
|
|
||||||
|
# Valid module names can contain '-' but can't start with it.
|
||||||
|
_valid_fully_qualified_module_re = r'^(\w[\w-]*)(\.\w[\w-]*)*$'
|
||||||
|
|
||||||
|
|
||||||
def mod_to_class(mod_name):
|
def mod_to_class(mod_name):
|
||||||
"""Convert a name from module style to class name style. Spack mostly
|
"""Convert a name from module style to class name style. Spack mostly
|
||||||
|
@ -42,20 +51,160 @@ def mod_to_class(mod_name):
|
||||||
return class_name
|
return class_name
|
||||||
|
|
||||||
|
|
||||||
|
def spack_module_to_python_module(mod_name):
|
||||||
|
"""Given a Spack module name, returns the name by which it can be
|
||||||
|
imported in Python.
|
||||||
|
"""
|
||||||
|
if re.match(r'[0-9]', mod_name):
|
||||||
|
mod_name = 'num' + mod_name
|
||||||
|
|
||||||
|
return mod_name.replace('-', '_')
|
||||||
|
|
||||||
|
|
||||||
|
def possible_spack_module_names(python_mod_name):
|
||||||
|
"""Given a Python module name, return a list of all possible spack module
|
||||||
|
names that could correspond to it."""
|
||||||
|
mod_name = re.sub(r'^num(\d)', r'\1', python_mod_name)
|
||||||
|
|
||||||
|
parts = re.split(r'(_)', mod_name)
|
||||||
|
options = [['_', '-']] * mod_name.count('_')
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for subs in itertools.product(*options):
|
||||||
|
s = list(parts)
|
||||||
|
s[1::2] = subs
|
||||||
|
results.append(''.join(s))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
def valid_module_name(mod_name):
|
def valid_module_name(mod_name):
|
||||||
"""Return whether the mod_name is valid for use in Spack."""
|
"""Return whether mod_name is valid for use in Spack."""
|
||||||
return bool(re.match(_valid_module_re, mod_name))
|
return bool(re.match(_valid_module_re, mod_name))
|
||||||
|
|
||||||
|
|
||||||
|
def valid_fully_qualified_module_name(mod_name):
|
||||||
|
"""Return whether mod_name is a valid namespaced module name."""
|
||||||
|
return bool(re.match(_valid_fully_qualified_module_re, mod_name))
|
||||||
|
|
||||||
|
|
||||||
def validate_module_name(mod_name):
|
def validate_module_name(mod_name):
|
||||||
"""Raise an exception if mod_name is not valid."""
|
"""Raise an exception if mod_name is not valid."""
|
||||||
if not valid_module_name(mod_name):
|
if not valid_module_name(mod_name):
|
||||||
raise InvalidModuleNameError(mod_name)
|
raise InvalidModuleNameError(mod_name)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_fully_qualified_module_name(mod_name):
|
||||||
|
"""Raise an exception if mod_name is not a valid namespaced module name."""
|
||||||
|
if not valid_fully_qualified_module_name(mod_name):
|
||||||
|
raise InvalidFullyQualifiedModuleNameError(mod_name)
|
||||||
|
|
||||||
|
|
||||||
class InvalidModuleNameError(spack.error.SpackError):
|
class InvalidModuleNameError(spack.error.SpackError):
|
||||||
"""Raised when we encounter a bad module name."""
|
"""Raised when we encounter a bad module name."""
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
super(InvalidModuleNameError, self).__init__(
|
super(InvalidModuleNameError, self).__init__(
|
||||||
"Invalid module name: " + name)
|
"Invalid module name: " + name)
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidFullyQualifiedModuleNameError(spack.error.SpackError):
|
||||||
|
"""Raised when we encounter a bad full package name."""
|
||||||
|
def __init__(self, name):
|
||||||
|
super(InvalidFullyQualifiedModuleNameError, self).__init__(
|
||||||
|
"Invalid fully qualified package name: " + name)
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
|
||||||
|
class NamespaceTrie(object):
|
||||||
|
class Element(object):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, separator='.'):
|
||||||
|
self._subspaces = {}
|
||||||
|
self._value = None
|
||||||
|
self._sep = separator
|
||||||
|
|
||||||
|
|
||||||
|
def __setitem__(self, namespace, value):
|
||||||
|
first, sep, rest = namespace.partition(self._sep)
|
||||||
|
|
||||||
|
if not first:
|
||||||
|
self._value = NamespaceTrie.Element(value)
|
||||||
|
return
|
||||||
|
|
||||||
|
if first not in self._subspaces:
|
||||||
|
self._subspaces[first] = NamespaceTrie()
|
||||||
|
|
||||||
|
self._subspaces[first][rest] = value
|
||||||
|
|
||||||
|
|
||||||
|
def _get_helper(self, namespace, full_name):
|
||||||
|
first, sep, rest = namespace.partition(self._sep)
|
||||||
|
if not first:
|
||||||
|
if not self._value:
|
||||||
|
raise KeyError("Can't find namespace '%s' in trie" % full_name)
|
||||||
|
return self._value.value
|
||||||
|
elif first not in self._subspaces:
|
||||||
|
raise KeyError("Can't find namespace '%s' in trie" % full_name)
|
||||||
|
else:
|
||||||
|
return self._subspaces[first]._get_helper(rest, full_name)
|
||||||
|
|
||||||
|
|
||||||
|
def __getitem__(self, namespace):
|
||||||
|
return self._get_helper(namespace, namespace)
|
||||||
|
|
||||||
|
|
||||||
|
def is_prefix(self, namespace):
|
||||||
|
"""True if the namespace has a value, or if it's the prefix of one that does."""
|
||||||
|
first, sep, rest = namespace.partition(self._sep)
|
||||||
|
if not first:
|
||||||
|
return True
|
||||||
|
elif first not in self._subspaces:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return self._subspaces[first].is_prefix(rest)
|
||||||
|
|
||||||
|
|
||||||
|
def is_leaf(self, namespace):
|
||||||
|
"""True if this namespace has no children in the trie."""
|
||||||
|
first, sep, rest = namespace.partition(self._sep)
|
||||||
|
if not first:
|
||||||
|
return bool(self._subspaces)
|
||||||
|
elif first not in self._subspaces:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return self._subspaces[first].is_leaf(rest)
|
||||||
|
|
||||||
|
|
||||||
|
def has_value(self, namespace):
|
||||||
|
"""True if there is a value set for the given namespace."""
|
||||||
|
first, sep, rest = namespace.partition(self._sep)
|
||||||
|
if not first:
|
||||||
|
return self._value is not None
|
||||||
|
elif first not in self._subspaces:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return self._subspaces[first].has_value(rest)
|
||||||
|
|
||||||
|
|
||||||
|
def __contains__(self, namespace):
|
||||||
|
"""Returns whether a value has been set for the namespace."""
|
||||||
|
return self.has_value(namespace)
|
||||||
|
|
||||||
|
|
||||||
|
def _str_helper(self, stream, level=0):
|
||||||
|
indent = (level * ' ')
|
||||||
|
for name in sorted(self._subspaces):
|
||||||
|
stream.write(indent + name + '\n')
|
||||||
|
if self._value:
|
||||||
|
stream.write(indent + ' ' + repr(self._value.value))
|
||||||
|
stream.write(self._subspaces[name]._str_helper(stream, level+1))
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
stream = StringIO()
|
||||||
|
self._str_helper(stream)
|
||||||
|
return stream.getvalue()
|
||||||
|
|
201
lib/spack/spack/util/spack_yaml.py
Normal file
201
lib/spack/spack/util/spack_yaml.py
Normal file
|
@ -0,0 +1,201 @@
|
||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://github.com/llnl/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
"""Enhanced YAML parsing for Spack.
|
||||||
|
|
||||||
|
- ``load()`` preserves YAML Marks on returned objects -- this allows
|
||||||
|
us to access file and line information later.
|
||||||
|
|
||||||
|
- ``Our load methods use ``OrderedDict`` class instead of YAML's
|
||||||
|
default unorderd dict.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import yaml
|
||||||
|
from yaml.nodes import *
|
||||||
|
from yaml.constructor import ConstructorError
|
||||||
|
from yaml.representer import SafeRepresenter
|
||||||
|
from ordereddict_backport import OrderedDict
|
||||||
|
|
||||||
|
# Only export load and dump
|
||||||
|
__all__ = ['load', 'dump']
|
||||||
|
|
||||||
|
# Make new classes so we can add custom attributes.
|
||||||
|
# Also, use OrderedDict instead of just dict.
|
||||||
|
class syaml_dict(OrderedDict):
|
||||||
|
def __repr__(self):
|
||||||
|
mappings = ('%r: %r' % (k,v) for k,v in self.items())
|
||||||
|
return '{%s}' % ', '.join(mappings)
|
||||||
|
class syaml_list(list):
|
||||||
|
__repr__ = list.__repr__
|
||||||
|
class syaml_str(str):
|
||||||
|
__repr__ = str.__repr__
|
||||||
|
|
||||||
|
def mark(obj, node):
|
||||||
|
"""Add start and end markers to an object."""
|
||||||
|
obj._start_mark = node.start_mark
|
||||||
|
obj._end_mark = node.end_mark
|
||||||
|
|
||||||
|
|
||||||
|
class OrderedLineLoader(yaml.Loader):
|
||||||
|
"""YAML loader that preserves order and line numbers.
|
||||||
|
|
||||||
|
Mappings read in by this loader behave like an ordered dict.
|
||||||
|
Sequences, mappings, and strings also have new attributes,
|
||||||
|
``_start_mark`` and ``_end_mark``, that preserve YAML line
|
||||||
|
information in the output data.
|
||||||
|
|
||||||
|
"""
|
||||||
|
#
|
||||||
|
# Override construct_yaml_* so that they build our derived types,
|
||||||
|
# which allows us to add new attributes to them.
|
||||||
|
#
|
||||||
|
# The standard YAML constructors return empty instances and fill
|
||||||
|
# in with mappings later. We preserve this behavior.
|
||||||
|
#
|
||||||
|
def construct_yaml_str(self, node):
|
||||||
|
value = self.construct_scalar(node)
|
||||||
|
try:
|
||||||
|
value = value.encode('ascii')
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
pass
|
||||||
|
value = syaml_str(value)
|
||||||
|
mark(value, node)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def construct_yaml_seq(self, node):
|
||||||
|
data = syaml_list()
|
||||||
|
mark(data, node)
|
||||||
|
yield data
|
||||||
|
data.extend(self.construct_sequence(node))
|
||||||
|
|
||||||
|
|
||||||
|
def construct_yaml_map(self, node):
|
||||||
|
data = syaml_dict()
|
||||||
|
mark(data, node)
|
||||||
|
yield data
|
||||||
|
value = self.construct_mapping(node)
|
||||||
|
data.update(value)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Override the ``construct_*`` routines. These fill in empty
|
||||||
|
# objects after yielded by the above ``construct_yaml_*`` methods.
|
||||||
|
#
|
||||||
|
def construct_sequence(self, node, deep=False):
|
||||||
|
if not isinstance(node, SequenceNode):
|
||||||
|
raise ConstructorError(None, None,
|
||||||
|
"expected a sequence node, but found %s" % node.id,
|
||||||
|
node.start_mark)
|
||||||
|
value = syaml_list(self.construct_object(child, deep=deep)
|
||||||
|
for child in node.value)
|
||||||
|
mark(value, node)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def construct_mapping(self, node, deep=False):
|
||||||
|
"""Store mappings as OrderedDicts instead of as regular python
|
||||||
|
dictionaries to preserve file ordering."""
|
||||||
|
if not isinstance(node, MappingNode):
|
||||||
|
raise ConstructorError(None, None,
|
||||||
|
"expected a mapping node, but found %s" % node.id,
|
||||||
|
node.start_mark)
|
||||||
|
|
||||||
|
mapping = syaml_dict()
|
||||||
|
for key_node, value_node in node.value:
|
||||||
|
key = self.construct_object(key_node, deep=deep)
|
||||||
|
try:
|
||||||
|
hash(key)
|
||||||
|
except TypeError, exc:
|
||||||
|
raise ConstructorError("while constructing a mapping", node.start_mark,
|
||||||
|
"found unacceptable key (%s)" % exc, key_node.start_mark)
|
||||||
|
value = self.construct_object(value_node, deep=deep)
|
||||||
|
if key in mapping:
|
||||||
|
raise ConstructorError("while constructing a mapping", node.start_mark,
|
||||||
|
"found already in-use key (%s)" % key, key_node.start_mark)
|
||||||
|
mapping[key] = value
|
||||||
|
|
||||||
|
mark(mapping, node)
|
||||||
|
return mapping
|
||||||
|
|
||||||
|
# register above new constructors
|
||||||
|
OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map)
|
||||||
|
OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq)
|
||||||
|
OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class OrderedLineDumper(yaml.Dumper):
|
||||||
|
"""Dumper that preserves ordering and formats ``syaml_*`` objects.
|
||||||
|
|
||||||
|
This dumper preserves insertion ordering ``syaml_dict`` objects
|
||||||
|
when they're written out. It also has some custom formatters
|
||||||
|
for ``syaml_*`` objects so that they are formatted like their
|
||||||
|
regular Python equivalents, instead of ugly YAML pyobjects.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def represent_mapping(self, tag, mapping, flow_style=None):
|
||||||
|
value = []
|
||||||
|
node = MappingNode(tag, value, flow_style=flow_style)
|
||||||
|
if self.alias_key is not None:
|
||||||
|
self.represented_objects[self.alias_key] = node
|
||||||
|
best_style = True
|
||||||
|
if hasattr(mapping, 'items'):
|
||||||
|
# if it's a syaml_dict, preserve OrderedDict order.
|
||||||
|
# Otherwise do the default thing.
|
||||||
|
sort = not isinstance(mapping, syaml_dict)
|
||||||
|
mapping = mapping.items()
|
||||||
|
if sort:
|
||||||
|
mapping.sort()
|
||||||
|
|
||||||
|
for item_key, item_value in mapping:
|
||||||
|
node_key = self.represent_data(item_key)
|
||||||
|
node_value = self.represent_data(item_value)
|
||||||
|
if not (isinstance(node_key, ScalarNode) and not node_key.style):
|
||||||
|
best_style = False
|
||||||
|
if not (isinstance(node_value, ScalarNode) and not node_value.style):
|
||||||
|
best_style = False
|
||||||
|
value.append((node_key, node_value))
|
||||||
|
if flow_style is None:
|
||||||
|
if self.default_flow_style is not None:
|
||||||
|
node.flow_style = self.default_flow_style
|
||||||
|
else:
|
||||||
|
node.flow_style = best_style
|
||||||
|
return node
|
||||||
|
|
||||||
|
# Make our special objects look like normal YAML ones.
|
||||||
|
OrderedLineDumper.add_representer(syaml_dict, OrderedLineDumper.represent_dict)
|
||||||
|
OrderedLineDumper.add_representer(syaml_list, OrderedLineDumper.represent_list)
|
||||||
|
OrderedLineDumper.add_representer(syaml_str, OrderedLineDumper.represent_str)
|
||||||
|
|
||||||
|
|
||||||
|
def load(*args, **kwargs):
|
||||||
|
"""Load but modify the loader instance so that it will add __line__
|
||||||
|
atrributes to the returned object."""
|
||||||
|
kwargs['Loader'] = OrderedLineLoader
|
||||||
|
return yaml.load(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def dump(*args, **kwargs):
|
||||||
|
kwargs['Dumper'] = OrderedLineDumper
|
||||||
|
return yaml.dump(*args, **kwargs)
|
|
@ -1,12 +0,0 @@
|
||||||
compilers:
|
|
||||||
all:
|
|
||||||
clang@3.3:
|
|
||||||
cc: /path/to/clang
|
|
||||||
cxx: /path/to/clang++
|
|
||||||
f77: None
|
|
||||||
fc: None
|
|
||||||
gcc@4.5.0:
|
|
||||||
cc: /path/to/gcc
|
|
||||||
cxx: /path/to/g++
|
|
||||||
f77: /path/to/gfortran
|
|
||||||
fc: /path/to/gfortran
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue