Update externals to work with Python 3
- Update YAML version to support Python 3 - Python 3 support for ordereddict backport - Exclude Python3 YAML from version tests. - Vendor six into Spack. - Make Python version-check tests work with Python 3 - Add ability to add version check exceptions with '# nopyqver' line comments.
This commit is contained in:
parent
bc404532ea
commit
0331b08c64
44 changed files with 7067 additions and 130 deletions
|
@ -46,7 +46,6 @@ addons:
|
||||||
packages:
|
packages:
|
||||||
- gfortran
|
- gfortran
|
||||||
- graphviz
|
- graphviz
|
||||||
- libyaml-dev
|
|
||||||
|
|
||||||
# Work around Travis's lack of support for Python on OSX
|
# Work around Travis's lack of support for Python on OSX
|
||||||
before_install:
|
before_install:
|
||||||
|
|
|
@ -46,6 +46,13 @@ sys.path.insert(0, SPACK_LIB_PATH)
|
||||||
SPACK_EXTERNAL_LIBS = os.path.join(SPACK_LIB_PATH, "external")
|
SPACK_EXTERNAL_LIBS = os.path.join(SPACK_LIB_PATH, "external")
|
||||||
sys.path.insert(0, SPACK_EXTERNAL_LIBS)
|
sys.path.insert(0, SPACK_EXTERNAL_LIBS)
|
||||||
|
|
||||||
|
# Handle vendoring of YAML specially, as it has two versions.
|
||||||
|
if sys.version_info[0] == 2:
|
||||||
|
SPACK_YAML_LIBS = os.path.join(SPACK_EXTERNAL_LIBS, "yaml/lib")
|
||||||
|
else:
|
||||||
|
SPACK_YAML_LIBS = os.path.join(SPACK_EXTERNAL_LIBS, "yaml/lib3")
|
||||||
|
sys.path.insert(0, SPACK_YAML_LIBS)
|
||||||
|
|
||||||
# Quick and dirty check to clean orphaned .pyc files left over from
|
# Quick and dirty check to clean orphaned .pyc files left over from
|
||||||
# previous revisions. These files were present in earlier versions of
|
# previous revisions. These files were present in earlier versions of
|
||||||
# Spack, were removed, but shadow system modules that Spack still
|
# Spack, were removed, but shadow system modules that Spack still
|
||||||
|
|
|
@ -51,6 +51,10 @@
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
sys.path.insert(0, os.path.abspath('exts'))
|
sys.path.insert(0, os.path.abspath('exts'))
|
||||||
sys.path.insert(0, os.path.abspath('../external'))
|
sys.path.insert(0, os.path.abspath('../external'))
|
||||||
|
if sys.version_info[0] < 3:
|
||||||
|
sys.path.insert(0, os.path.abspath('../external/yaml/lib'))
|
||||||
|
else:
|
||||||
|
sys.path.insert(0, os.path.abspath('../external/yaml/lib3'))
|
||||||
sys.path.append(os.path.abspath('..'))
|
sys.path.append(os.path.abspath('..'))
|
||||||
|
|
||||||
# Add the Spack bin directory to the path so that we can use its output in docs.
|
# Add the Spack bin directory to the path so that we can use its output in docs.
|
||||||
|
@ -110,13 +114,13 @@
|
||||||
for line in fileinput.input('spack.rst', inplace=1):
|
for line in fileinput.input('spack.rst', inplace=1):
|
||||||
if handling_spack:
|
if handling_spack:
|
||||||
if not line.startswith(' :noindex:'):
|
if not line.startswith(' :noindex:'):
|
||||||
print ' :noindex: %s' % ' '.join(spack.__all__)
|
print(' :noindex: %s' % ' '.join(spack.__all__))
|
||||||
handling_spack = False
|
handling_spack = False
|
||||||
|
|
||||||
if line.startswith('.. automodule::'):
|
if line.startswith('.. automodule::'):
|
||||||
handling_spack = (line == '.. automodule:: spack\n')
|
handling_spack = (line == '.. automodule:: spack\n')
|
||||||
|
|
||||||
print line,
|
sys.stdout.write(line)
|
||||||
|
|
||||||
# Enable todo items
|
# Enable todo items
|
||||||
todo_include_todos = True
|
todo_include_todos = True
|
||||||
|
|
2
lib/spack/external/_pytest/pytester.py
vendored
2
lib/spack/external/_pytest/pytester.py
vendored
|
@ -551,7 +551,7 @@ def syspathinsert(self, path=None):
|
||||||
def _possibly_invalidate_import_caches(self):
|
def _possibly_invalidate_import_caches(self):
|
||||||
# invalidate caches if we can (py33 and above)
|
# invalidate caches if we can (py33 and above)
|
||||||
try:
|
try:
|
||||||
import importlib
|
import importlib # nopyqver
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
|
|
8
lib/spack/external/ordereddict_backport.py
vendored
8
lib/spack/external/ordereddict_backport.py
vendored
|
@ -8,7 +8,13 @@
|
||||||
try:
|
try:
|
||||||
from thread import get_ident as _get_ident
|
from thread import get_ident as _get_ident
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from dummy_thread import get_ident as _get_ident
|
try:
|
||||||
|
from dummy_thread import get_ident as _get_ident
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
from _dummy_thread import get_ident as _get_ident
|
||||||
|
except ImportError:
|
||||||
|
from threading import get_ident as _get_ident # nopyqver
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from _abcoll import KeysView, ValuesView, ItemsView
|
from _abcoll import KeysView, ValuesView, ItemsView
|
||||||
|
|
70
lib/spack/external/pyqver2.py
vendored
70
lib/spack/external/pyqver2.py
vendored
|
@ -57,11 +57,7 @@
|
||||||
"hmac": (2, 2),
|
"hmac": (2, 2),
|
||||||
"hotshot": (2, 2),
|
"hotshot": (2, 2),
|
||||||
"HTMLParser": (2, 2),
|
"HTMLParser": (2, 2),
|
||||||
# skip importlib until we can conditionally skip for pytest.
|
"importlib": (2, 7),
|
||||||
# pytest tries to import this and catches the exception, but
|
|
||||||
# the test will still fail.
|
|
||||||
# TODO: can we excelude with a comment like '# flake: noqa?'
|
|
||||||
# "importlib": (2, 7),
|
|
||||||
"inspect": (2, 1),
|
"inspect": (2, 1),
|
||||||
"io": (2, 6),
|
"io": (2, 6),
|
||||||
"itertools": (2, 3),
|
"itertools": (2, 3),
|
||||||
|
@ -262,7 +258,7 @@ def visitYield(self, node):
|
||||||
self.add(node, (2,2), "yield expression")
|
self.add(node, (2,2), "yield expression")
|
||||||
self.default(node)
|
self.default(node)
|
||||||
|
|
||||||
def get_versions(source):
|
def get_versions(source, filename=None):
|
||||||
"""Return information about the Python versions required for specific features.
|
"""Return information about the Python versions required for specific features.
|
||||||
|
|
||||||
The return value is a dictionary with keys as a version number as a tuple
|
The return value is a dictionary with keys as a version number as a tuple
|
||||||
|
@ -346,65 +342,3 @@ def qver(source):
|
||||||
#(2, 6)
|
#(2, 6)
|
||||||
"""
|
"""
|
||||||
return max(get_versions(source).keys())
|
return max(get_versions(source).keys())
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
|
|
||||||
Verbose = False
|
|
||||||
MinVersion = (2, 3)
|
|
||||||
Lint = False
|
|
||||||
|
|
||||||
files = []
|
|
||||||
i = 1
|
|
||||||
while i < len(sys.argv):
|
|
||||||
a = sys.argv[i]
|
|
||||||
if a == "--test":
|
|
||||||
import doctest
|
|
||||||
doctest.testmod()
|
|
||||||
sys.exit(0)
|
|
||||||
if a == "-v" or a == "--verbose":
|
|
||||||
Verbose = True
|
|
||||||
elif a == "-l" or a == "--lint":
|
|
||||||
Lint = True
|
|
||||||
elif a == "-m" or a == "--min-version":
|
|
||||||
i += 1
|
|
||||||
MinVersion = tuple(map(int, sys.argv[i].split(".")))
|
|
||||||
else:
|
|
||||||
files.append(a)
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
if not files:
|
|
||||||
print >>sys.stderr, """Usage: %s [options] source ...
|
|
||||||
|
|
||||||
Report minimum Python version required to run given source files.
|
|
||||||
|
|
||||||
-m x.y or --min-version x.y (default 2.3)
|
|
||||||
report version triggers at or above version x.y in verbose mode
|
|
||||||
-v or --verbose
|
|
||||||
print more detailed report of version triggers for each version
|
|
||||||
""" % sys.argv[0]
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
for fn in files:
|
|
||||||
try:
|
|
||||||
f = open(fn)
|
|
||||||
source = f.read()
|
|
||||||
f.close()
|
|
||||||
ver = get_versions(source)
|
|
||||||
if Verbose:
|
|
||||||
print fn
|
|
||||||
for v in sorted([k for k in ver.keys() if k >= MinVersion], reverse=True):
|
|
||||||
reasons = [x for x in uniq(ver[v]) if x]
|
|
||||||
if reasons:
|
|
||||||
# each reason is (lineno, message)
|
|
||||||
print "\t%s\t%s" % (".".join(map(str, v)), ", ".join([x[1] for x in reasons]))
|
|
||||||
elif Lint:
|
|
||||||
for v in sorted([k for k in ver.keys() if k >= MinVersion], reverse=True):
|
|
||||||
reasons = [x for x in uniq(ver[v]) if x]
|
|
||||||
for r in reasons:
|
|
||||||
# each reason is (lineno, message)
|
|
||||||
print "%s:%s: %s %s" % (fn, r[0], ".".join(map(str, v)), r[1])
|
|
||||||
else:
|
|
||||||
print "%s\t%s" % (".".join(map(str, max(ver.keys()))), fn)
|
|
||||||
except SyntaxError, x:
|
|
||||||
print "%s: syntax error compiling with Python %s: %s" % (fn, platform.python_version(), x)
|
|
||||||
|
|
248
lib/spack/external/pyqver3.py
vendored
Executable file
248
lib/spack/external/pyqver3.py
vendored
Executable file
|
@ -0,0 +1,248 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
#
|
||||||
|
# pyqver3.py
|
||||||
|
# by Greg Hewgill
|
||||||
|
# https://github.com/ghewgill/pyqver
|
||||||
|
#
|
||||||
|
# This software is provided 'as-is', without any express or implied
|
||||||
|
# warranty. In no event will the author be held liable for any damages
|
||||||
|
# arising from the use of this software.
|
||||||
|
#
|
||||||
|
# Permission is granted to anyone to use this software for any purpose,
|
||||||
|
# including commercial applications, and to alter it and redistribute it
|
||||||
|
# freely, subject to the following restrictions:
|
||||||
|
#
|
||||||
|
# 1. The origin of this software must not be misrepresented; you must not
|
||||||
|
# claim that you wrote the original software. If you use this software
|
||||||
|
# in a product, an acknowledgment in the product documentation would be
|
||||||
|
# appreciated but is not required.
|
||||||
|
# 2. Altered source versions must be plainly marked as such, and must not be
|
||||||
|
# misrepresented as being the original software.
|
||||||
|
# 3. This notice may not be removed or altered from any source distribution.
|
||||||
|
#
|
||||||
|
# Copyright (c) 2009-2013 Greg Hewgill http://hewgill.com
|
||||||
|
#
|
||||||
|
import ast
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
|
|
||||||
|
StandardModules = {
|
||||||
|
# skip argparse now that it's in lib/spack/external
|
||||||
|
# "argparse": (3, 2),
|
||||||
|
"faulthandler": (3, 3),
|
||||||
|
"importlib": (3, 1),
|
||||||
|
"ipaddress": (3, 3),
|
||||||
|
"lzma": (3, 3),
|
||||||
|
"tkinter.ttk": (3, 1),
|
||||||
|
"unittest.mock": (3, 3),
|
||||||
|
"venv": (3, 3),
|
||||||
|
}
|
||||||
|
|
||||||
|
Functions = {
|
||||||
|
"bytearray.maketrans": (3, 1),
|
||||||
|
"bytes.maketrans": (3, 1),
|
||||||
|
"bz2.open": (3, 3),
|
||||||
|
"collections.Counter": (3, 1),
|
||||||
|
"collections.OrderedDict": (3, 1),
|
||||||
|
"crypt.mksalt": (3, 3),
|
||||||
|
"email.generator.BytesGenerator": (3, 2),
|
||||||
|
"email.message_from_binary_file": (3, 2),
|
||||||
|
"email.message_from_bytes": (3, 2),
|
||||||
|
"functools.lru_cache": (3, 2),
|
||||||
|
"gzip.compress": (3, 2),
|
||||||
|
"gzip.decompress": (3, 2),
|
||||||
|
"inspect.getclosurevars": (3, 3),
|
||||||
|
"inspect.getgeneratorlocals": (3, 3),
|
||||||
|
"inspect.getgeneratorstate": (3, 2),
|
||||||
|
"itertools.combinations_with_replacement": (3, 1),
|
||||||
|
"itertools.compress": (3, 1),
|
||||||
|
"logging.config.dictConfig": (3, 2),
|
||||||
|
"logging.NullHandler": (3, 1),
|
||||||
|
"math.erf": (3, 2),
|
||||||
|
"math.erfc": (3, 2),
|
||||||
|
"math.expm1": (3, 2),
|
||||||
|
"math.gamma": (3, 2),
|
||||||
|
"math.isfinite": (3, 2),
|
||||||
|
"math.lgamma": (3, 2),
|
||||||
|
"math.log2": (3, 3),
|
||||||
|
"os.environb": (3, 2),
|
||||||
|
"os.fsdecode": (3, 2),
|
||||||
|
"os.fsencode": (3, 2),
|
||||||
|
"os.fwalk": (3, 3),
|
||||||
|
"os.getenvb": (3, 2),
|
||||||
|
"os.get_exec_path": (3, 2),
|
||||||
|
"os.getgrouplist": (3, 3),
|
||||||
|
"os.getpriority": (3, 3),
|
||||||
|
"os.getresgid": (3, 2),
|
||||||
|
"os.getresuid": (3, 2),
|
||||||
|
"os.get_terminal_size": (3, 3),
|
||||||
|
"os.getxattr": (3, 3),
|
||||||
|
"os.initgroups": (3, 2),
|
||||||
|
"os.listxattr": (3, 3),
|
||||||
|
"os.lockf": (3, 3),
|
||||||
|
"os.pipe2": (3, 3),
|
||||||
|
"os.posix_fadvise": (3, 3),
|
||||||
|
"os.posix_fallocate": (3, 3),
|
||||||
|
"os.pread": (3, 3),
|
||||||
|
"os.pwrite": (3, 3),
|
||||||
|
"os.readv": (3, 3),
|
||||||
|
"os.removexattr": (3, 3),
|
||||||
|
"os.replace": (3, 3),
|
||||||
|
"os.sched_get_priority_max": (3, 3),
|
||||||
|
"os.sched_get_priority_min": (3, 3),
|
||||||
|
"os.sched_getaffinity": (3, 3),
|
||||||
|
"os.sched_getparam": (3, 3),
|
||||||
|
"os.sched_getscheduler": (3, 3),
|
||||||
|
"os.sched_rr_get_interval": (3, 3),
|
||||||
|
"os.sched_setaffinity": (3, 3),
|
||||||
|
"os.sched_setparam": (3, 3),
|
||||||
|
"os.sched_setscheduler": (3, 3),
|
||||||
|
"os.sched_yield": (3, 3),
|
||||||
|
"os.sendfile": (3, 3),
|
||||||
|
"os.setpriority": (3, 3),
|
||||||
|
"os.setresgid": (3, 2),
|
||||||
|
"os.setresuid": (3, 2),
|
||||||
|
"os.setxattr": (3, 3),
|
||||||
|
"os.sync": (3, 3),
|
||||||
|
"os.truncate": (3, 3),
|
||||||
|
"os.waitid": (3, 3),
|
||||||
|
"os.writev": (3, 3),
|
||||||
|
"shutil.chown": (3, 3),
|
||||||
|
"shutil.disk_usage": (3, 3),
|
||||||
|
"shutil.get_archive_formats": (3, 3),
|
||||||
|
"shutil.get_terminal_size": (3, 3),
|
||||||
|
"shutil.get_unpack_formats": (3, 3),
|
||||||
|
"shutil.make_archive": (3, 3),
|
||||||
|
"shutil.register_archive_format": (3, 3),
|
||||||
|
"shutil.register_unpack_format": (3, 3),
|
||||||
|
"shutil.unpack_archive": (3, 3),
|
||||||
|
"shutil.unregister_archive_format": (3, 3),
|
||||||
|
"shutil.unregister_unpack_format": (3, 3),
|
||||||
|
"shutil.which": (3, 3),
|
||||||
|
"signal.pthread_kill": (3, 3),
|
||||||
|
"signal.pthread_sigmask": (3, 3),
|
||||||
|
"signal.sigpending": (3, 3),
|
||||||
|
"signal.sigtimedwait": (3, 3),
|
||||||
|
"signal.sigwait": (3, 3),
|
||||||
|
"signal.sigwaitinfo": (3, 3),
|
||||||
|
"socket.CMSG_LEN": (3, 3),
|
||||||
|
"socket.CMSG_SPACE": (3, 3),
|
||||||
|
"socket.fromshare": (3, 3),
|
||||||
|
"socket.if_indextoname": (3, 3),
|
||||||
|
"socket.if_nameindex": (3, 3),
|
||||||
|
"socket.if_nametoindex": (3, 3),
|
||||||
|
"socket.sethostname": (3, 3),
|
||||||
|
"ssl.match_hostname": (3, 2),
|
||||||
|
"ssl.RAND_bytes": (3, 3),
|
||||||
|
"ssl.RAND_pseudo_bytes": (3, 3),
|
||||||
|
"ssl.SSLContext": (3, 2),
|
||||||
|
"ssl.SSLEOFError": (3, 3),
|
||||||
|
"ssl.SSLSyscallError": (3, 3),
|
||||||
|
"ssl.SSLWantReadError": (3, 3),
|
||||||
|
"ssl.SSLWantWriteError": (3, 3),
|
||||||
|
"ssl.SSLZeroReturnError": (3, 3),
|
||||||
|
"stat.filemode": (3, 3),
|
||||||
|
"textwrap.indent": (3, 3),
|
||||||
|
"threading.get_ident": (3, 3),
|
||||||
|
"time.clock_getres": (3, 3),
|
||||||
|
"time.clock_gettime": (3, 3),
|
||||||
|
"time.clock_settime": (3, 3),
|
||||||
|
"time.get_clock_info": (3, 3),
|
||||||
|
"time.monotonic": (3, 3),
|
||||||
|
"time.perf_counter": (3, 3),
|
||||||
|
"time.process_time": (3, 3),
|
||||||
|
"types.new_class": (3, 3),
|
||||||
|
"types.prepare_class": (3, 3),
|
||||||
|
}
|
||||||
|
|
||||||
|
def uniq(a):
|
||||||
|
if len(a) == 0:
|
||||||
|
return []
|
||||||
|
else:
|
||||||
|
return [a[0]] + uniq([x for x in a if x != a[0]])
|
||||||
|
|
||||||
|
class NodeChecker(ast.NodeVisitor):
|
||||||
|
def __init__(self):
|
||||||
|
self.vers = dict()
|
||||||
|
self.vers[(3,0)] = []
|
||||||
|
def add(self, node, ver, msg):
|
||||||
|
if ver not in self.vers:
|
||||||
|
self.vers[ver] = []
|
||||||
|
self.vers[ver].append((node.lineno, msg))
|
||||||
|
def visit_Call(self, node):
|
||||||
|
def rollup(n):
|
||||||
|
if isinstance(n, ast.Name):
|
||||||
|
return n.id
|
||||||
|
elif isinstance(n, ast.Attribute):
|
||||||
|
r = rollup(n.value)
|
||||||
|
if r:
|
||||||
|
return r + "." + n.attr
|
||||||
|
name = rollup(node.func)
|
||||||
|
if name:
|
||||||
|
v = Functions.get(name)
|
||||||
|
if v is not None:
|
||||||
|
self.add(node, v, name)
|
||||||
|
self.generic_visit(node)
|
||||||
|
def visit_Import(self, node):
|
||||||
|
for n in node.names:
|
||||||
|
v = StandardModules.get(n.name)
|
||||||
|
if v is not None:
|
||||||
|
self.add(node, v, n.name)
|
||||||
|
self.generic_visit(node)
|
||||||
|
def visit_ImportFrom(self, node):
|
||||||
|
v = StandardModules.get(node.module)
|
||||||
|
if v is not None:
|
||||||
|
self.add(node, v, node.module)
|
||||||
|
for n in node.names:
|
||||||
|
name = node.module + "." + n.name
|
||||||
|
v = Functions.get(name)
|
||||||
|
if v is not None:
|
||||||
|
self.add(node, v, name)
|
||||||
|
def visit_Raise(self, node):
|
||||||
|
if isinstance(node.cause, ast.Name) and node.cause.id == "None":
|
||||||
|
self.add(node, (3,3), "raise ... from None")
|
||||||
|
def visit_YieldFrom(self, node):
|
||||||
|
self.add(node, (3,3), "yield from")
|
||||||
|
|
||||||
|
def get_versions(source, filename=None):
|
||||||
|
"""Return information about the Python versions required for specific features.
|
||||||
|
|
||||||
|
The return value is a dictionary with keys as a version number as a tuple
|
||||||
|
(for example Python 3.1 is (3,1)) and the value are a list of features that
|
||||||
|
require the indicated Python version.
|
||||||
|
"""
|
||||||
|
tree = ast.parse(source, filename=filename)
|
||||||
|
checker = NodeChecker()
|
||||||
|
checker.visit(tree)
|
||||||
|
return checker.vers
|
||||||
|
|
||||||
|
def v33(source):
|
||||||
|
if sys.version_info >= (3, 3):
|
||||||
|
return qver(source)
|
||||||
|
else:
|
||||||
|
print("Not all features tested, run --test with Python 3.3", file=sys.stderr)
|
||||||
|
return (3, 3)
|
||||||
|
|
||||||
|
def qver(source):
|
||||||
|
"""Return the minimum Python version required to run a particular bit of code.
|
||||||
|
|
||||||
|
>>> qver('print("hello world")')
|
||||||
|
(3, 0)
|
||||||
|
>>> qver("import importlib")
|
||||||
|
(3, 1)
|
||||||
|
>>> qver("from importlib import x")
|
||||||
|
(3, 1)
|
||||||
|
>>> qver("import tkinter.ttk")
|
||||||
|
(3, 1)
|
||||||
|
>>> qver("from collections import Counter")
|
||||||
|
(3, 1)
|
||||||
|
>>> qver("collections.OrderedDict()")
|
||||||
|
(3, 1)
|
||||||
|
>>> qver("import functools\\n@functools.lru_cache()\\ndef f(x): x*x")
|
||||||
|
(3, 2)
|
||||||
|
>>> v33("yield from x")
|
||||||
|
(3, 3)
|
||||||
|
>>> v33("raise x from None")
|
||||||
|
(3, 3)
|
||||||
|
"""
|
||||||
|
return max(get_versions(source).keys())
|
886
lib/spack/external/six.py
vendored
Normal file
886
lib/spack/external/six.py
vendored
Normal file
|
@ -0,0 +1,886 @@
|
||||||
|
# Copyright (c) 2010-2017 Benjamin Peterson
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in all
|
||||||
|
# copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
# SOFTWARE.
|
||||||
|
|
||||||
|
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import itertools
|
||||||
|
import operator
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
|
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||||
|
__version__ = "1.10.0"
|
||||||
|
|
||||||
|
|
||||||
|
# Useful for very coarse version differentiation.
|
||||||
|
PY2 = sys.version_info[0] == 2
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
PY34 = sys.version_info[0:2] >= (3, 4)
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
string_types = str,
|
||||||
|
integer_types = int,
|
||||||
|
class_types = type,
|
||||||
|
text_type = str
|
||||||
|
binary_type = bytes
|
||||||
|
|
||||||
|
MAXSIZE = sys.maxsize
|
||||||
|
else:
|
||||||
|
string_types = basestring,
|
||||||
|
integer_types = (int, long)
|
||||||
|
class_types = (type, types.ClassType)
|
||||||
|
text_type = unicode
|
||||||
|
binary_type = str
|
||||||
|
|
||||||
|
if sys.platform.startswith("java"):
|
||||||
|
# Jython always uses 32 bits.
|
||||||
|
MAXSIZE = int((1 << 31) - 1)
|
||||||
|
else:
|
||||||
|
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||||
|
class X(object):
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return 1 << 31
|
||||||
|
try:
|
||||||
|
len(X())
|
||||||
|
except OverflowError:
|
||||||
|
# 32-bit
|
||||||
|
MAXSIZE = int((1 << 31) - 1)
|
||||||
|
else:
|
||||||
|
# 64-bit
|
||||||
|
MAXSIZE = int((1 << 63) - 1)
|
||||||
|
del X
|
||||||
|
|
||||||
|
|
||||||
|
def _add_doc(func, doc):
|
||||||
|
"""Add documentation to a function."""
|
||||||
|
func.__doc__ = doc
|
||||||
|
|
||||||
|
|
||||||
|
def _import_module(name):
|
||||||
|
"""Import module, returning the module after the last dot."""
|
||||||
|
__import__(name)
|
||||||
|
return sys.modules[name]
|
||||||
|
|
||||||
|
|
||||||
|
class _LazyDescr(object):
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __get__(self, obj, tp):
|
||||||
|
result = self._resolve()
|
||||||
|
setattr(obj, self.name, result) # Invokes __set__.
|
||||||
|
try:
|
||||||
|
# This is a bit ugly, but it avoids running this again by
|
||||||
|
# removing this descriptor.
|
||||||
|
delattr(obj.__class__, self.name)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class MovedModule(_LazyDescr):
|
||||||
|
|
||||||
|
def __init__(self, name, old, new=None):
|
||||||
|
super(MovedModule, self).__init__(name)
|
||||||
|
if PY3:
|
||||||
|
if new is None:
|
||||||
|
new = name
|
||||||
|
self.mod = new
|
||||||
|
else:
|
||||||
|
self.mod = old
|
||||||
|
|
||||||
|
def _resolve(self):
|
||||||
|
return _import_module(self.mod)
|
||||||
|
|
||||||
|
def __getattr__(self, attr):
|
||||||
|
_module = self._resolve()
|
||||||
|
value = getattr(_module, attr)
|
||||||
|
setattr(self, attr, value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class _LazyModule(types.ModuleType):
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
super(_LazyModule, self).__init__(name)
|
||||||
|
self.__doc__ = self.__class__.__doc__
|
||||||
|
|
||||||
|
def __dir__(self):
|
||||||
|
attrs = ["__doc__", "__name__"]
|
||||||
|
attrs += [attr.name for attr in self._moved_attributes]
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
# Subclasses should override this
|
||||||
|
_moved_attributes = []
|
||||||
|
|
||||||
|
|
||||||
|
class MovedAttribute(_LazyDescr):
|
||||||
|
|
||||||
|
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||||
|
super(MovedAttribute, self).__init__(name)
|
||||||
|
if PY3:
|
||||||
|
if new_mod is None:
|
||||||
|
new_mod = name
|
||||||
|
self.mod = new_mod
|
||||||
|
if new_attr is None:
|
||||||
|
if old_attr is None:
|
||||||
|
new_attr = name
|
||||||
|
else:
|
||||||
|
new_attr = old_attr
|
||||||
|
self.attr = new_attr
|
||||||
|
else:
|
||||||
|
self.mod = old_mod
|
||||||
|
if old_attr is None:
|
||||||
|
old_attr = name
|
||||||
|
self.attr = old_attr
|
||||||
|
|
||||||
|
def _resolve(self):
|
||||||
|
module = _import_module(self.mod)
|
||||||
|
return getattr(module, self.attr)
|
||||||
|
|
||||||
|
|
||||||
|
class _SixMetaPathImporter(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
A meta path importer to import six.moves and its submodules.
|
||||||
|
|
||||||
|
This class implements a PEP302 finder and loader. It should be compatible
|
||||||
|
with Python 2.5 and all existing versions of Python3
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, six_module_name):
|
||||||
|
self.name = six_module_name
|
||||||
|
self.known_modules = {}
|
||||||
|
|
||||||
|
def _add_module(self, mod, *fullnames):
|
||||||
|
for fullname in fullnames:
|
||||||
|
self.known_modules[self.name + "." + fullname] = mod
|
||||||
|
|
||||||
|
def _get_module(self, fullname):
|
||||||
|
return self.known_modules[self.name + "." + fullname]
|
||||||
|
|
||||||
|
def find_module(self, fullname, path=None):
|
||||||
|
if fullname in self.known_modules:
|
||||||
|
return self
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __get_module(self, fullname):
|
||||||
|
try:
|
||||||
|
return self.known_modules[fullname]
|
||||||
|
except KeyError:
|
||||||
|
raise ImportError("This loader does not know module " + fullname)
|
||||||
|
|
||||||
|
def load_module(self, fullname):
|
||||||
|
try:
|
||||||
|
# in case of a reload
|
||||||
|
return sys.modules[fullname]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
mod = self.__get_module(fullname)
|
||||||
|
if isinstance(mod, MovedModule):
|
||||||
|
mod = mod._resolve()
|
||||||
|
else:
|
||||||
|
mod.__loader__ = self
|
||||||
|
sys.modules[fullname] = mod
|
||||||
|
return mod
|
||||||
|
|
||||||
|
def is_package(self, fullname):
|
||||||
|
"""
|
||||||
|
Return true, if the named module is a package.
|
||||||
|
|
||||||
|
We need this method to get correct spec objects with
|
||||||
|
Python 3.4 (see PEP451)
|
||||||
|
"""
|
||||||
|
return hasattr(self.__get_module(fullname), "__path__")
|
||||||
|
|
||||||
|
def get_code(self, fullname):
|
||||||
|
"""Return None
|
||||||
|
|
||||||
|
Required, if is_package is implemented"""
|
||||||
|
self.__get_module(fullname) # eventually raises ImportError
|
||||||
|
return None
|
||||||
|
get_source = get_code # same as get_code
|
||||||
|
|
||||||
|
_importer = _SixMetaPathImporter(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class _MovedItems(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects"""
|
||||||
|
__path__ = [] # mark as package
|
||||||
|
|
||||||
|
|
||||||
|
_moved_attributes = [
|
||||||
|
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||||
|
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||||
|
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||||
|
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||||
|
MovedAttribute("intern", "__builtin__", "sys"),
|
||||||
|
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||||
|
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
|
||||||
|
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
|
||||||
|
MovedAttribute("getstatusoutput", "commands", "subprocess"),
|
||||||
|
MovedAttribute("getoutput", "commands", "subprocess"),
|
||||||
|
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||||
|
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
|
||||||
|
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||||
|
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||||
|
MovedAttribute("StringIO", "StringIO", "io"),
|
||||||
|
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||||
|
MovedAttribute("UserList", "UserList", "collections"),
|
||||||
|
MovedAttribute("UserString", "UserString", "collections"),
|
||||||
|
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||||
|
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||||
|
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||||
|
MovedModule("builtins", "__builtin__"),
|
||||||
|
MovedModule("configparser", "ConfigParser"),
|
||||||
|
MovedModule("copyreg", "copy_reg"),
|
||||||
|
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||||
|
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||||
|
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||||
|
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||||
|
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||||
|
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||||
|
MovedModule("http_client", "httplib", "http.client"),
|
||||||
|
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||||
|
MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
|
||||||
|
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||||
|
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||||
|
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||||
|
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||||
|
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||||
|
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||||
|
MovedModule("cPickle", "cPickle", "pickle"),
|
||||||
|
MovedModule("queue", "Queue"),
|
||||||
|
MovedModule("reprlib", "repr"),
|
||||||
|
MovedModule("socketserver", "SocketServer"),
|
||||||
|
MovedModule("_thread", "thread", "_thread"),
|
||||||
|
MovedModule("tkinter", "Tkinter"),
|
||||||
|
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||||
|
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||||
|
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||||
|
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||||
|
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||||
|
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
|
||||||
|
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||||
|
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||||
|
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||||
|
"tkinter.colorchooser"),
|
||||||
|
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||||
|
"tkinter.commondialog"),
|
||||||
|
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||||
|
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||||
|
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||||
|
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||||
|
"tkinter.simpledialog"),
|
||||||
|
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
|
||||||
|
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
|
||||||
|
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||||
|
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||||
|
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||||
|
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||||
|
]
|
||||||
|
# Add windows specific modules.
|
||||||
|
if sys.platform == "win32":
|
||||||
|
_moved_attributes += [
|
||||||
|
MovedModule("winreg", "_winreg"),
|
||||||
|
]
|
||||||
|
|
||||||
|
for attr in _moved_attributes:
|
||||||
|
setattr(_MovedItems, attr.name, attr)
|
||||||
|
if isinstance(attr, MovedModule):
|
||||||
|
_importer._add_module(attr, "moves." + attr.name)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
_MovedItems._moved_attributes = _moved_attributes
|
||||||
|
|
||||||
|
moves = _MovedItems(__name__ + ".moves")
|
||||||
|
_importer._add_module(moves, "moves")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_parse(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_parse_moved_attributes = [
|
||||||
|
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("quote", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
|
||||||
|
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("splitvalue", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_parse_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||||
|
"moves.urllib_parse", "moves.urllib.parse")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_error(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_error_moved_attributes = [
|
||||||
|
MovedAttribute("URLError", "urllib2", "urllib.error"),
|
||||||
|
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
|
||||||
|
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_error_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_error, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||||
|
"moves.urllib_error", "moves.urllib.error")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_request(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_request_moved_attributes = [
|
||||||
|
MovedAttribute("urlopen", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("install_opener", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("build_opener", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("pathname2url", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("url2pathname", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("getproxies", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("Request", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_request_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||||
|
"moves.urllib_request", "moves.urllib.request")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_response(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_response_moved_attributes = [
|
||||||
|
MovedAttribute("addbase", "urllib", "urllib.response"),
|
||||||
|
MovedAttribute("addclosehook", "urllib", "urllib.response"),
|
||||||
|
MovedAttribute("addinfo", "urllib", "urllib.response"),
|
||||||
|
MovedAttribute("addinfourl", "urllib", "urllib.response"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_response_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_response, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||||
|
"moves.urllib_response", "moves.urllib.response")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_robotparser_moved_attributes = [
|
||||||
|
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_robotparser_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||||
|
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib(types.ModuleType):
|
||||||
|
|
||||||
|
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||||
|
__path__ = [] # mark as package
|
||||||
|
parse = _importer._get_module("moves.urllib_parse")
|
||||||
|
error = _importer._get_module("moves.urllib_error")
|
||||||
|
request = _importer._get_module("moves.urllib_request")
|
||||||
|
response = _importer._get_module("moves.urllib_response")
|
||||||
|
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||||
|
|
||||||
|
def __dir__(self):
|
||||||
|
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||||
|
"moves.urllib")
|
||||||
|
|
||||||
|
|
||||||
|
def add_move(move):
|
||||||
|
"""Add an item to six.moves."""
|
||||||
|
setattr(_MovedItems, move.name, move)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_move(name):
|
||||||
|
"""Remove item from six.moves."""
|
||||||
|
try:
|
||||||
|
delattr(_MovedItems, name)
|
||||||
|
except AttributeError:
|
||||||
|
try:
|
||||||
|
del moves.__dict__[name]
|
||||||
|
except KeyError:
|
||||||
|
raise AttributeError("no such move, %r" % (name,))
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
_meth_func = "__func__"
|
||||||
|
_meth_self = "__self__"
|
||||||
|
|
||||||
|
_func_closure = "__closure__"
|
||||||
|
_func_code = "__code__"
|
||||||
|
_func_defaults = "__defaults__"
|
||||||
|
_func_globals = "__globals__"
|
||||||
|
else:
|
||||||
|
_meth_func = "im_func"
|
||||||
|
_meth_self = "im_self"
|
||||||
|
|
||||||
|
_func_closure = "func_closure"
|
||||||
|
_func_code = "func_code"
|
||||||
|
_func_defaults = "func_defaults"
|
||||||
|
_func_globals = "func_globals"
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
advance_iterator = next
|
||||||
|
except NameError:
|
||||||
|
def advance_iterator(it):
|
||||||
|
return it.next()
|
||||||
|
next = advance_iterator
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
callable = callable
|
||||||
|
except NameError:
|
||||||
|
def callable(obj):
|
||||||
|
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def get_unbound_function(unbound):
|
||||||
|
return unbound
|
||||||
|
|
||||||
|
create_bound_method = types.MethodType
|
||||||
|
|
||||||
|
def create_unbound_method(func, cls):
|
||||||
|
return func
|
||||||
|
|
||||||
|
Iterator = object
|
||||||
|
else:
|
||||||
|
def get_unbound_function(unbound):
|
||||||
|
return unbound.im_func
|
||||||
|
|
||||||
|
def create_bound_method(func, obj):
|
||||||
|
return types.MethodType(func, obj, obj.__class__)
|
||||||
|
|
||||||
|
def create_unbound_method(func, cls):
|
||||||
|
return types.MethodType(func, None, cls)
|
||||||
|
|
||||||
|
class Iterator(object):
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
return type(self).__next__(self)
|
||||||
|
|
||||||
|
callable = callable
|
||||||
|
_add_doc(get_unbound_function,
|
||||||
|
"""Get the function out of a possibly unbound function""")
|
||||||
|
|
||||||
|
|
||||||
|
get_method_function = operator.attrgetter(_meth_func)
|
||||||
|
get_method_self = operator.attrgetter(_meth_self)
|
||||||
|
get_function_closure = operator.attrgetter(_func_closure)
|
||||||
|
get_function_code = operator.attrgetter(_func_code)
|
||||||
|
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||||
|
get_function_globals = operator.attrgetter(_func_globals)
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def iterkeys(d, **kw):
|
||||||
|
return iter(d.keys(**kw))
|
||||||
|
|
||||||
|
def itervalues(d, **kw):
|
||||||
|
return iter(d.values(**kw))
|
||||||
|
|
||||||
|
def iteritems(d, **kw):
|
||||||
|
return iter(d.items(**kw))
|
||||||
|
|
||||||
|
def iterlists(d, **kw):
|
||||||
|
return iter(d.lists(**kw))
|
||||||
|
|
||||||
|
viewkeys = operator.methodcaller("keys")
|
||||||
|
|
||||||
|
viewvalues = operator.methodcaller("values")
|
||||||
|
|
||||||
|
viewitems = operator.methodcaller("items")
|
||||||
|
else:
|
||||||
|
def iterkeys(d, **kw):
|
||||||
|
return d.iterkeys(**kw)
|
||||||
|
|
||||||
|
def itervalues(d, **kw):
|
||||||
|
return d.itervalues(**kw)
|
||||||
|
|
||||||
|
def iteritems(d, **kw):
|
||||||
|
return d.iteritems(**kw)
|
||||||
|
|
||||||
|
def iterlists(d, **kw):
|
||||||
|
return d.iterlists(**kw)
|
||||||
|
|
||||||
|
viewkeys = operator.methodcaller("viewkeys")
|
||||||
|
|
||||||
|
viewvalues = operator.methodcaller("viewvalues")
|
||||||
|
|
||||||
|
viewitems = operator.methodcaller("viewitems")
|
||||||
|
|
||||||
|
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||||
|
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||||
|
_add_doc(iteritems,
|
||||||
|
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||||
|
_add_doc(iterlists,
|
||||||
|
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def b(s):
|
||||||
|
return s.encode("latin-1")
|
||||||
|
|
||||||
|
def u(s):
|
||||||
|
return s
|
||||||
|
unichr = chr
|
||||||
|
import struct
|
||||||
|
int2byte = struct.Struct(">B").pack
|
||||||
|
del struct
|
||||||
|
byte2int = operator.itemgetter(0)
|
||||||
|
indexbytes = operator.getitem
|
||||||
|
iterbytes = iter
|
||||||
|
import io
|
||||||
|
StringIO = io.StringIO
|
||||||
|
BytesIO = io.BytesIO
|
||||||
|
_assertCountEqual = "assertCountEqual"
|
||||||
|
if sys.version_info[1] <= 1:
|
||||||
|
_assertRaisesRegex = "assertRaisesRegexp"
|
||||||
|
_assertRegex = "assertRegexpMatches"
|
||||||
|
else:
|
||||||
|
_assertRaisesRegex = "assertRaisesRegex"
|
||||||
|
_assertRegex = "assertRegex"
|
||||||
|
else:
|
||||||
|
def b(s):
|
||||||
|
return s
|
||||||
|
# Workaround for standalone backslash
|
||||||
|
|
||||||
|
def u(s):
|
||||||
|
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||||
|
unichr = unichr
|
||||||
|
int2byte = chr
|
||||||
|
|
||||||
|
def byte2int(bs):
|
||||||
|
return ord(bs[0])
|
||||||
|
|
||||||
|
def indexbytes(buf, i):
|
||||||
|
return ord(buf[i])
|
||||||
|
iterbytes = functools.partial(itertools.imap, ord)
|
||||||
|
import StringIO
|
||||||
|
StringIO = BytesIO = StringIO.StringIO
|
||||||
|
_assertCountEqual = "assertItemsEqual"
|
||||||
|
_assertRaisesRegex = "assertRaisesRegexp"
|
||||||
|
_assertRegex = "assertRegexpMatches"
|
||||||
|
_add_doc(b, """Byte literal""")
|
||||||
|
_add_doc(u, """Text literal""")
|
||||||
|
|
||||||
|
|
||||||
|
def assertCountEqual(self, *args, **kwargs):
|
||||||
|
return getattr(self, _assertCountEqual)(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def assertRaisesRegex(self, *args, **kwargs):
|
||||||
|
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def assertRegex(self, *args, **kwargs):
|
||||||
|
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
exec_ = getattr(moves.builtins, "exec")
|
||||||
|
|
||||||
|
def reraise(tp, value, tb=None):
|
||||||
|
try:
|
||||||
|
if value is None:
|
||||||
|
value = tp()
|
||||||
|
if value.__traceback__ is not tb:
|
||||||
|
raise value.with_traceback(tb)
|
||||||
|
raise value
|
||||||
|
finally:
|
||||||
|
value = None
|
||||||
|
tb = None
|
||||||
|
|
||||||
|
else:
|
||||||
|
def exec_(_code_, _globs_=None, _locs_=None):
|
||||||
|
"""Execute code in a namespace."""
|
||||||
|
if _globs_ is None:
|
||||||
|
frame = sys._getframe(1)
|
||||||
|
_globs_ = frame.f_globals
|
||||||
|
if _locs_ is None:
|
||||||
|
_locs_ = frame.f_locals
|
||||||
|
del frame
|
||||||
|
elif _locs_ is None:
|
||||||
|
_locs_ = _globs_
|
||||||
|
exec("""exec _code_ in _globs_, _locs_""")
|
||||||
|
|
||||||
|
exec_("""def reraise(tp, value, tb=None):
|
||||||
|
try:
|
||||||
|
raise tp, value, tb
|
||||||
|
finally:
|
||||||
|
tb = None
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info[:2] == (3, 2):
|
||||||
|
exec_("""def raise_from(value, from_value):
|
||||||
|
try:
|
||||||
|
if from_value is None:
|
||||||
|
raise value
|
||||||
|
raise value from from_value
|
||||||
|
finally:
|
||||||
|
value = None
|
||||||
|
""")
|
||||||
|
elif sys.version_info[:2] > (3, 2):
|
||||||
|
exec_("""def raise_from(value, from_value):
|
||||||
|
try:
|
||||||
|
raise value from from_value
|
||||||
|
finally:
|
||||||
|
value = None
|
||||||
|
""")
|
||||||
|
else:
|
||||||
|
def raise_from(value, from_value):
|
||||||
|
raise value
|
||||||
|
|
||||||
|
|
||||||
|
print_ = getattr(moves.builtins, "print", None)
|
||||||
|
if print_ is None:
|
||||||
|
def print_(*args, **kwargs):
|
||||||
|
"""The new-style print function for Python 2.4 and 2.5."""
|
||||||
|
fp = kwargs.pop("file", sys.stdout)
|
||||||
|
if fp is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
def write(data):
|
||||||
|
if not isinstance(data, basestring):
|
||||||
|
data = str(data)
|
||||||
|
# If the file has an encoding, encode unicode with it.
|
||||||
|
if (isinstance(fp, file) and
|
||||||
|
isinstance(data, unicode) and
|
||||||
|
fp.encoding is not None):
|
||||||
|
errors = getattr(fp, "errors", None)
|
||||||
|
if errors is None:
|
||||||
|
errors = "strict"
|
||||||
|
data = data.encode(fp.encoding, errors)
|
||||||
|
fp.write(data)
|
||||||
|
want_unicode = False
|
||||||
|
sep = kwargs.pop("sep", None)
|
||||||
|
if sep is not None:
|
||||||
|
if isinstance(sep, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
elif not isinstance(sep, str):
|
||||||
|
raise TypeError("sep must be None or a string")
|
||||||
|
end = kwargs.pop("end", None)
|
||||||
|
if end is not None:
|
||||||
|
if isinstance(end, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
elif not isinstance(end, str):
|
||||||
|
raise TypeError("end must be None or a string")
|
||||||
|
if kwargs:
|
||||||
|
raise TypeError("invalid keyword arguments to print()")
|
||||||
|
if not want_unicode:
|
||||||
|
for arg in args:
|
||||||
|
if isinstance(arg, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
break
|
||||||
|
if want_unicode:
|
||||||
|
newline = unicode("\n")
|
||||||
|
space = unicode(" ")
|
||||||
|
else:
|
||||||
|
newline = "\n"
|
||||||
|
space = " "
|
||||||
|
if sep is None:
|
||||||
|
sep = space
|
||||||
|
if end is None:
|
||||||
|
end = newline
|
||||||
|
for i, arg in enumerate(args):
|
||||||
|
if i:
|
||||||
|
write(sep)
|
||||||
|
write(arg)
|
||||||
|
write(end)
|
||||||
|
if sys.version_info[:2] < (3, 3):
|
||||||
|
_print = print_
|
||||||
|
|
||||||
|
def print_(*args, **kwargs):
|
||||||
|
fp = kwargs.get("file", sys.stdout)
|
||||||
|
flush = kwargs.pop("flush", False)
|
||||||
|
_print(*args, **kwargs)
|
||||||
|
if flush and fp is not None:
|
||||||
|
fp.flush()
|
||||||
|
|
||||||
|
_add_doc(reraise, """Reraise an exception.""")
|
||||||
|
|
||||||
|
if sys.version_info[0:2] < (3, 4):
|
||||||
|
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||||
|
updated=functools.WRAPPER_UPDATES):
|
||||||
|
def wrapper(f):
|
||||||
|
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||||
|
f.__wrapped__ = wrapped
|
||||||
|
return f
|
||||||
|
return wrapper
|
||||||
|
else:
|
||||||
|
wraps = functools.wraps
|
||||||
|
|
||||||
|
|
||||||
|
def with_metaclass(meta, *bases):
|
||||||
|
"""Create a base class with a metaclass."""
|
||||||
|
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||||
|
# metaclass for one level of class instantiation that replaces itself with
|
||||||
|
# the actual metaclass.
|
||||||
|
class metaclass(meta):
|
||||||
|
|
||||||
|
def __new__(cls, name, this_bases, d):
|
||||||
|
return meta(name, bases, d)
|
||||||
|
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||||
|
|
||||||
|
|
||||||
|
def add_metaclass(metaclass):
|
||||||
|
"""Class decorator for creating a class with a metaclass."""
|
||||||
|
def wrapper(cls):
|
||||||
|
orig_vars = cls.__dict__.copy()
|
||||||
|
slots = orig_vars.get('__slots__')
|
||||||
|
if slots is not None:
|
||||||
|
if isinstance(slots, str):
|
||||||
|
slots = [slots]
|
||||||
|
for slots_var in slots:
|
||||||
|
orig_vars.pop(slots_var)
|
||||||
|
orig_vars.pop('__dict__', None)
|
||||||
|
orig_vars.pop('__weakref__', None)
|
||||||
|
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def python_2_unicode_compatible(klass):
|
||||||
|
"""
|
||||||
|
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||||
|
Under Python 3 it does nothing.
|
||||||
|
|
||||||
|
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||||
|
returning text and apply this decorator to the class.
|
||||||
|
"""
|
||||||
|
if PY2:
|
||||||
|
if '__str__' not in klass.__dict__:
|
||||||
|
raise ValueError("@python_2_unicode_compatible cannot be applied "
|
||||||
|
"to %s because it doesn't define __str__()." %
|
||||||
|
klass.__name__)
|
||||||
|
klass.__unicode__ = klass.__str__
|
||||||
|
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||||
|
return klass
|
||||||
|
|
||||||
|
|
||||||
|
# Complete the moves implementation.
|
||||||
|
# This code is at the end of this module to speed up module loading.
|
||||||
|
# Turn this module into a package.
|
||||||
|
__path__ = [] # required for PEP 302 and PEP 451
|
||||||
|
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||||
|
if globals().get("__spec__") is not None:
|
||||||
|
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||||
|
# Remove other six meta path importers, since they cause problems. This can
|
||||||
|
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||||
|
# this for some reason.)
|
||||||
|
if sys.meta_path:
|
||||||
|
for i, importer in enumerate(sys.meta_path):
|
||||||
|
# Here's some real nastiness: Another "instance" of the six module might
|
||||||
|
# be floating around. Therefore, we can't use isinstance() to check for
|
||||||
|
# the six meta path importer, since the other six instance will have
|
||||||
|
# inserted an importer with different class.
|
||||||
|
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||||
|
importer.name == __name__):
|
||||||
|
del sys.meta_path[i]
|
||||||
|
break
|
||||||
|
del i, importer
|
||||||
|
# Finally, add the importer to the meta path import hook.
|
||||||
|
sys.meta_path.append(_importer)
|
2
lib/spack/external/yaml/README
vendored
2
lib/spack/external/yaml/README
vendored
|
@ -28,7 +28,7 @@ Post your questions and opinions to the YAML-Core mailing list:
|
||||||
'http://lists.sourceforge.net/lists/listinfo/yaml-core'.
|
'http://lists.sourceforge.net/lists/listinfo/yaml-core'.
|
||||||
|
|
||||||
Submit bug reports and feature requests to the PyYAML bug tracker:
|
Submit bug reports and feature requests to the PyYAML bug tracker:
|
||||||
'http://pyyaml.org/newticket?component=pyyaml'.
|
'https://bitbucket.org/xi/pyyaml/issues/new'.
|
||||||
|
|
||||||
PyYAML is written by Kirill Simonov <xi@resolvent.net>. It is released
|
PyYAML is written by Kirill Simonov <xi@resolvent.net>. It is released
|
||||||
under the MIT license. See the file LICENSE for more details.
|
under the MIT license. See the file LICENSE for more details.
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
from loader import *
|
from loader import *
|
||||||
from dumper import *
|
from dumper import *
|
||||||
|
|
||||||
__version__ = '3.10'
|
__version__ = '3.12'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from cyaml import *
|
from cyaml import *
|
|
@ -131,9 +131,6 @@ def construct_mapping(self, node, deep=False):
|
||||||
raise ConstructorError("while constructing a mapping", node.start_mark,
|
raise ConstructorError("while constructing a mapping", node.start_mark,
|
||||||
"found unacceptable key (%s)" % exc, key_node.start_mark)
|
"found unacceptable key (%s)" % exc, key_node.start_mark)
|
||||||
value = self.construct_object(value_node, deep=deep)
|
value = self.construct_object(value_node, deep=deep)
|
||||||
if key in mapping:
|
|
||||||
raise ConstructorError("while constructing a mapping", node.start_mark,
|
|
||||||
"found already in-use key (%s)" % key, key_node.start_mark)
|
|
||||||
mapping[key] = value
|
mapping[key] = value
|
||||||
return mapping
|
return mapping
|
||||||
|
|
85
lib/spack/external/yaml/lib/yaml/cyaml.py
vendored
Normal file
85
lib/spack/external/yaml/lib/yaml/cyaml.py
vendored
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
|
||||||
|
__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader',
|
||||||
|
'CBaseDumper', 'CSafeDumper', 'CDumper']
|
||||||
|
|
||||||
|
from _yaml import CParser, CEmitter
|
||||||
|
|
||||||
|
from constructor import *
|
||||||
|
|
||||||
|
from serializer import *
|
||||||
|
from representer import *
|
||||||
|
|
||||||
|
from resolver import *
|
||||||
|
|
||||||
|
class CBaseLoader(CParser, BaseConstructor, BaseResolver):
|
||||||
|
|
||||||
|
def __init__(self, stream):
|
||||||
|
CParser.__init__(self, stream)
|
||||||
|
BaseConstructor.__init__(self)
|
||||||
|
BaseResolver.__init__(self)
|
||||||
|
|
||||||
|
class CSafeLoader(CParser, SafeConstructor, Resolver):
|
||||||
|
|
||||||
|
def __init__(self, stream):
|
||||||
|
CParser.__init__(self, stream)
|
||||||
|
SafeConstructor.__init__(self)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
||||||
|
class CLoader(CParser, Constructor, Resolver):
|
||||||
|
|
||||||
|
def __init__(self, stream):
|
||||||
|
CParser.__init__(self, stream)
|
||||||
|
Constructor.__init__(self)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
||||||
|
class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver):
|
||||||
|
|
||||||
|
def __init__(self, stream,
|
||||||
|
default_style=None, default_flow_style=None,
|
||||||
|
canonical=None, indent=None, width=None,
|
||||||
|
allow_unicode=None, line_break=None,
|
||||||
|
encoding=None, explicit_start=None, explicit_end=None,
|
||||||
|
version=None, tags=None):
|
||||||
|
CEmitter.__init__(self, stream, canonical=canonical,
|
||||||
|
indent=indent, width=width, encoding=encoding,
|
||||||
|
allow_unicode=allow_unicode, line_break=line_break,
|
||||||
|
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||||
|
version=version, tags=tags)
|
||||||
|
Representer.__init__(self, default_style=default_style,
|
||||||
|
default_flow_style=default_flow_style)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
||||||
|
class CSafeDumper(CEmitter, SafeRepresenter, Resolver):
|
||||||
|
|
||||||
|
def __init__(self, stream,
|
||||||
|
default_style=None, default_flow_style=None,
|
||||||
|
canonical=None, indent=None, width=None,
|
||||||
|
allow_unicode=None, line_break=None,
|
||||||
|
encoding=None, explicit_start=None, explicit_end=None,
|
||||||
|
version=None, tags=None):
|
||||||
|
CEmitter.__init__(self, stream, canonical=canonical,
|
||||||
|
indent=indent, width=width, encoding=encoding,
|
||||||
|
allow_unicode=allow_unicode, line_break=line_break,
|
||||||
|
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||||
|
version=version, tags=tags)
|
||||||
|
SafeRepresenter.__init__(self, default_style=default_style,
|
||||||
|
default_flow_style=default_flow_style)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
||||||
|
class CDumper(CEmitter, Serializer, Representer, Resolver):
|
||||||
|
|
||||||
|
def __init__(self, stream,
|
||||||
|
default_style=None, default_flow_style=None,
|
||||||
|
canonical=None, indent=None, width=None,
|
||||||
|
allow_unicode=None, line_break=None,
|
||||||
|
encoding=None, explicit_start=None, explicit_end=None,
|
||||||
|
version=None, tags=None):
|
||||||
|
CEmitter.__init__(self, stream, canonical=canonical,
|
||||||
|
indent=indent, width=width, encoding=encoding,
|
||||||
|
allow_unicode=allow_unicode, line_break=line_break,
|
||||||
|
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||||
|
version=version, tags=tags)
|
||||||
|
Representer.__init__(self, default_style=default_style,
|
||||||
|
default_flow_style=default_flow_style)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
|
@ -56,7 +56,8 @@ class Reader(object):
|
||||||
|
|
||||||
# Yeah, it's ugly and slow.
|
# Yeah, it's ugly and slow.
|
||||||
|
|
||||||
def __init__(self, stream, name=None):
|
def __init__(self, stream):
|
||||||
|
self.name = None
|
||||||
self.stream = None
|
self.stream = None
|
||||||
self.stream_pointer = 0
|
self.stream_pointer = 0
|
||||||
self.eof = True
|
self.eof = True
|
||||||
|
@ -69,16 +70,16 @@ def __init__(self, stream, name=None):
|
||||||
self.line = 0
|
self.line = 0
|
||||||
self.column = 0
|
self.column = 0
|
||||||
if isinstance(stream, unicode):
|
if isinstance(stream, unicode):
|
||||||
self.name = "<unicode string>" if name is None else name
|
self.name = "<unicode string>"
|
||||||
self.check_printable(stream)
|
self.check_printable(stream)
|
||||||
self.buffer = stream+u'\0'
|
self.buffer = stream+u'\0'
|
||||||
elif isinstance(stream, str):
|
elif isinstance(stream, str):
|
||||||
self.name = "<string>" if name is None else name
|
self.name = "<string>"
|
||||||
self.raw_buffer = stream
|
self.raw_buffer = stream
|
||||||
self.determine_encoding()
|
self.determine_encoding()
|
||||||
else:
|
else:
|
||||||
self.stream = stream
|
self.stream = stream
|
||||||
self.name = getattr(stream, 'name', "<file>") if name is None else name
|
self.name = getattr(stream, 'name', "<file>")
|
||||||
self.eof = False
|
self.eof = False
|
||||||
self.raw_buffer = ''
|
self.raw_buffer = ''
|
||||||
self.determine_encoding()
|
self.determine_encoding()
|
|
@ -139,7 +139,9 @@ def ignore_aliases(self, data):
|
||||||
class SafeRepresenter(BaseRepresenter):
|
class SafeRepresenter(BaseRepresenter):
|
||||||
|
|
||||||
def ignore_aliases(self, data):
|
def ignore_aliases(self, data):
|
||||||
if data in [None, ()]:
|
if data is None:
|
||||||
|
return True
|
||||||
|
if isinstance(data, tuple) and data == ():
|
||||||
return True
|
return True
|
||||||
if isinstance(data, (str, unicode, bool, int, float)):
|
if isinstance(data, (str, unicode, bool, int, float)):
|
||||||
return True
|
return True
|
|
@ -24,7 +24,10 @@ def __init__(self):
|
||||||
|
|
||||||
def add_implicit_resolver(cls, tag, regexp, first):
|
def add_implicit_resolver(cls, tag, regexp, first):
|
||||||
if not 'yaml_implicit_resolvers' in cls.__dict__:
|
if not 'yaml_implicit_resolvers' in cls.__dict__:
|
||||||
cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
|
implicit_resolvers = {}
|
||||||
|
for key in cls.yaml_implicit_resolvers:
|
||||||
|
implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:]
|
||||||
|
cls.yaml_implicit_resolvers = implicit_resolvers
|
||||||
if first is None:
|
if first is None:
|
||||||
first = [None]
|
first = [None]
|
||||||
for ch in first:
|
for ch in first:
|
|
@ -286,7 +286,7 @@ def stale_possible_simple_keys(self):
|
||||||
or self.index-key.index > 1024:
|
or self.index-key.index > 1024:
|
||||||
if key.required:
|
if key.required:
|
||||||
raise ScannerError("while scanning a simple key", key.mark,
|
raise ScannerError("while scanning a simple key", key.mark,
|
||||||
"could not found expected ':'", self.get_mark())
|
"could not find expected ':'", self.get_mark())
|
||||||
del self.possible_simple_keys[level]
|
del self.possible_simple_keys[level]
|
||||||
|
|
||||||
def save_possible_simple_key(self):
|
def save_possible_simple_key(self):
|
||||||
|
@ -297,10 +297,6 @@ def save_possible_simple_key(self):
|
||||||
# Check if a simple key is required at the current position.
|
# Check if a simple key is required at the current position.
|
||||||
required = not self.flow_level and self.indent == self.column
|
required = not self.flow_level and self.indent == self.column
|
||||||
|
|
||||||
# A simple key is required only if it is the first token in the current
|
|
||||||
# line. Therefore it is always allowed.
|
|
||||||
assert self.allow_simple_key or not required
|
|
||||||
|
|
||||||
# The next token might be a simple key. Let's save it's number and
|
# The next token might be a simple key. Let's save it's number and
|
||||||
# position.
|
# position.
|
||||||
if self.allow_simple_key:
|
if self.allow_simple_key:
|
||||||
|
@ -317,7 +313,7 @@ def remove_possible_simple_key(self):
|
||||||
|
|
||||||
if key.required:
|
if key.required:
|
||||||
raise ScannerError("while scanning a simple key", key.mark,
|
raise ScannerError("while scanning a simple key", key.mark,
|
||||||
"could not found expected ':'", self.get_mark())
|
"could not find expected ':'", self.get_mark())
|
||||||
|
|
||||||
del self.possible_simple_keys[self.flow_level]
|
del self.possible_simple_keys[self.flow_level]
|
||||||
|
|
312
lib/spack/external/yaml/lib3/yaml/__init__.py
vendored
Normal file
312
lib/spack/external/yaml/lib3/yaml/__init__.py
vendored
Normal file
|
@ -0,0 +1,312 @@
|
||||||
|
|
||||||
|
from .error import *
|
||||||
|
|
||||||
|
from .tokens import *
|
||||||
|
from .events import *
|
||||||
|
from .nodes import *
|
||||||
|
|
||||||
|
from .loader import *
|
||||||
|
from .dumper import *
|
||||||
|
|
||||||
|
__version__ = '3.12'
|
||||||
|
try:
|
||||||
|
from .cyaml import *
|
||||||
|
__with_libyaml__ = True
|
||||||
|
except ImportError:
|
||||||
|
__with_libyaml__ = False
|
||||||
|
|
||||||
|
import io
|
||||||
|
|
||||||
|
def scan(stream, Loader=Loader):
|
||||||
|
"""
|
||||||
|
Scan a YAML stream and produce scanning tokens.
|
||||||
|
"""
|
||||||
|
loader = Loader(stream)
|
||||||
|
try:
|
||||||
|
while loader.check_token():
|
||||||
|
yield loader.get_token()
|
||||||
|
finally:
|
||||||
|
loader.dispose()
|
||||||
|
|
||||||
|
def parse(stream, Loader=Loader):
|
||||||
|
"""
|
||||||
|
Parse a YAML stream and produce parsing events.
|
||||||
|
"""
|
||||||
|
loader = Loader(stream)
|
||||||
|
try:
|
||||||
|
while loader.check_event():
|
||||||
|
yield loader.get_event()
|
||||||
|
finally:
|
||||||
|
loader.dispose()
|
||||||
|
|
||||||
|
def compose(stream, Loader=Loader):
|
||||||
|
"""
|
||||||
|
Parse the first YAML document in a stream
|
||||||
|
and produce the corresponding representation tree.
|
||||||
|
"""
|
||||||
|
loader = Loader(stream)
|
||||||
|
try:
|
||||||
|
return loader.get_single_node()
|
||||||
|
finally:
|
||||||
|
loader.dispose()
|
||||||
|
|
||||||
|
def compose_all(stream, Loader=Loader):
|
||||||
|
"""
|
||||||
|
Parse all YAML documents in a stream
|
||||||
|
and produce corresponding representation trees.
|
||||||
|
"""
|
||||||
|
loader = Loader(stream)
|
||||||
|
try:
|
||||||
|
while loader.check_node():
|
||||||
|
yield loader.get_node()
|
||||||
|
finally:
|
||||||
|
loader.dispose()
|
||||||
|
|
||||||
|
def load(stream, Loader=Loader):
|
||||||
|
"""
|
||||||
|
Parse the first YAML document in a stream
|
||||||
|
and produce the corresponding Python object.
|
||||||
|
"""
|
||||||
|
loader = Loader(stream)
|
||||||
|
try:
|
||||||
|
return loader.get_single_data()
|
||||||
|
finally:
|
||||||
|
loader.dispose()
|
||||||
|
|
||||||
|
def load_all(stream, Loader=Loader):
|
||||||
|
"""
|
||||||
|
Parse all YAML documents in a stream
|
||||||
|
and produce corresponding Python objects.
|
||||||
|
"""
|
||||||
|
loader = Loader(stream)
|
||||||
|
try:
|
||||||
|
while loader.check_data():
|
||||||
|
yield loader.get_data()
|
||||||
|
finally:
|
||||||
|
loader.dispose()
|
||||||
|
|
||||||
|
def safe_load(stream):
|
||||||
|
"""
|
||||||
|
Parse the first YAML document in a stream
|
||||||
|
and produce the corresponding Python object.
|
||||||
|
Resolve only basic YAML tags.
|
||||||
|
"""
|
||||||
|
return load(stream, SafeLoader)
|
||||||
|
|
||||||
|
def safe_load_all(stream):
|
||||||
|
"""
|
||||||
|
Parse all YAML documents in a stream
|
||||||
|
and produce corresponding Python objects.
|
||||||
|
Resolve only basic YAML tags.
|
||||||
|
"""
|
||||||
|
return load_all(stream, SafeLoader)
|
||||||
|
|
||||||
|
def emit(events, stream=None, Dumper=Dumper,
|
||||||
|
canonical=None, indent=None, width=None,
|
||||||
|
allow_unicode=None, line_break=None):
|
||||||
|
"""
|
||||||
|
Emit YAML parsing events into a stream.
|
||||||
|
If stream is None, return the produced string instead.
|
||||||
|
"""
|
||||||
|
getvalue = None
|
||||||
|
if stream is None:
|
||||||
|
stream = io.StringIO()
|
||||||
|
getvalue = stream.getvalue
|
||||||
|
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
|
||||||
|
allow_unicode=allow_unicode, line_break=line_break)
|
||||||
|
try:
|
||||||
|
for event in events:
|
||||||
|
dumper.emit(event)
|
||||||
|
finally:
|
||||||
|
dumper.dispose()
|
||||||
|
if getvalue:
|
||||||
|
return getvalue()
|
||||||
|
|
||||||
|
def serialize_all(nodes, stream=None, Dumper=Dumper,
|
||||||
|
canonical=None, indent=None, width=None,
|
||||||
|
allow_unicode=None, line_break=None,
|
||||||
|
encoding=None, explicit_start=None, explicit_end=None,
|
||||||
|
version=None, tags=None):
|
||||||
|
"""
|
||||||
|
Serialize a sequence of representation trees into a YAML stream.
|
||||||
|
If stream is None, return the produced string instead.
|
||||||
|
"""
|
||||||
|
getvalue = None
|
||||||
|
if stream is None:
|
||||||
|
if encoding is None:
|
||||||
|
stream = io.StringIO()
|
||||||
|
else:
|
||||||
|
stream = io.BytesIO()
|
||||||
|
getvalue = stream.getvalue
|
||||||
|
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
|
||||||
|
allow_unicode=allow_unicode, line_break=line_break,
|
||||||
|
encoding=encoding, version=version, tags=tags,
|
||||||
|
explicit_start=explicit_start, explicit_end=explicit_end)
|
||||||
|
try:
|
||||||
|
dumper.open()
|
||||||
|
for node in nodes:
|
||||||
|
dumper.serialize(node)
|
||||||
|
dumper.close()
|
||||||
|
finally:
|
||||||
|
dumper.dispose()
|
||||||
|
if getvalue:
|
||||||
|
return getvalue()
|
||||||
|
|
||||||
|
def serialize(node, stream=None, Dumper=Dumper, **kwds):
|
||||||
|
"""
|
||||||
|
Serialize a representation tree into a YAML stream.
|
||||||
|
If stream is None, return the produced string instead.
|
||||||
|
"""
|
||||||
|
return serialize_all([node], stream, Dumper=Dumper, **kwds)
|
||||||
|
|
||||||
|
def dump_all(documents, stream=None, Dumper=Dumper,
|
||||||
|
default_style=None, default_flow_style=None,
|
||||||
|
canonical=None, indent=None, width=None,
|
||||||
|
allow_unicode=None, line_break=None,
|
||||||
|
encoding=None, explicit_start=None, explicit_end=None,
|
||||||
|
version=None, tags=None):
|
||||||
|
"""
|
||||||
|
Serialize a sequence of Python objects into a YAML stream.
|
||||||
|
If stream is None, return the produced string instead.
|
||||||
|
"""
|
||||||
|
getvalue = None
|
||||||
|
if stream is None:
|
||||||
|
if encoding is None:
|
||||||
|
stream = io.StringIO()
|
||||||
|
else:
|
||||||
|
stream = io.BytesIO()
|
||||||
|
getvalue = stream.getvalue
|
||||||
|
dumper = Dumper(stream, default_style=default_style,
|
||||||
|
default_flow_style=default_flow_style,
|
||||||
|
canonical=canonical, indent=indent, width=width,
|
||||||
|
allow_unicode=allow_unicode, line_break=line_break,
|
||||||
|
encoding=encoding, version=version, tags=tags,
|
||||||
|
explicit_start=explicit_start, explicit_end=explicit_end)
|
||||||
|
try:
|
||||||
|
dumper.open()
|
||||||
|
for data in documents:
|
||||||
|
dumper.represent(data)
|
||||||
|
dumper.close()
|
||||||
|
finally:
|
||||||
|
dumper.dispose()
|
||||||
|
if getvalue:
|
||||||
|
return getvalue()
|
||||||
|
|
||||||
|
def dump(data, stream=None, Dumper=Dumper, **kwds):
|
||||||
|
"""
|
||||||
|
Serialize a Python object into a YAML stream.
|
||||||
|
If stream is None, return the produced string instead.
|
||||||
|
"""
|
||||||
|
return dump_all([data], stream, Dumper=Dumper, **kwds)
|
||||||
|
|
||||||
|
def safe_dump_all(documents, stream=None, **kwds):
|
||||||
|
"""
|
||||||
|
Serialize a sequence of Python objects into a YAML stream.
|
||||||
|
Produce only basic YAML tags.
|
||||||
|
If stream is None, return the produced string instead.
|
||||||
|
"""
|
||||||
|
return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
|
||||||
|
|
||||||
|
def safe_dump(data, stream=None, **kwds):
|
||||||
|
"""
|
||||||
|
Serialize a Python object into a YAML stream.
|
||||||
|
Produce only basic YAML tags.
|
||||||
|
If stream is None, return the produced string instead.
|
||||||
|
"""
|
||||||
|
return dump_all([data], stream, Dumper=SafeDumper, **kwds)
|
||||||
|
|
||||||
|
def add_implicit_resolver(tag, regexp, first=None,
|
||||||
|
Loader=Loader, Dumper=Dumper):
|
||||||
|
"""
|
||||||
|
Add an implicit scalar detector.
|
||||||
|
If an implicit scalar value matches the given regexp,
|
||||||
|
the corresponding tag is assigned to the scalar.
|
||||||
|
first is a sequence of possible initial characters or None.
|
||||||
|
"""
|
||||||
|
Loader.add_implicit_resolver(tag, regexp, first)
|
||||||
|
Dumper.add_implicit_resolver(tag, regexp, first)
|
||||||
|
|
||||||
|
def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
|
||||||
|
"""
|
||||||
|
Add a path based resolver for the given tag.
|
||||||
|
A path is a list of keys that forms a path
|
||||||
|
to a node in the representation tree.
|
||||||
|
Keys can be string values, integers, or None.
|
||||||
|
"""
|
||||||
|
Loader.add_path_resolver(tag, path, kind)
|
||||||
|
Dumper.add_path_resolver(tag, path, kind)
|
||||||
|
|
||||||
|
def add_constructor(tag, constructor, Loader=Loader):
|
||||||
|
"""
|
||||||
|
Add a constructor for the given tag.
|
||||||
|
Constructor is a function that accepts a Loader instance
|
||||||
|
and a node object and produces the corresponding Python object.
|
||||||
|
"""
|
||||||
|
Loader.add_constructor(tag, constructor)
|
||||||
|
|
||||||
|
def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
|
||||||
|
"""
|
||||||
|
Add a multi-constructor for the given tag prefix.
|
||||||
|
Multi-constructor is called for a node if its tag starts with tag_prefix.
|
||||||
|
Multi-constructor accepts a Loader instance, a tag suffix,
|
||||||
|
and a node object and produces the corresponding Python object.
|
||||||
|
"""
|
||||||
|
Loader.add_multi_constructor(tag_prefix, multi_constructor)
|
||||||
|
|
||||||
|
def add_representer(data_type, representer, Dumper=Dumper):
|
||||||
|
"""
|
||||||
|
Add a representer for the given type.
|
||||||
|
Representer is a function accepting a Dumper instance
|
||||||
|
and an instance of the given data type
|
||||||
|
and producing the corresponding representation node.
|
||||||
|
"""
|
||||||
|
Dumper.add_representer(data_type, representer)
|
||||||
|
|
||||||
|
def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
|
||||||
|
"""
|
||||||
|
Add a representer for the given type.
|
||||||
|
Multi-representer is a function accepting a Dumper instance
|
||||||
|
and an instance of the given data type or subtype
|
||||||
|
and producing the corresponding representation node.
|
||||||
|
"""
|
||||||
|
Dumper.add_multi_representer(data_type, multi_representer)
|
||||||
|
|
||||||
|
class YAMLObjectMetaclass(type):
|
||||||
|
"""
|
||||||
|
The metaclass for YAMLObject.
|
||||||
|
"""
|
||||||
|
def __init__(cls, name, bases, kwds):
|
||||||
|
super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
|
||||||
|
if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
|
||||||
|
cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
|
||||||
|
cls.yaml_dumper.add_representer(cls, cls.to_yaml)
|
||||||
|
|
||||||
|
class YAMLObject(metaclass=YAMLObjectMetaclass):
|
||||||
|
"""
|
||||||
|
An object that can dump itself to a YAML stream
|
||||||
|
and load itself from a YAML stream.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = () # no direct instantiation, so allow immutable subclasses
|
||||||
|
|
||||||
|
yaml_loader = Loader
|
||||||
|
yaml_dumper = Dumper
|
||||||
|
|
||||||
|
yaml_tag = None
|
||||||
|
yaml_flow_style = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_yaml(cls, loader, node):
|
||||||
|
"""
|
||||||
|
Convert a representation node to a Python object.
|
||||||
|
"""
|
||||||
|
return loader.construct_yaml_object(node, cls)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_yaml(cls, dumper, data):
|
||||||
|
"""
|
||||||
|
Convert a Python object to a representation node.
|
||||||
|
"""
|
||||||
|
return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
|
||||||
|
flow_style=cls.yaml_flow_style)
|
||||||
|
|
139
lib/spack/external/yaml/lib3/yaml/composer.py
vendored
Normal file
139
lib/spack/external/yaml/lib3/yaml/composer.py
vendored
Normal file
|
@ -0,0 +1,139 @@
|
||||||
|
|
||||||
|
__all__ = ['Composer', 'ComposerError']
|
||||||
|
|
||||||
|
from .error import MarkedYAMLError
|
||||||
|
from .events import *
|
||||||
|
from .nodes import *
|
||||||
|
|
||||||
|
class ComposerError(MarkedYAMLError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Composer:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.anchors = {}
|
||||||
|
|
||||||
|
def check_node(self):
|
||||||
|
# Drop the STREAM-START event.
|
||||||
|
if self.check_event(StreamStartEvent):
|
||||||
|
self.get_event()
|
||||||
|
|
||||||
|
# If there are more documents available?
|
||||||
|
return not self.check_event(StreamEndEvent)
|
||||||
|
|
||||||
|
def get_node(self):
|
||||||
|
# Get the root node of the next document.
|
||||||
|
if not self.check_event(StreamEndEvent):
|
||||||
|
return self.compose_document()
|
||||||
|
|
||||||
|
def get_single_node(self):
|
||||||
|
# Drop the STREAM-START event.
|
||||||
|
self.get_event()
|
||||||
|
|
||||||
|
# Compose a document if the stream is not empty.
|
||||||
|
document = None
|
||||||
|
if not self.check_event(StreamEndEvent):
|
||||||
|
document = self.compose_document()
|
||||||
|
|
||||||
|
# Ensure that the stream contains no more documents.
|
||||||
|
if not self.check_event(StreamEndEvent):
|
||||||
|
event = self.get_event()
|
||||||
|
raise ComposerError("expected a single document in the stream",
|
||||||
|
document.start_mark, "but found another document",
|
||||||
|
event.start_mark)
|
||||||
|
|
||||||
|
# Drop the STREAM-END event.
|
||||||
|
self.get_event()
|
||||||
|
|
||||||
|
return document
|
||||||
|
|
||||||
|
def compose_document(self):
|
||||||
|
# Drop the DOCUMENT-START event.
|
||||||
|
self.get_event()
|
||||||
|
|
||||||
|
# Compose the root node.
|
||||||
|
node = self.compose_node(None, None)
|
||||||
|
|
||||||
|
# Drop the DOCUMENT-END event.
|
||||||
|
self.get_event()
|
||||||
|
|
||||||
|
self.anchors = {}
|
||||||
|
return node
|
||||||
|
|
||||||
|
def compose_node(self, parent, index):
|
||||||
|
if self.check_event(AliasEvent):
|
||||||
|
event = self.get_event()
|
||||||
|
anchor = event.anchor
|
||||||
|
if anchor not in self.anchors:
|
||||||
|
raise ComposerError(None, None, "found undefined alias %r"
|
||||||
|
% anchor, event.start_mark)
|
||||||
|
return self.anchors[anchor]
|
||||||
|
event = self.peek_event()
|
||||||
|
anchor = event.anchor
|
||||||
|
if anchor is not None:
|
||||||
|
if anchor in self.anchors:
|
||||||
|
raise ComposerError("found duplicate anchor %r; first occurence"
|
||||||
|
% anchor, self.anchors[anchor].start_mark,
|
||||||
|
"second occurence", event.start_mark)
|
||||||
|
self.descend_resolver(parent, index)
|
||||||
|
if self.check_event(ScalarEvent):
|
||||||
|
node = self.compose_scalar_node(anchor)
|
||||||
|
elif self.check_event(SequenceStartEvent):
|
||||||
|
node = self.compose_sequence_node(anchor)
|
||||||
|
elif self.check_event(MappingStartEvent):
|
||||||
|
node = self.compose_mapping_node(anchor)
|
||||||
|
self.ascend_resolver()
|
||||||
|
return node
|
||||||
|
|
||||||
|
def compose_scalar_node(self, anchor):
|
||||||
|
event = self.get_event()
|
||||||
|
tag = event.tag
|
||||||
|
if tag is None or tag == '!':
|
||||||
|
tag = self.resolve(ScalarNode, event.value, event.implicit)
|
||||||
|
node = ScalarNode(tag, event.value,
|
||||||
|
event.start_mark, event.end_mark, style=event.style)
|
||||||
|
if anchor is not None:
|
||||||
|
self.anchors[anchor] = node
|
||||||
|
return node
|
||||||
|
|
||||||
|
def compose_sequence_node(self, anchor):
|
||||||
|
start_event = self.get_event()
|
||||||
|
tag = start_event.tag
|
||||||
|
if tag is None or tag == '!':
|
||||||
|
tag = self.resolve(SequenceNode, None, start_event.implicit)
|
||||||
|
node = SequenceNode(tag, [],
|
||||||
|
start_event.start_mark, None,
|
||||||
|
flow_style=start_event.flow_style)
|
||||||
|
if anchor is not None:
|
||||||
|
self.anchors[anchor] = node
|
||||||
|
index = 0
|
||||||
|
while not self.check_event(SequenceEndEvent):
|
||||||
|
node.value.append(self.compose_node(node, index))
|
||||||
|
index += 1
|
||||||
|
end_event = self.get_event()
|
||||||
|
node.end_mark = end_event.end_mark
|
||||||
|
return node
|
||||||
|
|
||||||
|
def compose_mapping_node(self, anchor):
|
||||||
|
start_event = self.get_event()
|
||||||
|
tag = start_event.tag
|
||||||
|
if tag is None or tag == '!':
|
||||||
|
tag = self.resolve(MappingNode, None, start_event.implicit)
|
||||||
|
node = MappingNode(tag, [],
|
||||||
|
start_event.start_mark, None,
|
||||||
|
flow_style=start_event.flow_style)
|
||||||
|
if anchor is not None:
|
||||||
|
self.anchors[anchor] = node
|
||||||
|
while not self.check_event(MappingEndEvent):
|
||||||
|
#key_event = self.peek_event()
|
||||||
|
item_key = self.compose_node(node, None)
|
||||||
|
#if item_key in node.value:
|
||||||
|
# raise ComposerError("while composing a mapping", start_event.start_mark,
|
||||||
|
# "found duplicate key", key_event.start_mark)
|
||||||
|
item_value = self.compose_node(node, item_key)
|
||||||
|
#node.value[item_key] = item_value
|
||||||
|
node.value.append((item_key, item_value))
|
||||||
|
end_event = self.get_event()
|
||||||
|
node.end_mark = end_event.end_mark
|
||||||
|
return node
|
||||||
|
|
686
lib/spack/external/yaml/lib3/yaml/constructor.py
vendored
Normal file
686
lib/spack/external/yaml/lib3/yaml/constructor.py
vendored
Normal file
|
@ -0,0 +1,686 @@
|
||||||
|
|
||||||
|
__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
|
||||||
|
'ConstructorError']
|
||||||
|
|
||||||
|
from .error import *
|
||||||
|
from .nodes import *
|
||||||
|
|
||||||
|
import collections, datetime, base64, binascii, re, sys, types
|
||||||
|
|
||||||
|
class ConstructorError(MarkedYAMLError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class BaseConstructor:
|
||||||
|
|
||||||
|
yaml_constructors = {}
|
||||||
|
yaml_multi_constructors = {}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.constructed_objects = {}
|
||||||
|
self.recursive_objects = {}
|
||||||
|
self.state_generators = []
|
||||||
|
self.deep_construct = False
|
||||||
|
|
||||||
|
def check_data(self):
|
||||||
|
# If there are more documents available?
|
||||||
|
return self.check_node()
|
||||||
|
|
||||||
|
def get_data(self):
|
||||||
|
# Construct and return the next document.
|
||||||
|
if self.check_node():
|
||||||
|
return self.construct_document(self.get_node())
|
||||||
|
|
||||||
|
def get_single_data(self):
|
||||||
|
# Ensure that the stream contains a single document and construct it.
|
||||||
|
node = self.get_single_node()
|
||||||
|
if node is not None:
|
||||||
|
return self.construct_document(node)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def construct_document(self, node):
|
||||||
|
data = self.construct_object(node)
|
||||||
|
while self.state_generators:
|
||||||
|
state_generators = self.state_generators
|
||||||
|
self.state_generators = []
|
||||||
|
for generator in state_generators:
|
||||||
|
for dummy in generator:
|
||||||
|
pass
|
||||||
|
self.constructed_objects = {}
|
||||||
|
self.recursive_objects = {}
|
||||||
|
self.deep_construct = False
|
||||||
|
return data
|
||||||
|
|
||||||
|
def construct_object(self, node, deep=False):
|
||||||
|
if node in self.constructed_objects:
|
||||||
|
return self.constructed_objects[node]
|
||||||
|
if deep:
|
||||||
|
old_deep = self.deep_construct
|
||||||
|
self.deep_construct = True
|
||||||
|
if node in self.recursive_objects:
|
||||||
|
raise ConstructorError(None, None,
|
||||||
|
"found unconstructable recursive node", node.start_mark)
|
||||||
|
self.recursive_objects[node] = None
|
||||||
|
constructor = None
|
||||||
|
tag_suffix = None
|
||||||
|
if node.tag in self.yaml_constructors:
|
||||||
|
constructor = self.yaml_constructors[node.tag]
|
||||||
|
else:
|
||||||
|
for tag_prefix in self.yaml_multi_constructors:
|
||||||
|
if node.tag.startswith(tag_prefix):
|
||||||
|
tag_suffix = node.tag[len(tag_prefix):]
|
||||||
|
constructor = self.yaml_multi_constructors[tag_prefix]
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if None in self.yaml_multi_constructors:
|
||||||
|
tag_suffix = node.tag
|
||||||
|
constructor = self.yaml_multi_constructors[None]
|
||||||
|
elif None in self.yaml_constructors:
|
||||||
|
constructor = self.yaml_constructors[None]
|
||||||
|
elif isinstance(node, ScalarNode):
|
||||||
|
constructor = self.__class__.construct_scalar
|
||||||
|
elif isinstance(node, SequenceNode):
|
||||||
|
constructor = self.__class__.construct_sequence
|
||||||
|
elif isinstance(node, MappingNode):
|
||||||
|
constructor = self.__class__.construct_mapping
|
||||||
|
if tag_suffix is None:
|
||||||
|
data = constructor(self, node)
|
||||||
|
else:
|
||||||
|
data = constructor(self, tag_suffix, node)
|
||||||
|
if isinstance(data, types.GeneratorType):
|
||||||
|
generator = data
|
||||||
|
data = next(generator)
|
||||||
|
if self.deep_construct:
|
||||||
|
for dummy in generator:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
self.state_generators.append(generator)
|
||||||
|
self.constructed_objects[node] = data
|
||||||
|
del self.recursive_objects[node]
|
||||||
|
if deep:
|
||||||
|
self.deep_construct = old_deep
|
||||||
|
return data
|
||||||
|
|
||||||
|
def construct_scalar(self, node):
|
||||||
|
if not isinstance(node, ScalarNode):
|
||||||
|
raise ConstructorError(None, None,
|
||||||
|
"expected a scalar node, but found %s" % node.id,
|
||||||
|
node.start_mark)
|
||||||
|
return node.value
|
||||||
|
|
||||||
|
def construct_sequence(self, node, deep=False):
|
||||||
|
if not isinstance(node, SequenceNode):
|
||||||
|
raise ConstructorError(None, None,
|
||||||
|
"expected a sequence node, but found %s" % node.id,
|
||||||
|
node.start_mark)
|
||||||
|
return [self.construct_object(child, deep=deep)
|
||||||
|
for child in node.value]
|
||||||
|
|
||||||
|
def construct_mapping(self, node, deep=False):
|
||||||
|
if not isinstance(node, MappingNode):
|
||||||
|
raise ConstructorError(None, None,
|
||||||
|
"expected a mapping node, but found %s" % node.id,
|
||||||
|
node.start_mark)
|
||||||
|
mapping = {}
|
||||||
|
for key_node, value_node in node.value:
|
||||||
|
key = self.construct_object(key_node, deep=deep)
|
||||||
|
if not isinstance(key, collections.Hashable):
|
||||||
|
raise ConstructorError("while constructing a mapping", node.start_mark,
|
||||||
|
"found unhashable key", key_node.start_mark)
|
||||||
|
value = self.construct_object(value_node, deep=deep)
|
||||||
|
mapping[key] = value
|
||||||
|
return mapping
|
||||||
|
|
||||||
|
def construct_pairs(self, node, deep=False):
|
||||||
|
if not isinstance(node, MappingNode):
|
||||||
|
raise ConstructorError(None, None,
|
||||||
|
"expected a mapping node, but found %s" % node.id,
|
||||||
|
node.start_mark)
|
||||||
|
pairs = []
|
||||||
|
for key_node, value_node in node.value:
|
||||||
|
key = self.construct_object(key_node, deep=deep)
|
||||||
|
value = self.construct_object(value_node, deep=deep)
|
||||||
|
pairs.append((key, value))
|
||||||
|
return pairs
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def add_constructor(cls, tag, constructor):
|
||||||
|
if not 'yaml_constructors' in cls.__dict__:
|
||||||
|
cls.yaml_constructors = cls.yaml_constructors.copy()
|
||||||
|
cls.yaml_constructors[tag] = constructor
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def add_multi_constructor(cls, tag_prefix, multi_constructor):
|
||||||
|
if not 'yaml_multi_constructors' in cls.__dict__:
|
||||||
|
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
|
||||||
|
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
|
||||||
|
|
||||||
|
class SafeConstructor(BaseConstructor):
|
||||||
|
|
||||||
|
def construct_scalar(self, node):
|
||||||
|
if isinstance(node, MappingNode):
|
||||||
|
for key_node, value_node in node.value:
|
||||||
|
if key_node.tag == 'tag:yaml.org,2002:value':
|
||||||
|
return self.construct_scalar(value_node)
|
||||||
|
return super().construct_scalar(node)
|
||||||
|
|
||||||
|
def flatten_mapping(self, node):
|
||||||
|
merge = []
|
||||||
|
index = 0
|
||||||
|
while index < len(node.value):
|
||||||
|
key_node, value_node = node.value[index]
|
||||||
|
if key_node.tag == 'tag:yaml.org,2002:merge':
|
||||||
|
del node.value[index]
|
||||||
|
if isinstance(value_node, MappingNode):
|
||||||
|
self.flatten_mapping(value_node)
|
||||||
|
merge.extend(value_node.value)
|
||||||
|
elif isinstance(value_node, SequenceNode):
|
||||||
|
submerge = []
|
||||||
|
for subnode in value_node.value:
|
||||||
|
if not isinstance(subnode, MappingNode):
|
||||||
|
raise ConstructorError("while constructing a mapping",
|
||||||
|
node.start_mark,
|
||||||
|
"expected a mapping for merging, but found %s"
|
||||||
|
% subnode.id, subnode.start_mark)
|
||||||
|
self.flatten_mapping(subnode)
|
||||||
|
submerge.append(subnode.value)
|
||||||
|
submerge.reverse()
|
||||||
|
for value in submerge:
|
||||||
|
merge.extend(value)
|
||||||
|
else:
|
||||||
|
raise ConstructorError("while constructing a mapping", node.start_mark,
|
||||||
|
"expected a mapping or list of mappings for merging, but found %s"
|
||||||
|
% value_node.id, value_node.start_mark)
|
||||||
|
elif key_node.tag == 'tag:yaml.org,2002:value':
|
||||||
|
key_node.tag = 'tag:yaml.org,2002:str'
|
||||||
|
index += 1
|
||||||
|
else:
|
||||||
|
index += 1
|
||||||
|
if merge:
|
||||||
|
node.value = merge + node.value
|
||||||
|
|
||||||
|
def construct_mapping(self, node, deep=False):
|
||||||
|
if isinstance(node, MappingNode):
|
||||||
|
self.flatten_mapping(node)
|
||||||
|
return super().construct_mapping(node, deep=deep)
|
||||||
|
|
||||||
|
def construct_yaml_null(self, node):
|
||||||
|
self.construct_scalar(node)
|
||||||
|
return None
|
||||||
|
|
||||||
|
bool_values = {
|
||||||
|
'yes': True,
|
||||||
|
'no': False,
|
||||||
|
'true': True,
|
||||||
|
'false': False,
|
||||||
|
'on': True,
|
||||||
|
'off': False,
|
||||||
|
}
|
||||||
|
|
||||||
|
def construct_yaml_bool(self, node):
|
||||||
|
value = self.construct_scalar(node)
|
||||||
|
return self.bool_values[value.lower()]
|
||||||
|
|
||||||
|
def construct_yaml_int(self, node):
|
||||||
|
value = self.construct_scalar(node)
|
||||||
|
value = value.replace('_', '')
|
||||||
|
sign = +1
|
||||||
|
if value[0] == '-':
|
||||||
|
sign = -1
|
||||||
|
if value[0] in '+-':
|
||||||
|
value = value[1:]
|
||||||
|
if value == '0':
|
||||||
|
return 0
|
||||||
|
elif value.startswith('0b'):
|
||||||
|
return sign*int(value[2:], 2)
|
||||||
|
elif value.startswith('0x'):
|
||||||
|
return sign*int(value[2:], 16)
|
||||||
|
elif value[0] == '0':
|
||||||
|
return sign*int(value, 8)
|
||||||
|
elif ':' in value:
|
||||||
|
digits = [int(part) for part in value.split(':')]
|
||||||
|
digits.reverse()
|
||||||
|
base = 1
|
||||||
|
value = 0
|
||||||
|
for digit in digits:
|
||||||
|
value += digit*base
|
||||||
|
base *= 60
|
||||||
|
return sign*value
|
||||||
|
else:
|
||||||
|
return sign*int(value)
|
||||||
|
|
||||||
|
inf_value = 1e300
|
||||||
|
while inf_value != inf_value*inf_value:
|
||||||
|
inf_value *= inf_value
|
||||||
|
nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
|
||||||
|
|
||||||
|
def construct_yaml_float(self, node):
|
||||||
|
value = self.construct_scalar(node)
|
||||||
|
value = value.replace('_', '').lower()
|
||||||
|
sign = +1
|
||||||
|
if value[0] == '-':
|
||||||
|
sign = -1
|
||||||
|
if value[0] in '+-':
|
||||||
|
value = value[1:]
|
||||||
|
if value == '.inf':
|
||||||
|
return sign*self.inf_value
|
||||||
|
elif value == '.nan':
|
||||||
|
return self.nan_value
|
||||||
|
elif ':' in value:
|
||||||
|
digits = [float(part) for part in value.split(':')]
|
||||||
|
digits.reverse()
|
||||||
|
base = 1
|
||||||
|
value = 0.0
|
||||||
|
for digit in digits:
|
||||||
|
value += digit*base
|
||||||
|
base *= 60
|
||||||
|
return sign*value
|
||||||
|
else:
|
||||||
|
return sign*float(value)
|
||||||
|
|
||||||
|
def construct_yaml_binary(self, node):
|
||||||
|
try:
|
||||||
|
value = self.construct_scalar(node).encode('ascii')
|
||||||
|
except UnicodeEncodeError as exc:
|
||||||
|
raise ConstructorError(None, None,
|
||||||
|
"failed to convert base64 data into ascii: %s" % exc,
|
||||||
|
node.start_mark)
|
||||||
|
try:
|
||||||
|
if hasattr(base64, 'decodebytes'):
|
||||||
|
return base64.decodebytes(value)
|
||||||
|
else:
|
||||||
|
return base64.decodestring(value)
|
||||||
|
except binascii.Error as exc:
|
||||||
|
raise ConstructorError(None, None,
|
||||||
|
"failed to decode base64 data: %s" % exc, node.start_mark)
|
||||||
|
|
||||||
|
timestamp_regexp = re.compile(
|
||||||
|
r'''^(?P<year>[0-9][0-9][0-9][0-9])
|
||||||
|
-(?P<month>[0-9][0-9]?)
|
||||||
|
-(?P<day>[0-9][0-9]?)
|
||||||
|
(?:(?:[Tt]|[ \t]+)
|
||||||
|
(?P<hour>[0-9][0-9]?)
|
||||||
|
:(?P<minute>[0-9][0-9])
|
||||||
|
:(?P<second>[0-9][0-9])
|
||||||
|
(?:\.(?P<fraction>[0-9]*))?
|
||||||
|
(?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
|
||||||
|
(?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
|
||||||
|
|
||||||
|
def construct_yaml_timestamp(self, node):
|
||||||
|
value = self.construct_scalar(node)
|
||||||
|
match = self.timestamp_regexp.match(node.value)
|
||||||
|
values = match.groupdict()
|
||||||
|
year = int(values['year'])
|
||||||
|
month = int(values['month'])
|
||||||
|
day = int(values['day'])
|
||||||
|
if not values['hour']:
|
||||||
|
return datetime.date(year, month, day)
|
||||||
|
hour = int(values['hour'])
|
||||||
|
minute = int(values['minute'])
|
||||||
|
second = int(values['second'])
|
||||||
|
fraction = 0
|
||||||
|
if values['fraction']:
|
||||||
|
fraction = values['fraction'][:6]
|
||||||
|
while len(fraction) < 6:
|
||||||
|
fraction += '0'
|
||||||
|
fraction = int(fraction)
|
||||||
|
delta = None
|
||||||
|
if values['tz_sign']:
|
||||||
|
tz_hour = int(values['tz_hour'])
|
||||||
|
tz_minute = int(values['tz_minute'] or 0)
|
||||||
|
delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
|
||||||
|
if values['tz_sign'] == '-':
|
||||||
|
delta = -delta
|
||||||
|
data = datetime.datetime(year, month, day, hour, minute, second, fraction)
|
||||||
|
if delta:
|
||||||
|
data -= delta
|
||||||
|
return data
|
||||||
|
|
||||||
|
def construct_yaml_omap(self, node):
|
||||||
|
# Note: we do not check for duplicate keys, because it's too
|
||||||
|
# CPU-expensive.
|
||||||
|
omap = []
|
||||||
|
yield omap
|
||||||
|
if not isinstance(node, SequenceNode):
|
||||||
|
raise ConstructorError("while constructing an ordered map", node.start_mark,
|
||||||
|
"expected a sequence, but found %s" % node.id, node.start_mark)
|
||||||
|
for subnode in node.value:
|
||||||
|
if not isinstance(subnode, MappingNode):
|
||||||
|
raise ConstructorError("while constructing an ordered map", node.start_mark,
|
||||||
|
"expected a mapping of length 1, but found %s" % subnode.id,
|
||||||
|
subnode.start_mark)
|
||||||
|
if len(subnode.value) != 1:
|
||||||
|
raise ConstructorError("while constructing an ordered map", node.start_mark,
|
||||||
|
"expected a single mapping item, but found %d items" % len(subnode.value),
|
||||||
|
subnode.start_mark)
|
||||||
|
key_node, value_node = subnode.value[0]
|
||||||
|
key = self.construct_object(key_node)
|
||||||
|
value = self.construct_object(value_node)
|
||||||
|
omap.append((key, value))
|
||||||
|
|
||||||
|
def construct_yaml_pairs(self, node):
|
||||||
|
# Note: the same code as `construct_yaml_omap`.
|
||||||
|
pairs = []
|
||||||
|
yield pairs
|
||||||
|
if not isinstance(node, SequenceNode):
|
||||||
|
raise ConstructorError("while constructing pairs", node.start_mark,
|
||||||
|
"expected a sequence, but found %s" % node.id, node.start_mark)
|
||||||
|
for subnode in node.value:
|
||||||
|
if not isinstance(subnode, MappingNode):
|
||||||
|
raise ConstructorError("while constructing pairs", node.start_mark,
|
||||||
|
"expected a mapping of length 1, but found %s" % subnode.id,
|
||||||
|
subnode.start_mark)
|
||||||
|
if len(subnode.value) != 1:
|
||||||
|
raise ConstructorError("while constructing pairs", node.start_mark,
|
||||||
|
"expected a single mapping item, but found %d items" % len(subnode.value),
|
||||||
|
subnode.start_mark)
|
||||||
|
key_node, value_node = subnode.value[0]
|
||||||
|
key = self.construct_object(key_node)
|
||||||
|
value = self.construct_object(value_node)
|
||||||
|
pairs.append((key, value))
|
||||||
|
|
||||||
|
def construct_yaml_set(self, node):
|
||||||
|
data = set()
|
||||||
|
yield data
|
||||||
|
value = self.construct_mapping(node)
|
||||||
|
data.update(value)
|
||||||
|
|
||||||
|
def construct_yaml_str(self, node):
|
||||||
|
return self.construct_scalar(node)
|
||||||
|
|
||||||
|
def construct_yaml_seq(self, node):
|
||||||
|
data = []
|
||||||
|
yield data
|
||||||
|
data.extend(self.construct_sequence(node))
|
||||||
|
|
||||||
|
def construct_yaml_map(self, node):
|
||||||
|
data = {}
|
||||||
|
yield data
|
||||||
|
value = self.construct_mapping(node)
|
||||||
|
data.update(value)
|
||||||
|
|
||||||
|
def construct_yaml_object(self, node, cls):
|
||||||
|
data = cls.__new__(cls)
|
||||||
|
yield data
|
||||||
|
if hasattr(data, '__setstate__'):
|
||||||
|
state = self.construct_mapping(node, deep=True)
|
||||||
|
data.__setstate__(state)
|
||||||
|
else:
|
||||||
|
state = self.construct_mapping(node)
|
||||||
|
data.__dict__.update(state)
|
||||||
|
|
||||||
|
def construct_undefined(self, node):
|
||||||
|
raise ConstructorError(None, None,
|
||||||
|
"could not determine a constructor for the tag %r" % node.tag,
|
||||||
|
node.start_mark)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:null',
|
||||||
|
SafeConstructor.construct_yaml_null)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:bool',
|
||||||
|
SafeConstructor.construct_yaml_bool)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:int',
|
||||||
|
SafeConstructor.construct_yaml_int)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:float',
|
||||||
|
SafeConstructor.construct_yaml_float)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:binary',
|
||||||
|
SafeConstructor.construct_yaml_binary)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:timestamp',
|
||||||
|
SafeConstructor.construct_yaml_timestamp)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:omap',
|
||||||
|
SafeConstructor.construct_yaml_omap)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:pairs',
|
||||||
|
SafeConstructor.construct_yaml_pairs)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:set',
|
||||||
|
SafeConstructor.construct_yaml_set)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:str',
|
||||||
|
SafeConstructor.construct_yaml_str)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:seq',
|
||||||
|
SafeConstructor.construct_yaml_seq)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:map',
|
||||||
|
SafeConstructor.construct_yaml_map)
|
||||||
|
|
||||||
|
SafeConstructor.add_constructor(None,
|
||||||
|
SafeConstructor.construct_undefined)
|
||||||
|
|
||||||
|
class Constructor(SafeConstructor):
|
||||||
|
|
||||||
|
def construct_python_str(self, node):
|
||||||
|
return self.construct_scalar(node)
|
||||||
|
|
||||||
|
def construct_python_unicode(self, node):
|
||||||
|
return self.construct_scalar(node)
|
||||||
|
|
||||||
|
def construct_python_bytes(self, node):
|
||||||
|
try:
|
||||||
|
value = self.construct_scalar(node).encode('ascii')
|
||||||
|
except UnicodeEncodeError as exc:
|
||||||
|
raise ConstructorError(None, None,
|
||||||
|
"failed to convert base64 data into ascii: %s" % exc,
|
||||||
|
node.start_mark)
|
||||||
|
try:
|
||||||
|
if hasattr(base64, 'decodebytes'):
|
||||||
|
return base64.decodebytes(value)
|
||||||
|
else:
|
||||||
|
return base64.decodestring(value)
|
||||||
|
except binascii.Error as exc:
|
||||||
|
raise ConstructorError(None, None,
|
||||||
|
"failed to decode base64 data: %s" % exc, node.start_mark)
|
||||||
|
|
||||||
|
def construct_python_long(self, node):
|
||||||
|
return self.construct_yaml_int(node)
|
||||||
|
|
||||||
|
def construct_python_complex(self, node):
|
||||||
|
return complex(self.construct_scalar(node))
|
||||||
|
|
||||||
|
def construct_python_tuple(self, node):
|
||||||
|
return tuple(self.construct_sequence(node))
|
||||||
|
|
||||||
|
def find_python_module(self, name, mark):
|
||||||
|
if not name:
|
||||||
|
raise ConstructorError("while constructing a Python module", mark,
|
||||||
|
"expected non-empty name appended to the tag", mark)
|
||||||
|
try:
|
||||||
|
__import__(name)
|
||||||
|
except ImportError as exc:
|
||||||
|
raise ConstructorError("while constructing a Python module", mark,
|
||||||
|
"cannot find module %r (%s)" % (name, exc), mark)
|
||||||
|
return sys.modules[name]
|
||||||
|
|
||||||
|
def find_python_name(self, name, mark):
|
||||||
|
if not name:
|
||||||
|
raise ConstructorError("while constructing a Python object", mark,
|
||||||
|
"expected non-empty name appended to the tag", mark)
|
||||||
|
if '.' in name:
|
||||||
|
module_name, object_name = name.rsplit('.', 1)
|
||||||
|
else:
|
||||||
|
module_name = 'builtins'
|
||||||
|
object_name = name
|
||||||
|
try:
|
||||||
|
__import__(module_name)
|
||||||
|
except ImportError as exc:
|
||||||
|
raise ConstructorError("while constructing a Python object", mark,
|
||||||
|
"cannot find module %r (%s)" % (module_name, exc), mark)
|
||||||
|
module = sys.modules[module_name]
|
||||||
|
if not hasattr(module, object_name):
|
||||||
|
raise ConstructorError("while constructing a Python object", mark,
|
||||||
|
"cannot find %r in the module %r"
|
||||||
|
% (object_name, module.__name__), mark)
|
||||||
|
return getattr(module, object_name)
|
||||||
|
|
||||||
|
def construct_python_name(self, suffix, node):
|
||||||
|
value = self.construct_scalar(node)
|
||||||
|
if value:
|
||||||
|
raise ConstructorError("while constructing a Python name", node.start_mark,
|
||||||
|
"expected the empty value, but found %r" % value, node.start_mark)
|
||||||
|
return self.find_python_name(suffix, node.start_mark)
|
||||||
|
|
||||||
|
def construct_python_module(self, suffix, node):
|
||||||
|
value = self.construct_scalar(node)
|
||||||
|
if value:
|
||||||
|
raise ConstructorError("while constructing a Python module", node.start_mark,
|
||||||
|
"expected the empty value, but found %r" % value, node.start_mark)
|
||||||
|
return self.find_python_module(suffix, node.start_mark)
|
||||||
|
|
||||||
|
def make_python_instance(self, suffix, node,
|
||||||
|
args=None, kwds=None, newobj=False):
|
||||||
|
if not args:
|
||||||
|
args = []
|
||||||
|
if not kwds:
|
||||||
|
kwds = {}
|
||||||
|
cls = self.find_python_name(suffix, node.start_mark)
|
||||||
|
if newobj and isinstance(cls, type):
|
||||||
|
return cls.__new__(cls, *args, **kwds)
|
||||||
|
else:
|
||||||
|
return cls(*args, **kwds)
|
||||||
|
|
||||||
|
def set_python_instance_state(self, instance, state):
|
||||||
|
if hasattr(instance, '__setstate__'):
|
||||||
|
instance.__setstate__(state)
|
||||||
|
else:
|
||||||
|
slotstate = {}
|
||||||
|
if isinstance(state, tuple) and len(state) == 2:
|
||||||
|
state, slotstate = state
|
||||||
|
if hasattr(instance, '__dict__'):
|
||||||
|
instance.__dict__.update(state)
|
||||||
|
elif state:
|
||||||
|
slotstate.update(state)
|
||||||
|
for key, value in slotstate.items():
|
||||||
|
setattr(object, key, value)
|
||||||
|
|
||||||
|
def construct_python_object(self, suffix, node):
|
||||||
|
# Format:
|
||||||
|
# !!python/object:module.name { ... state ... }
|
||||||
|
instance = self.make_python_instance(suffix, node, newobj=True)
|
||||||
|
yield instance
|
||||||
|
deep = hasattr(instance, '__setstate__')
|
||||||
|
state = self.construct_mapping(node, deep=deep)
|
||||||
|
self.set_python_instance_state(instance, state)
|
||||||
|
|
||||||
|
def construct_python_object_apply(self, suffix, node, newobj=False):
|
||||||
|
# Format:
|
||||||
|
# !!python/object/apply # (or !!python/object/new)
|
||||||
|
# args: [ ... arguments ... ]
|
||||||
|
# kwds: { ... keywords ... }
|
||||||
|
# state: ... state ...
|
||||||
|
# listitems: [ ... listitems ... ]
|
||||||
|
# dictitems: { ... dictitems ... }
|
||||||
|
# or short format:
|
||||||
|
# !!python/object/apply [ ... arguments ... ]
|
||||||
|
# The difference between !!python/object/apply and !!python/object/new
|
||||||
|
# is how an object is created, check make_python_instance for details.
|
||||||
|
if isinstance(node, SequenceNode):
|
||||||
|
args = self.construct_sequence(node, deep=True)
|
||||||
|
kwds = {}
|
||||||
|
state = {}
|
||||||
|
listitems = []
|
||||||
|
dictitems = {}
|
||||||
|
else:
|
||||||
|
value = self.construct_mapping(node, deep=True)
|
||||||
|
args = value.get('args', [])
|
||||||
|
kwds = value.get('kwds', {})
|
||||||
|
state = value.get('state', {})
|
||||||
|
listitems = value.get('listitems', [])
|
||||||
|
dictitems = value.get('dictitems', {})
|
||||||
|
instance = self.make_python_instance(suffix, node, args, kwds, newobj)
|
||||||
|
if state:
|
||||||
|
self.set_python_instance_state(instance, state)
|
||||||
|
if listitems:
|
||||||
|
instance.extend(listitems)
|
||||||
|
if dictitems:
|
||||||
|
for key in dictitems:
|
||||||
|
instance[key] = dictitems[key]
|
||||||
|
return instance
|
||||||
|
|
||||||
|
def construct_python_object_new(self, suffix, node):
|
||||||
|
return self.construct_python_object_apply(suffix, node, newobj=True)
|
||||||
|
|
||||||
|
Constructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:python/none',
|
||||||
|
Constructor.construct_yaml_null)
|
||||||
|
|
||||||
|
Constructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:python/bool',
|
||||||
|
Constructor.construct_yaml_bool)
|
||||||
|
|
||||||
|
Constructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:python/str',
|
||||||
|
Constructor.construct_python_str)
|
||||||
|
|
||||||
|
Constructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:python/unicode',
|
||||||
|
Constructor.construct_python_unicode)
|
||||||
|
|
||||||
|
Constructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:python/bytes',
|
||||||
|
Constructor.construct_python_bytes)
|
||||||
|
|
||||||
|
Constructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:python/int',
|
||||||
|
Constructor.construct_yaml_int)
|
||||||
|
|
||||||
|
Constructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:python/long',
|
||||||
|
Constructor.construct_python_long)
|
||||||
|
|
||||||
|
Constructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:python/float',
|
||||||
|
Constructor.construct_yaml_float)
|
||||||
|
|
||||||
|
Constructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:python/complex',
|
||||||
|
Constructor.construct_python_complex)
|
||||||
|
|
||||||
|
Constructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:python/list',
|
||||||
|
Constructor.construct_yaml_seq)
|
||||||
|
|
||||||
|
Constructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:python/tuple',
|
||||||
|
Constructor.construct_python_tuple)
|
||||||
|
|
||||||
|
Constructor.add_constructor(
|
||||||
|
'tag:yaml.org,2002:python/dict',
|
||||||
|
Constructor.construct_yaml_map)
|
||||||
|
|
||||||
|
Constructor.add_multi_constructor(
|
||||||
|
'tag:yaml.org,2002:python/name:',
|
||||||
|
Constructor.construct_python_name)
|
||||||
|
|
||||||
|
Constructor.add_multi_constructor(
|
||||||
|
'tag:yaml.org,2002:python/module:',
|
||||||
|
Constructor.construct_python_module)
|
||||||
|
|
||||||
|
Constructor.add_multi_constructor(
|
||||||
|
'tag:yaml.org,2002:python/object:',
|
||||||
|
Constructor.construct_python_object)
|
||||||
|
|
||||||
|
Constructor.add_multi_constructor(
|
||||||
|
'tag:yaml.org,2002:python/object/apply:',
|
||||||
|
Constructor.construct_python_object_apply)
|
||||||
|
|
||||||
|
Constructor.add_multi_constructor(
|
||||||
|
'tag:yaml.org,2002:python/object/new:',
|
||||||
|
Constructor.construct_python_object_new)
|
||||||
|
|
85
lib/spack/external/yaml/lib3/yaml/cyaml.py
vendored
Normal file
85
lib/spack/external/yaml/lib3/yaml/cyaml.py
vendored
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
|
||||||
|
__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader',
|
||||||
|
'CBaseDumper', 'CSafeDumper', 'CDumper']
|
||||||
|
|
||||||
|
from _yaml import CParser, CEmitter
|
||||||
|
|
||||||
|
from .constructor import *
|
||||||
|
|
||||||
|
from .serializer import *
|
||||||
|
from .representer import *
|
||||||
|
|
||||||
|
from .resolver import *
|
||||||
|
|
||||||
|
class CBaseLoader(CParser, BaseConstructor, BaseResolver):
|
||||||
|
|
||||||
|
def __init__(self, stream):
|
||||||
|
CParser.__init__(self, stream)
|
||||||
|
BaseConstructor.__init__(self)
|
||||||
|
BaseResolver.__init__(self)
|
||||||
|
|
||||||
|
class CSafeLoader(CParser, SafeConstructor, Resolver):
|
||||||
|
|
||||||
|
def __init__(self, stream):
|
||||||
|
CParser.__init__(self, stream)
|
||||||
|
SafeConstructor.__init__(self)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
||||||
|
class CLoader(CParser, Constructor, Resolver):
|
||||||
|
|
||||||
|
def __init__(self, stream):
|
||||||
|
CParser.__init__(self, stream)
|
||||||
|
Constructor.__init__(self)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
||||||
|
class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver):
|
||||||
|
|
||||||
|
def __init__(self, stream,
|
||||||
|
default_style=None, default_flow_style=None,
|
||||||
|
canonical=None, indent=None, width=None,
|
||||||
|
allow_unicode=None, line_break=None,
|
||||||
|
encoding=None, explicit_start=None, explicit_end=None,
|
||||||
|
version=None, tags=None):
|
||||||
|
CEmitter.__init__(self, stream, canonical=canonical,
|
||||||
|
indent=indent, width=width, encoding=encoding,
|
||||||
|
allow_unicode=allow_unicode, line_break=line_break,
|
||||||
|
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||||
|
version=version, tags=tags)
|
||||||
|
Representer.__init__(self, default_style=default_style,
|
||||||
|
default_flow_style=default_flow_style)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
||||||
|
class CSafeDumper(CEmitter, SafeRepresenter, Resolver):
|
||||||
|
|
||||||
|
def __init__(self, stream,
|
||||||
|
default_style=None, default_flow_style=None,
|
||||||
|
canonical=None, indent=None, width=None,
|
||||||
|
allow_unicode=None, line_break=None,
|
||||||
|
encoding=None, explicit_start=None, explicit_end=None,
|
||||||
|
version=None, tags=None):
|
||||||
|
CEmitter.__init__(self, stream, canonical=canonical,
|
||||||
|
indent=indent, width=width, encoding=encoding,
|
||||||
|
allow_unicode=allow_unicode, line_break=line_break,
|
||||||
|
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||||
|
version=version, tags=tags)
|
||||||
|
SafeRepresenter.__init__(self, default_style=default_style,
|
||||||
|
default_flow_style=default_flow_style)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
||||||
|
class CDumper(CEmitter, Serializer, Representer, Resolver):
|
||||||
|
|
||||||
|
def __init__(self, stream,
|
||||||
|
default_style=None, default_flow_style=None,
|
||||||
|
canonical=None, indent=None, width=None,
|
||||||
|
allow_unicode=None, line_break=None,
|
||||||
|
encoding=None, explicit_start=None, explicit_end=None,
|
||||||
|
version=None, tags=None):
|
||||||
|
CEmitter.__init__(self, stream, canonical=canonical,
|
||||||
|
indent=indent, width=width, encoding=encoding,
|
||||||
|
allow_unicode=allow_unicode, line_break=line_break,
|
||||||
|
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||||
|
version=version, tags=tags)
|
||||||
|
Representer.__init__(self, default_style=default_style,
|
||||||
|
default_flow_style=default_flow_style)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
62
lib/spack/external/yaml/lib3/yaml/dumper.py
vendored
Normal file
62
lib/spack/external/yaml/lib3/yaml/dumper.py
vendored
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
|
||||||
|
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
|
||||||
|
|
||||||
|
from .emitter import *
|
||||||
|
from .serializer import *
|
||||||
|
from .representer import *
|
||||||
|
from .resolver import *
|
||||||
|
|
||||||
|
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
|
||||||
|
|
||||||
|
def __init__(self, stream,
|
||||||
|
default_style=None, default_flow_style=None,
|
||||||
|
canonical=None, indent=None, width=None,
|
||||||
|
allow_unicode=None, line_break=None,
|
||||||
|
encoding=None, explicit_start=None, explicit_end=None,
|
||||||
|
version=None, tags=None):
|
||||||
|
Emitter.__init__(self, stream, canonical=canonical,
|
||||||
|
indent=indent, width=width,
|
||||||
|
allow_unicode=allow_unicode, line_break=line_break)
|
||||||
|
Serializer.__init__(self, encoding=encoding,
|
||||||
|
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||||
|
version=version, tags=tags)
|
||||||
|
Representer.__init__(self, default_style=default_style,
|
||||||
|
default_flow_style=default_flow_style)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
||||||
|
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
|
||||||
|
|
||||||
|
def __init__(self, stream,
|
||||||
|
default_style=None, default_flow_style=None,
|
||||||
|
canonical=None, indent=None, width=None,
|
||||||
|
allow_unicode=None, line_break=None,
|
||||||
|
encoding=None, explicit_start=None, explicit_end=None,
|
||||||
|
version=None, tags=None):
|
||||||
|
Emitter.__init__(self, stream, canonical=canonical,
|
||||||
|
indent=indent, width=width,
|
||||||
|
allow_unicode=allow_unicode, line_break=line_break)
|
||||||
|
Serializer.__init__(self, encoding=encoding,
|
||||||
|
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||||
|
version=version, tags=tags)
|
||||||
|
SafeRepresenter.__init__(self, default_style=default_style,
|
||||||
|
default_flow_style=default_flow_style)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
||||||
|
class Dumper(Emitter, Serializer, Representer, Resolver):
|
||||||
|
|
||||||
|
def __init__(self, stream,
|
||||||
|
default_style=None, default_flow_style=None,
|
||||||
|
canonical=None, indent=None, width=None,
|
||||||
|
allow_unicode=None, line_break=None,
|
||||||
|
encoding=None, explicit_start=None, explicit_end=None,
|
||||||
|
version=None, tags=None):
|
||||||
|
Emitter.__init__(self, stream, canonical=canonical,
|
||||||
|
indent=indent, width=width,
|
||||||
|
allow_unicode=allow_unicode, line_break=line_break)
|
||||||
|
Serializer.__init__(self, encoding=encoding,
|
||||||
|
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||||
|
version=version, tags=tags)
|
||||||
|
Representer.__init__(self, default_style=default_style,
|
||||||
|
default_flow_style=default_flow_style)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
1137
lib/spack/external/yaml/lib3/yaml/emitter.py
vendored
Normal file
1137
lib/spack/external/yaml/lib3/yaml/emitter.py
vendored
Normal file
File diff suppressed because it is too large
Load diff
75
lib/spack/external/yaml/lib3/yaml/error.py
vendored
Normal file
75
lib/spack/external/yaml/lib3/yaml/error.py
vendored
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
|
||||||
|
__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
|
||||||
|
|
||||||
|
class Mark:
|
||||||
|
|
||||||
|
def __init__(self, name, index, line, column, buffer, pointer):
|
||||||
|
self.name = name
|
||||||
|
self.index = index
|
||||||
|
self.line = line
|
||||||
|
self.column = column
|
||||||
|
self.buffer = buffer
|
||||||
|
self.pointer = pointer
|
||||||
|
|
||||||
|
def get_snippet(self, indent=4, max_length=75):
|
||||||
|
if self.buffer is None:
|
||||||
|
return None
|
||||||
|
head = ''
|
||||||
|
start = self.pointer
|
||||||
|
while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029':
|
||||||
|
start -= 1
|
||||||
|
if self.pointer-start > max_length/2-1:
|
||||||
|
head = ' ... '
|
||||||
|
start += 5
|
||||||
|
break
|
||||||
|
tail = ''
|
||||||
|
end = self.pointer
|
||||||
|
while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029':
|
||||||
|
end += 1
|
||||||
|
if end-self.pointer > max_length/2-1:
|
||||||
|
tail = ' ... '
|
||||||
|
end -= 5
|
||||||
|
break
|
||||||
|
snippet = self.buffer[start:end]
|
||||||
|
return ' '*indent + head + snippet + tail + '\n' \
|
||||||
|
+ ' '*(indent+self.pointer-start+len(head)) + '^'
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
snippet = self.get_snippet()
|
||||||
|
where = " in \"%s\", line %d, column %d" \
|
||||||
|
% (self.name, self.line+1, self.column+1)
|
||||||
|
if snippet is not None:
|
||||||
|
where += ":\n"+snippet
|
||||||
|
return where
|
||||||
|
|
||||||
|
class YAMLError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class MarkedYAMLError(YAMLError):
|
||||||
|
|
||||||
|
def __init__(self, context=None, context_mark=None,
|
||||||
|
problem=None, problem_mark=None, note=None):
|
||||||
|
self.context = context
|
||||||
|
self.context_mark = context_mark
|
||||||
|
self.problem = problem
|
||||||
|
self.problem_mark = problem_mark
|
||||||
|
self.note = note
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
lines = []
|
||||||
|
if self.context is not None:
|
||||||
|
lines.append(self.context)
|
||||||
|
if self.context_mark is not None \
|
||||||
|
and (self.problem is None or self.problem_mark is None
|
||||||
|
or self.context_mark.name != self.problem_mark.name
|
||||||
|
or self.context_mark.line != self.problem_mark.line
|
||||||
|
or self.context_mark.column != self.problem_mark.column):
|
||||||
|
lines.append(str(self.context_mark))
|
||||||
|
if self.problem is not None:
|
||||||
|
lines.append(self.problem)
|
||||||
|
if self.problem_mark is not None:
|
||||||
|
lines.append(str(self.problem_mark))
|
||||||
|
if self.note is not None:
|
||||||
|
lines.append(self.note)
|
||||||
|
return '\n'.join(lines)
|
||||||
|
|
86
lib/spack/external/yaml/lib3/yaml/events.py
vendored
Normal file
86
lib/spack/external/yaml/lib3/yaml/events.py
vendored
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
|
||||||
|
# Abstract classes.
|
||||||
|
|
||||||
|
class Event(object):
|
||||||
|
def __init__(self, start_mark=None, end_mark=None):
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
def __repr__(self):
|
||||||
|
attributes = [key for key in ['anchor', 'tag', 'implicit', 'value']
|
||||||
|
if hasattr(self, key)]
|
||||||
|
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
|
||||||
|
for key in attributes])
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, arguments)
|
||||||
|
|
||||||
|
class NodeEvent(Event):
|
||||||
|
def __init__(self, anchor, start_mark=None, end_mark=None):
|
||||||
|
self.anchor = anchor
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
|
||||||
|
class CollectionStartEvent(NodeEvent):
|
||||||
|
def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
|
||||||
|
flow_style=None):
|
||||||
|
self.anchor = anchor
|
||||||
|
self.tag = tag
|
||||||
|
self.implicit = implicit
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
self.flow_style = flow_style
|
||||||
|
|
||||||
|
class CollectionEndEvent(Event):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Implementations.
|
||||||
|
|
||||||
|
class StreamStartEvent(Event):
|
||||||
|
def __init__(self, start_mark=None, end_mark=None, encoding=None):
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
self.encoding = encoding
|
||||||
|
|
||||||
|
class StreamEndEvent(Event):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class DocumentStartEvent(Event):
|
||||||
|
def __init__(self, start_mark=None, end_mark=None,
|
||||||
|
explicit=None, version=None, tags=None):
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
self.explicit = explicit
|
||||||
|
self.version = version
|
||||||
|
self.tags = tags
|
||||||
|
|
||||||
|
class DocumentEndEvent(Event):
|
||||||
|
def __init__(self, start_mark=None, end_mark=None,
|
||||||
|
explicit=None):
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
self.explicit = explicit
|
||||||
|
|
||||||
|
class AliasEvent(NodeEvent):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class ScalarEvent(NodeEvent):
|
||||||
|
def __init__(self, anchor, tag, implicit, value,
|
||||||
|
start_mark=None, end_mark=None, style=None):
|
||||||
|
self.anchor = anchor
|
||||||
|
self.tag = tag
|
||||||
|
self.implicit = implicit
|
||||||
|
self.value = value
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
self.style = style
|
||||||
|
|
||||||
|
class SequenceStartEvent(CollectionStartEvent):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class SequenceEndEvent(CollectionEndEvent):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class MappingStartEvent(CollectionStartEvent):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class MappingEndEvent(CollectionEndEvent):
|
||||||
|
pass
|
||||||
|
|
40
lib/spack/external/yaml/lib3/yaml/loader.py
vendored
Normal file
40
lib/spack/external/yaml/lib3/yaml/loader.py
vendored
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
|
||||||
|
__all__ = ['BaseLoader', 'SafeLoader', 'Loader']
|
||||||
|
|
||||||
|
from .reader import *
|
||||||
|
from .scanner import *
|
||||||
|
from .parser import *
|
||||||
|
from .composer import *
|
||||||
|
from .constructor import *
|
||||||
|
from .resolver import *
|
||||||
|
|
||||||
|
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
|
||||||
|
|
||||||
|
def __init__(self, stream):
|
||||||
|
Reader.__init__(self, stream)
|
||||||
|
Scanner.__init__(self)
|
||||||
|
Parser.__init__(self)
|
||||||
|
Composer.__init__(self)
|
||||||
|
BaseConstructor.__init__(self)
|
||||||
|
BaseResolver.__init__(self)
|
||||||
|
|
||||||
|
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver):
|
||||||
|
|
||||||
|
def __init__(self, stream):
|
||||||
|
Reader.__init__(self, stream)
|
||||||
|
Scanner.__init__(self)
|
||||||
|
Parser.__init__(self)
|
||||||
|
Composer.__init__(self)
|
||||||
|
SafeConstructor.__init__(self)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
||||||
|
class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
|
||||||
|
|
||||||
|
def __init__(self, stream):
|
||||||
|
Reader.__init__(self, stream)
|
||||||
|
Scanner.__init__(self)
|
||||||
|
Parser.__init__(self)
|
||||||
|
Composer.__init__(self)
|
||||||
|
Constructor.__init__(self)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
49
lib/spack/external/yaml/lib3/yaml/nodes.py
vendored
Normal file
49
lib/spack/external/yaml/lib3/yaml/nodes.py
vendored
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
|
||||||
|
class Node(object):
|
||||||
|
def __init__(self, tag, value, start_mark, end_mark):
|
||||||
|
self.tag = tag
|
||||||
|
self.value = value
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
def __repr__(self):
|
||||||
|
value = self.value
|
||||||
|
#if isinstance(value, list):
|
||||||
|
# if len(value) == 0:
|
||||||
|
# value = '<empty>'
|
||||||
|
# elif len(value) == 1:
|
||||||
|
# value = '<1 item>'
|
||||||
|
# else:
|
||||||
|
# value = '<%d items>' % len(value)
|
||||||
|
#else:
|
||||||
|
# if len(value) > 75:
|
||||||
|
# value = repr(value[:70]+u' ... ')
|
||||||
|
# else:
|
||||||
|
# value = repr(value)
|
||||||
|
value = repr(value)
|
||||||
|
return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
|
||||||
|
|
||||||
|
class ScalarNode(Node):
|
||||||
|
id = 'scalar'
|
||||||
|
def __init__(self, tag, value,
|
||||||
|
start_mark=None, end_mark=None, style=None):
|
||||||
|
self.tag = tag
|
||||||
|
self.value = value
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
self.style = style
|
||||||
|
|
||||||
|
class CollectionNode(Node):
|
||||||
|
def __init__(self, tag, value,
|
||||||
|
start_mark=None, end_mark=None, flow_style=None):
|
||||||
|
self.tag = tag
|
||||||
|
self.value = value
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
self.flow_style = flow_style
|
||||||
|
|
||||||
|
class SequenceNode(CollectionNode):
|
||||||
|
id = 'sequence'
|
||||||
|
|
||||||
|
class MappingNode(CollectionNode):
|
||||||
|
id = 'mapping'
|
||||||
|
|
589
lib/spack/external/yaml/lib3/yaml/parser.py
vendored
Normal file
589
lib/spack/external/yaml/lib3/yaml/parser.py
vendored
Normal file
|
@ -0,0 +1,589 @@
|
||||||
|
|
||||||
|
# The following YAML grammar is LL(1) and is parsed by a recursive descent
|
||||||
|
# parser.
|
||||||
|
#
|
||||||
|
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
|
||||||
|
# implicit_document ::= block_node DOCUMENT-END*
|
||||||
|
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
||||||
|
# block_node_or_indentless_sequence ::=
|
||||||
|
# ALIAS
|
||||||
|
# | properties (block_content | indentless_block_sequence)?
|
||||||
|
# | block_content
|
||||||
|
# | indentless_block_sequence
|
||||||
|
# block_node ::= ALIAS
|
||||||
|
# | properties block_content?
|
||||||
|
# | block_content
|
||||||
|
# flow_node ::= ALIAS
|
||||||
|
# | properties flow_content?
|
||||||
|
# | flow_content
|
||||||
|
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
||||||
|
# block_content ::= block_collection | flow_collection | SCALAR
|
||||||
|
# flow_content ::= flow_collection | SCALAR
|
||||||
|
# block_collection ::= block_sequence | block_mapping
|
||||||
|
# flow_collection ::= flow_sequence | flow_mapping
|
||||||
|
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
|
||||||
|
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||||
|
# block_mapping ::= BLOCK-MAPPING_START
|
||||||
|
# ((KEY block_node_or_indentless_sequence?)?
|
||||||
|
# (VALUE block_node_or_indentless_sequence?)?)*
|
||||||
|
# BLOCK-END
|
||||||
|
# flow_sequence ::= FLOW-SEQUENCE-START
|
||||||
|
# (flow_sequence_entry FLOW-ENTRY)*
|
||||||
|
# flow_sequence_entry?
|
||||||
|
# FLOW-SEQUENCE-END
|
||||||
|
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||||
|
# flow_mapping ::= FLOW-MAPPING-START
|
||||||
|
# (flow_mapping_entry FLOW-ENTRY)*
|
||||||
|
# flow_mapping_entry?
|
||||||
|
# FLOW-MAPPING-END
|
||||||
|
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||||
|
#
|
||||||
|
# FIRST sets:
|
||||||
|
#
|
||||||
|
# stream: { STREAM-START }
|
||||||
|
# explicit_document: { DIRECTIVE DOCUMENT-START }
|
||||||
|
# implicit_document: FIRST(block_node)
|
||||||
|
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||||
|
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||||
|
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
||||||
|
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
||||||
|
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
|
||||||
|
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||||
|
# block_sequence: { BLOCK-SEQUENCE-START }
|
||||||
|
# block_mapping: { BLOCK-MAPPING-START }
|
||||||
|
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
|
||||||
|
# indentless_sequence: { ENTRY }
|
||||||
|
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||||
|
# flow_sequence: { FLOW-SEQUENCE-START }
|
||||||
|
# flow_mapping: { FLOW-MAPPING-START }
|
||||||
|
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
|
||||||
|
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
|
||||||
|
|
||||||
|
__all__ = ['Parser', 'ParserError']
|
||||||
|
|
||||||
|
from .error import MarkedYAMLError
|
||||||
|
from .tokens import *
|
||||||
|
from .events import *
|
||||||
|
from .scanner import *
|
||||||
|
|
||||||
|
class ParserError(MarkedYAMLError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Parser:
|
||||||
|
# Since writing a recursive-descendant parser is a straightforward task, we
|
||||||
|
# do not give many comments here.
|
||||||
|
|
||||||
|
DEFAULT_TAGS = {
|
||||||
|
'!': '!',
|
||||||
|
'!!': 'tag:yaml.org,2002:',
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.current_event = None
|
||||||
|
self.yaml_version = None
|
||||||
|
self.tag_handles = {}
|
||||||
|
self.states = []
|
||||||
|
self.marks = []
|
||||||
|
self.state = self.parse_stream_start
|
||||||
|
|
||||||
|
def dispose(self):
|
||||||
|
# Reset the state attributes (to clear self-references)
|
||||||
|
self.states = []
|
||||||
|
self.state = None
|
||||||
|
|
||||||
|
def check_event(self, *choices):
|
||||||
|
# Check the type of the next event.
|
||||||
|
if self.current_event is None:
|
||||||
|
if self.state:
|
||||||
|
self.current_event = self.state()
|
||||||
|
if self.current_event is not None:
|
||||||
|
if not choices:
|
||||||
|
return True
|
||||||
|
for choice in choices:
|
||||||
|
if isinstance(self.current_event, choice):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def peek_event(self):
|
||||||
|
# Get the next event.
|
||||||
|
if self.current_event is None:
|
||||||
|
if self.state:
|
||||||
|
self.current_event = self.state()
|
||||||
|
return self.current_event
|
||||||
|
|
||||||
|
def get_event(self):
|
||||||
|
# Get the next event and proceed further.
|
||||||
|
if self.current_event is None:
|
||||||
|
if self.state:
|
||||||
|
self.current_event = self.state()
|
||||||
|
value = self.current_event
|
||||||
|
self.current_event = None
|
||||||
|
return value
|
||||||
|
|
||||||
|
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
|
||||||
|
# implicit_document ::= block_node DOCUMENT-END*
|
||||||
|
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
||||||
|
|
||||||
|
def parse_stream_start(self):
|
||||||
|
|
||||||
|
# Parse the stream start.
|
||||||
|
token = self.get_token()
|
||||||
|
event = StreamStartEvent(token.start_mark, token.end_mark,
|
||||||
|
encoding=token.encoding)
|
||||||
|
|
||||||
|
# Prepare the next state.
|
||||||
|
self.state = self.parse_implicit_document_start
|
||||||
|
|
||||||
|
return event
|
||||||
|
|
||||||
|
def parse_implicit_document_start(self):
|
||||||
|
|
||||||
|
# Parse an implicit document.
|
||||||
|
if not self.check_token(DirectiveToken, DocumentStartToken,
|
||||||
|
StreamEndToken):
|
||||||
|
self.tag_handles = self.DEFAULT_TAGS
|
||||||
|
token = self.peek_token()
|
||||||
|
start_mark = end_mark = token.start_mark
|
||||||
|
event = DocumentStartEvent(start_mark, end_mark,
|
||||||
|
explicit=False)
|
||||||
|
|
||||||
|
# Prepare the next state.
|
||||||
|
self.states.append(self.parse_document_end)
|
||||||
|
self.state = self.parse_block_node
|
||||||
|
|
||||||
|
return event
|
||||||
|
|
||||||
|
else:
|
||||||
|
return self.parse_document_start()
|
||||||
|
|
||||||
|
def parse_document_start(self):
|
||||||
|
|
||||||
|
# Parse any extra document end indicators.
|
||||||
|
while self.check_token(DocumentEndToken):
|
||||||
|
self.get_token()
|
||||||
|
|
||||||
|
# Parse an explicit document.
|
||||||
|
if not self.check_token(StreamEndToken):
|
||||||
|
token = self.peek_token()
|
||||||
|
start_mark = token.start_mark
|
||||||
|
version, tags = self.process_directives()
|
||||||
|
if not self.check_token(DocumentStartToken):
|
||||||
|
raise ParserError(None, None,
|
||||||
|
"expected '<document start>', but found %r"
|
||||||
|
% self.peek_token().id,
|
||||||
|
self.peek_token().start_mark)
|
||||||
|
token = self.get_token()
|
||||||
|
end_mark = token.end_mark
|
||||||
|
event = DocumentStartEvent(start_mark, end_mark,
|
||||||
|
explicit=True, version=version, tags=tags)
|
||||||
|
self.states.append(self.parse_document_end)
|
||||||
|
self.state = self.parse_document_content
|
||||||
|
else:
|
||||||
|
# Parse the end of the stream.
|
||||||
|
token = self.get_token()
|
||||||
|
event = StreamEndEvent(token.start_mark, token.end_mark)
|
||||||
|
assert not self.states
|
||||||
|
assert not self.marks
|
||||||
|
self.state = None
|
||||||
|
return event
|
||||||
|
|
||||||
|
def parse_document_end(self):
|
||||||
|
|
||||||
|
# Parse the document end.
|
||||||
|
token = self.peek_token()
|
||||||
|
start_mark = end_mark = token.start_mark
|
||||||
|
explicit = False
|
||||||
|
if self.check_token(DocumentEndToken):
|
||||||
|
token = self.get_token()
|
||||||
|
end_mark = token.end_mark
|
||||||
|
explicit = True
|
||||||
|
event = DocumentEndEvent(start_mark, end_mark,
|
||||||
|
explicit=explicit)
|
||||||
|
|
||||||
|
# Prepare the next state.
|
||||||
|
self.state = self.parse_document_start
|
||||||
|
|
||||||
|
return event
|
||||||
|
|
||||||
|
def parse_document_content(self):
|
||||||
|
if self.check_token(DirectiveToken,
|
||||||
|
DocumentStartToken, DocumentEndToken, StreamEndToken):
|
||||||
|
event = self.process_empty_scalar(self.peek_token().start_mark)
|
||||||
|
self.state = self.states.pop()
|
||||||
|
return event
|
||||||
|
else:
|
||||||
|
return self.parse_block_node()
|
||||||
|
|
||||||
|
def process_directives(self):
|
||||||
|
self.yaml_version = None
|
||||||
|
self.tag_handles = {}
|
||||||
|
while self.check_token(DirectiveToken):
|
||||||
|
token = self.get_token()
|
||||||
|
if token.name == 'YAML':
|
||||||
|
if self.yaml_version is not None:
|
||||||
|
raise ParserError(None, None,
|
||||||
|
"found duplicate YAML directive", token.start_mark)
|
||||||
|
major, minor = token.value
|
||||||
|
if major != 1:
|
||||||
|
raise ParserError(None, None,
|
||||||
|
"found incompatible YAML document (version 1.* is required)",
|
||||||
|
token.start_mark)
|
||||||
|
self.yaml_version = token.value
|
||||||
|
elif token.name == 'TAG':
|
||||||
|
handle, prefix = token.value
|
||||||
|
if handle in self.tag_handles:
|
||||||
|
raise ParserError(None, None,
|
||||||
|
"duplicate tag handle %r" % handle,
|
||||||
|
token.start_mark)
|
||||||
|
self.tag_handles[handle] = prefix
|
||||||
|
if self.tag_handles:
|
||||||
|
value = self.yaml_version, self.tag_handles.copy()
|
||||||
|
else:
|
||||||
|
value = self.yaml_version, None
|
||||||
|
for key in self.DEFAULT_TAGS:
|
||||||
|
if key not in self.tag_handles:
|
||||||
|
self.tag_handles[key] = self.DEFAULT_TAGS[key]
|
||||||
|
return value
|
||||||
|
|
||||||
|
# block_node_or_indentless_sequence ::= ALIAS
|
||||||
|
# | properties (block_content | indentless_block_sequence)?
|
||||||
|
# | block_content
|
||||||
|
# | indentless_block_sequence
|
||||||
|
# block_node ::= ALIAS
|
||||||
|
# | properties block_content?
|
||||||
|
# | block_content
|
||||||
|
# flow_node ::= ALIAS
|
||||||
|
# | properties flow_content?
|
||||||
|
# | flow_content
|
||||||
|
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
||||||
|
# block_content ::= block_collection | flow_collection | SCALAR
|
||||||
|
# flow_content ::= flow_collection | SCALAR
|
||||||
|
# block_collection ::= block_sequence | block_mapping
|
||||||
|
# flow_collection ::= flow_sequence | flow_mapping
|
||||||
|
|
||||||
|
def parse_block_node(self):
|
||||||
|
return self.parse_node(block=True)
|
||||||
|
|
||||||
|
def parse_flow_node(self):
|
||||||
|
return self.parse_node()
|
||||||
|
|
||||||
|
def parse_block_node_or_indentless_sequence(self):
|
||||||
|
return self.parse_node(block=True, indentless_sequence=True)
|
||||||
|
|
||||||
|
def parse_node(self, block=False, indentless_sequence=False):
|
||||||
|
if self.check_token(AliasToken):
|
||||||
|
token = self.get_token()
|
||||||
|
event = AliasEvent(token.value, token.start_mark, token.end_mark)
|
||||||
|
self.state = self.states.pop()
|
||||||
|
else:
|
||||||
|
anchor = None
|
||||||
|
tag = None
|
||||||
|
start_mark = end_mark = tag_mark = None
|
||||||
|
if self.check_token(AnchorToken):
|
||||||
|
token = self.get_token()
|
||||||
|
start_mark = token.start_mark
|
||||||
|
end_mark = token.end_mark
|
||||||
|
anchor = token.value
|
||||||
|
if self.check_token(TagToken):
|
||||||
|
token = self.get_token()
|
||||||
|
tag_mark = token.start_mark
|
||||||
|
end_mark = token.end_mark
|
||||||
|
tag = token.value
|
||||||
|
elif self.check_token(TagToken):
|
||||||
|
token = self.get_token()
|
||||||
|
start_mark = tag_mark = token.start_mark
|
||||||
|
end_mark = token.end_mark
|
||||||
|
tag = token.value
|
||||||
|
if self.check_token(AnchorToken):
|
||||||
|
token = self.get_token()
|
||||||
|
end_mark = token.end_mark
|
||||||
|
anchor = token.value
|
||||||
|
if tag is not None:
|
||||||
|
handle, suffix = tag
|
||||||
|
if handle is not None:
|
||||||
|
if handle not in self.tag_handles:
|
||||||
|
raise ParserError("while parsing a node", start_mark,
|
||||||
|
"found undefined tag handle %r" % handle,
|
||||||
|
tag_mark)
|
||||||
|
tag = self.tag_handles[handle]+suffix
|
||||||
|
else:
|
||||||
|
tag = suffix
|
||||||
|
#if tag == '!':
|
||||||
|
# raise ParserError("while parsing a node", start_mark,
|
||||||
|
# "found non-specific tag '!'", tag_mark,
|
||||||
|
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
|
||||||
|
if start_mark is None:
|
||||||
|
start_mark = end_mark = self.peek_token().start_mark
|
||||||
|
event = None
|
||||||
|
implicit = (tag is None or tag == '!')
|
||||||
|
if indentless_sequence and self.check_token(BlockEntryToken):
|
||||||
|
end_mark = self.peek_token().end_mark
|
||||||
|
event = SequenceStartEvent(anchor, tag, implicit,
|
||||||
|
start_mark, end_mark)
|
||||||
|
self.state = self.parse_indentless_sequence_entry
|
||||||
|
else:
|
||||||
|
if self.check_token(ScalarToken):
|
||||||
|
token = self.get_token()
|
||||||
|
end_mark = token.end_mark
|
||||||
|
if (token.plain and tag is None) or tag == '!':
|
||||||
|
implicit = (True, False)
|
||||||
|
elif tag is None:
|
||||||
|
implicit = (False, True)
|
||||||
|
else:
|
||||||
|
implicit = (False, False)
|
||||||
|
event = ScalarEvent(anchor, tag, implicit, token.value,
|
||||||
|
start_mark, end_mark, style=token.style)
|
||||||
|
self.state = self.states.pop()
|
||||||
|
elif self.check_token(FlowSequenceStartToken):
|
||||||
|
end_mark = self.peek_token().end_mark
|
||||||
|
event = SequenceStartEvent(anchor, tag, implicit,
|
||||||
|
start_mark, end_mark, flow_style=True)
|
||||||
|
self.state = self.parse_flow_sequence_first_entry
|
||||||
|
elif self.check_token(FlowMappingStartToken):
|
||||||
|
end_mark = self.peek_token().end_mark
|
||||||
|
event = MappingStartEvent(anchor, tag, implicit,
|
||||||
|
start_mark, end_mark, flow_style=True)
|
||||||
|
self.state = self.parse_flow_mapping_first_key
|
||||||
|
elif block and self.check_token(BlockSequenceStartToken):
|
||||||
|
end_mark = self.peek_token().start_mark
|
||||||
|
event = SequenceStartEvent(anchor, tag, implicit,
|
||||||
|
start_mark, end_mark, flow_style=False)
|
||||||
|
self.state = self.parse_block_sequence_first_entry
|
||||||
|
elif block and self.check_token(BlockMappingStartToken):
|
||||||
|
end_mark = self.peek_token().start_mark
|
||||||
|
event = MappingStartEvent(anchor, tag, implicit,
|
||||||
|
start_mark, end_mark, flow_style=False)
|
||||||
|
self.state = self.parse_block_mapping_first_key
|
||||||
|
elif anchor is not None or tag is not None:
|
||||||
|
# Empty scalars are allowed even if a tag or an anchor is
|
||||||
|
# specified.
|
||||||
|
event = ScalarEvent(anchor, tag, (implicit, False), '',
|
||||||
|
start_mark, end_mark)
|
||||||
|
self.state = self.states.pop()
|
||||||
|
else:
|
||||||
|
if block:
|
||||||
|
node = 'block'
|
||||||
|
else:
|
||||||
|
node = 'flow'
|
||||||
|
token = self.peek_token()
|
||||||
|
raise ParserError("while parsing a %s node" % node, start_mark,
|
||||||
|
"expected the node content, but found %r" % token.id,
|
||||||
|
token.start_mark)
|
||||||
|
return event
|
||||||
|
|
||||||
|
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
|
||||||
|
|
||||||
|
def parse_block_sequence_first_entry(self):
|
||||||
|
token = self.get_token()
|
||||||
|
self.marks.append(token.start_mark)
|
||||||
|
return self.parse_block_sequence_entry()
|
||||||
|
|
||||||
|
def parse_block_sequence_entry(self):
|
||||||
|
if self.check_token(BlockEntryToken):
|
||||||
|
token = self.get_token()
|
||||||
|
if not self.check_token(BlockEntryToken, BlockEndToken):
|
||||||
|
self.states.append(self.parse_block_sequence_entry)
|
||||||
|
return self.parse_block_node()
|
||||||
|
else:
|
||||||
|
self.state = self.parse_block_sequence_entry
|
||||||
|
return self.process_empty_scalar(token.end_mark)
|
||||||
|
if not self.check_token(BlockEndToken):
|
||||||
|
token = self.peek_token()
|
||||||
|
raise ParserError("while parsing a block collection", self.marks[-1],
|
||||||
|
"expected <block end>, but found %r" % token.id, token.start_mark)
|
||||||
|
token = self.get_token()
|
||||||
|
event = SequenceEndEvent(token.start_mark, token.end_mark)
|
||||||
|
self.state = self.states.pop()
|
||||||
|
self.marks.pop()
|
||||||
|
return event
|
||||||
|
|
||||||
|
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||||
|
|
||||||
|
def parse_indentless_sequence_entry(self):
|
||||||
|
if self.check_token(BlockEntryToken):
|
||||||
|
token = self.get_token()
|
||||||
|
if not self.check_token(BlockEntryToken,
|
||||||
|
KeyToken, ValueToken, BlockEndToken):
|
||||||
|
self.states.append(self.parse_indentless_sequence_entry)
|
||||||
|
return self.parse_block_node()
|
||||||
|
else:
|
||||||
|
self.state = self.parse_indentless_sequence_entry
|
||||||
|
return self.process_empty_scalar(token.end_mark)
|
||||||
|
token = self.peek_token()
|
||||||
|
event = SequenceEndEvent(token.start_mark, token.start_mark)
|
||||||
|
self.state = self.states.pop()
|
||||||
|
return event
|
||||||
|
|
||||||
|
# block_mapping ::= BLOCK-MAPPING_START
|
||||||
|
# ((KEY block_node_or_indentless_sequence?)?
|
||||||
|
# (VALUE block_node_or_indentless_sequence?)?)*
|
||||||
|
# BLOCK-END
|
||||||
|
|
||||||
|
def parse_block_mapping_first_key(self):
|
||||||
|
token = self.get_token()
|
||||||
|
self.marks.append(token.start_mark)
|
||||||
|
return self.parse_block_mapping_key()
|
||||||
|
|
||||||
|
def parse_block_mapping_key(self):
|
||||||
|
if self.check_token(KeyToken):
|
||||||
|
token = self.get_token()
|
||||||
|
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
|
||||||
|
self.states.append(self.parse_block_mapping_value)
|
||||||
|
return self.parse_block_node_or_indentless_sequence()
|
||||||
|
else:
|
||||||
|
self.state = self.parse_block_mapping_value
|
||||||
|
return self.process_empty_scalar(token.end_mark)
|
||||||
|
if not self.check_token(BlockEndToken):
|
||||||
|
token = self.peek_token()
|
||||||
|
raise ParserError("while parsing a block mapping", self.marks[-1],
|
||||||
|
"expected <block end>, but found %r" % token.id, token.start_mark)
|
||||||
|
token = self.get_token()
|
||||||
|
event = MappingEndEvent(token.start_mark, token.end_mark)
|
||||||
|
self.state = self.states.pop()
|
||||||
|
self.marks.pop()
|
||||||
|
return event
|
||||||
|
|
||||||
|
def parse_block_mapping_value(self):
|
||||||
|
if self.check_token(ValueToken):
|
||||||
|
token = self.get_token()
|
||||||
|
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
|
||||||
|
self.states.append(self.parse_block_mapping_key)
|
||||||
|
return self.parse_block_node_or_indentless_sequence()
|
||||||
|
else:
|
||||||
|
self.state = self.parse_block_mapping_key
|
||||||
|
return self.process_empty_scalar(token.end_mark)
|
||||||
|
else:
|
||||||
|
self.state = self.parse_block_mapping_key
|
||||||
|
token = self.peek_token()
|
||||||
|
return self.process_empty_scalar(token.start_mark)
|
||||||
|
|
||||||
|
# flow_sequence ::= FLOW-SEQUENCE-START
|
||||||
|
# (flow_sequence_entry FLOW-ENTRY)*
|
||||||
|
# flow_sequence_entry?
|
||||||
|
# FLOW-SEQUENCE-END
|
||||||
|
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||||
|
#
|
||||||
|
# Note that while production rules for both flow_sequence_entry and
|
||||||
|
# flow_mapping_entry are equal, their interpretations are different.
|
||||||
|
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
|
||||||
|
# generate an inline mapping (set syntax).
|
||||||
|
|
||||||
|
def parse_flow_sequence_first_entry(self):
|
||||||
|
token = self.get_token()
|
||||||
|
self.marks.append(token.start_mark)
|
||||||
|
return self.parse_flow_sequence_entry(first=True)
|
||||||
|
|
||||||
|
def parse_flow_sequence_entry(self, first=False):
|
||||||
|
if not self.check_token(FlowSequenceEndToken):
|
||||||
|
if not first:
|
||||||
|
if self.check_token(FlowEntryToken):
|
||||||
|
self.get_token()
|
||||||
|
else:
|
||||||
|
token = self.peek_token()
|
||||||
|
raise ParserError("while parsing a flow sequence", self.marks[-1],
|
||||||
|
"expected ',' or ']', but got %r" % token.id, token.start_mark)
|
||||||
|
|
||||||
|
if self.check_token(KeyToken):
|
||||||
|
token = self.peek_token()
|
||||||
|
event = MappingStartEvent(None, None, True,
|
||||||
|
token.start_mark, token.end_mark,
|
||||||
|
flow_style=True)
|
||||||
|
self.state = self.parse_flow_sequence_entry_mapping_key
|
||||||
|
return event
|
||||||
|
elif not self.check_token(FlowSequenceEndToken):
|
||||||
|
self.states.append(self.parse_flow_sequence_entry)
|
||||||
|
return self.parse_flow_node()
|
||||||
|
token = self.get_token()
|
||||||
|
event = SequenceEndEvent(token.start_mark, token.end_mark)
|
||||||
|
self.state = self.states.pop()
|
||||||
|
self.marks.pop()
|
||||||
|
return event
|
||||||
|
|
||||||
|
def parse_flow_sequence_entry_mapping_key(self):
|
||||||
|
token = self.get_token()
|
||||||
|
if not self.check_token(ValueToken,
|
||||||
|
FlowEntryToken, FlowSequenceEndToken):
|
||||||
|
self.states.append(self.parse_flow_sequence_entry_mapping_value)
|
||||||
|
return self.parse_flow_node()
|
||||||
|
else:
|
||||||
|
self.state = self.parse_flow_sequence_entry_mapping_value
|
||||||
|
return self.process_empty_scalar(token.end_mark)
|
||||||
|
|
||||||
|
def parse_flow_sequence_entry_mapping_value(self):
|
||||||
|
if self.check_token(ValueToken):
|
||||||
|
token = self.get_token()
|
||||||
|
if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
|
||||||
|
self.states.append(self.parse_flow_sequence_entry_mapping_end)
|
||||||
|
return self.parse_flow_node()
|
||||||
|
else:
|
||||||
|
self.state = self.parse_flow_sequence_entry_mapping_end
|
||||||
|
return self.process_empty_scalar(token.end_mark)
|
||||||
|
else:
|
||||||
|
self.state = self.parse_flow_sequence_entry_mapping_end
|
||||||
|
token = self.peek_token()
|
||||||
|
return self.process_empty_scalar(token.start_mark)
|
||||||
|
|
||||||
|
def parse_flow_sequence_entry_mapping_end(self):
|
||||||
|
self.state = self.parse_flow_sequence_entry
|
||||||
|
token = self.peek_token()
|
||||||
|
return MappingEndEvent(token.start_mark, token.start_mark)
|
||||||
|
|
||||||
|
# flow_mapping ::= FLOW-MAPPING-START
|
||||||
|
# (flow_mapping_entry FLOW-ENTRY)*
|
||||||
|
# flow_mapping_entry?
|
||||||
|
# FLOW-MAPPING-END
|
||||||
|
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||||
|
|
||||||
|
def parse_flow_mapping_first_key(self):
|
||||||
|
token = self.get_token()
|
||||||
|
self.marks.append(token.start_mark)
|
||||||
|
return self.parse_flow_mapping_key(first=True)
|
||||||
|
|
||||||
|
def parse_flow_mapping_key(self, first=False):
|
||||||
|
if not self.check_token(FlowMappingEndToken):
|
||||||
|
if not first:
|
||||||
|
if self.check_token(FlowEntryToken):
|
||||||
|
self.get_token()
|
||||||
|
else:
|
||||||
|
token = self.peek_token()
|
||||||
|
raise ParserError("while parsing a flow mapping", self.marks[-1],
|
||||||
|
"expected ',' or '}', but got %r" % token.id, token.start_mark)
|
||||||
|
if self.check_token(KeyToken):
|
||||||
|
token = self.get_token()
|
||||||
|
if not self.check_token(ValueToken,
|
||||||
|
FlowEntryToken, FlowMappingEndToken):
|
||||||
|
self.states.append(self.parse_flow_mapping_value)
|
||||||
|
return self.parse_flow_node()
|
||||||
|
else:
|
||||||
|
self.state = self.parse_flow_mapping_value
|
||||||
|
return self.process_empty_scalar(token.end_mark)
|
||||||
|
elif not self.check_token(FlowMappingEndToken):
|
||||||
|
self.states.append(self.parse_flow_mapping_empty_value)
|
||||||
|
return self.parse_flow_node()
|
||||||
|
token = self.get_token()
|
||||||
|
event = MappingEndEvent(token.start_mark, token.end_mark)
|
||||||
|
self.state = self.states.pop()
|
||||||
|
self.marks.pop()
|
||||||
|
return event
|
||||||
|
|
||||||
|
def parse_flow_mapping_value(self):
|
||||||
|
if self.check_token(ValueToken):
|
||||||
|
token = self.get_token()
|
||||||
|
if not self.check_token(FlowEntryToken, FlowMappingEndToken):
|
||||||
|
self.states.append(self.parse_flow_mapping_key)
|
||||||
|
return self.parse_flow_node()
|
||||||
|
else:
|
||||||
|
self.state = self.parse_flow_mapping_key
|
||||||
|
return self.process_empty_scalar(token.end_mark)
|
||||||
|
else:
|
||||||
|
self.state = self.parse_flow_mapping_key
|
||||||
|
token = self.peek_token()
|
||||||
|
return self.process_empty_scalar(token.start_mark)
|
||||||
|
|
||||||
|
def parse_flow_mapping_empty_value(self):
|
||||||
|
self.state = self.parse_flow_mapping_key
|
||||||
|
return self.process_empty_scalar(self.peek_token().start_mark)
|
||||||
|
|
||||||
|
def process_empty_scalar(self, mark):
|
||||||
|
return ScalarEvent(None, None, (True, False), '', mark, mark)
|
||||||
|
|
192
lib/spack/external/yaml/lib3/yaml/reader.py
vendored
Normal file
192
lib/spack/external/yaml/lib3/yaml/reader.py
vendored
Normal file
|
@ -0,0 +1,192 @@
|
||||||
|
# This module contains abstractions for the input stream. You don't have to
|
||||||
|
# looks further, there are no pretty code.
|
||||||
|
#
|
||||||
|
# We define two classes here.
|
||||||
|
#
|
||||||
|
# Mark(source, line, column)
|
||||||
|
# It's just a record and its only use is producing nice error messages.
|
||||||
|
# Parser does not use it for any other purposes.
|
||||||
|
#
|
||||||
|
# Reader(source, data)
|
||||||
|
# Reader determines the encoding of `data` and converts it to unicode.
|
||||||
|
# Reader provides the following methods and attributes:
|
||||||
|
# reader.peek(length=1) - return the next `length` characters
|
||||||
|
# reader.forward(length=1) - move the current position to `length` characters.
|
||||||
|
# reader.index - the number of the current character.
|
||||||
|
# reader.line, stream.column - the line and the column of the current character.
|
||||||
|
|
||||||
|
__all__ = ['Reader', 'ReaderError']
|
||||||
|
|
||||||
|
from .error import YAMLError, Mark
|
||||||
|
|
||||||
|
import codecs, re
|
||||||
|
|
||||||
|
class ReaderError(YAMLError):
|
||||||
|
|
||||||
|
def __init__(self, name, position, character, encoding, reason):
|
||||||
|
self.name = name
|
||||||
|
self.character = character
|
||||||
|
self.position = position
|
||||||
|
self.encoding = encoding
|
||||||
|
self.reason = reason
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if isinstance(self.character, bytes):
|
||||||
|
return "'%s' codec can't decode byte #x%02x: %s\n" \
|
||||||
|
" in \"%s\", position %d" \
|
||||||
|
% (self.encoding, ord(self.character), self.reason,
|
||||||
|
self.name, self.position)
|
||||||
|
else:
|
||||||
|
return "unacceptable character #x%04x: %s\n" \
|
||||||
|
" in \"%s\", position %d" \
|
||||||
|
% (self.character, self.reason,
|
||||||
|
self.name, self.position)
|
||||||
|
|
||||||
|
class Reader(object):
|
||||||
|
# Reader:
|
||||||
|
# - determines the data encoding and converts it to a unicode string,
|
||||||
|
# - checks if characters are in allowed range,
|
||||||
|
# - adds '\0' to the end.
|
||||||
|
|
||||||
|
# Reader accepts
|
||||||
|
# - a `bytes` object,
|
||||||
|
# - a `str` object,
|
||||||
|
# - a file-like object with its `read` method returning `str`,
|
||||||
|
# - a file-like object with its `read` method returning `unicode`.
|
||||||
|
|
||||||
|
# Yeah, it's ugly and slow.
|
||||||
|
|
||||||
|
def __init__(self, stream):
|
||||||
|
self.name = None
|
||||||
|
self.stream = None
|
||||||
|
self.stream_pointer = 0
|
||||||
|
self.eof = True
|
||||||
|
self.buffer = ''
|
||||||
|
self.pointer = 0
|
||||||
|
self.raw_buffer = None
|
||||||
|
self.raw_decode = None
|
||||||
|
self.encoding = None
|
||||||
|
self.index = 0
|
||||||
|
self.line = 0
|
||||||
|
self.column = 0
|
||||||
|
if isinstance(stream, str):
|
||||||
|
self.name = "<unicode string>"
|
||||||
|
self.check_printable(stream)
|
||||||
|
self.buffer = stream+'\0'
|
||||||
|
elif isinstance(stream, bytes):
|
||||||
|
self.name = "<byte string>"
|
||||||
|
self.raw_buffer = stream
|
||||||
|
self.determine_encoding()
|
||||||
|
else:
|
||||||
|
self.stream = stream
|
||||||
|
self.name = getattr(stream, 'name', "<file>")
|
||||||
|
self.eof = False
|
||||||
|
self.raw_buffer = None
|
||||||
|
self.determine_encoding()
|
||||||
|
|
||||||
|
def peek(self, index=0):
|
||||||
|
try:
|
||||||
|
return self.buffer[self.pointer+index]
|
||||||
|
except IndexError:
|
||||||
|
self.update(index+1)
|
||||||
|
return self.buffer[self.pointer+index]
|
||||||
|
|
||||||
|
def prefix(self, length=1):
|
||||||
|
if self.pointer+length >= len(self.buffer):
|
||||||
|
self.update(length)
|
||||||
|
return self.buffer[self.pointer:self.pointer+length]
|
||||||
|
|
||||||
|
def forward(self, length=1):
|
||||||
|
if self.pointer+length+1 >= len(self.buffer):
|
||||||
|
self.update(length+1)
|
||||||
|
while length:
|
||||||
|
ch = self.buffer[self.pointer]
|
||||||
|
self.pointer += 1
|
||||||
|
self.index += 1
|
||||||
|
if ch in '\n\x85\u2028\u2029' \
|
||||||
|
or (ch == '\r' and self.buffer[self.pointer] != '\n'):
|
||||||
|
self.line += 1
|
||||||
|
self.column = 0
|
||||||
|
elif ch != '\uFEFF':
|
||||||
|
self.column += 1
|
||||||
|
length -= 1
|
||||||
|
|
||||||
|
def get_mark(self):
|
||||||
|
if self.stream is None:
|
||||||
|
return Mark(self.name, self.index, self.line, self.column,
|
||||||
|
self.buffer, self.pointer)
|
||||||
|
else:
|
||||||
|
return Mark(self.name, self.index, self.line, self.column,
|
||||||
|
None, None)
|
||||||
|
|
||||||
|
def determine_encoding(self):
|
||||||
|
while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
|
||||||
|
self.update_raw()
|
||||||
|
if isinstance(self.raw_buffer, bytes):
|
||||||
|
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
|
||||||
|
self.raw_decode = codecs.utf_16_le_decode
|
||||||
|
self.encoding = 'utf-16-le'
|
||||||
|
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
|
||||||
|
self.raw_decode = codecs.utf_16_be_decode
|
||||||
|
self.encoding = 'utf-16-be'
|
||||||
|
else:
|
||||||
|
self.raw_decode = codecs.utf_8_decode
|
||||||
|
self.encoding = 'utf-8'
|
||||||
|
self.update(1)
|
||||||
|
|
||||||
|
NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
|
||||||
|
def check_printable(self, data):
|
||||||
|
match = self.NON_PRINTABLE.search(data)
|
||||||
|
if match:
|
||||||
|
character = match.group()
|
||||||
|
position = self.index+(len(self.buffer)-self.pointer)+match.start()
|
||||||
|
raise ReaderError(self.name, position, ord(character),
|
||||||
|
'unicode', "special characters are not allowed")
|
||||||
|
|
||||||
|
def update(self, length):
|
||||||
|
if self.raw_buffer is None:
|
||||||
|
return
|
||||||
|
self.buffer = self.buffer[self.pointer:]
|
||||||
|
self.pointer = 0
|
||||||
|
while len(self.buffer) < length:
|
||||||
|
if not self.eof:
|
||||||
|
self.update_raw()
|
||||||
|
if self.raw_decode is not None:
|
||||||
|
try:
|
||||||
|
data, converted = self.raw_decode(self.raw_buffer,
|
||||||
|
'strict', self.eof)
|
||||||
|
except UnicodeDecodeError as exc:
|
||||||
|
character = self.raw_buffer[exc.start]
|
||||||
|
if self.stream is not None:
|
||||||
|
position = self.stream_pointer-len(self.raw_buffer)+exc.start
|
||||||
|
else:
|
||||||
|
position = exc.start
|
||||||
|
raise ReaderError(self.name, position, character,
|
||||||
|
exc.encoding, exc.reason)
|
||||||
|
else:
|
||||||
|
data = self.raw_buffer
|
||||||
|
converted = len(data)
|
||||||
|
self.check_printable(data)
|
||||||
|
self.buffer += data
|
||||||
|
self.raw_buffer = self.raw_buffer[converted:]
|
||||||
|
if self.eof:
|
||||||
|
self.buffer += '\0'
|
||||||
|
self.raw_buffer = None
|
||||||
|
break
|
||||||
|
|
||||||
|
def update_raw(self, size=4096):
|
||||||
|
data = self.stream.read(size)
|
||||||
|
if self.raw_buffer is None:
|
||||||
|
self.raw_buffer = data
|
||||||
|
else:
|
||||||
|
self.raw_buffer += data
|
||||||
|
self.stream_pointer += len(data)
|
||||||
|
if not data:
|
||||||
|
self.eof = True
|
||||||
|
|
||||||
|
#try:
|
||||||
|
# import psyco
|
||||||
|
# psyco.bind(Reader)
|
||||||
|
#except ImportError:
|
||||||
|
# pass
|
||||||
|
|
387
lib/spack/external/yaml/lib3/yaml/representer.py
vendored
Normal file
387
lib/spack/external/yaml/lib3/yaml/representer.py
vendored
Normal file
|
@ -0,0 +1,387 @@
|
||||||
|
|
||||||
|
__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
|
||||||
|
'RepresenterError']
|
||||||
|
|
||||||
|
from .error import *
|
||||||
|
from .nodes import *
|
||||||
|
|
||||||
|
import datetime, sys, copyreg, types, base64, collections
|
||||||
|
|
||||||
|
class RepresenterError(YAMLError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class BaseRepresenter:
|
||||||
|
|
||||||
|
yaml_representers = {}
|
||||||
|
yaml_multi_representers = {}
|
||||||
|
|
||||||
|
def __init__(self, default_style=None, default_flow_style=None):
|
||||||
|
self.default_style = default_style
|
||||||
|
self.default_flow_style = default_flow_style
|
||||||
|
self.represented_objects = {}
|
||||||
|
self.object_keeper = []
|
||||||
|
self.alias_key = None
|
||||||
|
|
||||||
|
def represent(self, data):
|
||||||
|
node = self.represent_data(data)
|
||||||
|
self.serialize(node)
|
||||||
|
self.represented_objects = {}
|
||||||
|
self.object_keeper = []
|
||||||
|
self.alias_key = None
|
||||||
|
|
||||||
|
def represent_data(self, data):
|
||||||
|
if self.ignore_aliases(data):
|
||||||
|
self.alias_key = None
|
||||||
|
else:
|
||||||
|
self.alias_key = id(data)
|
||||||
|
if self.alias_key is not None:
|
||||||
|
if self.alias_key in self.represented_objects:
|
||||||
|
node = self.represented_objects[self.alias_key]
|
||||||
|
#if node is None:
|
||||||
|
# raise RepresenterError("recursive objects are not allowed: %r" % data)
|
||||||
|
return node
|
||||||
|
#self.represented_objects[alias_key] = None
|
||||||
|
self.object_keeper.append(data)
|
||||||
|
data_types = type(data).__mro__
|
||||||
|
if data_types[0] in self.yaml_representers:
|
||||||
|
node = self.yaml_representers[data_types[0]](self, data)
|
||||||
|
else:
|
||||||
|
for data_type in data_types:
|
||||||
|
if data_type in self.yaml_multi_representers:
|
||||||
|
node = self.yaml_multi_representers[data_type](self, data)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if None in self.yaml_multi_representers:
|
||||||
|
node = self.yaml_multi_representers[None](self, data)
|
||||||
|
elif None in self.yaml_representers:
|
||||||
|
node = self.yaml_representers[None](self, data)
|
||||||
|
else:
|
||||||
|
node = ScalarNode(None, str(data))
|
||||||
|
#if alias_key is not None:
|
||||||
|
# self.represented_objects[alias_key] = node
|
||||||
|
return node
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def add_representer(cls, data_type, representer):
|
||||||
|
if not 'yaml_representers' in cls.__dict__:
|
||||||
|
cls.yaml_representers = cls.yaml_representers.copy()
|
||||||
|
cls.yaml_representers[data_type] = representer
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def add_multi_representer(cls, data_type, representer):
|
||||||
|
if not 'yaml_multi_representers' in cls.__dict__:
|
||||||
|
cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
|
||||||
|
cls.yaml_multi_representers[data_type] = representer
|
||||||
|
|
||||||
|
def represent_scalar(self, tag, value, style=None):
|
||||||
|
if style is None:
|
||||||
|
style = self.default_style
|
||||||
|
node = ScalarNode(tag, value, style=style)
|
||||||
|
if self.alias_key is not None:
|
||||||
|
self.represented_objects[self.alias_key] = node
|
||||||
|
return node
|
||||||
|
|
||||||
|
def represent_sequence(self, tag, sequence, flow_style=None):
|
||||||
|
value = []
|
||||||
|
node = SequenceNode(tag, value, flow_style=flow_style)
|
||||||
|
if self.alias_key is not None:
|
||||||
|
self.represented_objects[self.alias_key] = node
|
||||||
|
best_style = True
|
||||||
|
for item in sequence:
|
||||||
|
node_item = self.represent_data(item)
|
||||||
|
if not (isinstance(node_item, ScalarNode) and not node_item.style):
|
||||||
|
best_style = False
|
||||||
|
value.append(node_item)
|
||||||
|
if flow_style is None:
|
||||||
|
if self.default_flow_style is not None:
|
||||||
|
node.flow_style = self.default_flow_style
|
||||||
|
else:
|
||||||
|
node.flow_style = best_style
|
||||||
|
return node
|
||||||
|
|
||||||
|
def represent_mapping(self, tag, mapping, flow_style=None):
|
||||||
|
value = []
|
||||||
|
node = MappingNode(tag, value, flow_style=flow_style)
|
||||||
|
if self.alias_key is not None:
|
||||||
|
self.represented_objects[self.alias_key] = node
|
||||||
|
best_style = True
|
||||||
|
if hasattr(mapping, 'items'):
|
||||||
|
mapping = list(mapping.items())
|
||||||
|
try:
|
||||||
|
mapping = sorted(mapping)
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
for item_key, item_value in mapping:
|
||||||
|
node_key = self.represent_data(item_key)
|
||||||
|
node_value = self.represent_data(item_value)
|
||||||
|
if not (isinstance(node_key, ScalarNode) and not node_key.style):
|
||||||
|
best_style = False
|
||||||
|
if not (isinstance(node_value, ScalarNode) and not node_value.style):
|
||||||
|
best_style = False
|
||||||
|
value.append((node_key, node_value))
|
||||||
|
if flow_style is None:
|
||||||
|
if self.default_flow_style is not None:
|
||||||
|
node.flow_style = self.default_flow_style
|
||||||
|
else:
|
||||||
|
node.flow_style = best_style
|
||||||
|
return node
|
||||||
|
|
||||||
|
def ignore_aliases(self, data):
|
||||||
|
return False
|
||||||
|
|
||||||
|
class SafeRepresenter(BaseRepresenter):
|
||||||
|
|
||||||
|
def ignore_aliases(self, data):
|
||||||
|
if data is None:
|
||||||
|
return True
|
||||||
|
if isinstance(data, tuple) and data == ():
|
||||||
|
return True
|
||||||
|
if isinstance(data, (str, bytes, bool, int, float)):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def represent_none(self, data):
|
||||||
|
return self.represent_scalar('tag:yaml.org,2002:null', 'null')
|
||||||
|
|
||||||
|
def represent_str(self, data):
|
||||||
|
return self.represent_scalar('tag:yaml.org,2002:str', data)
|
||||||
|
|
||||||
|
def represent_binary(self, data):
|
||||||
|
if hasattr(base64, 'encodebytes'):
|
||||||
|
data = base64.encodebytes(data).decode('ascii')
|
||||||
|
else:
|
||||||
|
data = base64.encodestring(data).decode('ascii')
|
||||||
|
return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|')
|
||||||
|
|
||||||
|
def represent_bool(self, data):
|
||||||
|
if data:
|
||||||
|
value = 'true'
|
||||||
|
else:
|
||||||
|
value = 'false'
|
||||||
|
return self.represent_scalar('tag:yaml.org,2002:bool', value)
|
||||||
|
|
||||||
|
def represent_int(self, data):
|
||||||
|
return self.represent_scalar('tag:yaml.org,2002:int', str(data))
|
||||||
|
|
||||||
|
inf_value = 1e300
|
||||||
|
while repr(inf_value) != repr(inf_value*inf_value):
|
||||||
|
inf_value *= inf_value
|
||||||
|
|
||||||
|
def represent_float(self, data):
|
||||||
|
if data != data or (data == 0.0 and data == 1.0):
|
||||||
|
value = '.nan'
|
||||||
|
elif data == self.inf_value:
|
||||||
|
value = '.inf'
|
||||||
|
elif data == -self.inf_value:
|
||||||
|
value = '-.inf'
|
||||||
|
else:
|
||||||
|
value = repr(data).lower()
|
||||||
|
# Note that in some cases `repr(data)` represents a float number
|
||||||
|
# without the decimal parts. For instance:
|
||||||
|
# >>> repr(1e17)
|
||||||
|
# '1e17'
|
||||||
|
# Unfortunately, this is not a valid float representation according
|
||||||
|
# to the definition of the `!!float` tag. We fix this by adding
|
||||||
|
# '.0' before the 'e' symbol.
|
||||||
|
if '.' not in value and 'e' in value:
|
||||||
|
value = value.replace('e', '.0e', 1)
|
||||||
|
return self.represent_scalar('tag:yaml.org,2002:float', value)
|
||||||
|
|
||||||
|
def represent_list(self, data):
|
||||||
|
#pairs = (len(data) > 0 and isinstance(data, list))
|
||||||
|
#if pairs:
|
||||||
|
# for item in data:
|
||||||
|
# if not isinstance(item, tuple) or len(item) != 2:
|
||||||
|
# pairs = False
|
||||||
|
# break
|
||||||
|
#if not pairs:
|
||||||
|
return self.represent_sequence('tag:yaml.org,2002:seq', data)
|
||||||
|
#value = []
|
||||||
|
#for item_key, item_value in data:
|
||||||
|
# value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
|
||||||
|
# [(item_key, item_value)]))
|
||||||
|
#return SequenceNode(u'tag:yaml.org,2002:pairs', value)
|
||||||
|
|
||||||
|
def represent_dict(self, data):
|
||||||
|
return self.represent_mapping('tag:yaml.org,2002:map', data)
|
||||||
|
|
||||||
|
def represent_set(self, data):
|
||||||
|
value = {}
|
||||||
|
for key in data:
|
||||||
|
value[key] = None
|
||||||
|
return self.represent_mapping('tag:yaml.org,2002:set', value)
|
||||||
|
|
||||||
|
def represent_date(self, data):
|
||||||
|
value = data.isoformat()
|
||||||
|
return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
|
||||||
|
|
||||||
|
def represent_datetime(self, data):
|
||||||
|
value = data.isoformat(' ')
|
||||||
|
return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
|
||||||
|
|
||||||
|
def represent_yaml_object(self, tag, data, cls, flow_style=None):
|
||||||
|
if hasattr(data, '__getstate__'):
|
||||||
|
state = data.__getstate__()
|
||||||
|
else:
|
||||||
|
state = data.__dict__.copy()
|
||||||
|
return self.represent_mapping(tag, state, flow_style=flow_style)
|
||||||
|
|
||||||
|
def represent_undefined(self, data):
|
||||||
|
raise RepresenterError("cannot represent an object: %s" % data)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(type(None),
|
||||||
|
SafeRepresenter.represent_none)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(str,
|
||||||
|
SafeRepresenter.represent_str)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(bytes,
|
||||||
|
SafeRepresenter.represent_binary)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(bool,
|
||||||
|
SafeRepresenter.represent_bool)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(int,
|
||||||
|
SafeRepresenter.represent_int)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(float,
|
||||||
|
SafeRepresenter.represent_float)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(list,
|
||||||
|
SafeRepresenter.represent_list)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(tuple,
|
||||||
|
SafeRepresenter.represent_list)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(dict,
|
||||||
|
SafeRepresenter.represent_dict)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(set,
|
||||||
|
SafeRepresenter.represent_set)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(datetime.date,
|
||||||
|
SafeRepresenter.represent_date)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(datetime.datetime,
|
||||||
|
SafeRepresenter.represent_datetime)
|
||||||
|
|
||||||
|
SafeRepresenter.add_representer(None,
|
||||||
|
SafeRepresenter.represent_undefined)
|
||||||
|
|
||||||
|
class Representer(SafeRepresenter):
|
||||||
|
|
||||||
|
def represent_complex(self, data):
|
||||||
|
if data.imag == 0.0:
|
||||||
|
data = '%r' % data.real
|
||||||
|
elif data.real == 0.0:
|
||||||
|
data = '%rj' % data.imag
|
||||||
|
elif data.imag > 0:
|
||||||
|
data = '%r+%rj' % (data.real, data.imag)
|
||||||
|
else:
|
||||||
|
data = '%r%rj' % (data.real, data.imag)
|
||||||
|
return self.represent_scalar('tag:yaml.org,2002:python/complex', data)
|
||||||
|
|
||||||
|
def represent_tuple(self, data):
|
||||||
|
return self.represent_sequence('tag:yaml.org,2002:python/tuple', data)
|
||||||
|
|
||||||
|
def represent_name(self, data):
|
||||||
|
name = '%s.%s' % (data.__module__, data.__name__)
|
||||||
|
return self.represent_scalar('tag:yaml.org,2002:python/name:'+name, '')
|
||||||
|
|
||||||
|
def represent_module(self, data):
|
||||||
|
return self.represent_scalar(
|
||||||
|
'tag:yaml.org,2002:python/module:'+data.__name__, '')
|
||||||
|
|
||||||
|
def represent_object(self, data):
|
||||||
|
# We use __reduce__ API to save the data. data.__reduce__ returns
|
||||||
|
# a tuple of length 2-5:
|
||||||
|
# (function, args, state, listitems, dictitems)
|
||||||
|
|
||||||
|
# For reconstructing, we calls function(*args), then set its state,
|
||||||
|
# listitems, and dictitems if they are not None.
|
||||||
|
|
||||||
|
# A special case is when function.__name__ == '__newobj__'. In this
|
||||||
|
# case we create the object with args[0].__new__(*args).
|
||||||
|
|
||||||
|
# Another special case is when __reduce__ returns a string - we don't
|
||||||
|
# support it.
|
||||||
|
|
||||||
|
# We produce a !!python/object, !!python/object/new or
|
||||||
|
# !!python/object/apply node.
|
||||||
|
|
||||||
|
cls = type(data)
|
||||||
|
if cls in copyreg.dispatch_table:
|
||||||
|
reduce = copyreg.dispatch_table[cls](data)
|
||||||
|
elif hasattr(data, '__reduce_ex__'):
|
||||||
|
reduce = data.__reduce_ex__(2)
|
||||||
|
elif hasattr(data, '__reduce__'):
|
||||||
|
reduce = data.__reduce__()
|
||||||
|
else:
|
||||||
|
raise RepresenterError("cannot represent object: %r" % data)
|
||||||
|
reduce = (list(reduce)+[None]*5)[:5]
|
||||||
|
function, args, state, listitems, dictitems = reduce
|
||||||
|
args = list(args)
|
||||||
|
if state is None:
|
||||||
|
state = {}
|
||||||
|
if listitems is not None:
|
||||||
|
listitems = list(listitems)
|
||||||
|
if dictitems is not None:
|
||||||
|
dictitems = dict(dictitems)
|
||||||
|
if function.__name__ == '__newobj__':
|
||||||
|
function = args[0]
|
||||||
|
args = args[1:]
|
||||||
|
tag = 'tag:yaml.org,2002:python/object/new:'
|
||||||
|
newobj = True
|
||||||
|
else:
|
||||||
|
tag = 'tag:yaml.org,2002:python/object/apply:'
|
||||||
|
newobj = False
|
||||||
|
function_name = '%s.%s' % (function.__module__, function.__name__)
|
||||||
|
if not args and not listitems and not dictitems \
|
||||||
|
and isinstance(state, dict) and newobj:
|
||||||
|
return self.represent_mapping(
|
||||||
|
'tag:yaml.org,2002:python/object:'+function_name, state)
|
||||||
|
if not listitems and not dictitems \
|
||||||
|
and isinstance(state, dict) and not state:
|
||||||
|
return self.represent_sequence(tag+function_name, args)
|
||||||
|
value = {}
|
||||||
|
if args:
|
||||||
|
value['args'] = args
|
||||||
|
if state or not isinstance(state, dict):
|
||||||
|
value['state'] = state
|
||||||
|
if listitems:
|
||||||
|
value['listitems'] = listitems
|
||||||
|
if dictitems:
|
||||||
|
value['dictitems'] = dictitems
|
||||||
|
return self.represent_mapping(tag+function_name, value)
|
||||||
|
|
||||||
|
def represent_ordered_dict(self, data):
|
||||||
|
# Provide uniform representation across different Python versions.
|
||||||
|
data_type = type(data)
|
||||||
|
tag = 'tag:yaml.org,2002:python/object/apply:%s.%s' \
|
||||||
|
% (data_type.__module__, data_type.__name__)
|
||||||
|
items = [[key, value] for key, value in data.items()]
|
||||||
|
return self.represent_sequence(tag, [items])
|
||||||
|
|
||||||
|
Representer.add_representer(complex,
|
||||||
|
Representer.represent_complex)
|
||||||
|
|
||||||
|
Representer.add_representer(tuple,
|
||||||
|
Representer.represent_tuple)
|
||||||
|
|
||||||
|
Representer.add_representer(type,
|
||||||
|
Representer.represent_name)
|
||||||
|
|
||||||
|
Representer.add_representer(collections.OrderedDict,
|
||||||
|
Representer.represent_ordered_dict)
|
||||||
|
|
||||||
|
Representer.add_representer(types.FunctionType,
|
||||||
|
Representer.represent_name)
|
||||||
|
|
||||||
|
Representer.add_representer(types.BuiltinFunctionType,
|
||||||
|
Representer.represent_name)
|
||||||
|
|
||||||
|
Representer.add_representer(types.ModuleType,
|
||||||
|
Representer.represent_module)
|
||||||
|
|
||||||
|
Representer.add_multi_representer(object,
|
||||||
|
Representer.represent_object)
|
||||||
|
|
227
lib/spack/external/yaml/lib3/yaml/resolver.py
vendored
Normal file
227
lib/spack/external/yaml/lib3/yaml/resolver.py
vendored
Normal file
|
@ -0,0 +1,227 @@
|
||||||
|
|
||||||
|
__all__ = ['BaseResolver', 'Resolver']
|
||||||
|
|
||||||
|
from .error import *
|
||||||
|
from .nodes import *
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
class ResolverError(YAMLError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class BaseResolver:
|
||||||
|
|
||||||
|
DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str'
|
||||||
|
DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq'
|
||||||
|
DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map'
|
||||||
|
|
||||||
|
yaml_implicit_resolvers = {}
|
||||||
|
yaml_path_resolvers = {}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.resolver_exact_paths = []
|
||||||
|
self.resolver_prefix_paths = []
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def add_implicit_resolver(cls, tag, regexp, first):
|
||||||
|
if not 'yaml_implicit_resolvers' in cls.__dict__:
|
||||||
|
implicit_resolvers = {}
|
||||||
|
for key in cls.yaml_implicit_resolvers:
|
||||||
|
implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:]
|
||||||
|
cls.yaml_implicit_resolvers = implicit_resolvers
|
||||||
|
if first is None:
|
||||||
|
first = [None]
|
||||||
|
for ch in first:
|
||||||
|
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def add_path_resolver(cls, tag, path, kind=None):
|
||||||
|
# Note: `add_path_resolver` is experimental. The API could be changed.
|
||||||
|
# `new_path` is a pattern that is matched against the path from the
|
||||||
|
# root to the node that is being considered. `node_path` elements are
|
||||||
|
# tuples `(node_check, index_check)`. `node_check` is a node class:
|
||||||
|
# `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
|
||||||
|
# matches any kind of a node. `index_check` could be `None`, a boolean
|
||||||
|
# value, a string value, or a number. `None` and `False` match against
|
||||||
|
# any _value_ of sequence and mapping nodes. `True` matches against
|
||||||
|
# any _key_ of a mapping node. A string `index_check` matches against
|
||||||
|
# a mapping value that corresponds to a scalar key which content is
|
||||||
|
# equal to the `index_check` value. An integer `index_check` matches
|
||||||
|
# against a sequence value with the index equal to `index_check`.
|
||||||
|
if not 'yaml_path_resolvers' in cls.__dict__:
|
||||||
|
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
|
||||||
|
new_path = []
|
||||||
|
for element in path:
|
||||||
|
if isinstance(element, (list, tuple)):
|
||||||
|
if len(element) == 2:
|
||||||
|
node_check, index_check = element
|
||||||
|
elif len(element) == 1:
|
||||||
|
node_check = element[0]
|
||||||
|
index_check = True
|
||||||
|
else:
|
||||||
|
raise ResolverError("Invalid path element: %s" % element)
|
||||||
|
else:
|
||||||
|
node_check = None
|
||||||
|
index_check = element
|
||||||
|
if node_check is str:
|
||||||
|
node_check = ScalarNode
|
||||||
|
elif node_check is list:
|
||||||
|
node_check = SequenceNode
|
||||||
|
elif node_check is dict:
|
||||||
|
node_check = MappingNode
|
||||||
|
elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
|
||||||
|
and not isinstance(node_check, str) \
|
||||||
|
and node_check is not None:
|
||||||
|
raise ResolverError("Invalid node checker: %s" % node_check)
|
||||||
|
if not isinstance(index_check, (str, int)) \
|
||||||
|
and index_check is not None:
|
||||||
|
raise ResolverError("Invalid index checker: %s" % index_check)
|
||||||
|
new_path.append((node_check, index_check))
|
||||||
|
if kind is str:
|
||||||
|
kind = ScalarNode
|
||||||
|
elif kind is list:
|
||||||
|
kind = SequenceNode
|
||||||
|
elif kind is dict:
|
||||||
|
kind = MappingNode
|
||||||
|
elif kind not in [ScalarNode, SequenceNode, MappingNode] \
|
||||||
|
and kind is not None:
|
||||||
|
raise ResolverError("Invalid node kind: %s" % kind)
|
||||||
|
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
|
||||||
|
|
||||||
|
def descend_resolver(self, current_node, current_index):
|
||||||
|
if not self.yaml_path_resolvers:
|
||||||
|
return
|
||||||
|
exact_paths = {}
|
||||||
|
prefix_paths = []
|
||||||
|
if current_node:
|
||||||
|
depth = len(self.resolver_prefix_paths)
|
||||||
|
for path, kind in self.resolver_prefix_paths[-1]:
|
||||||
|
if self.check_resolver_prefix(depth, path, kind,
|
||||||
|
current_node, current_index):
|
||||||
|
if len(path) > depth:
|
||||||
|
prefix_paths.append((path, kind))
|
||||||
|
else:
|
||||||
|
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
|
||||||
|
else:
|
||||||
|
for path, kind in self.yaml_path_resolvers:
|
||||||
|
if not path:
|
||||||
|
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
|
||||||
|
else:
|
||||||
|
prefix_paths.append((path, kind))
|
||||||
|
self.resolver_exact_paths.append(exact_paths)
|
||||||
|
self.resolver_prefix_paths.append(prefix_paths)
|
||||||
|
|
||||||
|
def ascend_resolver(self):
|
||||||
|
if not self.yaml_path_resolvers:
|
||||||
|
return
|
||||||
|
self.resolver_exact_paths.pop()
|
||||||
|
self.resolver_prefix_paths.pop()
|
||||||
|
|
||||||
|
def check_resolver_prefix(self, depth, path, kind,
|
||||||
|
current_node, current_index):
|
||||||
|
node_check, index_check = path[depth-1]
|
||||||
|
if isinstance(node_check, str):
|
||||||
|
if current_node.tag != node_check:
|
||||||
|
return
|
||||||
|
elif node_check is not None:
|
||||||
|
if not isinstance(current_node, node_check):
|
||||||
|
return
|
||||||
|
if index_check is True and current_index is not None:
|
||||||
|
return
|
||||||
|
if (index_check is False or index_check is None) \
|
||||||
|
and current_index is None:
|
||||||
|
return
|
||||||
|
if isinstance(index_check, str):
|
||||||
|
if not (isinstance(current_index, ScalarNode)
|
||||||
|
and index_check == current_index.value):
|
||||||
|
return
|
||||||
|
elif isinstance(index_check, int) and not isinstance(index_check, bool):
|
||||||
|
if index_check != current_index:
|
||||||
|
return
|
||||||
|
return True
|
||||||
|
|
||||||
|
def resolve(self, kind, value, implicit):
|
||||||
|
if kind is ScalarNode and implicit[0]:
|
||||||
|
if value == '':
|
||||||
|
resolvers = self.yaml_implicit_resolvers.get('', [])
|
||||||
|
else:
|
||||||
|
resolvers = self.yaml_implicit_resolvers.get(value[0], [])
|
||||||
|
resolvers += self.yaml_implicit_resolvers.get(None, [])
|
||||||
|
for tag, regexp in resolvers:
|
||||||
|
if regexp.match(value):
|
||||||
|
return tag
|
||||||
|
implicit = implicit[1]
|
||||||
|
if self.yaml_path_resolvers:
|
||||||
|
exact_paths = self.resolver_exact_paths[-1]
|
||||||
|
if kind in exact_paths:
|
||||||
|
return exact_paths[kind]
|
||||||
|
if None in exact_paths:
|
||||||
|
return exact_paths[None]
|
||||||
|
if kind is ScalarNode:
|
||||||
|
return self.DEFAULT_SCALAR_TAG
|
||||||
|
elif kind is SequenceNode:
|
||||||
|
return self.DEFAULT_SEQUENCE_TAG
|
||||||
|
elif kind is MappingNode:
|
||||||
|
return self.DEFAULT_MAPPING_TAG
|
||||||
|
|
||||||
|
class Resolver(BaseResolver):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Resolver.add_implicit_resolver(
|
||||||
|
'tag:yaml.org,2002:bool',
|
||||||
|
re.compile(r'''^(?:yes|Yes|YES|no|No|NO
|
||||||
|
|true|True|TRUE|false|False|FALSE
|
||||||
|
|on|On|ON|off|Off|OFF)$''', re.X),
|
||||||
|
list('yYnNtTfFoO'))
|
||||||
|
|
||||||
|
Resolver.add_implicit_resolver(
|
||||||
|
'tag:yaml.org,2002:float',
|
||||||
|
re.compile(r'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)?
|
||||||
|
|\.[0-9_]+(?:[eE][-+][0-9]+)?
|
||||||
|
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
|
||||||
|
|[-+]?\.(?:inf|Inf|INF)
|
||||||
|
|\.(?:nan|NaN|NAN))$''', re.X),
|
||||||
|
list('-+0123456789.'))
|
||||||
|
|
||||||
|
Resolver.add_implicit_resolver(
|
||||||
|
'tag:yaml.org,2002:int',
|
||||||
|
re.compile(r'''^(?:[-+]?0b[0-1_]+
|
||||||
|
|[-+]?0[0-7_]+
|
||||||
|
|[-+]?(?:0|[1-9][0-9_]*)
|
||||||
|
|[-+]?0x[0-9a-fA-F_]+
|
||||||
|
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
|
||||||
|
list('-+0123456789'))
|
||||||
|
|
||||||
|
Resolver.add_implicit_resolver(
|
||||||
|
'tag:yaml.org,2002:merge',
|
||||||
|
re.compile(r'^(?:<<)$'),
|
||||||
|
['<'])
|
||||||
|
|
||||||
|
Resolver.add_implicit_resolver(
|
||||||
|
'tag:yaml.org,2002:null',
|
||||||
|
re.compile(r'''^(?: ~
|
||||||
|
|null|Null|NULL
|
||||||
|
| )$''', re.X),
|
||||||
|
['~', 'n', 'N', ''])
|
||||||
|
|
||||||
|
Resolver.add_implicit_resolver(
|
||||||
|
'tag:yaml.org,2002:timestamp',
|
||||||
|
re.compile(r'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|
||||||
|
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
|
||||||
|
(?:[Tt]|[ \t]+)[0-9][0-9]?
|
||||||
|
:[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)?
|
||||||
|
(?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
|
||||||
|
list('0123456789'))
|
||||||
|
|
||||||
|
Resolver.add_implicit_resolver(
|
||||||
|
'tag:yaml.org,2002:value',
|
||||||
|
re.compile(r'^(?:=)$'),
|
||||||
|
['='])
|
||||||
|
|
||||||
|
# The following resolver is only for documentation purposes. It cannot work
|
||||||
|
# because plain scalars cannot start with '!', '&', or '*'.
|
||||||
|
Resolver.add_implicit_resolver(
|
||||||
|
'tag:yaml.org,2002:yaml',
|
||||||
|
re.compile(r'^(?:!|&|\*)$'),
|
||||||
|
list('!&*'))
|
||||||
|
|
1444
lib/spack/external/yaml/lib3/yaml/scanner.py
vendored
Normal file
1444
lib/spack/external/yaml/lib3/yaml/scanner.py
vendored
Normal file
File diff suppressed because it is too large
Load diff
111
lib/spack/external/yaml/lib3/yaml/serializer.py
vendored
Normal file
111
lib/spack/external/yaml/lib3/yaml/serializer.py
vendored
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
|
||||||
|
__all__ = ['Serializer', 'SerializerError']
|
||||||
|
|
||||||
|
from .error import YAMLError
|
||||||
|
from .events import *
|
||||||
|
from .nodes import *
|
||||||
|
|
||||||
|
class SerializerError(YAMLError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Serializer:
|
||||||
|
|
||||||
|
ANCHOR_TEMPLATE = 'id%03d'
|
||||||
|
|
||||||
|
def __init__(self, encoding=None,
|
||||||
|
explicit_start=None, explicit_end=None, version=None, tags=None):
|
||||||
|
self.use_encoding = encoding
|
||||||
|
self.use_explicit_start = explicit_start
|
||||||
|
self.use_explicit_end = explicit_end
|
||||||
|
self.use_version = version
|
||||||
|
self.use_tags = tags
|
||||||
|
self.serialized_nodes = {}
|
||||||
|
self.anchors = {}
|
||||||
|
self.last_anchor_id = 0
|
||||||
|
self.closed = None
|
||||||
|
|
||||||
|
def open(self):
|
||||||
|
if self.closed is None:
|
||||||
|
self.emit(StreamStartEvent(encoding=self.use_encoding))
|
||||||
|
self.closed = False
|
||||||
|
elif self.closed:
|
||||||
|
raise SerializerError("serializer is closed")
|
||||||
|
else:
|
||||||
|
raise SerializerError("serializer is already opened")
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
if self.closed is None:
|
||||||
|
raise SerializerError("serializer is not opened")
|
||||||
|
elif not self.closed:
|
||||||
|
self.emit(StreamEndEvent())
|
||||||
|
self.closed = True
|
||||||
|
|
||||||
|
#def __del__(self):
|
||||||
|
# self.close()
|
||||||
|
|
||||||
|
def serialize(self, node):
|
||||||
|
if self.closed is None:
|
||||||
|
raise SerializerError("serializer is not opened")
|
||||||
|
elif self.closed:
|
||||||
|
raise SerializerError("serializer is closed")
|
||||||
|
self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
|
||||||
|
version=self.use_version, tags=self.use_tags))
|
||||||
|
self.anchor_node(node)
|
||||||
|
self.serialize_node(node, None, None)
|
||||||
|
self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
|
||||||
|
self.serialized_nodes = {}
|
||||||
|
self.anchors = {}
|
||||||
|
self.last_anchor_id = 0
|
||||||
|
|
||||||
|
def anchor_node(self, node):
|
||||||
|
if node in self.anchors:
|
||||||
|
if self.anchors[node] is None:
|
||||||
|
self.anchors[node] = self.generate_anchor(node)
|
||||||
|
else:
|
||||||
|
self.anchors[node] = None
|
||||||
|
if isinstance(node, SequenceNode):
|
||||||
|
for item in node.value:
|
||||||
|
self.anchor_node(item)
|
||||||
|
elif isinstance(node, MappingNode):
|
||||||
|
for key, value in node.value:
|
||||||
|
self.anchor_node(key)
|
||||||
|
self.anchor_node(value)
|
||||||
|
|
||||||
|
def generate_anchor(self, node):
|
||||||
|
self.last_anchor_id += 1
|
||||||
|
return self.ANCHOR_TEMPLATE % self.last_anchor_id
|
||||||
|
|
||||||
|
def serialize_node(self, node, parent, index):
|
||||||
|
alias = self.anchors[node]
|
||||||
|
if node in self.serialized_nodes:
|
||||||
|
self.emit(AliasEvent(alias))
|
||||||
|
else:
|
||||||
|
self.serialized_nodes[node] = True
|
||||||
|
self.descend_resolver(parent, index)
|
||||||
|
if isinstance(node, ScalarNode):
|
||||||
|
detected_tag = self.resolve(ScalarNode, node.value, (True, False))
|
||||||
|
default_tag = self.resolve(ScalarNode, node.value, (False, True))
|
||||||
|
implicit = (node.tag == detected_tag), (node.tag == default_tag)
|
||||||
|
self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
|
||||||
|
style=node.style))
|
||||||
|
elif isinstance(node, SequenceNode):
|
||||||
|
implicit = (node.tag
|
||||||
|
== self.resolve(SequenceNode, node.value, True))
|
||||||
|
self.emit(SequenceStartEvent(alias, node.tag, implicit,
|
||||||
|
flow_style=node.flow_style))
|
||||||
|
index = 0
|
||||||
|
for item in node.value:
|
||||||
|
self.serialize_node(item, node, index)
|
||||||
|
index += 1
|
||||||
|
self.emit(SequenceEndEvent())
|
||||||
|
elif isinstance(node, MappingNode):
|
||||||
|
implicit = (node.tag
|
||||||
|
== self.resolve(MappingNode, node.value, True))
|
||||||
|
self.emit(MappingStartEvent(alias, node.tag, implicit,
|
||||||
|
flow_style=node.flow_style))
|
||||||
|
for key, value in node.value:
|
||||||
|
self.serialize_node(key, node, None)
|
||||||
|
self.serialize_node(value, node, key)
|
||||||
|
self.emit(MappingEndEvent())
|
||||||
|
self.ascend_resolver()
|
||||||
|
|
104
lib/spack/external/yaml/lib3/yaml/tokens.py
vendored
Normal file
104
lib/spack/external/yaml/lib3/yaml/tokens.py
vendored
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
|
||||||
|
class Token(object):
|
||||||
|
def __init__(self, start_mark, end_mark):
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
def __repr__(self):
|
||||||
|
attributes = [key for key in self.__dict__
|
||||||
|
if not key.endswith('_mark')]
|
||||||
|
attributes.sort()
|
||||||
|
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
|
||||||
|
for key in attributes])
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, arguments)
|
||||||
|
|
||||||
|
#class BOMToken(Token):
|
||||||
|
# id = '<byte order mark>'
|
||||||
|
|
||||||
|
class DirectiveToken(Token):
|
||||||
|
id = '<directive>'
|
||||||
|
def __init__(self, name, value, start_mark, end_mark):
|
||||||
|
self.name = name
|
||||||
|
self.value = value
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
|
||||||
|
class DocumentStartToken(Token):
|
||||||
|
id = '<document start>'
|
||||||
|
|
||||||
|
class DocumentEndToken(Token):
|
||||||
|
id = '<document end>'
|
||||||
|
|
||||||
|
class StreamStartToken(Token):
|
||||||
|
id = '<stream start>'
|
||||||
|
def __init__(self, start_mark=None, end_mark=None,
|
||||||
|
encoding=None):
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
self.encoding = encoding
|
||||||
|
|
||||||
|
class StreamEndToken(Token):
|
||||||
|
id = '<stream end>'
|
||||||
|
|
||||||
|
class BlockSequenceStartToken(Token):
|
||||||
|
id = '<block sequence start>'
|
||||||
|
|
||||||
|
class BlockMappingStartToken(Token):
|
||||||
|
id = '<block mapping start>'
|
||||||
|
|
||||||
|
class BlockEndToken(Token):
|
||||||
|
id = '<block end>'
|
||||||
|
|
||||||
|
class FlowSequenceStartToken(Token):
|
||||||
|
id = '['
|
||||||
|
|
||||||
|
class FlowMappingStartToken(Token):
|
||||||
|
id = '{'
|
||||||
|
|
||||||
|
class FlowSequenceEndToken(Token):
|
||||||
|
id = ']'
|
||||||
|
|
||||||
|
class FlowMappingEndToken(Token):
|
||||||
|
id = '}'
|
||||||
|
|
||||||
|
class KeyToken(Token):
|
||||||
|
id = '?'
|
||||||
|
|
||||||
|
class ValueToken(Token):
|
||||||
|
id = ':'
|
||||||
|
|
||||||
|
class BlockEntryToken(Token):
|
||||||
|
id = '-'
|
||||||
|
|
||||||
|
class FlowEntryToken(Token):
|
||||||
|
id = ','
|
||||||
|
|
||||||
|
class AliasToken(Token):
|
||||||
|
id = '<alias>'
|
||||||
|
def __init__(self, value, start_mark, end_mark):
|
||||||
|
self.value = value
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
|
||||||
|
class AnchorToken(Token):
|
||||||
|
id = '<anchor>'
|
||||||
|
def __init__(self, value, start_mark, end_mark):
|
||||||
|
self.value = value
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
|
||||||
|
class TagToken(Token):
|
||||||
|
id = '<tag>'
|
||||||
|
def __init__(self, value, start_mark, end_mark):
|
||||||
|
self.value = value
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
|
||||||
|
class ScalarToken(Token):
|
||||||
|
id = '<scalar>'
|
||||||
|
def __init__(self, value, plain, start_mark, end_mark, style=None):
|
||||||
|
self.value = value
|
||||||
|
self.plain = plain
|
||||||
|
self.start_mark = start_mark
|
||||||
|
self.end_mark = end_mark
|
||||||
|
self.style = style
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
##############################################################################
|
##############################################################################
|
||||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
|
||||||
# Produced at the Lawrence Livermore National Laboratory.
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
#
|
#
|
||||||
# This file is part of Spack.
|
# This file is part of Spack.
|
||||||
|
@ -22,26 +22,52 @@
|
||||||
# License along with this program; if not, write to the Free Software
|
# License along with this program; if not, write to the Free Software
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
"""
|
"""Check that Spack complies with minimum supported python versions.
|
||||||
This test ensures that all Spack files are Python version 2.6 or less.
|
|
||||||
|
|
||||||
Spack was originally 2.7, but enough systems in 2014 are still using
|
We ensure that all Spack files work with Python2 >= 2.6 and Python3 >= 3.0.
|
||||||
2.6 on their frontend nodes that we need 2.6 to get adopted.
|
|
||||||
|
We'd like to drop 2.6 support at some point, but there are still many HPC
|
||||||
|
systems that ship with RHEL6/CentOS 6, which have Python 2.6 as the
|
||||||
|
default version. Once those go away, we can likely drop 2.6 and increase
|
||||||
|
the minimum supported Python 3 version, as well.
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import re
|
import re
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import pyqver2
|
|
||||||
import spack
|
import spack
|
||||||
|
|
||||||
spack_max_version = (2, 6)
|
#
|
||||||
|
# This test uses pyqver, by Greg Hewgill, which is a dual-source module.
|
||||||
|
# That means we need to do different checks depending on whether we're
|
||||||
|
# running Python 2 or Python 3.
|
||||||
|
#
|
||||||
|
if sys.version_info[0] < 3:
|
||||||
|
import pyqver2 as pyqver
|
||||||
|
spack_min_supported = (2, 6)
|
||||||
|
|
||||||
|
# Exclude Python 3 versions of dual-source modules when using Python 2
|
||||||
|
exclude_paths = [
|
||||||
|
os.path.join(spack.lib_path, 'external', 'yaml', 'lib3'),
|
||||||
|
os.path.join(spack.lib_path, 'external', 'pyqver3.py')]
|
||||||
|
|
||||||
|
else:
|
||||||
|
import pyqver3 as pyqver
|
||||||
|
spack_min_supported = (3, 0)
|
||||||
|
|
||||||
|
# Exclude Python 2 versions of dual-source modules when using Python 3
|
||||||
|
exclude_paths = [
|
||||||
|
os.path.join(spack.lib_path, 'external', 'yaml', 'lib'),
|
||||||
|
os.path.join(spack.lib_path, 'external', 'pyqver2.py')]
|
||||||
|
|
||||||
|
|
||||||
class PythonVersionTest(unittest.TestCase):
|
class PythonVersionTest(unittest.TestCase):
|
||||||
|
|
||||||
def pyfiles(self, *search_paths):
|
def pyfiles(self, search_paths, exclude=()):
|
||||||
|
"""List python search files in a set of search paths, excluding
|
||||||
|
any paths in the exclude list"""
|
||||||
# first file is the spack script.
|
# first file is the spack script.
|
||||||
yield spack.spack_file
|
yield spack.spack_file
|
||||||
|
|
||||||
|
@ -49,53 +75,71 @@ def pyfiles(self, *search_paths):
|
||||||
for path in search_paths:
|
for path in search_paths:
|
||||||
for root, dirnames, filenames in os.walk(path):
|
for root, dirnames, filenames in os.walk(path):
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
|
realpath = os.path.realpath(os.path.join(root, filename))
|
||||||
|
if any(realpath.startswith(p) for p in exclude):
|
||||||
|
continue
|
||||||
|
|
||||||
if re.match(r'^[^.#].*\.py$', filename):
|
if re.match(r'^[^.#].*\.py$', filename):
|
||||||
yield os.path.join(root, filename)
|
yield os.path.join(root, filename)
|
||||||
|
|
||||||
def package_py_files(self):
|
def check_python_versions(self, files):
|
||||||
for name in spack.repo.all_package_names():
|
# This is a dict dict mapping:
|
||||||
yield spack.repo.filename_for_package_name(name)
|
# version -> filename -> reasons
|
||||||
|
#
|
||||||
def check_python_versions(self, *files):
|
# Reasons are tuples of (lineno, string), where the string is the
|
||||||
# dict version -> filename -> reasons
|
# cause for a version incompatibility.
|
||||||
all_issues = {}
|
all_issues = {}
|
||||||
|
|
||||||
for fn in files:
|
# Parse files and run pyqver on each file.
|
||||||
with open(fn) as pyfile:
|
for path in files:
|
||||||
versions = pyqver2.get_versions(pyfile.read())
|
with open(path) as pyfile:
|
||||||
for ver, reasons in versions.items():
|
full_text = pyfile.read()
|
||||||
if ver > spack_max_version:
|
versions = pyqver.get_versions(full_text, path)
|
||||||
if ver not in all_issues:
|
|
||||||
all_issues[ver] = {}
|
|
||||||
all_issues[ver][fn] = reasons
|
|
||||||
|
|
||||||
|
for ver, reasons in versions.items():
|
||||||
|
if ver <= spack_min_supported:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Record issues. Mark exceptions with '# nopyqver' comment
|
||||||
|
for lineno, cause in reasons:
|
||||||
|
lines = full_text.split('\n')
|
||||||
|
if not re.search(r'#\s*nopyqver\s*$', lines[lineno - 1]):
|
||||||
|
all_issues.setdefault(ver, {})[path] = reasons
|
||||||
|
|
||||||
|
# Print a message if there are are issues
|
||||||
if all_issues:
|
if all_issues:
|
||||||
tty.error("Spack must run on Python version %d.%d"
|
tty.msg("Spack must remain compatible with Python version %d.%d"
|
||||||
% spack_max_version)
|
% spack_min_supported)
|
||||||
|
|
||||||
|
# Print out a table showing which files/linenos require which
|
||||||
|
# python version, and a string describing why.
|
||||||
for v in sorted(all_issues.keys(), reverse=True):
|
for v in sorted(all_issues.keys(), reverse=True):
|
||||||
msgs = []
|
messages = []
|
||||||
for fn in sorted(all_issues[v].keys()):
|
for path in sorted(all_issues[v].keys()):
|
||||||
short_fn = fn
|
short_path = path
|
||||||
if fn.startswith(spack.prefix):
|
if path.startswith(spack.prefix):
|
||||||
short_fn = fn[len(spack.prefix):]
|
short_path = path[len(spack.prefix):]
|
||||||
|
|
||||||
reasons = [r for r in set(all_issues[v][fn]) if r]
|
reasons = [r for r in set(all_issues[v][path]) if r]
|
||||||
for r in reasons:
|
for lineno, cause in reasons:
|
||||||
msgs.append(("%s:%s" % ('spack' + short_fn, r[0]), r[1]))
|
file_line = "%s:%s" % (short_path.lstrip('/'), lineno)
|
||||||
|
messages.append((file_line, cause))
|
||||||
|
|
||||||
tty.error("These files require version %d.%d:" % v)
|
print()
|
||||||
maxlen = max(len(f) for f, prob in msgs)
|
tty.msg("These files require version %d.%d:" % v)
|
||||||
|
maxlen = max(len(f) for f, prob in messages)
|
||||||
fmt = "%%-%ds%%s" % (maxlen + 3)
|
fmt = "%%-%ds%%s" % (maxlen + 3)
|
||||||
print fmt % ('File', 'Reason')
|
print(fmt % ('File', 'Reason'))
|
||||||
print fmt % ('-' * (maxlen), '-' * 20)
|
print(fmt % ('-' * (maxlen), '-' * 20))
|
||||||
for msg in msgs:
|
for msg in messages:
|
||||||
print fmt % msg
|
print(fmt % msg)
|
||||||
|
|
||||||
|
# Fail this test if there were issues.
|
||||||
self.assertTrue(len(all_issues) == 0)
|
self.assertTrue(len(all_issues) == 0)
|
||||||
|
|
||||||
def test_core_module_compatibility(self):
|
def test_core_module_compatibility(self):
|
||||||
self.check_python_versions(*self.pyfiles(spack.lib_path))
|
self.check_python_versions(
|
||||||
|
self.pyfiles([spack.lib_path], exclude=exclude_paths))
|
||||||
|
|
||||||
def test_package_module_compatibility(self):
|
def test_package_module_compatibility(self):
|
||||||
self.check_python_versions(*self.pyfiles(spack.packages_path))
|
self.check_python_versions(self.pyfiles([spack.packages_path]))
|
||||||
|
|
Loading…
Reference in a new issue