More rubust spec parsing and error messages
This commit is contained in:
parent
c3f52f0200
commit
ad4411bc9e
4 changed files with 121 additions and 42 deletions
|
@ -2,23 +2,33 @@
|
||||||
import spack.error as err
|
import spack.error as err
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
class UnlexableInputError(err.SpackError):
|
|
||||||
"""Raised when we don't know how to lex something."""
|
|
||||||
def __init__(self, message):
|
|
||||||
super(UnlexableInputError, self).__init__(message)
|
|
||||||
|
|
||||||
|
|
||||||
class ParseError(err.SpackError):
|
class ParseError(err.SpackError):
|
||||||
"""Raised when we don't hit an error while parsing."""
|
"""Raised when we don't hit an error while parsing."""
|
||||||
def __init__(self, message):
|
def __init__(self, message, string, pos):
|
||||||
super(ParseError, self).__init__(message)
|
super(ParseError, self).__init__(message)
|
||||||
|
self.string = string
|
||||||
|
self.pos = pos
|
||||||
|
|
||||||
|
def print_error(self, out):
|
||||||
|
out.write(self.message + ":\n\n")
|
||||||
|
out.write(" " + self.string + "\n")
|
||||||
|
out.write(" " + self.pos * " " + "^\n\n")
|
||||||
|
|
||||||
|
|
||||||
|
class LexError(ParseError):
|
||||||
|
"""Raised when we don't know how to lex something."""
|
||||||
|
def __init__(self, message, string, pos):
|
||||||
|
super(LexError, self).__init__(message, string, pos)
|
||||||
|
|
||||||
|
|
||||||
class Token:
|
class Token:
|
||||||
"""Represents tokens; generated from input by lexer and fed to parse()."""
|
"""Represents tokens; generated from input by lexer and fed to parse()."""
|
||||||
def __init__(self, type, value=''):
|
def __init__(self, type, value='', start=0, end=0):
|
||||||
self.type = type
|
self.type = type
|
||||||
self.value = value
|
self.value = value
|
||||||
|
self.start = start
|
||||||
|
self.end = end
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(self)
|
return str(self)
|
||||||
|
@ -33,18 +43,19 @@ def __cmp__(self, other):
|
||||||
return cmp((self.type, self.value),
|
return cmp((self.type, self.value),
|
||||||
(other.type, other.value))
|
(other.type, other.value))
|
||||||
|
|
||||||
|
|
||||||
class Lexer(object):
|
class Lexer(object):
|
||||||
"""Base class for Lexers that keep track of line numbers."""
|
"""Base class for Lexers that keep track of line numbers."""
|
||||||
def __init__(self, lexicon):
|
def __init__(self, lexicon):
|
||||||
self.scanner = re.Scanner(lexicon)
|
self.scanner = re.Scanner(lexicon)
|
||||||
|
|
||||||
def token(self, type, value=''):
|
def token(self, type, value=''):
|
||||||
return Token(type, value)
|
return Token(type, value, self.scanner.match.start(0), self.scanner.match.end(0))
|
||||||
|
|
||||||
def lex(self, text):
|
def lex(self, text):
|
||||||
tokens, remainder = self.scanner.scan(text)
|
tokens, remainder = self.scanner.scan(text)
|
||||||
if remainder:
|
if remainder:
|
||||||
raise UnlexableInputError("Unlexable input:\n%s\n" % remainder)
|
raise LexError("Invalid character", text, text.index(remainder))
|
||||||
return tokens
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
@ -55,6 +66,7 @@ def __init__(self, lexer):
|
||||||
self.token = None # last accepted token
|
self.token = None # last accepted token
|
||||||
self.next = None # next token
|
self.next = None # next token
|
||||||
self.lexer = lexer
|
self.lexer = lexer
|
||||||
|
self.text = None
|
||||||
|
|
||||||
def gettok(self):
|
def gettok(self):
|
||||||
"""Puts the next token in the input stream into self.next."""
|
"""Puts the next token in the input stream into self.next."""
|
||||||
|
@ -76,8 +88,16 @@ def accept(self, id):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def next_token_error(self, message):
|
||||||
|
"""Raise an error about the next token in the stream."""
|
||||||
|
raise ParseError(message, self.text, self.token.end)
|
||||||
|
|
||||||
|
def last_token_error(self, message):
|
||||||
|
"""Raise an error about the previous token in the stream."""
|
||||||
|
raise ParseError(message, self.text, self.token.start)
|
||||||
|
|
||||||
def unexpected_token(self):
|
def unexpected_token(self):
|
||||||
raise ParseError("Unexpected token: %s." % self.next)
|
self.next_token_error("Unexpected token")
|
||||||
|
|
||||||
def expect(self, id):
|
def expect(self, id):
|
||||||
"""Like accept(), but fails if we don't like the next token."""
|
"""Like accept(), but fails if we don't like the next token."""
|
||||||
|
@ -87,9 +107,10 @@ def expect(self, id):
|
||||||
if self.next:
|
if self.next:
|
||||||
self.unexpected_token()
|
self.unexpected_token()
|
||||||
else:
|
else:
|
||||||
raise ParseError("Unexpected end of file.")
|
self.next_token_error("Unexpected end of file")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def parse(self, text):
|
def parse(self, text):
|
||||||
|
self.text = text
|
||||||
self.push_tokens(self.lexer.lex(text))
|
self.push_tokens(self.lexer.lex(text))
|
||||||
return self.do_parse()
|
return self.do_parse()
|
||||||
|
|
|
@ -44,17 +44,22 @@
|
||||||
import spack.error
|
import spack.error
|
||||||
|
|
||||||
|
|
||||||
class DuplicateDependenceError(spack.error.SpackError):
|
class SpecError(spack.error.SpackError):
|
||||||
"""Raised when the same dependence occurs in a spec twice."""
|
"""Superclass for all errors that occur while constructing specs."""
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super(DuplicateDependenceError, self).__init__(message)
|
super(SpecError, self).__init__(message)
|
||||||
|
|
||||||
class DuplicateVariantError(spack.error.SpackError):
|
class DuplicateDependencyError(SpecError):
|
||||||
|
"""Raised when the same dependency occurs in a spec twice."""
|
||||||
|
def __init__(self, message):
|
||||||
|
super(DuplicateDependencyError, self).__init__(message)
|
||||||
|
|
||||||
|
class DuplicateVariantError(SpecError):
|
||||||
"""Raised when the same variant occurs in a spec twice."""
|
"""Raised when the same variant occurs in a spec twice."""
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super(VariantVariantError, self).__init__(message)
|
super(DuplicateVariantError, self).__init__(message)
|
||||||
|
|
||||||
class DuplicateCompilerError(spack.error.SpackError):
|
class DuplicateCompilerError(SpecError):
|
||||||
"""Raised when the same compiler occurs in a spec twice."""
|
"""Raised when the same compiler occurs in a spec twice."""
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super(DuplicateCompilerError, self).__init__(message)
|
super(DuplicateCompilerError, self).__init__(message)
|
||||||
|
@ -88,14 +93,18 @@ def add_version(self, version):
|
||||||
self.versions.append(version)
|
self.versions.append(version)
|
||||||
|
|
||||||
def add_variant(self, name, enabled):
|
def add_variant(self, name, enabled):
|
||||||
|
if name in self.variants: raise DuplicateVariantError(
|
||||||
|
"Cannot specify variant '%s' twice" % name)
|
||||||
self.variants[name] = enabled
|
self.variants[name] = enabled
|
||||||
|
|
||||||
def add_compiler(self, compiler):
|
def add_compiler(self, compiler):
|
||||||
|
if self.compiler: raise DuplicateCompilerError(
|
||||||
|
"Spec for '%s' cannot have two compilers." % self.name)
|
||||||
self.compiler = compiler
|
self.compiler = compiler
|
||||||
|
|
||||||
def add_dependency(self, dep):
|
def add_dependency(self, dep):
|
||||||
if dep.name in self.dependencies:
|
if dep.name in self.dependencies:
|
||||||
raise ValueError("Cannot depend on %s twice" % dep)
|
raise DuplicateDependencyError("Cannot depend on '%s' twice" % dep)
|
||||||
self.dependencies[dep.name] = dep
|
self.dependencies[dep.name] = dep
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
@ -140,21 +149,36 @@ def __init__(self):
|
||||||
(r'\w[\w.-]*', lambda scanner, val: self.token(ID, val)),
|
(r'\w[\w.-]*', lambda scanner, val: self.token(ID, val)),
|
||||||
(r'\s+', lambda scanner, val: None)])
|
(r'\s+', lambda scanner, val: None)])
|
||||||
|
|
||||||
|
|
||||||
class SpecParser(spack.parse.Parser):
|
class SpecParser(spack.parse.Parser):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(SpecParser, self).__init__(SpecLexer())
|
super(SpecParser, self).__init__(SpecLexer())
|
||||||
|
|
||||||
def spec(self):
|
|
||||||
self.expect(ID)
|
|
||||||
self.check_identifier()
|
|
||||||
|
|
||||||
spec = Spec(self.token.value)
|
def do_parse(self):
|
||||||
|
specs = []
|
||||||
while self.next:
|
while self.next:
|
||||||
if self.accept(DEP):
|
if self.accept(ID):
|
||||||
dep = self.spec()
|
specs.append(self.spec())
|
||||||
spec.add_dependency(dep)
|
|
||||||
|
|
||||||
elif self.accept(AT):
|
elif self.accept(DEP):
|
||||||
|
if not specs:
|
||||||
|
self.last_token_error("Dependency has no package")
|
||||||
|
self.expect(ID)
|
||||||
|
specs[-1].add_dependency(self.spec())
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.unexpected_token()
|
||||||
|
|
||||||
|
return specs
|
||||||
|
|
||||||
|
|
||||||
|
def spec(self):
|
||||||
|
self.check_identifier()
|
||||||
|
spec = Spec(self.token.value)
|
||||||
|
|
||||||
|
while self.next:
|
||||||
|
if self.accept(AT):
|
||||||
vlist = self.version_list()
|
vlist = self.version_list()
|
||||||
for version in vlist:
|
for version in vlist:
|
||||||
spec.add_version(version)
|
spec.add_version(version)
|
||||||
|
@ -173,7 +197,7 @@ def spec(self):
|
||||||
spec.add_compiler(self.compiler())
|
spec.add_compiler(self.compiler())
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.unexpected_token()
|
break
|
||||||
|
|
||||||
return spec
|
return spec
|
||||||
|
|
||||||
|
@ -187,11 +211,15 @@ def version(self):
|
||||||
if self.accept(COLON):
|
if self.accept(COLON):
|
||||||
if self.accept(ID):
|
if self.accept(ID):
|
||||||
end = self.token.value
|
end = self.token.value
|
||||||
else:
|
elif start:
|
||||||
|
# No colon, but there was a version.
|
||||||
return Version(start)
|
return Version(start)
|
||||||
|
else:
|
||||||
|
# No colon and no id: invalid version.
|
||||||
|
self.next_token_error("Invalid version specifier")
|
||||||
|
|
||||||
if not start and not end:
|
if not start and not end:
|
||||||
raise ParseError("Lone colon: version range needs at least one version.")
|
self.next_token_error("Lone colon: version range needs a version")
|
||||||
else:
|
else:
|
||||||
if start: start = Version(start)
|
if start: start = Version(start)
|
||||||
if end: end = Version(end)
|
if end: end = Version(end)
|
||||||
|
@ -200,10 +228,9 @@ def version(self):
|
||||||
|
|
||||||
def version_list(self):
|
def version_list(self):
|
||||||
vlist = []
|
vlist = []
|
||||||
while True:
|
vlist.append(self.version())
|
||||||
|
while self.accept(COMMA):
|
||||||
vlist.append(self.version())
|
vlist.append(self.version())
|
||||||
if not self.accept(COMMA):
|
|
||||||
break
|
|
||||||
return vlist
|
return vlist
|
||||||
|
|
||||||
|
|
||||||
|
@ -224,12 +251,9 @@ def check_identifier(self):
|
||||||
basis. Call this if we detect a version id where it shouldn't be.
|
basis. Call this if we detect a version id where it shouldn't be.
|
||||||
"""
|
"""
|
||||||
if '.' in self.token.value:
|
if '.' in self.token.value:
|
||||||
raise spack.parse.ParseError(
|
self.last_token_error("Identifier cannot contain '.'")
|
||||||
"Non-version identifier cannot contain '.'")
|
|
||||||
|
|
||||||
|
|
||||||
def do_parse(self):
|
def parse(string):
|
||||||
specs = []
|
"""Returns a list of specs from an input string."""
|
||||||
while self.next:
|
return SpecParser().parse(string)
|
||||||
specs.append(self.spec())
|
|
||||||
return specs
|
|
||||||
|
|
|
@ -48,8 +48,8 @@ def check_parse(self, expected, spec=None):
|
||||||
if spec == None:
|
if spec == None:
|
||||||
spec = expected
|
spec = expected
|
||||||
output = self.parser.parse(spec)
|
output = self.parser.parse(spec)
|
||||||
self.assertEqual(len(output), 1)
|
parsed = (" ".join(str(spec) for spec in output))
|
||||||
self.assertEqual(str(output[0]), spec)
|
self.assertEqual(expected, parsed)
|
||||||
|
|
||||||
|
|
||||||
def check_lex(self, tokens, spec):
|
def check_lex(self, tokens, spec):
|
||||||
|
@ -85,7 +85,38 @@ def test_full_specs(self):
|
||||||
|
|
||||||
def test_canonicalize(self):
|
def test_canonicalize(self):
|
||||||
self.check_parse(
|
self.check_parse(
|
||||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 ^stackwalker@8.1_1e")
|
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 ^stackwalker@8.1_1e",
|
||||||
|
"mvapich_foo ^_openmpi@1.6,1.2:1.4%intel@12.1:12.6+debug~qt_4 ^stackwalker@8.1_1e")
|
||||||
|
|
||||||
|
self.check_parse(
|
||||||
|
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 ^stackwalker@8.1_1e",
|
||||||
|
"mvapich_foo ^stackwalker@8.1_1e ^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug")
|
||||||
|
|
||||||
|
self.check_parse(
|
||||||
|
"x ^y@1,2:3,4%intel@1,2,3,4+a~b+c~d+e~f",
|
||||||
|
"x ^y~f+e~d+c~b+a@4,2:3,1%intel@4,3,2,1")
|
||||||
|
|
||||||
|
def test_parse_errors(self):
|
||||||
|
self.assertRaises(ParseError, self.check_parse, "x@@1.2")
|
||||||
|
self.assertRaises(ParseError, self.check_parse, "x ^y@@1.2")
|
||||||
|
self.assertRaises(ParseError, self.check_parse, "x@1.2::")
|
||||||
|
self.assertRaises(ParseError, self.check_parse, "x::")
|
||||||
|
|
||||||
|
def test_duplicate_variant(self):
|
||||||
|
self.assertRaises(DuplicateVariantError, self.check_parse, "x@1.2+debug+debug")
|
||||||
|
self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2+debug+debug")
|
||||||
|
|
||||||
|
def test_duplicate_depdendence(self):
|
||||||
|
self.assertRaises(DuplicateDependencyError, self.check_parse, "x ^y ^y")
|
||||||
|
|
||||||
|
def test_duplicate_compiler(self):
|
||||||
|
self.assertRaises(DuplicateCompilerError, self.check_parse, "x%intel%intel")
|
||||||
|
self.assertRaises(DuplicateCompilerError, self.check_parse, "x%intel%gnu")
|
||||||
|
self.assertRaises(DuplicateCompilerError, self.check_parse, "x%gnu%intel")
|
||||||
|
self.assertRaises(DuplicateCompilerError, self.check_parse, "x ^y%intel%intel")
|
||||||
|
self.assertRaises(DuplicateCompilerError, self.check_parse, "x ^y%intel%gnu")
|
||||||
|
self.assertRaises(DuplicateCompilerError, self.check_parse, "x ^y%gnu%intel")
|
||||||
|
|
||||||
|
|
||||||
# ================================================================================
|
# ================================================================================
|
||||||
# Lex checks
|
# Lex checks
|
||||||
|
|
|
@ -134,6 +134,9 @@ def __ne__(self, other):
|
||||||
return not (self == other)
|
return not (self == other)
|
||||||
|
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self.__str__()
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
out = ""
|
out = ""
|
||||||
if self.start:
|
if self.start:
|
||||||
|
|
Loading…
Reference in a new issue