From ad4411bc9e8de6d13edcf3be78d78a989475b1ec Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 12 May 2013 11:10:10 -0700 Subject: [PATCH] More rubust spec parsing and error messages --- lib/spack/spack/parse.py | 43 ++++++++++++++----- lib/spack/spack/spec.py | 80 +++++++++++++++++++++++------------ lib/spack/spack/test/specs.py | 37 ++++++++++++++-- lib/spack/spack/version.py | 3 ++ 4 files changed, 121 insertions(+), 42 deletions(-) diff --git a/lib/spack/spack/parse.py b/lib/spack/spack/parse.py index f39def022f..1d10bda6af 100644 --- a/lib/spack/spack/parse.py +++ b/lib/spack/spack/parse.py @@ -2,23 +2,33 @@ import spack.error as err import itertools -class UnlexableInputError(err.SpackError): - """Raised when we don't know how to lex something.""" - def __init__(self, message): - super(UnlexableInputError, self).__init__(message) - class ParseError(err.SpackError): """Raised when we don't hit an error while parsing.""" - def __init__(self, message): + def __init__(self, message, string, pos): super(ParseError, self).__init__(message) + self.string = string + self.pos = pos + + def print_error(self, out): + out.write(self.message + ":\n\n") + out.write(" " + self.string + "\n") + out.write(" " + self.pos * " " + "^\n\n") + + +class LexError(ParseError): + """Raised when we don't know how to lex something.""" + def __init__(self, message, string, pos): + super(LexError, self).__init__(message, string, pos) class Token: """Represents tokens; generated from input by lexer and fed to parse().""" - def __init__(self, type, value=''): + def __init__(self, type, value='', start=0, end=0): self.type = type self.value = value + self.start = start + self.end = end def __repr__(self): return str(self) @@ -33,18 +43,19 @@ def __cmp__(self, other): return cmp((self.type, self.value), (other.type, other.value)) + class Lexer(object): """Base class for Lexers that keep track of line numbers.""" def __init__(self, lexicon): self.scanner = re.Scanner(lexicon) def token(self, type, value=''): - return Token(type, value) + return Token(type, value, self.scanner.match.start(0), self.scanner.match.end(0)) def lex(self, text): tokens, remainder = self.scanner.scan(text) if remainder: - raise UnlexableInputError("Unlexable input:\n%s\n" % remainder) + raise LexError("Invalid character", text, text.index(remainder)) return tokens @@ -55,6 +66,7 @@ def __init__(self, lexer): self.token = None # last accepted token self.next = None # next token self.lexer = lexer + self.text = None def gettok(self): """Puts the next token in the input stream into self.next.""" @@ -76,8 +88,16 @@ def accept(self, id): return True return False + def next_token_error(self, message): + """Raise an error about the next token in the stream.""" + raise ParseError(message, self.text, self.token.end) + + def last_token_error(self, message): + """Raise an error about the previous token in the stream.""" + raise ParseError(message, self.text, self.token.start) + def unexpected_token(self): - raise ParseError("Unexpected token: %s." % self.next) + self.next_token_error("Unexpected token") def expect(self, id): """Like accept(), but fails if we don't like the next token.""" @@ -87,9 +107,10 @@ def expect(self, id): if self.next: self.unexpected_token() else: - raise ParseError("Unexpected end of file.") + self.next_token_error("Unexpected end of file") sys.exit(1) def parse(self, text): + self.text = text self.push_tokens(self.lexer.lex(text)) return self.do_parse() diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index a04d397e50..853e7238d8 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -44,17 +44,22 @@ import spack.error -class DuplicateDependenceError(spack.error.SpackError): - """Raised when the same dependence occurs in a spec twice.""" +class SpecError(spack.error.SpackError): + """Superclass for all errors that occur while constructing specs.""" def __init__(self, message): - super(DuplicateDependenceError, self).__init__(message) + super(SpecError, self).__init__(message) -class DuplicateVariantError(spack.error.SpackError): +class DuplicateDependencyError(SpecError): + """Raised when the same dependency occurs in a spec twice.""" + def __init__(self, message): + super(DuplicateDependencyError, self).__init__(message) + +class DuplicateVariantError(SpecError): """Raised when the same variant occurs in a spec twice.""" def __init__(self, message): - super(VariantVariantError, self).__init__(message) + super(DuplicateVariantError, self).__init__(message) -class DuplicateCompilerError(spack.error.SpackError): +class DuplicateCompilerError(SpecError): """Raised when the same compiler occurs in a spec twice.""" def __init__(self, message): super(DuplicateCompilerError, self).__init__(message) @@ -88,14 +93,18 @@ def add_version(self, version): self.versions.append(version) def add_variant(self, name, enabled): + if name in self.variants: raise DuplicateVariantError( + "Cannot specify variant '%s' twice" % name) self.variants[name] = enabled def add_compiler(self, compiler): + if self.compiler: raise DuplicateCompilerError( + "Spec for '%s' cannot have two compilers." % self.name) self.compiler = compiler def add_dependency(self, dep): if dep.name in self.dependencies: - raise ValueError("Cannot depend on %s twice" % dep) + raise DuplicateDependencyError("Cannot depend on '%s' twice" % dep) self.dependencies[dep.name] = dep def __str__(self): @@ -140,21 +149,36 @@ def __init__(self): (r'\w[\w.-]*', lambda scanner, val: self.token(ID, val)), (r'\s+', lambda scanner, val: None)]) + class SpecParser(spack.parse.Parser): def __init__(self): super(SpecParser, self).__init__(SpecLexer()) - def spec(self): - self.expect(ID) - self.check_identifier() - spec = Spec(self.token.value) + def do_parse(self): + specs = [] while self.next: - if self.accept(DEP): - dep = self.spec() - spec.add_dependency(dep) + if self.accept(ID): + specs.append(self.spec()) - elif self.accept(AT): + elif self.accept(DEP): + if not specs: + self.last_token_error("Dependency has no package") + self.expect(ID) + specs[-1].add_dependency(self.spec()) + + else: + self.unexpected_token() + + return specs + + + def spec(self): + self.check_identifier() + spec = Spec(self.token.value) + + while self.next: + if self.accept(AT): vlist = self.version_list() for version in vlist: spec.add_version(version) @@ -173,7 +197,7 @@ def spec(self): spec.add_compiler(self.compiler()) else: - self.unexpected_token() + break return spec @@ -187,11 +211,15 @@ def version(self): if self.accept(COLON): if self.accept(ID): end = self.token.value - else: + elif start: + # No colon, but there was a version. return Version(start) + else: + # No colon and no id: invalid version. + self.next_token_error("Invalid version specifier") if not start and not end: - raise ParseError("Lone colon: version range needs at least one version.") + self.next_token_error("Lone colon: version range needs a version") else: if start: start = Version(start) if end: end = Version(end) @@ -200,10 +228,9 @@ def version(self): def version_list(self): vlist = [] - while True: + vlist.append(self.version()) + while self.accept(COMMA): vlist.append(self.version()) - if not self.accept(COMMA): - break return vlist @@ -224,12 +251,9 @@ def check_identifier(self): basis. Call this if we detect a version id where it shouldn't be. """ if '.' in self.token.value: - raise spack.parse.ParseError( - "Non-version identifier cannot contain '.'") + self.last_token_error("Identifier cannot contain '.'") - def do_parse(self): - specs = [] - while self.next: - specs.append(self.spec()) - return specs +def parse(string): + """Returns a list of specs from an input string.""" + return SpecParser().parse(string) diff --git a/lib/spack/spack/test/specs.py b/lib/spack/spack/test/specs.py index 89c7844a71..cbc88fa315 100644 --- a/lib/spack/spack/test/specs.py +++ b/lib/spack/spack/test/specs.py @@ -48,8 +48,8 @@ def check_parse(self, expected, spec=None): if spec == None: spec = expected output = self.parser.parse(spec) - self.assertEqual(len(output), 1) - self.assertEqual(str(output[0]), spec) + parsed = (" ".join(str(spec) for spec in output)) + self.assertEqual(expected, parsed) def check_lex(self, tokens, spec): @@ -85,7 +85,38 @@ def test_full_specs(self): def test_canonicalize(self): self.check_parse( - "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 ^stackwalker@8.1_1e") + "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 ^stackwalker@8.1_1e", + "mvapich_foo ^_openmpi@1.6,1.2:1.4%intel@12.1:12.6+debug~qt_4 ^stackwalker@8.1_1e") + + self.check_parse( + "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 ^stackwalker@8.1_1e", + "mvapich_foo ^stackwalker@8.1_1e ^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug") + + self.check_parse( + "x ^y@1,2:3,4%intel@1,2,3,4+a~b+c~d+e~f", + "x ^y~f+e~d+c~b+a@4,2:3,1%intel@4,3,2,1") + + def test_parse_errors(self): + self.assertRaises(ParseError, self.check_parse, "x@@1.2") + self.assertRaises(ParseError, self.check_parse, "x ^y@@1.2") + self.assertRaises(ParseError, self.check_parse, "x@1.2::") + self.assertRaises(ParseError, self.check_parse, "x::") + + def test_duplicate_variant(self): + self.assertRaises(DuplicateVariantError, self.check_parse, "x@1.2+debug+debug") + self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2+debug+debug") + + def test_duplicate_depdendence(self): + self.assertRaises(DuplicateDependencyError, self.check_parse, "x ^y ^y") + + def test_duplicate_compiler(self): + self.assertRaises(DuplicateCompilerError, self.check_parse, "x%intel%intel") + self.assertRaises(DuplicateCompilerError, self.check_parse, "x%intel%gnu") + self.assertRaises(DuplicateCompilerError, self.check_parse, "x%gnu%intel") + self.assertRaises(DuplicateCompilerError, self.check_parse, "x ^y%intel%intel") + self.assertRaises(DuplicateCompilerError, self.check_parse, "x ^y%intel%gnu") + self.assertRaises(DuplicateCompilerError, self.check_parse, "x ^y%gnu%intel") + # ================================================================================ # Lex checks diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py index 14e1083722..9a0390451c 100644 --- a/lib/spack/spack/version.py +++ b/lib/spack/spack/version.py @@ -134,6 +134,9 @@ def __ne__(self, other): return not (self == other) + def __repr__(self): + return self.__str__() + def __str__(self): out = "" if self.start: