Allow version to accept the '=' token without activating lexer switch (#32257)
This commit is contained in:
parent
01c9780577
commit
045a5e80cb
2 changed files with 128 additions and 56 deletions
|
@ -4981,7 +4981,7 @@ def __missing__(self, key):
|
||||||
|
|
||||||
|
|
||||||
#: These are possible token types in the spec grammar.
|
#: These are possible token types in the spec grammar.
|
||||||
HASH, DEP, AT, COLON, COMMA, ON, OFF, PCT, EQ, ID, VAL, FILE = range(12)
|
HASH, DEP, VER, COLON, COMMA, ON, OFF, PCT, EQ, ID, VAL, FILE = range(12)
|
||||||
|
|
||||||
#: Regex for fully qualified spec names. (e.g., builtin.hdf5)
|
#: Regex for fully qualified spec names. (e.g., builtin.hdf5)
|
||||||
spec_id_re = r"\w[\w.-]*"
|
spec_id_re = r"\w[\w.-]*"
|
||||||
|
@ -5001,10 +5001,13 @@ def __init__(self):
|
||||||
)
|
)
|
||||||
super(SpecLexer, self).__init__(
|
super(SpecLexer, self).__init__(
|
||||||
[
|
[
|
||||||
(r"\^", lambda scanner, val: self.token(DEP, val)),
|
(
|
||||||
(r"\@", lambda scanner, val: self.token(AT, val)),
|
r"\@([\w.\-]*\s*)*(\s*\=\s*\w[\w.\-]*)?",
|
||||||
|
lambda scanner, val: self.token(VER, val),
|
||||||
|
),
|
||||||
(r"\:", lambda scanner, val: self.token(COLON, val)),
|
(r"\:", lambda scanner, val: self.token(COLON, val)),
|
||||||
(r"\,", lambda scanner, val: self.token(COMMA, val)),
|
(r"\,", lambda scanner, val: self.token(COMMA, val)),
|
||||||
|
(r"\^", lambda scanner, val: self.token(DEP, val)),
|
||||||
(r"\+", lambda scanner, val: self.token(ON, val)),
|
(r"\+", lambda scanner, val: self.token(ON, val)),
|
||||||
(r"\-", lambda scanner, val: self.token(OFF, val)),
|
(r"\-", lambda scanner, val: self.token(OFF, val)),
|
||||||
(r"\~", lambda scanner, val: self.token(OFF, val)),
|
(r"\~", lambda scanner, val: self.token(OFF, val)),
|
||||||
|
@ -5142,7 +5145,7 @@ def do_parse(self):
|
||||||
else:
|
else:
|
||||||
# If the next token can be part of a valid anonymous spec,
|
# If the next token can be part of a valid anonymous spec,
|
||||||
# create the anonymous spec
|
# create the anonymous spec
|
||||||
if self.next.type in (AT, ON, OFF, PCT):
|
if self.next.type in (VER, ON, OFF, PCT):
|
||||||
# Raise an error if the previous spec is already concrete
|
# Raise an error if the previous spec is already concrete
|
||||||
if specs and specs[-1].concrete:
|
if specs and specs[-1].concrete:
|
||||||
raise RedundantSpecError(specs[-1], "compiler, version, " "or variant")
|
raise RedundantSpecError(specs[-1], "compiler, version, " "or variant")
|
||||||
|
@ -5250,7 +5253,7 @@ def spec(self, name):
|
||||||
spec.name = spec_name
|
spec.name = spec_name
|
||||||
|
|
||||||
while self.next:
|
while self.next:
|
||||||
if self.accept(AT):
|
if self.accept(VER):
|
||||||
vlist = self.version_list()
|
vlist = self.version_list()
|
||||||
spec._add_versions(vlist)
|
spec._add_versions(vlist)
|
||||||
|
|
||||||
|
@ -5268,7 +5271,6 @@ def spec(self, name):
|
||||||
elif self.accept(ID):
|
elif self.accept(ID):
|
||||||
self.previous = self.token
|
self.previous = self.token
|
||||||
if self.accept(EQ):
|
if self.accept(EQ):
|
||||||
# We're adding a key-value pair to the spec
|
|
||||||
self.expect(VAL)
|
self.expect(VAL)
|
||||||
spec._add_flag(self.previous.value, self.token.value)
|
spec._add_flag(self.previous.value, self.token.value)
|
||||||
self.previous = None
|
self.previous = None
|
||||||
|
@ -5304,16 +5306,24 @@ def variant(self, name=None):
|
||||||
return self.token.value
|
return self.token.value
|
||||||
|
|
||||||
def version(self):
|
def version(self):
|
||||||
|
|
||||||
start = None
|
start = None
|
||||||
end = None
|
end = None
|
||||||
if self.accept(ID):
|
|
||||||
start = self.token.value
|
def str_translate(value):
|
||||||
if self.accept(EQ):
|
# return None for empty strings since we can end up with `'@'.strip('@')`
|
||||||
# This is for versions that are associated with a hash
|
if not (value and value.strip()):
|
||||||
# i.e. @[40 char hash]=version
|
return None
|
||||||
start += self.token.value
|
else:
|
||||||
self.expect(VAL)
|
return value
|
||||||
start += self.token.value
|
|
||||||
|
if self.token.type is COMMA:
|
||||||
|
# need to increment commas, could be ID or COLON
|
||||||
|
self.accept(ID)
|
||||||
|
|
||||||
|
if self.token.type in (VER, ID):
|
||||||
|
version_spec = self.token.value.lstrip("@")
|
||||||
|
start = str_translate(version_spec)
|
||||||
|
|
||||||
if self.accept(COLON):
|
if self.accept(COLON):
|
||||||
if self.accept(ID):
|
if self.accept(ID):
|
||||||
|
@ -5323,10 +5333,10 @@ def version(self):
|
||||||
else:
|
else:
|
||||||
end = self.token.value
|
end = self.token.value
|
||||||
elif start:
|
elif start:
|
||||||
# No colon, but there was a version.
|
# No colon, but there was a version
|
||||||
return vn.Version(start)
|
return vn.Version(start)
|
||||||
else:
|
else:
|
||||||
# No colon and no id: invalid version.
|
# No colon and no id: invalid version
|
||||||
self.next_token_error("Invalid version specifier")
|
self.next_token_error("Invalid version specifier")
|
||||||
|
|
||||||
if start:
|
if start:
|
||||||
|
@ -5349,7 +5359,7 @@ def compiler(self):
|
||||||
compiler = CompilerSpec.__new__(CompilerSpec)
|
compiler = CompilerSpec.__new__(CompilerSpec)
|
||||||
compiler.name = self.token.value
|
compiler.name = self.token.value
|
||||||
compiler.versions = vn.VersionList()
|
compiler.versions = vn.VersionList()
|
||||||
if self.accept(AT):
|
if self.accept(VER):
|
||||||
vlist = self.version_list()
|
vlist = self.version_list()
|
||||||
compiler._add_versions(vlist)
|
compiler._add_versions(vlist)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -31,63 +31,97 @@
|
||||||
)
|
)
|
||||||
from spack.variant import DuplicateVariantError
|
from spack.variant import DuplicateVariantError
|
||||||
|
|
||||||
# Sample output for a complex lexing.
|
# Building blocks for complex lexing.
|
||||||
complex_lex = [
|
complex_root = [
|
||||||
Token(sp.ID, "mvapich_foo"),
|
Token(sp.ID, "mvapich_foo"),
|
||||||
Token(sp.DEP),
|
|
||||||
Token(sp.ID, "_openmpi"),
|
|
||||||
Token(sp.AT),
|
|
||||||
Token(sp.ID, "1.2"),
|
|
||||||
Token(sp.COLON),
|
|
||||||
Token(sp.ID, "1.4"),
|
|
||||||
Token(sp.COMMA),
|
|
||||||
Token(sp.ID, "1.6"),
|
|
||||||
Token(sp.PCT),
|
|
||||||
Token(sp.ID, "intel"),
|
|
||||||
Token(sp.AT),
|
|
||||||
Token(sp.ID, "12.1"),
|
|
||||||
Token(sp.COLON),
|
|
||||||
Token(sp.ID, "12.6"),
|
|
||||||
Token(sp.ON),
|
|
||||||
Token(sp.ID, "debug"),
|
|
||||||
Token(sp.OFF),
|
|
||||||
Token(sp.ID, "qt_4"),
|
|
||||||
Token(sp.DEP),
|
|
||||||
Token(sp.ID, "stackwalker"),
|
|
||||||
Token(sp.AT),
|
|
||||||
Token(sp.ID, "8.1_1e"),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Another sample lexer output with a kv pair.
|
kv_root = [
|
||||||
kv_lex = [
|
|
||||||
Token(sp.ID, "mvapich_foo"),
|
Token(sp.ID, "mvapich_foo"),
|
||||||
Token(sp.ID, "debug"),
|
Token(sp.ID, "debug"),
|
||||||
Token(sp.EQ),
|
Token(sp.EQ),
|
||||||
Token(sp.VAL, "4"),
|
Token(sp.VAL, "4"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_compiler = [
|
||||||
|
Token(sp.PCT),
|
||||||
|
Token(sp.ID, "intel"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_compiler_v = [
|
||||||
|
Token(sp.VER, "@12.1"),
|
||||||
|
Token(sp.COLON),
|
||||||
|
Token(sp.ID, "12.6"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_compiler_v_space = [
|
||||||
|
Token(sp.VER, "@"),
|
||||||
|
Token(sp.ID, "12.1"),
|
||||||
|
Token(sp.COLON),
|
||||||
|
Token(sp.ID, "12.6"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_dep1 = [
|
||||||
Token(sp.DEP),
|
Token(sp.DEP),
|
||||||
Token(sp.ID, "_openmpi"),
|
Token(sp.ID, "_openmpi"),
|
||||||
Token(sp.AT),
|
Token(sp.VER, "@1.2"),
|
||||||
|
Token(sp.COLON),
|
||||||
|
Token(sp.ID, "1.4"),
|
||||||
|
Token(sp.COMMA),
|
||||||
|
Token(sp.ID, "1.6"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_dep1_space = [
|
||||||
|
Token(sp.DEP),
|
||||||
|
Token(sp.ID, "_openmpi"),
|
||||||
|
Token(sp.VER, "@"),
|
||||||
Token(sp.ID, "1.2"),
|
Token(sp.ID, "1.2"),
|
||||||
Token(sp.COLON),
|
Token(sp.COLON),
|
||||||
Token(sp.ID, "1.4"),
|
Token(sp.ID, "1.4"),
|
||||||
Token(sp.COMMA),
|
Token(sp.COMMA),
|
||||||
Token(sp.ID, "1.6"),
|
Token(sp.ID, "1.6"),
|
||||||
Token(sp.PCT),
|
]
|
||||||
Token(sp.ID, "intel"),
|
|
||||||
Token(sp.AT),
|
complex_dep1_var = [
|
||||||
Token(sp.ID, "12.1"),
|
|
||||||
Token(sp.COLON),
|
|
||||||
Token(sp.ID, "12.6"),
|
|
||||||
Token(sp.ON),
|
Token(sp.ON),
|
||||||
Token(sp.ID, "debug"),
|
Token(sp.ID, "debug"),
|
||||||
Token(sp.OFF),
|
Token(sp.OFF),
|
||||||
Token(sp.ID, "qt_4"),
|
Token(sp.ID, "qt_4"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_dep2 = [
|
||||||
Token(sp.DEP),
|
Token(sp.DEP),
|
||||||
Token(sp.ID, "stackwalker"),
|
Token(sp.ID, "stackwalker"),
|
||||||
Token(sp.AT),
|
Token(sp.VER, "@8.1_1e"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_dep2_space = [
|
||||||
|
Token(sp.DEP),
|
||||||
|
Token(sp.ID, "stackwalker"),
|
||||||
|
Token(sp.VER, "@"),
|
||||||
Token(sp.ID, "8.1_1e"),
|
Token(sp.ID, "8.1_1e"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Sample output from complex lexing
|
||||||
|
complex_lex = (
|
||||||
|
complex_root
|
||||||
|
+ complex_dep1
|
||||||
|
+ complex_compiler
|
||||||
|
+ complex_compiler_v
|
||||||
|
+ complex_dep1_var
|
||||||
|
+ complex_dep2
|
||||||
|
)
|
||||||
|
|
||||||
|
# Another sample lexer output with a kv pair.
|
||||||
|
kv_lex = (
|
||||||
|
kv_root
|
||||||
|
+ complex_dep1
|
||||||
|
+ complex_compiler
|
||||||
|
+ complex_compiler_v_space
|
||||||
|
+ complex_dep1_var
|
||||||
|
+ complex_dep2_space
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestSpecSyntax(object):
|
class TestSpecSyntax(object):
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
|
@ -120,7 +154,7 @@ def check_lex(self, tokens, spec):
|
||||||
lex_output = sp.SpecLexer().lex(spec)
|
lex_output = sp.SpecLexer().lex(spec)
|
||||||
assert len(tokens) == len(lex_output), "unexpected number of tokens"
|
assert len(tokens) == len(lex_output), "unexpected number of tokens"
|
||||||
for tok, spec_tok in zip(tokens, lex_output):
|
for tok, spec_tok in zip(tokens, lex_output):
|
||||||
if tok.type == sp.ID or tok.type == sp.VAL:
|
if tok.type in (sp.ID, sp.VAL, sp.VER):
|
||||||
assert tok == spec_tok
|
assert tok == spec_tok
|
||||||
else:
|
else:
|
||||||
# Only check the type for non-identifiers.
|
# Only check the type for non-identifiers.
|
||||||
|
@ -716,14 +750,22 @@ def test_minimal_spaces(self):
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_spaces_between_dependences(self):
|
def test_spaces_between_dependences(self):
|
||||||
|
lex_key = (
|
||||||
|
complex_root
|
||||||
|
+ complex_dep1
|
||||||
|
+ complex_compiler
|
||||||
|
+ complex_compiler_v
|
||||||
|
+ complex_dep1_var
|
||||||
|
+ complex_dep2_space
|
||||||
|
)
|
||||||
self.check_lex(
|
self.check_lex(
|
||||||
complex_lex,
|
lex_key,
|
||||||
"mvapich_foo "
|
"mvapich_foo "
|
||||||
"^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 "
|
"^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 "
|
||||||
"^stackwalker @ 8.1_1e",
|
"^stackwalker @ 8.1_1e",
|
||||||
)
|
)
|
||||||
self.check_lex(
|
self.check_lex(
|
||||||
complex_lex,
|
lex_key,
|
||||||
"mvapich_foo "
|
"mvapich_foo "
|
||||||
"^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 "
|
"^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 "
|
||||||
"^stackwalker @ 8.1_1e",
|
"^stackwalker @ 8.1_1e",
|
||||||
|
@ -738,14 +780,30 @@ def test_spaces_between_options(self):
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_way_too_many_spaces(self):
|
def test_way_too_many_spaces(self):
|
||||||
|
lex_key = (
|
||||||
|
complex_root
|
||||||
|
+ complex_dep1
|
||||||
|
+ complex_compiler
|
||||||
|
+ complex_compiler_v_space
|
||||||
|
+ complex_dep1_var
|
||||||
|
+ complex_dep2_space
|
||||||
|
)
|
||||||
self.check_lex(
|
self.check_lex(
|
||||||
complex_lex,
|
lex_key,
|
||||||
"mvapich_foo "
|
"mvapich_foo "
|
||||||
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
|
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
|
||||||
"^ stackwalker @ 8.1_1e",
|
"^ stackwalker @ 8.1_1e",
|
||||||
)
|
)
|
||||||
|
lex_key = (
|
||||||
|
complex_root
|
||||||
|
+ complex_dep1
|
||||||
|
+ complex_compiler
|
||||||
|
+ complex_compiler_v_space
|
||||||
|
+ complex_dep1_var
|
||||||
|
+ complex_dep2_space
|
||||||
|
)
|
||||||
self.check_lex(
|
self.check_lex(
|
||||||
complex_lex,
|
lex_key,
|
||||||
"mvapich_foo "
|
"mvapich_foo "
|
||||||
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug ~ qt_4 "
|
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug ~ qt_4 "
|
||||||
"^ stackwalker @ 8.1_1e",
|
"^ stackwalker @ 8.1_1e",
|
||||||
|
@ -838,6 +896,10 @@ def test_compare_abstract_specs(self):
|
||||||
# Check that we can compare without raising an error
|
# Check that we can compare without raising an error
|
||||||
assert a <= b or b < a
|
assert a <= b or b < a
|
||||||
|
|
||||||
|
def test_git_ref_specs_with_variants(self):
|
||||||
|
spec_str = "develop-branch-version@git.{h}=develop+var1+var2".format(h="a" * 40)
|
||||||
|
self.check_parse(spec_str)
|
||||||
|
|
||||||
def test_git_ref_spec_equivalences(self, mock_packages, mock_stage):
|
def test_git_ref_spec_equivalences(self, mock_packages, mock_stage):
|
||||||
s1 = sp.Spec("develop-branch-version@git.{hash}=develop".format(hash="a" * 40))
|
s1 = sp.Spec("develop-branch-version@git.{hash}=develop".format(hash="a" * 40))
|
||||||
s2 = sp.Spec("develop-branch-version@git.{hash}=develop".format(hash="b" * 40))
|
s2 = sp.Spec("develop-branch-version@git.{hash}=develop".format(hash="b" * 40))
|
||||||
|
|
Loading…
Reference in a new issue