Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if match(item.expect, token):
new_item = item.advance()
label = (new_item.s, new_item.start, i)
new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label))
new_item.node.add_family(new_item.s, item.rule, new_item.start, item.node, token)
if new_item.expect in self.TERMINALS:
# add (B ::= Aai+1.B, h, y) to Q'
next_to_scan.add(new_item)
else:
# add (B ::= Aa+1.B, h, y) to Ei+1
next_set.add(new_item)
if not next_set and not next_to_scan:
expect = {i.expect.name for i in to_scan}
raise UnexpectedToken(token, expect, considered_rules = set(to_scan))
return next_to_scan
def get_action(token):
state = state_stack[-1]
try:
return states[state][token.type]
except KeyError:
expected = [s for s in states[state].keys() if s.isupper()]
raise UnexpectedToken(token, expected, state=state)
def __init__(self, token, expected, considered_rules=None, state=None):
self.token = token
self.expected = expected # XXX str shouldn't necessary
self.line = getattr(token, 'line', '?')
self.column = getattr(token, 'column', '?')
self.considered_rules = considered_rules
self.state = state
self.pos_in_stream = getattr(token, 'pos_in_stream', None)
message = ("Unexpected token %r at line %s, column %s.\n"
"Expected one of: \n\t* %s\n"
% (token, self.line, self.column, '\n\t* '.join(self.expected)))
super(UnexpectedToken, self).__init__(message)
for x in l.lex(stream, self.root_lexer.newline_types, self.root_lexer.ignore_types):
yield x
parser_state = get_parser_state()
l.lexer = self.lexers[parser_state]
l.state = parser_state # For debug only, no need to worry about multithreading
except UnexpectedCharacters as e:
# In the contextual lexer, UnexpectedCharacters can mean that the terminal is defined,
# but not in the current context.
# This tests the input against the global context, to provide a nicer error.
root_match = self.root_lexer.match(stream, e.pos_in_stream)
if not root_match:
raise
value, type_ = root_match
t = Token(type_, value, e.pos_in_stream, e.line, e.column)
raise UnexpectedToken(t, e.allowed, state=e.state)
]
logging.basicConfig(level=logging.DEBUG)
parser = Lark(PROPERTY_GRAMMAR, parser="lalr", start="property", debug=True)
type_checker = HplTypeChecker(TEST_TOPICS, TEST_FIELDS, TEST_CONSTANTS)
transformer = PropertyTransformer(type_checker=type_checker)
for test_str in FAILING_TESTS:
try:
tree = parser.parse(test_str)
tree = transformer.transform(tree)
print ""
print test_str
assert False, "expected failure"
except UnexpectedToken as e:
pass
except UnexpectedCharacters as e:
pass
except TypeError as e:
pass
except SyntaxError as e:
pass
for test_str in PASSING_TESTS:
print ""
print test_str
tree = parser.parse(test_str)
tree = transformer.transform(tree)
print tree
print "All", str(len(FAILING_TESTS) + len(PASSING_TESTS)), "tests passed."