Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_properties():
parser = Lark(PROPERTY_GRAMMAR, parser="lalr",
start="hpl_property", debug=True)
return test_routine(parser, GOOD_PROPERTIES, BAD_PROPERTIES)
def assert_reconstruct(self, grammar, code):
parser = Lark(grammar, parser='lalr', maybe_placeholders=False)
tree = parser.parse(code)
new = Reconstructor(parser).reconstruct(tree)
self.assertEqual(_remove_ws(code), _remove_ws(new))
def main():
expr_parser = Lark(search_grammar) # , parser='lalr')
sample_inputs = """platform = "LANDSAT_8"
platform = "LAND SAT_8"
platform = 4
lat in [4, 6]
time in [2014, 2014]
time in [2014-03-01, 2014-04-01]
time in 2014-03-02
time in 2014-3-2
time in 2014-3
time in 2014
platform = LANDSAT_8
lat in [4, 6] time in 2014-03-02
platform=LS8 lat in [-14, -23.5] instrument="OTHER"
""".strip().split('\n')
"exactly 3 publish(m, topic)",
"publish(m, topic) {float_list[all] > float_array[all]}",
"publish(m, topic) {int_array[all] = m.int_array[none]}",
"publish(m, topic) {string_array[all] = string_list[some]}",
"publish(m, topic) {twist_array[all].linear.x >= 0.0}",
"publish(m, topic) {nested_array[1].nested_array[all].int_array[some] > 1}"
]
logging.basicConfig(level=logging.DEBUG)
parser = Lark(PROPERTY_GRAMMAR, parser="lalr", start="property", debug=True)
type_checker = HplTypeChecker(TEST_TOPICS, TEST_FIELDS, TEST_CONSTANTS)
transformer = PropertyTransformer(type_checker=type_checker)
for test_str in FAILING_TESTS:
try:
tree = parser.parse(test_str)
tree = transformer.transform(tree)
print ""
print test_str
assert False, "expected failure"
except UnexpectedToken as e:
pass
except UnexpectedCharacters as e:
pass
except TypeError as e:
class PythonIndenter(Indenter):
NL_type = '_NEWLINE'
OPEN_PAREN_types = ['__LPAR', '__LSQB', '__LBRACE']
CLOSE_PAREN_types = ['__RPAR', '__RSQB', '__RBRACE']
INDENT_type = '_INDENT'
DEDENT_type = '_DEDENT'
tab_len = 8
grammar2_filename = os.path.join(__path__, 'python2.g')
grammar3_filename = os.path.join(__path__, 'python3.g')
with open(grammar2_filename) as f:
python_parser2 = Lark(f, parser='lalr', postlex=PythonIndenter(), start='file_input')
with open(grammar3_filename) as f:
python_parser3 = Lark(f, parser='lalr', postlex=PythonIndenter(), start='file_input')
with open(grammar2_filename) as f:
python_parser2_earley = Lark(f, parser='lalr', lexer='standard', postlex=PythonIndenter(), start='file_input')
def _read(fn, *args):
kwargs = {'encoding': 'iso-8859-1'}
with open(fn, *args, **kwargs) as f:
return f.read()
def _get_lib_path():
if os.name == 'nt':
if 'PyPy' in sys.version:
return os.path.join(sys.prefix, 'lib-python', sys.winver)
else:
return os.path.join(sys.prefix, 'Lib')
# =============================================================================
# Produce new sequence
# =============================================================================
# first we read in the sequence from the
with open(args_dict["infile"],'r') as in_file:
seq=in_file.readlines()
seq=seq[0].rstrip()
# Definition of the grammar
fold_parser = Lark(r"""
topology: element topology
| element
element: ORIENTATION LAYER POSITION
ORIENTATION: /\+/
| /-/
LAYER: /A/
| /B/
| /C/
| /E/
POSITION: /\+[0-6]/
| /-[1-6]/
def parse_bril(txt):
parser = lark.Lark(GRAMMAR)
tree = parser.parse(txt)
data = JSONTransformer().transform(tree)
return json.dumps(data, indent=2, sort_keys=True)
def get_ast_from_idl_string(idl_string):
global _parser
if _parser is None:
_parser = Lark(grammar, start='specification')
return _parser.parse(idl_string)
def hpl_assumption_parser(debug=False):
return Lark(ASSUMPTION_GRAMMAR, parser="lalr", start="hpl_assumption",
transformer=PropertyTransformer(), debug=debug)
def main(fobj, start):
lark_inst = Lark(fobj, parser="lalr", lexer="contextual", start=start)
print('# The file was automatically generated by Lark v%s' % lark.__version__)
for pyfile in EXTRACT_STANDALONE_FILES:
with open(os.path.join(_larkdir, pyfile)) as f:
print (extract_sections(f)['standalone'])
data, m = lark_inst.memo_serialize([TerminalDef, Rule])
print( 'DATA = (' )
# pprint(data, width=160)
print(data)
print(')')
print( 'MEMO = (')
print(m)
print(')')