Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# from tea_eval import evaluate
from lark import Lark
from lark.tree import pydot__tree_to_png
from lark.visitors import Interpreter
tea_parser = Lark.open('./tea/tea_grammar.lark', parser='lalr')
tea_p = tea_parser.parse
def run_tea_program(program):
global tea_parser
parse_tree = tea_parser.parse(program)
print(parse_tree.pretty())
# Draw parse tree out as a PNG
pydot__tree_to_png(parse_tree, 'lark_test.png')
evaluate(parse_tree)
def evaluate(parse_tree):
print('Here')
intpr = Interpreter()
tree = intpr.visit(tree=parse_tree)
from lark import Lark, Transformer
from ..grammar.elements_basic import Literal, Optional, Sequence, Alternative, Empty
import os
dir_path = os.path.dirname(os.path.realpath(__file__))
spec_parser = Lark.open(os.path.join(dir_path, "grammar.lark"),
parser="lalr"
)
class ParseError(Exception):
pass
class CompoundTransformer(Transformer):
"""
Visits each node of the parse tree starting with the leaves
and working up, replacing lark Tree objects with the
appropriate dragonfly classes.
"""
def __init__(self, extras=None, *args, **kwargs):
self.extras = extras or {}
Transformer.__init__(self, *args, **kwargs)
def _parser_with_metadata(self) -> Tree:
return Lark.open(
os.path.join(self._directory, "gdscript.lark"),
postlex=Indenter(),
parser="lalr",
start="start",
propagate_positions=True,
maybe_placeholders=False,
)
def _parser(self) -> Tree:
return Lark.open(
os.path.join(self._directory, "gdscript.lark"),
postlex=Indenter(),
parser="lalr",
start="start",
maybe_placeholders=False,
)
def parse_graphql_sdl_to_ast(sdl: str) -> Tree:
"""
Parses a GraphQL SDL schema into an Abstract Syntax Tree (created by the
lark library).
We use the LALR(1) parser for fast parsing of huge trees. The
grammar is thus a bit less legible but much (much) faster.
:param sdl: Any GraphQL SDL schema string
:return: a Lark parser `Tree`
"""
gqlsdl_parser = Lark.open(
_GRAMMAR_FILE_PATH,
start="document",
parser="lalr",
lexer="contextual",
propagate_positions=True,
)
gqlsdl = gqlsdl_parser.parse
return gqlsdl(sdl)
def _get_parser(self, language_spec):
"""Get a parser and lazy create it if necessary"""
try:
return self._parsers[language_spec.code]
except KeyError:
parser = Lark.open(
str(self.grammerfile),
parser="lalr",
transformer=self._transformer,
edit_terminals=language_spec,
)
self._parsers[language_spec.code] = parser
return parser
import os
from pathlib import Path
from lark import Lark
import lark.indenter
class Indenter(lark.indenter.Indenter):
NL_type = '_NL'
OPEN_PAREN_types = ['LPAR', 'LSQB', 'LBRACE']
CLOSE_PAREN_types = ['RPAR', 'RSQB', 'RBRACE']
INDENT_type = '_INDENT'
DEDENT_type = '_DEDENT'
tab_len = 8
self_dir = os.path.dirname(os.path.abspath(Path(__file__).resolve()))
parser = Lark.open(os.path.join(self_dir, 'gdscript.lark'), postlex=Indenter(), parser='lalr')
if __name__ == '__main__':
for arg in sys.argv[1:]:
with open(arg, 'r') as fh:
content = fh.read()
parser.parse(content)
def _comment_parser(self) -> Tree:
return Lark.open(
os.path.join(self._directory, "comments.lark"),
postlex=Indenter(),
parser="lalr",
start="start",
propagate_positions=True,
maybe_placeholders=False,
)