Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
token = lex.LexToken()
token.type = 'EXPECT_NO_MORE_ARGS'
token.value = None
token.lineno = t.lineno
token.lexpos = t.lexpos
self.client._push_extra_token(token)
optional = False
for predicate in signature:
if predicate == 'optional?':
optional = True
continue
token = lex.LexToken()
token.value = predicate
token.lineno = t.lineno
token.lexpos = t.lexpos
if predicate == 'ly:music?':
token.type = 'EXPECT_SCM' # ?!?!
elif predicate == 'ly:pitch?':
token.type = 'EXPECT_PITCH'
elif predicate == 'ly:duration?':
token.type = 'EXPECT_DURATION'
elif predicate in ['markup?', 'cheap-markup?']:
token.type = 'EXPECT_MARKUP'
elif predicate == 'markup-list?':
token.type = 'EXPECT_MARKUP_LIST'
else:
token.type = 'EXPECT_SCM'
token.type = "EXPECT_SCM" # ?!?!
elif predicate == "ly:pitch?":
token.type = "EXPECT_PITCH"
elif predicate == "ly:duration?":
token.type = "EXPECT_DURATION"
elif predicate in ["markup?", "cheap-markup?"]:
token.type = "EXPECT_MARKUP"
elif predicate == "markup-list?":
token.type = "EXPECT_MARKUP_LIST"
else:
token.type = "EXPECT_SCM"
self.client._push_extra_token(token)
if optional:
optional_token = lex.LexToken()
optional_token.value = "optional?"
optional_token.lineno = t.lineno
optional_token.lexpos = t.lexpos
optional_token.type = "EXPECT_OPTIONAL"
self.client._push_extra_token(optional_token)
optional = False
def newline(self, lineno):
tok = lex.LexToken()
tok.type = 'NEWLINE'
tok.value = '\n'
tok.lineno = lineno
tok.lexpos = -1
tok.lexer = self.lexer
return tok
def LexToken(t_type, value, line, pos):
t = Token()
t.type = t_type
t.value = value
t.lineno = line
t.lexpos = pos
return t
return newtok
else:
# No match, see if in literals
if lexdata[lexpos] in self.lexliterals:
tok = LexToken()
tok.value = lexdata[lexpos]
tok.lineno = self.lineno
tok.type = tok.value
tok.lexpos = lexpos
self.lexpos = lexpos + 1
return tok
# No match. Call t_error() if defined.
if self.lexerrorf:
tok = LexToken()
tok.value = self.lexdata[lexpos:]
tok.lineno = self.lineno
tok.type = 'error'
tok.lexer = self
tok.lexpos = lexpos
self.lexpos = lexpos
newtok = self.lexerrorf(tok)
if lexpos == self.lexpos:
# Error method didn't change text position at all. This is an error.
raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:])
lexpos = self.lexpos
if not newtok:
continue
return newtok
self.lexpos = lexpos
token.type = 'EXPECT_SCM' # ?!?!
elif predicate == 'ly:pitch?':
token.type = 'EXPECT_PITCH'
elif predicate == 'ly:duration?':
token.type = 'EXPECT_DURATION'
elif predicate in ['markup?', 'cheap-markup?']:
token.type = 'EXPECT_MARKUP'
elif predicate == 'markup-list?':
token.type = 'EXPECT_MARKUP_LIST'
else:
token.type = 'EXPECT_SCM'
self.client._push_extra_token(token)
if optional:
optional_token = lex.LexToken()
optional_token.value = 'optional?'
optional_token.lineno = t.lineno
optional_token.lexpos = t.lexpos
optional_token.type = 'EXPECT_OPTIONAL'
self.client._push_extra_token(optional_token)
optional = False
source = production.lexer.source
nodes = [
node
for node in production.slice[1:]
if node is not None and (
isinstance(node, LexToken) or
node.value is not None)]
if len(nodes) < 1:
return Span(source,
SpanPosition(0, 0, 0),
SpanPosition(0, 0, 0))
node_start, node_end = nodes[0], nodes[-1]
if isinstance(node_start, LexToken):
position_start = SpanPosition.from_token_start(node_start)
else:
position_start = node_start.value.span.start
if isinstance(node_end, LexToken):
position_end = SpanPosition.from_token_end(node_end)
else:
position_end = node_end.value.span.end
return Span(source, position_start, position_end)
return newtok
else:
# No match, see if in literals
if lexdata[lexpos] in self.lexliterals:
tok = LexToken()
tok.value = lexdata[lexpos]
tok.lineno = self.lineno
tok.type = tok.value
tok.lexpos = lexpos
self.lexpos = lexpos + 1
return tok
# No match. Call t_error() if defined.
if self.lexerrorf:
tok = LexToken()
tok.value = self.lexdata[lexpos:]
tok.lineno = self.lineno
tok.type = 'error'
tok.lexer = self
tok.lexpos = lexpos
self.lexpos = lexpos
newtok = self.lexerrorf(tok)
if lexpos == self.lexpos:
# Error method didn't change text position at all. This is an error.
raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:])
lexpos = self.lexpos
if not newtok:
continue
return newtok
self.lexpos = lexpos
def _create_semi_token(self, orig_token):
token = ply.lex.LexToken()
token.type = 'SEMI'
token.value = ';'
if orig_token is not None:
token.lineno = orig_token.lineno
token.lexpos = orig_token.lexpos
else:
token.lineno = 0
token.lexpos = 0
return token
def indent_generator(toks):
"""Post process the given stream of tokens to generate INDENT/DEDENT
tokens.
Note
----
Each generated token's value is the total amount of spaces from the
beginning of the line.
The way indentation tokens are generated is similar to how it works in
python."""
stack = [0]
# Dummy token to track the token just before the current one
former = LexToken()
former.type = "NEWLINE"
former.value = "dummy"
former.lineno = 0
former.lexpos = -1
def generate_dedent(stck, tok):
amount = stck.pop(0)
return new_dedent(amount, tok)
for token in toks:
if former.type == "NEWLINE":
if token.type == "WS":
indent = len(token.value)
else:
indent = 0