Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_scan_quoted_literal():
text = '"This is quoted text."'
lexer = lkml.Lexer(text)
lexer.index = 1
token = lexer.scan_quoted_literal()
assert token == tokens.QuotedLiteralToken("This is quoted text.", 1)
def test_scan_quoted_literal_with_otherwise_illegal_chars():
text = '"This: is {quoted} \n text."'
lexer = lkml.Lexer(text)
lexer.index = 1
token = lexer.scan_quoted_literal()
assert token == tokens.QuotedLiteralToken("This: is {quoted} \n text.", 1)
def test_scan_quoted_literal_with_escaped_quotes():
text = r'"#.### \"M\""'
lexer = lkml.Lexer(text)
lexer.index = 1
token = lexer.scan_quoted_literal()
assert token == tokens.QuotedLiteralToken(r"#.### \"M\"", 1)
def test_parse_pair_with_bad_key():
stream = (
tokens.QuotedLiteralToken("hidden", 1),
tokens.ValueToken(1),
tokens.LiteralToken("yes", 1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_pair()
assert result is None
def test_parse_value_quoted_literal():
quoted_literal = "This is a quoted literal."
stream = (tokens.QuotedLiteralToken(quoted_literal, 1), tokens.StreamEndToken(1))
parser = lkml.parser.Parser(stream)
result = parser.parse_value()
assert result == quoted_literal
Examples:
Input (before tokenizing into a stream):
------
1) "Foo"
2) "${TABLE}.foo ;;"
Output (string):
-------
1) "Foo"
2) "${TABLE}.foo"
"""
if self.log_debug:
grammar = "[value] = literal / quoted_literal / expression_block"
self.logger.debug("%sTry to parse %s", self.depth * DELIMITER, grammar)
if self.check(tokens.QuotedLiteralToken, tokens.LiteralToken):
value = self.consume_token_value()
if self.log_debug:
self.logger.debug(
"%sSuccessfully parsed value.", self.depth * DELIMITER
)
return value
elif self.check(tokens.ExpressionBlockToken):
value = self.consume_token_value()
if self.check(tokens.ExpressionBlockEndToken):
self.advance()
else:
return None
if self.log_debug:
self.logger.debug(
"%sSuccessfully parsed value.", self.depth * DELIMITER
)
------
1) "[date, week]"
2) "['foo', 'bar']"
Output (list):
-------
1) ["date", "week"]
2) ["foo", "bar"]
"""
if self.log_debug:
grammar = '[csv] = (literal / quoted_literal) ("," (literal / quoted_literal))* ","?'
self.logger.debug("%sTry to parse %s", self.depth * DELIMITER, grammar)
values = []
if self.check(tokens.LiteralToken, tokens.QuotedLiteralToken):
values.append(self.consume_token_value())
else:
return None
while not self.check(tokens.ListEndToken):
if self.check(tokens.CommaToken):
self.advance()
else:
return None
if self.check(tokens.LiteralToken, tokens.QuotedLiteralToken):
values.append(self.consume_token_value())
elif self.check(tokens.ListEndToken):
break
else:
return None
>>> lexer.scan_quoted_literal()
QuotedLiteralToken(Label)
"""
chars = ""
while True:
ch = self.peek()
if ch == '"':
break
elif ch == "\\":
chars += self.consume() # Extra consume to skip the escaped character
elif ch == "\n":
self.line_number += 1
chars += self.consume()
self.advance()
return tokens.QuotedLiteralToken(chars, self.line_number)