Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_parse_list_with_no_opening_bracket():
stream = (
tokens.LiteralToken("drill_fields", 1),
tokens.ValueToken(1),
tokens.LiteralToken("view_name.field_one", 1),
tokens.CommaToken(1),
tokens.LiteralToken("view_name.field_two", 1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_list()
assert result is None
def test_scan_until_token_skips_comments():
text = "# This is a comment\nStart here"
lexer = lkml.Lexer(text)
lexer.scan_until_token()
result = lexer.peek()
assert result == "S"
params = [
("\0", tokens.StreamEndToken(1)),
("{", tokens.BlockStartToken(1)),
("}", tokens.BlockEndToken(1)),
("[", tokens.ListStartToken(1)),
("]", tokens.ListEndToken(1)),
(",", tokens.CommaToken(1)),
(":", tokens.ValueToken(1)),
(";;", tokens.ExpressionBlockEndToken(1)),
]
@pytest.mark.parametrize("text,expected", params)
def test_scan_all_simple_tokens(text, expected):
lexer = lkml.Lexer(text)
result = lexer.scan()
# Skip stream start token appended at the beginning
assert result[1] == expected
def test_scan_quoted_literal():
text = '"This is quoted text."'
lexer = lkml.Lexer(text)
def test_parse_list_with_literals():
stream = (
tokens.LiteralToken("drill_fields", 1),
tokens.ValueToken(1),
tokens.ListStartToken(1),
tokens.LiteralToken("view_name.field_one", 1),
tokens.CommaToken(1),
tokens.LiteralToken("view_name.field_two", 1),
tokens.CommaToken(1),
tokens.LiteralToken("view_name.field_three", 1),
tokens.ListEndToken(1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_list()
assert result == {
"drill_fields": [
"view_name.field_one",
"view_name.field_two",
"view_name.field_three",
]
def test_parse_list_with_trailing_comma():
stream = (
tokens.LiteralToken("drill_fields", 1),
tokens.ValueToken(1),
tokens.ListStartToken(1),
tokens.LiteralToken("view_name.field_one", 1),
tokens.CommaToken(1),
tokens.ListEndToken(1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_list()
assert result == {"drill_fields": ["view_name.field_one"]}
def parser():
stream = (
tokens.StreamStartToken(1),
tokens.LiteralToken("view", 1),
tokens.ValueToken(1),
tokens.LiteralToken("view_name", 1),
tokens.BlockStartToken(1),
tokens.LiteralToken("sql_table_name", 2),
tokens.ValueToken(2),
tokens.ExpressionBlockToken("schema.table_name", 2),
tokens.ExpressionBlockEndToken(2),
tokens.LiteralToken("drill_fields", 3),
tokens.ValueToken(3),
tokens.ListStartToken(3),
tokens.LiteralToken("view_name.field_one", 3),
tokens.CommaToken(3),
tokens.LiteralToken("view_name.field_two", 3),
tokens.ListEndToken(3),
tokens.BlockEndToken(4),
tokens.StreamEndToken(4),
)
return lkml.parser.Parser(stream)
def test_parse_list_with_bad_token():
stream = (
tokens.LiteralToken("drill_fields", 1),
tokens.ValueToken(1),
tokens.ListStartToken(1),
tokens.LiteralToken("view_name.field_one", 1),
tokens.CommaToken(1),
tokens.LiteralToken("view_name.field_two", 1),
tokens.CommaToken(1),
tokens.ValueToken(1),
tokens.ListEndToken(1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_list()
assert result is None
def test_parse_list_with_missing_comma():
stream = (
tokens.LiteralToken("drill_fields", 1),
tokens.ValueToken(1),
tokens.ListStartToken(1),
tokens.LiteralToken("view_name.field_one", 1),
tokens.CommaToken(1),
tokens.LiteralToken("view_name.field_two", 1),
tokens.LiteralToken("view_name.field_three", 1),
tokens.ListEndToken(1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_list()
assert result is None
1) ["date", "week"]
2) ["foo", "bar"]
"""
if self.log_debug:
grammar = '[csv] = (literal / quoted_literal) ("," (literal / quoted_literal))* ","?'
self.logger.debug("%sTry to parse %s", self.depth * DELIMITER, grammar)
values = []
if self.check(tokens.LiteralToken, tokens.QuotedLiteralToken):
values.append(self.consume_token_value())
else:
return None
while not self.check(tokens.ListEndToken):
if self.check(tokens.CommaToken):
self.advance()
else:
return None
if self.check(tokens.LiteralToken, tokens.QuotedLiteralToken):
values.append(self.consume_token_value())
elif self.check(tokens.ListEndToken):
break
else:
return None
if self.log_debug:
self.logger.debug(
"%sSuccessfully parsed comma-separated values.", self.depth * DELIMITER
)
return values