Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_parse_list_with_literals():
stream = (
tokens.LiteralToken("drill_fields", 1),
tokens.ValueToken(1),
tokens.ListStartToken(1),
tokens.LiteralToken("view_name.field_one", 1),
tokens.CommaToken(1),
tokens.LiteralToken("view_name.field_two", 1),
tokens.CommaToken(1),
tokens.LiteralToken("view_name.field_three", 1),
tokens.ListEndToken(1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_list()
assert result == {
"drill_fields": [
"view_name.field_one",
"view_name.field_two",
"view_name.field_three",
def test_parse_key_without_literal_token():
stream = (tokens.ValueToken(1), tokens.StreamEndToken(1))
parser = lkml.parser.Parser(stream)
result = parser.parse_key()
assert result is None
def test_parse_list_with_only_commas():
stream = (
tokens.LiteralToken("drill_fields", 1),
tokens.ValueToken(1),
tokens.ListStartToken(1),
tokens.CommaToken(1),
tokens.CommaToken(1),
tokens.CommaToken(1),
tokens.ListEndToken(1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_list()
assert result is None
def parser():
stream = (
tokens.StreamStartToken(1),
tokens.LiteralToken("view", 1),
tokens.ValueToken(1),
tokens.LiteralToken("view_name", 1),
tokens.BlockStartToken(1),
tokens.LiteralToken("sql_table_name", 2),
tokens.ValueToken(2),
tokens.ExpressionBlockToken("schema.table_name", 2),
tokens.ExpressionBlockEndToken(2),
tokens.LiteralToken("drill_fields", 3),
tokens.ValueToken(3),
tokens.ListStartToken(3),
tokens.LiteralToken("view_name.field_one", 3),
tokens.CommaToken(3),
tokens.LiteralToken("view_name.field_two", 3),
tokens.ListEndToken(3),
tokens.BlockEndToken(4),
tokens.StreamEndToken(4),
)
return lkml.parser.Parser(stream)
def test_parse_list_with_bad_token():
stream = (
tokens.LiteralToken("drill_fields", 1),
tokens.ValueToken(1),
tokens.ListStartToken(1),
tokens.LiteralToken("view_name.field_one", 1),
tokens.CommaToken(1),
tokens.LiteralToken("view_name.field_two", 1),
tokens.CommaToken(1),
tokens.ValueToken(1),
tokens.ListEndToken(1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_list()
assert result is None
def parser():
stream = (
tokens.StreamStartToken(1),
tokens.LiteralToken("view", 1),
tokens.ValueToken(1),
tokens.LiteralToken("view_name", 1),
tokens.BlockStartToken(1),
tokens.LiteralToken("sql_table_name", 2),
tokens.ValueToken(2),
tokens.ExpressionBlockToken("schema.table_name", 2),
tokens.ExpressionBlockEndToken(2),
tokens.LiteralToken("drill_fields", 3),
tokens.ValueToken(3),
tokens.ListStartToken(3),
tokens.LiteralToken("view_name.field_one", 3),
tokens.CommaToken(3),
tokens.LiteralToken("view_name.field_two", 3),
tokens.ListEndToken(3),
tokens.BlockEndToken(4),
tokens.StreamEndToken(4),
)
def test_parse_pair_with_literal():
stream = (
tokens.LiteralToken("hidden", 1),
tokens.ValueToken(1),
tokens.LiteralToken("yes", 1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_pair()
assert result == {"hidden": "yes"}
def test_scan_with_complex_sql_block():
text = (
"sql_distinct_key: concat(${orders.order_id}, '|', "
"${orders__items.primary_key}) ;;"
)
output = lkml.Lexer(text).scan()
assert output == (
tokens.StreamStartToken(1),
tokens.LiteralToken("sql_distinct_key", 1),
tokens.ValueToken(1),
tokens.ExpressionBlockToken(
"concat(${orders.order_id}, '|', ${orders__items.primary_key})", 1
),
tokens.ExpressionBlockEndToken(1),
tokens.StreamEndToken(1),
)
break
elif ch == ";":
if self.peek_multiple(2) == ";;":
self.advance(2)
self.tokens.append(CHARACTER_TO_TOKEN[ch](self.line_number))
elif ch == '"':
self.advance()
self.tokens.append(self.scan_quoted_literal())
elif ch in CHARACTER_TO_TOKEN.keys():
self.advance()
self.tokens.append(CHARACTER_TO_TOKEN[ch](self.line_number))
elif self.check_for_expression_block(self.peek_multiple(25)):
self.tokens.append(self.scan_literal())
self.scan_until_token()
self.advance()
self.tokens.append(tokens.ValueToken(self.line_number))
self.scan_until_token()
self.tokens.append(self.scan_expression_block())
else:
# TODO: This should actually check for valid literals first
# and throw an error if it doesn't match
self.tokens.append(self.scan_literal())
return tuple(self.tokens)
KEYS_WITH_NAME_FIELDS: Tuple[str, ...] = (
"user_attribute_param",
"param",
"form_param",
"option",
)
CHARACTER_TO_TOKEN: Dict[str, Type[tokens.Token]] = {
"\0": tokens.StreamEndToken,
"{": tokens.BlockStartToken,
"}": tokens.BlockEndToken,
"[": tokens.ListStartToken,
"]": tokens.ListEndToken,
",": tokens.CommaToken,
":": tokens.ValueToken,
";": tokens.ExpressionBlockEndToken,
}