Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_parse_value_without_closing_double_semicolons():
stream = (
tokens.ExpressionBlockToken("SELECT * FROM TABLE", 1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_value()
assert result is None
def test_scan_with_complex_sql_block():
text = (
"sql_distinct_key: concat(${orders.order_id}, '|', "
"${orders__items.primary_key}) ;;"
)
output = lkml.Lexer(text).scan()
assert output == (
tokens.StreamStartToken(1),
tokens.LiteralToken("sql_distinct_key", 1),
tokens.ValueToken(1),
tokens.ExpressionBlockToken(
"concat(${orders.order_id}, '|', ${orders__items.primary_key})", 1
),
tokens.ExpressionBlockEndToken(1),
tokens.StreamEndToken(1),
)
def test_tokens__repr__():
token = tokens.ExpressionBlockToken("schema.table_name", 2)
repr(token) == "ExpressionBlockToken(schema.table_name)"
def test_scan_expression_block_with_complex_sql_block():
text = "concat(${orders.order_id}, '|',\n${orders__items.primary_key}) ;;"
token = lkml.Lexer(text).scan_expression_block()
token == tokens.ExpressionBlockToken(
"concat(${orders.order_id}, '|', ${orders__items.primary_key})", 1
)
def parser():
stream = (
tokens.StreamStartToken(1),
tokens.LiteralToken("view", 1),
tokens.ValueToken(1),
tokens.LiteralToken("view_name", 1),
tokens.BlockStartToken(1),
tokens.LiteralToken("sql_table_name", 2),
tokens.ValueToken(2),
tokens.ExpressionBlockToken("schema.table_name", 2),
tokens.ExpressionBlockEndToken(2),
tokens.LiteralToken("drill_fields", 3),
tokens.ValueToken(3),
tokens.ListStartToken(3),
tokens.LiteralToken("view_name.field_one", 3),
tokens.CommaToken(3),
tokens.LiteralToken("view_name.field_two", 3),
tokens.ListEndToken(3),
tokens.BlockEndToken(4),
tokens.StreamEndToken(4),
)
return lkml.parser.Parser(stream)
-------
1) "Foo"
2) "${TABLE}.foo"
"""
if self.log_debug:
grammar = "[value] = literal / quoted_literal / expression_block"
self.logger.debug("%sTry to parse %s", self.depth * DELIMITER, grammar)
if self.check(tokens.QuotedLiteralToken, tokens.LiteralToken):
value = self.consume_token_value()
if self.log_debug:
self.logger.debug(
"%sSuccessfully parsed value.", self.depth * DELIMITER
)
return value
elif self.check(tokens.ExpressionBlockToken):
value = self.consume_token_value()
if self.check(tokens.ExpressionBlockEndToken):
self.advance()
else:
return None
if self.log_debug:
self.logger.debug(
"%sSuccessfully parsed value.", self.depth * DELIMITER
)
return value
else:
return None
This method strips any trailing whitespace from the expression string, since
Looker usually adds an extra space before the `;;` terminal.
Example:
>>> lexer = Lexer("SELECT * FROM ${TABLE} ;;")
>>> lexer.scan_expression_block()
ExpressionBlockToken(SELECT * FROM ${TABLE})
"""
chars = ""
while self.peek_multiple(2) != ";;":
if self.peek() == "\n":
self.line_number += 1
chars += self.consume()
chars = chars.rstrip()
return tokens.ExpressionBlockToken(chars, self.line_number)