Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def render_tokens_from_text(input_text: str):
global tokenizer
try:
tokens: List[Token] = tokenizer.tokenize(input_text)
length = len(tokens)
values = [t.value for t in tokens]
types = [t.type for t in tokens]
assert len(types) == len(values)
box_size = 64
view_x = 0
view_y = 0
view_w = box_size * length
view_h = box_size * 2 + BORDER_WIDTH * 2
tree = svgwrite.Drawing(size=(view_w, view_h))
tree.viewbox(minx=view_x, miny=view_y, width=view_w, height=view_h)
curr_x = BORDER_WIDTH
for t, v in zip(types, values):
from typing import List
from mathy import Token, TokenConstant, TokenEOF, Tokenizer, TokenPlus, TokenVariable
manual_tokens: List[Token] = [
Token("4", TokenConstant),
Token("x", TokenVariable),
Token("+", TokenPlus),
Token("2", TokenConstant),
Token("", TokenEOF),
]
auto_tokens: List[Token] = Tokenizer().tokenize("4x + 2")
for i, token in enumerate(manual_tokens):
assert auto_tokens[i].value == token.value
assert auto_tokens[i].type == token.type
from typing import List
from mathy import Token, TokenConstant, TokenEOF, Tokenizer, TokenPlus, TokenVariable
manual_tokens: List[Token] = [
Token("4", TokenConstant),
Token("x", TokenVariable),
Token("+", TokenPlus),
Token("2", TokenConstant),
Token("", TokenEOF),
]
auto_tokens: List[Token] = Tokenizer().tokenize("4x + 2")
for i, token in enumerate(manual_tokens):
assert auto_tokens[i].value == token.value
assert auto_tokens[i].type == token.type
from typing import List
from mathy import Tokenizer, Token
text = "4x + 2x^3 * 7x"
tokenizer = Tokenizer()
tokens: List[Token] = tokenizer.tokenize(text)
for token in tokens:
print(f"type: {token.type}, value: {token.value}")
from typing import List
from mathy import Token, TokenConstant, TokenEOF, Tokenizer, TokenPlus, TokenVariable
manual_tokens: List[Token] = [
Token("4", TokenConstant),
Token("x", TokenVariable),
Token("+", TokenPlus),
Token("2", TokenConstant),
Token("", TokenEOF),
]
auto_tokens: List[Token] = Tokenizer().tokenize("4x + 2")
for i, token in enumerate(manual_tokens):
assert auto_tokens[i].value == token.value
assert auto_tokens[i].type == token.type
from typing import List
from mathy import Token, TokenConstant, TokenEOF, Tokenizer, TokenPlus, TokenVariable
manual_tokens: List[Token] = [
Token("4", TokenConstant),
Token("x", TokenVariable),
Token("+", TokenPlus),
Token("2", TokenConstant),
Token("", TokenEOF),
]
auto_tokens: List[Token] = Tokenizer().tokenize("4x + 2")
for i, token in enumerate(manual_tokens):
assert auto_tokens[i].value == token.value
assert auto_tokens[i].type == token.type
from typing import List
from mathy import Token, TokenConstant, TokenEOF, Tokenizer, TokenPlus, TokenVariable
manual_tokens: List[Token] = [
Token("4", TokenConstant),
Token("x", TokenVariable),
Token("+", TokenPlus),
Token("2", TokenConstant),
Token("", TokenEOF),
]
auto_tokens: List[Token] = Tokenizer().tokenize("4x + 2")
for i, token in enumerate(manual_tokens):
assert auto_tokens[i].value == token.value
assert auto_tokens[i].type == token.type