Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
assert_raises(ValueError, LinearConstraint.combine,
[LinearConstraint(["a"], [1]), LinearConstraint(["b"], [1])])
_ops = [
Operator(",", 2, -100),
Operator("=", 2, 0),
Operator("+", 1, 100),
Operator("-", 1, 100),
Operator("+", 2, 100),
Operator("-", 2, 100),
Operator("*", 2, 200),
Operator("/", 2, 200),
]
_atomic = ["NUMBER", "VARIABLE"]
def _token_maker(type, string):
def make_token(scanner, token_string):
if type == "__OP__":
actual_type = token_string
else:
actual_type = type
return Token(actual_type,
Origin(string, *scanner.match.span()),
token_string)
return make_token
def _tokenize_constraint(string, variable_names):
("ONE", Origin(code, 28, 29), "1"),
("+", Origin(code, 30, 31), None),
("ZERO", Origin(code, 32, 33), "0"),
("+", Origin(code, 34, 35), None),
("NUMBER", Origin(code, 36, 38), "10"),
]
for got, expected in zip(tokens, expecteds):
assert isinstance(got, Token)
assert got.type == expected[0]
assert got.origin == expected[1]
assert got.extra == expected[2]
_unary_tilde = Operator("~", 1, -100)
_default_ops = [
_unary_tilde,
Operator("~", 2, -100),
Operator("+", 2, 100),
Operator("-", 2, 100),
Operator("*", 2, 200),
Operator("/", 2, 200),
Operator(":", 2, 300),
Operator("**", 2, 500),
Operator("+", 1, 100),
Operator("-", 1, 100),
]
def parse_formula(code, extra_operators=[]):
if not code.strip():
code = "~ 1"
def test_infix_parse():
ops = [Operator("+", 2, 10),
Operator("*", 2, 20),
Operator("-", 1, 30)]
atomic = ["ATOM1", "ATOM2"]
# a + -b * (c + d)
mock_origin = Origin("asdf", 2, 3)
tokens = [Token("ATOM1", mock_origin, "a"),
Token("+", mock_origin, "+"),
Token("-", mock_origin, "-"),
Token("ATOM2", mock_origin, "b"),
Token("*", mock_origin, "*"),
Token(Token.LPAREN, mock_origin, "("),
Token("ATOM1", mock_origin, "c"),
Token("+", mock_origin, "+"),
Token("ATOM2", mock_origin, "d"),
Token(Token.RPAREN, mock_origin, ")")]
tree = infix_parse(tokens, ops, atomic)
(Token.RPAREN, Origin(code, 23, 24), None),
("+", Origin(code, 25, 26), None),
("-", Origin(code, 27, 28), None),
("ONE", Origin(code, 28, 29), "1"),
("+", Origin(code, 30, 31), None),
("ZERO", Origin(code, 32, 33), "0"),
("+", Origin(code, 34, 35), None),
("NUMBER", Origin(code, 36, 38), "10"),
]
for got, expected in zip(tokens, expecteds):
assert isinstance(got, Token)
assert got.type == expected[0]
assert got.origin == expected[1]
assert got.extra == expected[2]
_unary_tilde = Operator("~", 1, -100)
_default_ops = [
_unary_tilde,
Operator("~", 2, -100),
Operator("+", 2, 100),
Operator("-", 2, 100),
Operator("*", 2, 200),
Operator("/", 2, 200),
Operator(":", 2, 300),
Operator("**", 2, 500),
Operator("+", 1, 100),
Operator("-", 1, 100),
]
def parse_formula(code, extra_operators=[]):
assert_raises(ValueError, LinearConstraint.combine, [])
assert_raises(ValueError, LinearConstraint.combine,
[LinearConstraint(["a"], [1]), LinearConstraint(["b"], [1])])
_ops = [
Operator(",", 2, -100),
Operator("=", 2, 0),
Operator("+", 1, 100),
Operator("-", 1, 100),
Operator("+", 2, 100),
Operator("-", 2, 100),
Operator("*", 2, 200),
Operator("/", 2, 200),
]
_atomic = ["NUMBER", "VARIABLE"]
def _token_maker(type, string):
def make_token(scanner, token_string):
if type == "__OP__":
actual_type = token_string
else:
actual_type = type
return Token(actual_type,
Origin(string, *scanner.match.span()),
token_string)
return make_token
for got, expected in zip(tokens, expecteds):
assert isinstance(got, Token)
assert got.type == expected[0]
assert got.origin == expected[1]
assert got.extra == expected[2]
_unary_tilde = Operator("~", 1, -100)
_default_ops = [
_unary_tilde,
Operator("~", 2, -100),
Operator("+", 2, 100),
Operator("-", 2, 100),
Operator("*", 2, 200),
Operator("/", 2, 200),
Operator(":", 2, 300),
Operator("**", 2, 500),
Operator("+", 1, 100),
Operator("-", 1, 100),
]
def parse_formula(code, extra_operators=[]):
if not code.strip():
code = "~ 1"
for op in extra_operators:
if op.precedence < 0:
raise ValueError("all operators must have precedence >= 0")
operators = _default_ops + extra_operators
operator_strings = [op.token_type for op in operators]
from nose.tools import assert_raises
assert_raises(ValueError, LinearConstraint.combine, [])
assert_raises(ValueError, LinearConstraint.combine,
[LinearConstraint(["a"], [1]), LinearConstraint(["b"], [1])])
_ops = [
Operator(",", 2, -100),
Operator("=", 2, 0),
Operator("+", 1, 100),
Operator("-", 1, 100),
Operator("+", 2, 100),
Operator("-", 2, 100),
Operator("*", 2, 200),
Operator("/", 2, 200),
]
_atomic = ["NUMBER", "VARIABLE"]
def _token_maker(type, string):
def make_token(scanner, token_string):
if type == "__OP__":
actual_type = token_string
else:
actual_type = type
return Token(actual_type,
Origin(string, *scanner.match.span()),
token_string)
from numpy.testing.utils import assert_equal
assert_equal(comb.coefs, [[1, 0], [0, 1]])
assert_equal(comb.constants, [[0], [1]])
from nose.tools import assert_raises
assert_raises(ValueError, LinearConstraint.combine, [])
assert_raises(ValueError, LinearConstraint.combine,
[LinearConstraint(["a"], [1]), LinearConstraint(["b"], [1])])
_ops = [
Operator(",", 2, -100),
Operator("=", 2, 0),
Operator("+", 1, 100),
Operator("-", 1, 100),
Operator("+", 2, 100),
Operator("-", 2, 100),
Operator("*", 2, 200),
Operator("/", 2, 200),
]
_atomic = ["NUMBER", "VARIABLE"]
def _token_maker(type, string):
def make_token(scanner, token_string):
if type == "__OP__":
actual_type = token_string
else:
actual_type = type
self.precedence = precedence
def __repr__(self):
return "%s(%r, %r, %r)" % (self.__class__.__name__,
self.token_type, self.arity, self.precedence)
__getstate__ = no_pickling
class _StackOperator(object):
def __init__(self, op, token):
self.op = op
self.token = token
__getstate__ = no_pickling
_open_paren = Operator(Token.LPAREN, -1, -9999999)
class _ParseContext(object):
def __init__(self, unary_ops, binary_ops, atomic_types, trace):
self.op_stack = []
self.noun_stack = []
self.unary_ops = unary_ops
self.binary_ops = binary_ops
self.atomic_types = atomic_types
self.trace = trace
__getstate__ = no_pickling
def _read_noun_context(token, c):
if token.type == Token.LPAREN:
if c.trace:
print("Pushing open-paren")
assert got.extra == expected[2]
_unary_tilde = Operator("~", 1, -100)
_default_ops = [
_unary_tilde,
Operator("~", 2, -100),
Operator("+", 2, 100),
Operator("-", 2, 100),
Operator("*", 2, 200),
Operator("/", 2, 200),
Operator(":", 2, 300),
Operator("**", 2, 500),
Operator("+", 1, 100),
Operator("-", 1, 100),
]
def parse_formula(code, extra_operators=[]):
if not code.strip():
code = "~ 1"
for op in extra_operators:
if op.precedence < 0:
raise ValueError("all operators must have precedence >= 0")
operators = _default_ops + extra_operators
operator_strings = [op.token_type for op in operators]
tree = infix_parse(_tokenize_formula(code, operator_strings),
operators,
_atomic_token_types)
if not isinstance(tree, ParseNode) or tree.type != "~":