Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
and constraint_like
and isinstance(constraint_like[0], str)):
constraints = []
for code in constraint_like:
if not isinstance(code, str):
raise ValueError("expected a string, not %r" % (code,))
tree = parse_constraint(code, variable_names)
evaluator = _EvalConstraint(variable_names)
constraints.append(evaluator.eval(tree, constraint=True))
return LinearConstraint.combine(constraints)
if isinstance(constraint_like, tuple):
if len(constraint_like) != 2:
raise ValueError("constraint tuple must have length 2")
coef, constants = constraint_like
return LinearConstraint(variable_names, coef, constants)
# assume a raw ndarray
coefs = np.asarray(constraint_like, dtype=float)
return LinearConstraint(variable_names, coefs)
assert lc.coefs.dtype == np.dtype(float)
assert lc.constants.dtype == np.dtype(float)
# statsmodels wants to be able to create degenerate constraints like this,
# see:
# https://github.com/pydata/patsy/issues/89
# We used to forbid it, but I guess it's harmless, so why not.
lc = LinearConstraint(["a"], [[0]])
assert_equal(lc.coefs, [[0]])
from nose.tools import assert_raises
assert_raises(ValueError, LinearConstraint, ["a"], [[1, 2]])
assert_raises(ValueError, LinearConstraint, ["a"], [[[1]]])
assert_raises(ValueError, LinearConstraint, ["a"], [[1, 2]], [3, 4])
assert_raises(ValueError, LinearConstraint, ["a", "b"], [[1, 2]], [3, 4])
assert_raises(ValueError, LinearConstraint, ["a"], [[1]], [[]])
assert_raises(ValueError, LinearConstraint, ["a", "b"], [])
assert_raises(ValueError, LinearConstraint, ["a", "b"],
np.zeros((0, 2)))
assert_no_pickling(lc)
def test_linear_constraint():
from nose.tools import assert_raises
from patsy.compat import OrderedDict
t = _check_lincon
t(LinearConstraint(["a", "b"], [2, 3]), ["a", "b"], [[2, 3]], [[0]])
assert_raises(ValueError, linear_constraint,
LinearConstraint(["b", "a"], [2, 3]),
["a", "b"])
t({"a": 2}, ["a", "b"], [[1, 0]], [[2]])
t(OrderedDict([("a", 2), ("b", 3)]),
["a", "b"], [[1, 0], [0, 1]], [[2], [3]])
t(OrderedDict([("a", 2), ("b", 3)]),
["b", "a"], [[0, 1], [1, 0]], [[2], [3]])
t({0: 2}, ["a", "b"], [[1, 0]], [[2]])
t(OrderedDict([(0, 2), (1, 3)]), ["a", "b"], [[1, 0], [0, 1]], [[2], [3]])
t(OrderedDict([("a", 2), (1, 3)]),
["a", "b"], [[1, 0], [0, 1]], [[2], [3]])
assert_raises(ValueError, linear_constraint, {"q": 1}, ["a", "b"])
assert_raises(ValueError, linear_constraint, {"a": 1, 0: 2}, ["a", "b"])
if not isinstance(code, str):
raise ValueError("expected a string, not %r" % (code,))
tree = parse_constraint(code, variable_names)
evaluator = _EvalConstraint(variable_names)
constraints.append(evaluator.eval(tree, constraint=True))
return LinearConstraint.combine(constraints)
if isinstance(constraint_like, tuple):
if len(constraint_like) != 2:
raise ValueError("constraint tuple must have length 2")
coef, constants = constraint_like
return LinearConstraint(variable_names, coef, constants)
# assume a raw ndarray
coefs = np.asarray(constraint_like, dtype=float)
return LinearConstraint(variable_names, coefs)
used = set()
for i, (name, value) in enumerate(six.iteritems(constraint_like)):
if name in variable_names:
idx = variable_names.index(name)
elif isinstance(name, six.integer_types):
idx = name
else:
raise ValueError("unrecognized variable name/index %r"
% (name,))
if idx in used:
raise ValueError("duplicated constraint on %r"
% (variable_names[idx],))
used.add(idx)
coefs[i, idx] = 1
constants[i] = value
return LinearConstraint(variable_names, coefs, constants)
if isinstance(constraint_like, str):
constraint_like = [constraint_like]
# fall-through
if (isinstance(constraint_like, list)
and constraint_like
and isinstance(constraint_like[0], str)):
constraints = []
for code in constraint_like:
if not isinstance(code, str):
raise ValueError("expected a string, not %r" % (code,))
tree = parse_constraint(code, variable_names)
evaluator = _EvalConstraint(variable_names)
constraints.append(evaluator.eval(tree, constraint=True))
return LinearConstraint.combine(constraints)
def test_LinearConstraint_combine():
comb = LinearConstraint.combine([LinearConstraint(["a", "b"], [1, 0]),
LinearConstraint(["a", "b"], [0, 1], [1])])
assert comb.variable_names == ["a", "b"]
try:
from numpy.testing import assert_equal
except ImportError:
from numpy.testing.utils import assert_equal
assert_equal(comb.coefs, [[1, 0], [0, 1]])
assert_equal(comb.constants, [[0], [1]])
from nose.tools import assert_raises
assert_raises(ValueError, LinearConstraint.combine, [])
assert_raises(ValueError, LinearConstraint.combine,
[LinearConstraint(["a"], [1]), LinearConstraint(["b"], [1])])
def test_LinearConstraint():
try:
from numpy.testing import assert_equal
except ImportError:
from numpy.testing.utils import assert_equal
lc = LinearConstraint(["foo", "bar"], [1, 1])
assert lc.variable_names == ["foo", "bar"]
assert_equal(lc.coefs, [[1, 1]])
assert_equal(lc.constants, [[0]])
lc = LinearConstraint(["foo", "bar"], [[1, 1], [2, 3]], [10, 20])
assert_equal(lc.coefs, [[1, 1], [2, 3]])
assert_equal(lc.constants, [[10], [20]])
assert lc.coefs.dtype == np.dtype(float)
assert lc.constants.dtype == np.dtype(float)
# statsmodels wants to be able to create degenerate constraints like this,
# see:
# https://github.com/pydata/patsy/issues/89
# We used to forbid it, but I guess it's harmless, so why not.
lc = LinearConstraint(["a"], [[0]])
assert_equal(lc.coefs, [[0]])
from nose.tools import assert_raises
assert_raises(ValueError, LinearConstraint, ["a"], [[1, 2]])
def test_LinearConstraint():
try:
from numpy.testing import assert_equal
except ImportError:
from numpy.testing.utils import assert_equal
lc = LinearConstraint(["foo", "bar"], [1, 1])
assert lc.variable_names == ["foo", "bar"]
assert_equal(lc.coefs, [[1, 1]])
assert_equal(lc.constants, [[0]])
lc = LinearConstraint(["foo", "bar"], [[1, 1], [2, 3]], [10, 20])
assert_equal(lc.coefs, [[1, 1], [2, 3]])
assert_equal(lc.constants, [[10], [20]])
assert lc.coefs.dtype == np.dtype(float)
assert lc.constants.dtype == np.dtype(float)
# statsmodels wants to be able to create degenerate constraints like this,
# see:
# https://github.com/pydata/patsy/issues/89
# We used to forbid it, but I guess it's harmless, so why not.
def _check_lincon(input, varnames, coefs, constants):
try:
from numpy.testing import assert_equal
except ImportError:
from numpy.testing.utils import assert_equal
got = linear_constraint(input, varnames)
print("got", got)
expected = LinearConstraint(varnames, coefs, constants)
print("expected", expected)
assert_equal(got.variable_names, expected.variable_names)
assert_equal(got.coefs, expected.coefs)
assert_equal(got.constants, expected.constants)
assert_equal(got.coefs.dtype, np.dtype(float))
assert_equal(got.constants.dtype, np.dtype(float))