Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def constr_func(aff_obj):
theta = [lu.create_var((1, 1)) for i in range(len(values))]
convex_objs = []
for val, theta_var in zip(values, theta):
val_aff = val.canonical_form[0]
convex_objs.append(
lu.mul_expr(val_aff,
theta_var,
val_aff.size)
)
convex_combo = lu.sum_expr(convex_objs)
one = lu.create_const(1, (1, 1))
constraints = [lu.create_eq(aff_obj, convex_combo),
lu.create_eq(lu.sum_expr(theta), one)]
for theta_var in theta:
constraints.append(lu.create_geq(theta_var))
return constraints
super(ConvexHull, self).__init__(rows,cols,constr_func)
Returns
-------
tuple
(LinOp for objective, list of constraints)
"""
axis = data[0]
keepdims = data[1]
if axis is None:
obj = lu.sum_entries(arg_objs[0], shape=shape)
elif axis == 1:
if keepdims:
const_shape = (arg_objs[0].shape[1], 1)
else:
const_shape = (arg_objs[0].shape[1],)
ones = lu.create_const(np.ones(const_shape), const_shape)
obj = lu.rmul_expr(arg_objs[0], ones, shape)
else: # axis == 0
if keepdims:
const_shape = (1, arg_objs[0].shape[0])
else:
const_shape = (arg_objs[0].shape[0],)
ones = lu.create_const(np.ones(const_shape), const_shape)
obj = lu.mul_expr(ones, arg_objs[0], shape)
return (obj, [])
def __init__(self, args, constr_id=None):
# TODO cast constants.
# self.args = [cvxtypes.expression().cast_to_const(arg) for arg in args]
self.args = args
if constr_id is None:
self.constr_id = lu.get_id()
else:
self.constr_id = constr_id
self._construct_dual_variables(args)
super(Constraint, self).__init__()
Parameters
----------
arg_objs : list
LinExpr for each argument.
shape : tuple
The shape of the resulting expression.
data :
Additional data required by the atom.
Returns
-------
tuple
(LinOp for objective, list of constraints)
"""
return (lu.diag_mat(arg_objs[0]), [])
def canonicalize(self):
"""Returns the graph implementation of the object.
Marks the top level constraint as the dual_holder,
so the dual value will be saved to the EqConstraint.
Returns:
A tuple of (affine expression, [constraints]).
"""
obj, constraints = self.args[0].canonical_form
dual_holder = lu.create_eq(obj, constr_id=self.id)
return (None, constraints + [dual_holder])
def graph_implementation(self, arg_objs):
"""Multiply the last expression by each preceding constant.
"""
obj = arg_objs[-1]
constraints = []
for i in range(len(arg_objs)-2, -1, -1):
obj, constr = lu.mul_expr(arg_objs[i], obj, self.size)
constraints += constr
return (obj, constraints)
mat_cache : MatrixCache
The cached version of the matrix-vector pair.
caching : bool
Is the data being cached?
"""
active_constr = []
constr_offsets = []
vert_offset = 0
for constr in mat_cache.constraints:
# Process the constraint if it has a parameter and not caching
# or it doesn't have a parameter and caching.
has_param = len(lu.get_expr_params(constr.expr)) > 0
if (has_param and not caching) or (not has_param and caching):
# If parameterized, convert the parameters into constant nodes.
if has_param:
constr = lu.copy_constr(constr,
lu.replace_params_with_consts)
active_constr.append(constr)
constr_offsets.append(vert_offset)
vert_offset += np.prod(constr.shape, dtype=int)
# Convert the constraints into a matrix and vector offset
# and add them to the matrix cache.
expr_list = [con.expr for con in active_constr]
if len(active_constr) > 0:
V, I, J, const_vec = canonInterface.get_problem_matrix(
expr_list,
self.sym_data.var_offsets,
constr_offsets
)
# Convert the constant offset to the correct data type.
conv_vec = self.vec_intf.const_to_matrix(const_vec,
convert_scalars=True)
def canonicalize(self):
"""Negates the target expression's objective.
"""
obj, constraints = self.args[0].canonical_form
return (lu.neg_expr(obj), constraints)
def _dummy_constr(self):
"""Returns a dummy constraint for the objective.
"""
return [lu.create_eq(self.sym_data.objective)]
Parameters
----------
arg_objs : list
LinExpr for each argument.
shape : tuple
The shape of the resulting expression.
data :
Additional data required by the atom.
Returns
-------
tuple
(LinOp for objective, list of constraints)
"""
for i, arg in enumerate(arg_objs):
if arg.shape != shape and lu.is_scalar(arg):
arg_objs[i] = lu.promote(arg, shape)
return (lu.sum_expr(arg_objs), [])