Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"""Test for amplmodel module."""
from nlp.model.amplmodel import AmplModel
import numpy as np
import sys
nargs = len(sys.argv)
if nargs < 1:
sys.stderr.write('Please specify problem name')
exit(1)
problem_name = sys.argv[1]
# Create a model
print 'Problem', problem_name
model = AmplModel(problem_name)
# Query the model
x0 = model.x0
pi0 = model.pi0
nvar = model.nvar
ncon = model.ncon
print 'There are %d variables and %d constraints' % (nvar, ncon)
np.set_printoptions(precision=3, linewidth=79, threshold=10, edgeitems=3)
print 'Initial point: ', x0
print 'Lower bounds on x: ', model.Lvar
print 'Upper bounds on x: ', model.Uvar
print 'f(x0) = ', model.obj(x0)
g0 = model.grad(x0)
print '∇f(x0) = ', g0
def igrad(self, i, x):
gi = AmplModel.igrad(self, i, x)
noise = _random_array(self.n)
return gi + self.noise_amplitude * noise
if nprobs == 0:
raise ValueError("Please supply problem name as argument")
# Create root logger.
logger = config_logger("nlp",
"%(name)-3s %(levelname)-5s %(message)s")
# Create TRUNK logger.
slv_log = config_logger("nlp.trunk",
"%(name)-9s %(levelname)-5s %(message)s",
level=logging.WARN if nprobs > 1 else logging.INFO)
logger.info("%10s %5s %8s %7s %5s %5s %4s %s",
"name", "nvar", "f", u"‖∇f‖", "#f", u"#∇f", "stat", "time")
for problem in sys.argv[1:]:
model = AmplModel(problem)
trunk = Trunk(model, TrustRegion(), TruncatedCG,
ny=True, inexact=True, maxiter=500)
trunk.solve()
logger.info("%10s %5d %8.1e %7.1e %5d %5d %4s %.3f",
model.name, model.nvar, trunk.f, trunk.gNorm,
model.obj.ncalls, model.grad.ncalls,
trunk.status, trunk.tsolve)
def hess(self, *args, **kwargs):
"""Evaluate Lagrangian Hessian."""
vals, rows, cols = super(SciPyNLPModel, self).hess(*args, **kwargs)
return sp.coo_matrix((vals, (rows, cols)),
shape=(self.nvar, self.nvar))
def jac(self, *args, **kwargs):
"""Evaluate sparse constraints Jacobian."""
if self.ncon == 0: # SciPy cannot create sparse matrix of size 0.
return linop_from_ndarray(np.empty((0, self.nvar), dtype=np.float))
vals, rows, cols = super(SciPyNLPModel, self).jac(*args, **kwargs)
return sp.coo_matrix((vals, (rows, cols)),
shape=(self.ncon, self.nvar))
class SciPyAmplModel(AmplModel):
"""`AmplModel` with sparse matrices n SciPy coordinate (COO) format."""
# MRO: 1. SciPyAmplModel
# 2. AmplModel
# 3. NLPModel
def A(self, *args, **kwargs):
"""Evaluate sparse Jacobian of the linear part of the constraints.
Useful to obtain constraint matrix when problem is a linear programming
problem.
"""
vals, rows, cols = super(SciPyAmplModel, self).A(*args, **kwargs)
return sp.coo_matrix((vals, (rows, cols)),
shape=(self.ncon, self.nvar))
return H
def jac(self, *args, **kwargs):
"""Evaluate constraints Jacobian at x."""
vals, rows, cols = super(PySparseNLPModel,
self).jac(*args, **kwargs)
J = psp(nrow=self.ncon, ncol=self.nvar,
sizeHint=vals.size, symmetric=False)
J.put(vals, rows, cols)
return J
try:
from nlp.model.amplmodel import AmplModel
class PySparseAmplModel(PySparseNLPModel, AmplModel):
# MRO: 1. PySparseAmplModel
# 2. PySparseNLPModel
# 3. AmplModel
# 4. NLPModel
#
# Here, `jac` and `hess` are inherited directly from PySparseNPLModel.
#
def __init__(self, *args, **kwargs):
super(PySparseAmplModel, self).__init__(*args, **kwargs)
def A(self, *args, **kwargs):
"""Evaluate sparse Jacobian of the linear part of the constraints.
Useful to obtain constraint matrix when problem is a linear programming
problem.
H.put_triplet(rows, cols, vals)
return H
def jac(self, *args, **kwargs):
"""Evaluate constraints Jacobian at x."""
vals, rows, cols = super(CySparseNLPModel, self).jac(*args, **kwargs)
J = LLSparseMatrix(nrow=self.ncon, ncol=self.nvar,
size_hint=vals.size, store_symmetric=False,
itype=types.INT64_T, dtype=types.FLOAT64_T)
J.put_triplet(rows, cols, vals)
return J
try:
from nlp.model.amplmodel import AmplModel
class CySparseAmplModel(CySparseNLPModel, AmplModel):
# MRO: 1. CySparseAmplModel
# 2. CySparseNLPModel
# 3. AmplModel
# 4. NLPModel
#
# Here, `jac` and `hess` are inherited directly from CySparseNPLModel.
#
def A(self, *args, **kwargs):
"""
Evaluate sparse Jacobian of the linear part of the
constraints. Useful to obtain constraint matrix
when problem is a linear programming problem.
"""
vals, rows, cols = super(CySparseAmplModel, self).A(*args, **kwargs)
A = LLSparseMatrix(nrow=self.ncon, ncol=self.nvar,
def display_basic_info(self):
"""Display vital statistics about the current model."""
super(AmplModel, self).display_basic_info()
# Display info that wasn't available in NLPModel.
write = self.logger.info
write('Number of nonzeros in Jacobian: %d\n' % self.nnzj)
write('Number of nonzeros in Lagrangian Hessian: %d\n' % self.nnzh)
if self.islp():
write('This problem is a linear program.\n')
return
# Create root logger.
logger = config_logger("nlp", "%(name)-3s %(levelname)-5s %(message)s")
# Create TRON logger.
tron_logger = config_logger("nlp.tron",
"%(name)-8s %(levelname)-5s %(message)s",
level=logging.WARN if nprobs > 1 else logging.INFO)
if nprobs > 1:
logger.info("%12s %5s %6s %8s %8s %6s %6s %5s %7s",
"name", "nvar", "iter", "f", u"‖P∇f‖", "#f", u"#∇f", "stat",
"time")
for problem in sys.argv[1:]:
model = AmplModel(problem)
model.compute_scaling_obj()
# Check for inequality- or equality-constrained problem.
if model.m > 0:
msg = '%s has %d linear or nonlinear constraints'
logger.error(msg, model.name, model.m)
continue
tron = TRON(model, TruncatedCG, maxiter=100)
try:
tron.solve()
status = tron.status
niter, fcalls, gcalls, pgnorm, tsolve = tron_stats(tron)
except:
msg = sys.exc_info()[1].message
status = msg if len(msg) > 0 else "xfail" # unknown failure
from nlp.model.amplmodel import AmplModel
from numpy.random import random as random_array
import random
def _random():
"""Return a random number in [-1,1)."""
return 2*random.random()-1
def _random_array(n):
"""Return a random array of length n with elements in [-1,1)."""
return 2*random_array()-1
class NoisyAmplModel(AmplModel):
def __init__(self, model, noise_amplitude=1.0, **kwargs):
"""
A noisy nonlinear problem in which only first derivatives can be
evaluated. For help on individual methods, see `AmplModel`.
"""
super(NoisyAmplModel, self).__init__(model, **kwargs)
self.noise_amplitude = noise_amplitude
def obj(self, x):
f = AmplModel.obj(self, x)
noise = _random()
return f + self.noise_amplitude * noise
def grad(self, x):
g = AmplModel.grad(self, x)