Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_cons_scaling(self):
model = AmplModel(self.model_name)
log = config_logger("nlp.der",
"%(name)-10s %(levelname)-8s %(message)s",
level=logging.DEBUG)
dcheck = DerivativeChecker(model, model.x0, tol=1e-5)
dcheck.check(hess=True, chess=True)
assert len(dcheck.jac_errs) == 0
assert len(dcheck.hess_errs) == 0
for j in xrange(model.ncon):
assert (len(dcheck.chess_errs[j]) == 0)
model.compute_scaling_cons(g_max=40.)
assert np.allclose(model.scale_con, np.array([1.]))
assert np.allclose(model.cons(model.x0), np.array([29.0]))
assert np.allclose(model.jop(model.x0).to_array(),
np.array([[40., 4.]]))
model.compute_scaling_cons(reset=True)
assert model.scale_con is None
def test_obj_scaling(self):
model = AmplModel(self.model_name)
log = config_logger("nlp.der",
"%(name)-10s %(levelname)-8s %(message)s",
level=logging.DEBUG)
dcheck = DerivativeChecker(model, model.x0, tol=1e-5)
dcheck.check(hess=True, chess=True)
assert len(dcheck.grad_errs) == 0
assert len(dcheck.hess_errs) == 0
for j in xrange(model.ncon):
assert (len(dcheck.chess_errs[j]) == 0)
model.compute_scaling_obj(g_max=1.)
assert model.obj(model.x0) == -0.39056208756589972
assert np.allclose(model.grad(model.x0), np.array([0.8, -1.]))
assert model.scale_obj == 1.
model.compute_scaling_obj(reset=True)
assert model.scale_obj is None
pg = tron.pgnorm
ts = tron.tsolve
else:
it = -tron.iter
fc, gc = -tron.model.obj.ncalls, -tron.model.grad.ncalls
pg = -1.0 if tron.pgnorm is None else -tron.pgnorm
ts = -1.0 if tron.tsolve is None else -tron.tsolve
return (it, fc, gc, pg, ts)
nprobs = len(sys.argv) - 1
if nprobs == 0:
raise ValueError("Please supply problem name as argument")
# Create root logger.
logger = config_logger("nlp", "%(name)-3s %(levelname)-5s %(message)s")
# Create TRON logger.
tron_logger = config_logger("nlp.tron",
"%(name)-8s %(levelname)-5s %(message)s",
level=logging.WARN if nprobs > 1 else logging.INFO)
if nprobs > 1:
logger.info("%12s %5s %6s %8s %8s %6s %6s %5s %7s",
"name", "nvar", "iter", "f", u"‖P∇f‖", "#f", u"#∇f", "stat",
"time")
for problem in sys.argv[1:]:
model = AmplModel(problem)
model.compute_scaling_obj()
# Check for inequality- or equality-constrained problem.
"""Simple AMPL driver for L-BFGS."""
import logging
import sys
from nlp.model.amplmodel import QNAmplModel
from nlp.optimize.lbfgs import LBFGS
from nlp.tools.logs import config_logger
from pykrylov.linop import InverseLBFGSOperator
nprobs = len(sys.argv) - 1
if nprobs == 0:
raise ValueError("Please supply problem name as argument")
# Create root logger.
logger = config_logger("nlp",
"%(name)-3s %(levelname)-5s %(message)s")
# Create LBFGS logger.
slv_log = config_logger("nlp.lbfgs",
"%(name)-9s %(levelname)-5s %(message)s",
level=logging.WARN if nprobs > 1 else logging.INFO)
logger.info("%10s %5s %8s %7s %5s %5s %4s %s",
"name", "nvar", "f", u"‖∇f‖", "#f", u"#∇f", "stat", "time")
for problem in sys.argv[1:]:
model = QNAmplModel(problem, H=InverseLBFGSOperator, scaling=True)
lbfgs = LBFGS(model, maxiter=300)
lbfgs.solve()
logger.info("%10s %5d %8.1e %7.1e %5d %5d %4s %.3f",
model.name, model.nvar, lbfgs.f, lbfgs.gNorm,
model.obj.ncalls, model.grad.ncalls,
from nlp.model.amplmodel import AmplModel
from nlp.tr.trustregion import TrustRegion
from nlp.optimize.trunk import Trunk
from nlp.optimize.pcg import TruncatedCG
from nlp.tools.logs import config_logger
nprobs = len(sys.argv) - 1
if nprobs == 0:
raise ValueError("Please supply problem name as argument")
# Create root logger.
logger = config_logger("nlp",
"%(name)-3s %(levelname)-5s %(message)s")
# Create TRUNK logger.
slv_log = config_logger("nlp.trunk",
"%(name)-9s %(levelname)-5s %(message)s",
level=logging.WARN if nprobs > 1 else logging.INFO)
logger.info("%10s %5s %8s %7s %5s %5s %4s %s",
"name", "nvar", "f", u"‖∇f‖", "#f", u"#∇f", "stat", "time")
for problem in sys.argv[1:]:
model = AmplModel(problem)
trunk = Trunk(model, TrustRegion(), TruncatedCG,
ny=True, inexact=True, maxiter=500)
trunk.solve()
logger.info("%10s %5d %8.1e %7.1e %5d %5d %4s %.3f",
model.name, model.nvar, trunk.f, trunk.gNorm,
model.obj.ncalls, model.grad.ncalls,
trunk.status, trunk.tsolve)
it = -tron.iter
fc, gc = -tron.model.obj.ncalls, -tron.model.grad.ncalls
pg = -1.0 if tron.pgnorm is None else -tron.pgnorm
ts = -1.0 if tron.tsolve is None else -tron.tsolve
return (it, fc, gc, pg, ts)
nprobs = len(sys.argv) - 1
if nprobs == 0:
raise ValueError("Please supply problem name as argument")
# Create root logger.
logger = config_logger("nlp", "%(name)-3s %(levelname)-5s %(message)s")
# Create TRON logger.
tron_logger = config_logger("nlp.tron",
"%(name)-8s %(levelname)-5s %(message)s",
level=logging.WARN if nprobs > 1 else logging.INFO)
if nprobs > 1:
logger.info("%12s %5s %6s %8s %8s %6s %6s %5s %7s",
"name", "nvar", "iter", "f", u"‖P∇f‖", "#f", u"#∇f", "stat",
"time")
for problem in sys.argv[1:]:
model = AmplModel(problem)
model.compute_scaling_obj()
# Check for inequality- or equality-constrained problem.
if model.m > 0:
msg = '%s has %d linear or nonlinear constraints'
logger.error(msg, model.name, model.m)
(args, other) = parser.parse_known_args()
# Translate options to input arguments.
opts = {}
if args.quasi_newton:
from new_regsqp_BFGS import RegSQPBFGSIterativeSolver as RegSQP
else:
from new_regsqp import RegSQPSolver as RegSQP
nprobs = len(other)
if nprobs == 0:
raise ValueError("Please supply problem name as argument")
# Create root logger.
log = config_logger('nlp', '%(name)-3s %(levelname)-5s %(message)s')
# Configure the solver logger.
reg_logger = config_logger("nlp.regsqp",
"%(name)-8s %(levelname)-5s %(message)s",
level=logging.WARN if nprobs > 1 else logging.DEBUG)
log.info('%12s %5s %5s %6s %8s %8s %8s %6s %6s %6s %5s %7s',
'name', 'nvar', 'ncons', 'iter', 'f', u'‖c‖', u'‖∇L‖',
'#f', '#g', '#jprod', 'stat', 'time')
# Solve each problem in turn.
for problem in other:
verbose = True
model = PySparseAmplModel(problem, **opts)
#!/usr/bin/env python
"""Simple AMPL driver for the derivative checker."""
from nlp.model.pysparsemodel import PySparseAmplModel
from nlp.tools.dercheck import DerivativeChecker
from nlp.tools.logs import config_logger
import sys
if len(sys.argv) == 1:
raise ValueError("Please supply problem name as argument")
# Create root logger.
log = config_logger("nlp.der", "%(name)-10s %(levelname)-8s %(message)s")
nlp = PySparseAmplModel(sys.argv[1])
dcheck = DerivativeChecker(nlp, nlp.x0)
dcheck.check()
from nlp.model.amplmodel import QNAmplModel
from nlp.optimize.lbfgs import LBFGS
from nlp.tools.logs import config_logger
from pykrylov.linop import InverseLBFGSOperator
nprobs = len(sys.argv) - 1
if nprobs == 0:
raise ValueError("Please supply problem name as argument")
# Create root logger.
logger = config_logger("nlp",
"%(name)-3s %(levelname)-5s %(message)s")
# Create LBFGS logger.
slv_log = config_logger("nlp.lbfgs",
"%(name)-9s %(levelname)-5s %(message)s",
level=logging.WARN if nprobs > 1 else logging.INFO)
logger.info("%10s %5s %8s %7s %5s %5s %4s %s",
"name", "nvar", "f", u"‖∇f‖", "#f", u"#∇f", "stat", "time")
for problem in sys.argv[1:]:
model = QNAmplModel(problem, H=InverseLBFGSOperator, scaling=True)
lbfgs = LBFGS(model, maxiter=300)
lbfgs.solve()
logger.info("%10s %5d %8.1e %7.1e %5d %5d %4s %.3f",
model.name, model.nvar, lbfgs.f, lbfgs.gNorm,
model.obj.ncalls, model.grad.ncalls,
lbfgs.status, lbfgs.tsolve)
opts["H"] = QNOperator
opts["npairs"] = args.npairs
opts["scaling"] = True
else:
from nlp.model.pysparsemodel import PySparseAmplModel as Model
from nlp.optimize.funnel import Funnel
nprobs = len(other)
if nprobs == 0:
raise ValueError("Please supply problem name as argument")
# Create root logger.
logger = config_logger("nlp", "%(name)-9s %(levelname)-5s %(message)s")
# Create Funnel logger.
funnel_logger = config_logger("funnel",
"%(name)-9s %(levelname)-5s %(message)s",
level=logging.WARN if nprobs > 1 else logging.DEBUG)
qn_logger = config_logger("qn",
"%(name)-9s %(levelname)-5s %(message)s",
level=logging.WARN if nprobs > 1 else logging.DEBUG)
if nprobs > 1:
logger.info("%12s %5s %5s %8s %8s %8s %6s %6s %6s %7s",
"name", "nvar", "iter", "f", u"‖c‖", u"‖g+Jᵀy‖", "#f", u"#∇f",
"stat", "time")
for problem in other:
model = Model(problem, logger=qn_logger, **opts)
# Check for inequality-constrained problem.
if model.nlowerC > 0 or model.nupperC > 0 or model.nrangeC > 0: