44 #ifndef ROL_PROJECTEDNEWTONKRYLOVSTEP_H 45 #define ROL_PROJECTEDNEWTONKRYLOVSTEP_H 76 Teuchos::RCP<Vector<Real> >
gp_;
77 Teuchos::RCP<Vector<Real> >
d_;
92 const Teuchos::RCP<Objective<Real> >
obj_;
93 const Teuchos::RCP<BoundConstraint<Real> >
bnd_;
94 const Teuchos::RCP<Vector<Real> >
x_;
95 const Teuchos::RCP<Vector<Real> >
g_;
96 Teuchos::RCP<Vector<Real> >
v_;
104 : obj_(obj), bnd_(bnd), x_(x), g_(g), eps_(eps) {
109 bnd_->pruneActive(*v_,*g_,*x_,eps_);
110 obj_->hessVec(Hv,*v_,*x_,tol);
111 bnd_->pruneActive(Hv,*g_,*x_,eps_);
113 bnd_->pruneInactive(*v_,*g_,*x_,eps_);
120 const Teuchos::RCP<Objective<Real> >
obj_;
122 const Teuchos::RCP<BoundConstraint<Real> >
bnd_;
123 const Teuchos::RCP<Vector<Real> >
x_;
124 const Teuchos::RCP<Vector<Real> >
g_;
125 Teuchos::RCP<Vector<Real> >
v_;
134 : obj_(obj), bnd_(bnd), x_(x), g_(g), eps_(eps), useSecant_(false) {
142 : secant_(secant), bnd_(bnd), x_(x), g_(g), eps_(eps), useSecant_(true) {
150 bnd_->pruneActive(*v_,*g_,*x_,eps_);
152 secant_->applyH(Hv,*v_);
155 obj_->precond(Hv,*v_,*x_,tol);
157 bnd_->pruneActive(Hv,*g_,*x_,eps_);
159 bnd_->pruneInactive(*v_,*g_,*x_,eps_);
178 :
Step<Real>(), secant_(Teuchos::null), krylov_(Teuchos::null),
179 gp_(Teuchos::null), d_(Teuchos::null),
180 iterKrylov_(0), flagKrylov_(0), verbosity_(0),
181 computeObj_(computeObj), useSecantPrecond_(false) {
183 Teuchos::ParameterList& Glist = parlist.sublist(
"General");
184 useSecantPrecond_ = Glist.sublist(
"Secant").get(
"Use as Preconditioner",
false);
185 useProjectedGrad_ = Glist.get(
"Projected Gradient Criticality Measure",
false);
186 verbosity_ = Glist.get(
"Print Verbosity",0);
188 krylovName_ = Glist.sublist(
"Krylov").get(
"Type",
"Conjugate Gradients");
190 krylov_ = KrylovFactory<Real>(parlist);
192 secantName_ = Glist.sublist(
"Secant").get(
"Type",
"Limited-Memory BFGS");
194 if ( useSecantPrecond_ ) {
195 secant_ = SecantFactory<Real>(parlist);
212 const bool computeObj =
true)
213 :
Step<Real>(), secant_(secant), krylov_(krylov),
215 gp_(Teuchos::null), d_(Teuchos::null),
216 iterKrylov_(0), flagKrylov_(0), verbosity_(0),
217 computeObj_(computeObj), useSecantPrecond_(false) {
219 Teuchos::ParameterList& Glist = parlist.sublist(
"General");
220 useSecantPrecond_ = Glist.sublist(
"Secant").get(
"Use as Preconditioner",
false);
221 useProjectedGrad_ = Glist.get(
"Projected Gradient Criticality Measure",
false);
222 verbosity_ = Glist.get(
"Print Verbosity",0);
224 if ( useSecantPrecond_ ) {
225 if (secant_ == Teuchos::null ) {
226 secantName_ = Glist.sublist(
"Secant").get(
"Type",
"Limited-Memory BFGS");
228 secant_ = SecantFactory<Real>(parlist);
231 secantName_ = Glist.sublist(
"Secant").get(
"User Defined Secant Name",
232 "Unspecified User Defined Secant Method");
236 if ( krylov_ == Teuchos::null ) {
237 krylovName_ = Glist.sublist(
"Krylov").get(
"Type",
"Conjugate Gradients");
239 krylov_ = KrylovFactory<Real>(parlist);
258 Teuchos::RCP<Objective<Real> > obj_ptr = Teuchos::rcpFromRef(obj);
259 Teuchos::RCP<BoundConstraint<Real> > bnd_ptr = Teuchos::rcpFromRef(bnd);
260 Teuchos::RCP<LinearOperator<Real> > hessian
262 step_state->gradientVec,algo_state.
gnorm));
263 Teuchos::RCP<LinearOperator<Real> > precond;
264 if (useSecantPrecond_) {
265 precond = Teuchos::rcp(
new PrecondPNK(secant_,bnd_ptr,
269 precond = Teuchos::rcp(
new PrecondPNK(obj_ptr,bnd_ptr,
275 krylov_->run(s,*hessian,*(step_state->gradientVec),*precond,iterKrylov_,flagKrylov_);
278 if ( flagKrylov_ == 2 && iterKrylov_ <= 1 ) {
279 s.
set((step_state->gradientVec)->dual());
287 Real tol = std::sqrt(ROL_EPSILON<Real>()), one(1);
295 (step_state->descentVec)->
set(x);
296 (step_state->descentVec)->axpy(-one,*d_);
300 if ( useSecantPrecond_ ) {
301 gp_->set(*(step_state->gradientVec));
308 obj.
gradient(*(step_state->gradientVec),x,tol);
312 if ( useSecantPrecond_ ) {
313 secant_->updateStorage(x,*(step_state->gradientVec),*gp_,s,algo_state.
snorm,algo_state.
iter+1);
318 if ( useProjectedGrad_ ) {
319 gp_->set(*(step_state->gradientVec));
321 algo_state.
gnorm = gp_->norm();
325 d_->axpy(-one,(step_state->gradientVec)->dual());
328 algo_state.
gnorm = d_->norm();
333 std::stringstream hist;
336 hist << std::string(109,
'-') <<
"\n";
338 hist <<
" status output definitions\n\n";
339 hist <<
" iter - Number of iterates (steps taken) \n";
340 hist <<
" value - Objective function value \n";
341 hist <<
" gnorm - Norm of the gradient\n";
342 hist <<
" snorm - Norm of the step (update to optimization vector)\n";
343 hist <<
" #fval - Cumulative number of times the objective function was evaluated\n";
344 hist <<
" #grad - Number of times the gradient was computed\n";
345 hist <<
" iterCG - Number of Krylov iterations used to compute search direction\n";
346 hist <<
" flagCG - Krylov solver flag" <<
"\n";
347 hist << std::string(109,
'-') <<
"\n";
351 hist << std::setw(6) << std::left <<
"iter";
352 hist << std::setw(15) << std::left <<
"value";
353 hist << std::setw(15) << std::left <<
"gnorm";
354 hist << std::setw(15) << std::left <<
"snorm";
355 hist << std::setw(10) << std::left <<
"#fval";
356 hist << std::setw(10) << std::left <<
"#grad";
357 hist << std::setw(10) << std::left <<
"iterCG";
358 hist << std::setw(10) << std::left <<
"flagCG";
363 std::stringstream hist;
366 if ( useSecantPrecond_ ) {
367 hist <<
" with " << secantName_ <<
" preconditioning";
373 std::stringstream hist;
374 hist << std::scientific << std::setprecision(6);
375 if ( algo_state.
iter == 0 ) {
378 if ( print_header ) {
381 if ( algo_state.
iter == 0 ) {
383 hist << std::setw(6) << std::left << algo_state.
iter;
384 hist << std::setw(15) << std::left << algo_state.
value;
385 hist << std::setw(15) << std::left << algo_state.
gnorm;
390 hist << std::setw(6) << std::left << algo_state.
iter;
391 hist << std::setw(15) << std::left << algo_state.
value;
392 hist << std::setw(15) << std::left << algo_state.
gnorm;
393 hist << std::setw(15) << std::left << algo_state.
snorm;
394 hist << std::setw(10) << std::left << algo_state.
nfval;
395 hist << std::setw(10) << std::left << algo_state.
ngrad;
Provides the interface to evaluate objective functions.
bool useSecantPrecond_
Whether or not a secant approximation is used for preconditioning inexact Newton. ...
virtual void scale(const Real alpha)=0
Compute where .
void applyInverse(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply inverse of linear operator.
void apply(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply linear operator.
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Update step, if successful.
virtual void plus(const Vector &x)=0
Compute , where .
PrecondPNK(const Teuchos::RCP< Secant< Real > > &secant, const Teuchos::RCP< BoundConstraint< Real > > &bnd, const Teuchos::RCP< Vector< Real > > &x, const Teuchos::RCP< Vector< Real > > &g, Real eps=0)
Teuchos::RCP< Vector< Real > > v_
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Provides the interface to compute optimization steps.
std::string printName(void) const
Print step name.
Teuchos::RCP< StepState< Real > > getState(void)
Contains definitions of custom data types in ROL.
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Compute step.
Teuchos::RCP< Vector< Real > > gp_
virtual Teuchos::RCP< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
ESecant StringToESecant(std::string s)
std::string EDescentToString(EDescent tr)
const Teuchos::RCP< Secant< Real > > secant_
Defines the linear algebra or vector space interface.
Provides the interface to compute optimization steps with projected inexact ProjectedNewton's method ...
EKrylov
Enumeration of Krylov methods.
EKrylov StringToEKrylov(std::string s)
State for algorithm class. Will be used for restarts.
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
bool useProjectedGrad_
Whether or not to use to the projected gradient criticality measure.
virtual const Vector & dual() const
Return dual representation of , for example, the result of applying a Riesz map, or change of basis...
void apply(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply linear operator.
ESecant
Enumeration of secant update algorithms.
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
int flagKrylov_
Termination flag for Krylov method (used for inexact Newton)
ProjectedNewtonKrylovStep(Teuchos::ParameterList &parlist, const Teuchos::RCP< Krylov< Real > > &krylov, const Teuchos::RCP< Secant< Real > > &secant, const bool computeObj=true)
Constructor.
Provides interface for and implements limited-memory secant operators.
std::string printHeader(void) const
Print iterate header.
const Teuchos::RCP< Vector< Real > > g_
std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
const Teuchos::RCP< BoundConstraint< Real > > bnd_
Teuchos::RCP< Secant< Real > > secant_
Secant object (used for quasi-Newton)
Provides definitions for Krylov solvers.
Provides the interface to apply a linear operator.
PrecondPNK(const Teuchos::RCP< Objective< Real > > &obj, const Teuchos::RCP< BoundConstraint< Real > > &bnd, const Teuchos::RCP< Vector< Real > > &x, const Teuchos::RCP< Vector< Real > > &g, Real eps=0)
Provides the interface to apply upper and lower bound constraints.
const Teuchos::RCP< Vector< Real > > x_
void computeProjectedGradient(Vector< Real > &g, const Vector< Real > &x)
Compute projected gradient.
Teuchos::RCP< Vector< Real > > v_
const Teuchos::RCP< Objective< Real > > obj_
int iterKrylov_
Number of Krylov iterations (used for inexact Newton)
Teuchos::RCP< Krylov< Real > > krylov_
Krylov solver object (used for inexact Newton)
virtual void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
const Teuchos::RCP< Vector< Real > > g_
ProjectedNewtonKrylovStep(Teuchos::ParameterList &parlist, const bool computeObj=true)
Constructor.
Teuchos::RCP< Vector< Real > > iterateVec
virtual void set(const Vector &x)
Set where .
HessianPNK(const Teuchos::RCP< Objective< Real > > &obj, const Teuchos::RCP< BoundConstraint< Real > > &bnd, const Teuchos::RCP< Vector< Real > > &x, const Teuchos::RCP< Vector< Real > > &g, Real eps=0)
virtual Real norm() const =0
Returns where .
virtual void update(const Vector< Real > &x, bool flag=true, int iter=-1)
Update objective function.
const Teuchos::RCP< Vector< Real > > x_
const Teuchos::RCP< Objective< Real > > obj_
const Teuchos::RCP< BoundConstraint< Real > > bnd_
Teuchos::RCP< Vector< Real > > d_
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.
int verbosity_
Verbosity level.