ROL
ROL_LineSearchStep.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_LINESEARCHSTEP_H
45 #define ROL_LINESEARCHSTEP_H
46 
47 #include "ROL_Types.hpp"
48 #include "ROL_HelperFunctions.hpp"
49 #include "ROL_Step.hpp"
50 #include "ROL_LineSearch.hpp"
51 
52 // Unconstrained Methods
53 #include "ROL_GradientStep.hpp"
54 #include "ROL_NonlinearCGStep.hpp"
55 #include "ROL_SecantStep.hpp"
56 #include "ROL_NewtonStep.hpp"
57 #include "ROL_NewtonKrylovStep.hpp"
58 
59 // Bound Constrained Methods
63 
64 #include <sstream>
65 #include <iomanip>
66 
136 namespace ROL {
137 
138 template <class Real>
139 class LineSearchStep : public Step<Real> {
140 private:
141 
142  Teuchos::RCP<Step<Real> > desc_;
143  Teuchos::RCP<Secant<Real> > secant_;
144  Teuchos::RCP<Krylov<Real> > krylov_;
145  Teuchos::RCP<NonlinearCG<Real> > nlcg_;
146  Teuchos::RCP<LineSearch<Real> > lineSearch_;
147 
148  Teuchos::RCP<Vector<Real> > d_;
149 
152 
154 
156 
159  Real fval_;
160 
161  Teuchos::ParameterList parlist_;
162 
163  std::string lineSearchName_;
164 
165  Real GradDotStep(const Vector<Real> &g, const Vector<Real> &s,
166  const Vector<Real> &x,
167  BoundConstraint<Real> &bnd, Real eps = 0) {
168  Real gs(0), one(1);
169  if (!bnd.isActivated()) {
170  gs = s.dot(g.dual());
171  }
172  else {
173  d_->set(s);
174  bnd.pruneActive(*d_,g,x,eps);
175  gs = d_->dot(g.dual());
176  d_->set(x);
177  d_->axpy(-one,g.dual());
178  bnd.project(*d_);
179  d_->scale(-one);
180  d_->plus(x);
181  bnd.pruneInactive(*d_,g,x,eps);
182  gs -= d_->dot(g.dual());
183  }
184  return gs;
185  }
186 
187 public:
188 
190  using Step<Real>::compute;
191  using Step<Real>::update;
192 
206  LineSearchStep( Teuchos::ParameterList &parlist,
207  const Teuchos::RCP<LineSearch<Real> > &lineSearch = Teuchos::null,
208  const Teuchos::RCP<Secant<Real> > &secant = Teuchos::null,
209  const Teuchos::RCP<Krylov<Real> > &krylov = Teuchos::null,
210  const Teuchos::RCP<NonlinearCG<Real> > &nlcg = Teuchos::null )
211  : Step<Real>(), desc_(Teuchos::null), secant_(secant),
212  krylov_(krylov), nlcg_(nlcg), lineSearch_(lineSearch),
214  verbosity_(0), computeObj_(true), fval_(0), parlist_(parlist) {
215  // Parse parameter list
216  Teuchos::ParameterList& Llist = parlist.sublist("Step").sublist("Line Search");
217  Teuchos::ParameterList& Glist = parlist.sublist("General");
218  econd_ = StringToECurvatureCondition(Llist.sublist("Curvature Condition").get("Type","Strong Wolfe Conditions") );
219  acceptLastAlpha_ = Llist.get("Accept Last Alpha", false);
220  verbosity_ = Glist.get("Print Verbosity",0);
221  computeObj_ = Glist.get("Recompute Objective Function",true);
222  // Initialize Line Search
223  if (lineSearch_ == Teuchos::null) {
224  lineSearchName_ = Llist.sublist("Line-Search Method").get("Type","Cubic Interpolation");
225  els_ = StringToELineSearch(lineSearchName_);
226  lineSearch_ = LineSearchFactory<Real>(parlist);
227  }
228  else { // User-defined linesearch provided
229  lineSearchName_ = Llist.sublist("Line-Search Method").get("User Defined Line-Search Name",
230  "Unspecified User Defined Line-Search");
231  }
232 
233  }
234 
235  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g,
237  AlgorithmState<Real> &algo_state ) {
238  d_ = x.clone();
239 
240  // Initialize unglobalized step
241  Teuchos::ParameterList& list
242  = parlist_.sublist("Step").sublist("Line Search").sublist("Descent Method");
243  EDescent edesc = StringToEDescent(list.get("Type","Quasi-Newton Method") );
244  if (bnd.isActivated()) {
245  switch(edesc) {
246  case DESCENT_STEEPEST: {
247  desc_ = Teuchos::rcp(new GradientStep<Real>(parlist_,computeObj_));
248  break;
249  }
250  case DESCENT_NONLINEARCG: {
251  desc_ = Teuchos::rcp(new NonlinearCGStep<Real>(parlist_,nlcg_,computeObj_));
252  break;
253  }
254  case DESCENT_SECANT: {
255  desc_ = Teuchos::rcp(new ProjectedSecantStep<Real>(parlist_,secant_,computeObj_));
256  break;
257  }
258  case DESCENT_NEWTON: {
259  desc_ = Teuchos::rcp(new ProjectedNewtonStep<Real>(parlist_,computeObj_));
260  break;
261  }
262  case DESCENT_NEWTONKRYLOV: {
263  desc_ = Teuchos::rcp(new ProjectedNewtonKrylovStep<Real>(parlist_,krylov_,secant_,computeObj_));
264  break;
265  }
266  default:
267  TEUCHOS_TEST_FOR_EXCEPTION(true,std::invalid_argument,
268  ">>> (LineSearchStep::Initialize): Undefined descent type!");
269  }
270  }
271  else {
272  switch(edesc) {
273  case DESCENT_STEEPEST: {
274  desc_ = Teuchos::rcp(new GradientStep<Real>(parlist_,computeObj_));
275  break;
276  }
277  case DESCENT_NONLINEARCG: {
278  desc_ = Teuchos::rcp(new NonlinearCGStep<Real>(parlist_,nlcg_,computeObj_));
279  break;
280  }
281  case DESCENT_SECANT: {
282  desc_ = Teuchos::rcp(new SecantStep<Real>(parlist_,secant_,computeObj_));
283  break;
284  }
285  case DESCENT_NEWTON: {
286  desc_ = Teuchos::rcp(new NewtonStep<Real>(parlist_,computeObj_));
287  break;
288  }
289  case DESCENT_NEWTONKRYLOV: {
290  desc_ = Teuchos::rcp(new NewtonKrylovStep<Real>(parlist_,krylov_,secant_,computeObj_));
291  break;
292  }
293  default:
294  TEUCHOS_TEST_FOR_EXCEPTION(true,std::invalid_argument,
295  ">>> (LineSearchStep::Initialize): Undefined descent type!");
296  }
297  }
298  desc_->initialize(x,s,g,obj,bnd,algo_state);
299 
300  // Initialize line search
301  lineSearch_->initialize(x,s,g,obj,bnd);
302  //const Teuchos::RCP<const StepState<Real> > desc_state = desc_->getStepState();
303  //lineSearch_->initialize(x,s,*(desc_state->gradientVec),obj,bnd);
304  }
305 
319  void compute( Vector<Real> &s, const Vector<Real> &x,
321  AlgorithmState<Real> &algo_state ) {
322  Real zero(0), one(1);
323  // Compute unglobalized step
324  desc_->compute(s,x,obj,bnd,algo_state);
325 
326  // Ensure that s is a descent direction
327  // ---> If not, then default to steepest descent
328  const Teuchos::RCP<const StepState<Real> > desc_state = desc_->getStepState();
329  Real gs = GradDotStep(*(desc_state->gradientVec),s,x,bnd,algo_state.gnorm);
330  if (gs >= zero) {
331  s.set((desc_state->gradientVec)->dual());
332  s.scale(-one);
333  gs = GradDotStep(*(desc_state->gradientVec),s,x,bnd,algo_state.gnorm);
334  }
335 
336  // Perform line search
337  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
338  fval_ = algo_state.value;
339  step_state->nfval = 0; step_state->ngrad = 0;
340  lineSearch_->setData(algo_state.gnorm,*(desc_state->gradientVec));
341  lineSearch_->run(step_state->searchSize,fval_,step_state->nfval,step_state->ngrad,gs,s,x,obj,bnd);
342 
343  // Make correction if maximum function evaluations reached
344  if(!acceptLastAlpha_) {
345  lineSearch_->setMaxitUpdate(step_state->searchSize,fval_,algo_state.value);
346  }
347 
348  // Compute scaled descent direction
349  s.scale(step_state->searchSize);
350  if ( bnd.isActivated() ) {
351  s.plus(x);
352  bnd.project(s);
353  s.axpy(static_cast<Real>(-1),x);
354  }
355  }
356 
368  void update( Vector<Real> &x, const Vector<Real> &s,
370  AlgorithmState<Real> &algo_state ) {
371  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
372  algo_state.nfval += step_state->nfval;
373  algo_state.ngrad += step_state->ngrad;
374  desc_->update(x,s,obj,bnd,algo_state);
375  if ( !computeObj_ ) {
376  algo_state.value = fval_;
377  }
378  }
379 
384  std::string printHeader( void ) const {
385  std::string head = desc_->printHeader();
386  head.erase(std::remove(head.end()-3,head.end(),'\n'), head.end());
387  std::stringstream hist;
388  hist.write(head.c_str(),head.length());
389  hist << std::setw(10) << std::left << "ls_#fval";
390  hist << std::setw(10) << std::left << "ls_#grad";
391  hist << "\n";
392  return hist.str();
393  }
394 
399  std::string printName( void ) const {
400  std::string name = desc_->printName();
401  std::stringstream hist;
402  hist << name;
403  hist << "Line Search: " << lineSearchName_;
404  hist << " satisfying " << ECurvatureConditionToString(econd_) << "\n";
405  return hist.str();
406  }
407 
415  std::string print( AlgorithmState<Real> & algo_state, bool print_header = false ) const {
416  const Teuchos::RCP<const StepState<Real> > step_state = Step<Real>::getStepState();
417  std::string desc = desc_->print(algo_state,false);
418  desc.erase(std::remove(desc.end()-3,desc.end(),'\n'), desc.end());
419  std::string name = desc_->printName();
420  size_t pos = desc.find(name);
421  if ( pos != std::string::npos ) {
422  desc.erase(pos, name.length());
423  }
424 
425  std::stringstream hist;
426  if ( algo_state.iter == 0 ) {
427  hist << printName();
428  }
429  if ( print_header ) {
430  hist << printHeader();
431  }
432  hist << desc;
433  if ( algo_state.iter == 0 ) {
434  hist << "\n";
435  }
436  else {
437  hist << std::setw(10) << std::left << step_state->nfval;
438  hist << std::setw(10) << std::left << step_state->ngrad;
439  hist << "\n";
440  }
441  return hist.str();
442  }
443 }; // class LineSearchStep
444 
445 } // namespace ROL
446 
447 #endif
Provides the interface to evaluate objective functions.
virtual void scale(const Real alpha)=0
Compute where .
virtual void plus(const Vector &x)=0
Compute , where .
bool acceptLastAlpha_
For backwards compatibility. When max function evaluations are reached take last step.
virtual void axpy(const Real alpha, const Vector &x)
Compute where .
Definition: ROL_Vector.hpp:145
ELineSearch StringToELineSearch(std::string s)
Definition: ROL_Types.hpp:777
Real GradDotStep(const Vector< Real > &g, const Vector< Real > &s, const Vector< Real > &x, BoundConstraint< Real > &bnd, Real eps=0)
Provides the interface to compute optimization steps with projected Newton&#39;s method using line search...
Provides the interface to compute optimization steps.
Definition: ROL_Step.hpp:69
Teuchos::RCP< StepState< Real > > getState(void)
Definition: ROL_Step.hpp:74
Contains definitions of custom data types in ROL.
Teuchos::ParameterList parlist_
void pruneInactive(Vector< Real > &v, const Vector< Real > &x, Real eps=0)
Set variables to zero if they correspond to the -inactive set.
virtual Teuchos::RCP< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Update step, if successful.
ELineSearch
Enumeration of line-search types.
Definition: ROL_Types.hpp:711
bool usePreviousAlpha_
If true, use the previously accepted step length (if any) as the new initial step length...
Contains definitions for helper functions in ROL.
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:76
Provides the interface to compute optimization steps with projected inexact ProjectedNewton&#39;s method ...
Provides the interface to compute optimization steps with projected secant method using line search...
virtual Real dot(const Vector &x) const =0
Compute where .
virtual void pruneActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0)
Set variables to zero if they correspond to the -active set.
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
Provides the interface to compute optimization steps with Newton&#39;s method globalized using line searc...
EDescent StringToEDescent(std::string s)
Definition: ROL_Types.hpp:452
State for algorithm class. Will be used for restarts.
Definition: ROL_Types.hpp:91
Provides the interface to compute optimization steps with line search.
Teuchos::RCP< Krylov< Real > > krylov_
Krylov solver object (used for inexact Newton)
bool isActivated(void)
Check if bounds are on.
virtual const Vector & dual() const
Return dual representation of , for example, the result of applying a Riesz map, or change of basis...
Definition: ROL_Vector.hpp:215
std::string ECurvatureConditionToString(ECurvatureCondition ls)
Definition: ROL_Types.hpp:804
Provides interface for and implements line searches.
const Teuchos::RCP< const StepState< Real > > getStepState(void) const
Get state for step object.
Definition: ROL_Step.hpp:293
std::string printHeader(void) const
Print iterate header.
LineSearchStep(Teuchos::ParameterList &parlist, const Teuchos::RCP< LineSearch< Real > > &lineSearch=Teuchos::null, const Teuchos::RCP< Secant< Real > > &secant=Teuchos::null, const Teuchos::RCP< Krylov< Real > > &krylov=Teuchos::null, const Teuchos::RCP< NonlinearCG< Real > > &nlcg=Teuchos::null)
Constructor.
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:70
Teuchos::RCP< LineSearch< Real > > lineSearch_
Line-search object.
ELineSearch els_
enum determines type of line search
Provides definitions for Krylov solvers.
Definition: ROL_Krylov.hpp:57
Provides the interface to apply upper and lower bound constraints.
Teuchos::RCP< Step< Real > > desc_
Unglobalized step object.
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Compute step.
std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
Provides the interface to compute optimization steps with nonlinear CG.
Provides the interface to compute optimization steps with projected inexact Newton&#39;s method using lin...
ECurvatureCondition
Enumeration of line-search curvature conditions.
Definition: ROL_Types.hpp:794
Teuchos::RCP< NonlinearCG< Real > > nlcg_
Nonlinear CG object (used for nonlinear CG)
std::string printName(void) const
Print step name.
Teuchos::RCP< Vector< Real > > d_
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:198
ECurvatureCondition StringToECurvatureCondition(std::string s)
Definition: ROL_Types.hpp:854
Provides the interface to compute optimization steps with the gradient descent method globalized usin...
EDescent
Enumeration of descent direction types.
Definition: ROL_Types.hpp:395
ECurvatureCondition econd_
enum determines type of curvature condition
Teuchos::RCP< Secant< Real > > secant_
Secant object (used for quasi-Newton)
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.
Provides the interface to compute optimization steps with a secant method.