ROL
ROL_GradientStep.hpp
Go to the documentation of this file.
1// @HEADER
2// ************************************************************************
3//
4// Rapid Optimization Library (ROL) Package
5// Copyright (2014) Sandia Corporation
6//
7// Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8// license for use of this work by or on behalf of the U.S. Government.
9//
10// Redistribution and use in source and binary forms, with or without
11// modification, are permitted provided that the following conditions are
12// met:
13//
14// 1. Redistributions of source code must retain the above copyright
15// notice, this list of conditions and the following disclaimer.
16//
17// 2. Redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution.
20//
21// 3. Neither the name of the Corporation nor the names of the
22// contributors may be used to endorse or promote products derived from
23// this software without specific prior written permission.
24//
25// THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36//
37// Questions? Contact lead developers:
38// Drew Kouri (dpkouri@sandia.gov) and
39// Denis Ridzal (dridzal@sandia.gov)
40//
41// ************************************************************************
42// @HEADER
43
44#ifndef ROL_GRADIENTSTEP_H
45#define ROL_GRADIENTSTEP_H
46
47#include "ROL_Types.hpp"
48#include "ROL_Step.hpp"
49#include "ROL_Secant.hpp"
50
57namespace ROL {
58
59template <class Real>
60class GradientStep : public Step<Real> {
61private:
62
64 const bool computeObj_;
65
66public:
67
68 using Step<Real>::initialize;
69 using Step<Real>::compute;
70 using Step<Real>::update;
71
79 GradientStep( ROL::ParameterList &parlist, const bool computeObj = true )
80 : Step<Real>(), verbosity_(0), computeObj_(computeObj) {
81 // Parse ParameterList
82 verbosity_ = parlist.sublist("General").get("Print Verbosity",0);
83 }
84
85 void compute( Vector<Real> &s, const Vector<Real> &x,
87 AlgorithmState<Real> &algo_state ) {
88 Real one(1);
89 ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
90
91 // Compute search direction
92 s.set((step_state->gradientVec)->dual());
93 s.scale(-one);
94 }
95
97 AlgorithmState<Real> &algo_state ) {
98 Real tol = std::sqrt(ROL_EPSILON<Real>());
99 ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
100
101 // Update iterate and store step
102 algo_state.iter++;
103 x.plus(s);
104 (step_state->descentVec)->set(s);
105 algo_state.snorm = s.norm();
106
107 // Compute new gradient
108 obj.update(x,true,algo_state.iter);
109 if ( computeObj_ ) {
110 algo_state.value = obj.value(x,tol);
111 algo_state.nfval++;
112 }
113 obj.gradient(*(step_state->gradientVec),x,tol);
114 algo_state.ngrad++;
115
116 // Update algorithm state
117 (algo_state.iterateVec)->set(x);
118 algo_state.gnorm = (step_state->gradientVec)->norm();
119 }
120
121 std::string printHeader( void ) const {
122 std::stringstream hist;
123
124 if( verbosity_>0 ) {
125 hist << std::string(109,'-') << "\n";
127 hist << " status output definitions\n\n";
128 hist << " iter - Number of iterates (steps taken) \n";
129 hist << " value - Objective function value \n";
130 hist << " gnorm - Norm of the gradient\n";
131 hist << " snorm - Norm of the step (update to optimization vector)\n";
132 hist << " #fval - Cumulative number of times the objective function was evaluated\n";
133 hist << " #grad - Number of times the gradient was computed\n";
134 hist << std::string(109,'-') << "\n";
135 }
136
137 hist << " ";
138 hist << std::setw(6) << std::left << "iter";
139 hist << std::setw(15) << std::left << "value";
140 hist << std::setw(15) << std::left << "gnorm";
141 hist << std::setw(15) << std::left << "snorm";
142 hist << std::setw(10) << std::left << "#fval";
143 hist << std::setw(10) << std::left << "#grad";
144 hist << "\n";
145 return hist.str();
146 }
147 std::string printName( void ) const {
148 std::stringstream hist;
149 hist << "\n" << EDescentToString(DESCENT_STEEPEST) << "\n";
150 return hist.str();
151 }
152 std::string print( AlgorithmState<Real> &algo_state, bool print_header = false ) const {
153 std::stringstream hist;
154 hist << std::scientific << std::setprecision(6);
155 if ( algo_state.iter == 0 ) {
156 hist << printName();
157 }
158 if ( print_header ) {
159 hist << printHeader();
160 }
161 if ( algo_state.iter == 0 ) {
162 hist << " ";
163 hist << std::setw(6) << std::left << algo_state.iter;
164 hist << std::setw(15) << std::left << algo_state.value;
165 hist << std::setw(15) << std::left << algo_state.gnorm;
166 hist << "\n";
167 }
168 else {
169 hist << " ";
170 hist << std::setw(6) << std::left << algo_state.iter;
171 hist << std::setw(15) << std::left << algo_state.value;
172 hist << std::setw(15) << std::left << algo_state.gnorm;
173 hist << std::setw(15) << std::left << algo_state.snorm;
174 hist << std::setw(10) << std::left << algo_state.nfval;
175 hist << std::setw(10) << std::left << algo_state.ngrad;
176 hist << "\n";
177 }
178 return hist.str();
179 }
180}; // class GradientStep
181
182} // namespace ROL
183#endif
Contains definitions of custom data types in ROL.
Provides the interface to apply upper and lower bound constraints.
Provides the interface to compute optimization steps with the gradient descent method globalized usin...
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Update step, if successful.
std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
const bool computeObj_
Allows line search to compute objective.
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Compute step.
std::string printName(void) const
Print step name.
GradientStep(ROL::ParameterList &parlist, const bool computeObj=true)
Constructor.
std::string printHeader(void) const
Print iterate header.
int verbosity_
Verbosity setting.
Provides the interface to evaluate objective functions.
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
Provides the interface to compute optimization steps.
Definition ROL_Step.hpp:68
virtual void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
Definition ROL_Step.hpp:88
ROL::Ptr< StepState< Real > > getState(void)
Definition ROL_Step.hpp:73
Defines the linear algebra or vector space interface.
virtual Real norm() const =0
Returns where .
virtual void set(const Vector &x)
Set where .
virtual void scale(const Real alpha)=0
Compute where .
virtual void plus(const Vector &x)=0
Compute , where .
@ DESCENT_STEEPEST
std::string EDescentToString(EDescent tr)
State for algorithm class. Will be used for restarts.
ROL::Ptr< Vector< Real > > iterateVec