ROL
ROL_MoreauYosidaObjective.hpp
Go to the documentation of this file.
1// @HEADER
2// ************************************************************************
3//
4// Rapid Optimization Library (ROL) Package
5// Copyright (2014) Sandia Corporation
6//
7// Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8// license for use of this work by or on behalf of the U.S. Government.
9//
10// Redistribution and use in source and binary forms, with or without
11// modification, are permitted provided that the following conditions are
12// met:
13//
14// 1. Redistributions of source code must retain the above copyright
15// notice, this list of conditions and the following disclaimer.
16//
17// 2. Redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution.
20//
21// 3. Neither the name of the Corporation nor the names of the
22// contributors may be used to endorse or promote products derived from
23// this software without specific prior written permission.
24//
25// THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36//
37// Questions? Contact lead developers:
38// Drew Kouri (dpkouri@sandia.gov) and
39// Denis Ridzal (dridzal@sandia.gov)
40//
41// ************************************************************************
42// @HEADER
43
44#ifndef ROL_MOREAUYOSIDAOBJECTIVE_H
45#define ROL_MOREAUYOSIDAOBJECTIVE_H
46
47#include "ROL_Objective.hpp"
49#include "ROL_Vector.hpp"
50#include "ROL_Types.hpp"
51#include "ROL_Ptr.hpp"
53#include <iostream>
54
63namespace ROL {
64
65template <class Real>
66class MoreauYosidaObjective : public Objective<Real> {
67private:
68 const Ptr<Objective<Real>> obj_;
69 const Ptr<BoundConstraint<Real>> bnd_;
70
71 Ptr<Vector<Real>> l_;
72 Ptr<Vector<Real>> u_;
73 Ptr<Vector<Real>> l1_;
74 Ptr<Vector<Real>> u1_;
75 Ptr<Vector<Real>> dl1_;
76 Ptr<Vector<Real>> du1_;
77 Ptr<Vector<Real>> xlam_;
78 Ptr<Vector<Real>> v_;
79 Ptr<Vector<Real>> dv_;
80 Ptr<Vector<Real>> dv2_;
81 Ptr<Vector<Real>> lam_;
82 Ptr<Vector<Real>> tmp_;
83
84 Ptr<ScalarController<Real,int>> fval_;
85 Ptr<VectorController<Real,int>> gradient_;
86
87 Real mu_;
89 int nfval_;
90 int ngrad_;
93
95 if ( bnd_->isActivated() ) {
96 Real one = 1.0;
97 if ( !isPenEvaluated_ ) {
98 xlam_->set(x);
99 xlam_->axpy(one/mu_,*lam_);
100
101 if ( bnd_->isFeasible(*xlam_) ) {
102 l1_->zero(); dl1_->zero();
103 u1_->zero(); du1_->zero();
104 }
105 else {
106 // Compute lower penalty component
107 l1_->set(*l_);
108 bnd_->pruneLowerInactive(*l1_,*xlam_);
109 tmp_->set(*xlam_);
110 bnd_->pruneLowerInactive(*tmp_,*xlam_);
111 l1_->axpy(-one,*tmp_);
112
113 // Compute upper penalty component
114 u1_->set(*xlam_);
115 bnd_->pruneUpperInactive(*u1_,*xlam_);
116 tmp_->set(*u_);
117 bnd_->pruneUpperInactive(*tmp_,*xlam_);
118 u1_->axpy(-one,*tmp_);
119
120 // Compute derivative of lower penalty component
121 dl1_->set(l1_->dual());
122 bnd_->pruneLowerInactive(*dl1_,*xlam_);
123
124 // Compute derivative of upper penalty component
125 du1_->set(u1_->dual());
126 bnd_->pruneUpperInactive(*du1_,*xlam_);
127 }
128
129 isPenEvaluated_ = true;
130 }
131 }
132 }
133
135 const Vector<Real> &g) {
136 fval_ = makePtr<ScalarController<Real,int>>();
137 gradient_ = makePtr<VectorController<Real,int>>();
138
139 l_ = x.clone();
140 l1_ = x.clone();
141 dl1_ = g.clone();
142 u_ = x.clone();
143 u1_ = x.clone();
144 du1_ = g.clone();
145 xlam_ = x.clone();
146 v_ = x.clone();
147 dv_ = g.clone();
148 dv2_ = g.clone();
149 lam_ = x.clone();
150 tmp_ = x.clone();
151
152 l_->set(*bnd_->getLowerBound());
153 u_->set(*bnd_->getUpperBound());
154
155 lam_->zero();
156 //lam_->set(*u_);
157 //lam_->plus(*l_);
158 //lam_->scale(0.5);
159 }
160
161public:
163 const Ptr<BoundConstraint<Real>> &bnd,
164 const Vector<Real> &x,
165 const Vector<Real> &g,
166 const Real mu = 1e1,
167 const bool updateMultiplier = true,
168 const bool updatePenalty = true)
169 : obj_(obj), bnd_(bnd), mu_(mu),
170 isPenEvaluated_(false), nfval_(0), ngrad_(0),
171 updateMultiplier_(updateMultiplier), updatePenalty_(updatePenalty) {
172 initialize(x,g);
173 }
174
176 const Ptr<BoundConstraint<Real>> &bnd,
177 const Vector<Real> &x,
178 const Vector<Real> &g,
179 const Vector<Real> &lam,
180 const Real mu = 1e1,
181 const bool updateMultiplier = true,
182 const bool updatePenalty = true)
183 : MoreauYosidaObjective(obj,bnd,x,g,mu,updateMultiplier,updatePenalty) {
184 lam_->set(lam);
185 }
186
188 const Ptr<BoundConstraint<Real>> &bnd,
189 const Vector<Real> &x,
190 const Vector<Real> &g,
191 ParameterList &parlist)
192 : obj_(obj), bnd_(bnd),
193 isPenEvaluated_(false), nfval_(0), ngrad_(0) {
194 initialize(x,g);
195 ParameterList &list = parlist.sublist("Step").sublist("Moreau-Yosida Penalty");
196 updateMultiplier_ = list.get("Update Multiplier",true);
197 updatePenalty_ = list.get("Update Penalty",true);
198 mu_ = list.get("Initial Penalty Parameter",1e1);
199 }
200
202 const Ptr<BoundConstraint<Real>> &bnd,
203 const Vector<Real> &x,
204 const Vector<Real> &g,
205 const Vector<Real> &lam,
206 ParameterList &parlist)
207 : MoreauYosidaObjective(obj,bnd,x,g,parlist) {
208 lam_->set(lam);
209 }
210
211 void updateMultipliers(Real mu, const Vector<Real> &x) {
212 if ( bnd_->isActivated() ) {
213 if ( updateMultiplier_ ) {
214 const Real one(1);
216 lam_->set(*u1_);
217 lam_->axpy(-one,*l1_);
218 lam_->scale(mu_);
219 }
220 if ( updatePenalty_ ) {
221 mu_ = mu;
222 }
223 }
224 nfval_ = 0; ngrad_ = 0;
225 isPenEvaluated_ = false;
226 }
227
228 void reset(const Real mu) {
229 lam_->zero();
230 mu_ = mu;
231 nfval_ = 0; ngrad_ = 0;
232 isPenEvaluated_ = false;
233 }
234
236 Real val(0);
237 if (bnd_->isActivated()) {
239
240 tmp_->set(x);
241 tmp_->axpy(static_cast<Real>(-1), *l_);
242 Real lower = mu_*std::abs(tmp_->dot(*l1_));
243
244 tmp_->set(x);
245 tmp_->axpy(static_cast<Real>(-1), *u_);
246 Real upper = mu_*std::abs(tmp_->dot(*u1_));
247
248 tmp_->set(x);
249 bnd_->project(*tmp_);
250 tmp_->axpy(static_cast<Real>(-1), x);
251 Real xnorm = tmp_->norm();
252
253 val = std::max(xnorm,std::max(lower,upper));
254 }
255 return val;
256 }
257
258 Real getObjectiveValue(const Vector<Real> &x, Real &tol) {
259 int key(0);
260 Real val(0);
261 bool isComputed = fval_->get(val,key);
262 if (!isComputed) {
263 val = obj_->value(x,tol); nfval_++;
264 fval_->set(val,key);
265 }
266 return val;
267 }
268
269 void getObjectiveGradient(Vector<Real> &g, const Vector<Real> &x, Real &tol) {
270 int key(0);
271 bool isComputed = gradient_->get(g,key);
272 if (!isComputed) {
273 obj_->gradient(g,x,tol); ngrad_++;
274 gradient_->set(g,key);
275 }
276 }
277
279 return nfval_;
280 }
281
283 return ngrad_;
284 }
285
293 void update( const Vector<Real> &x, UpdateType type, int iter = -1 ) {
294 obj_->update(x,type,iter);
295 fval_->objectiveUpdate(type);
296 gradient_->objectiveUpdate(type);
297 // Need to do something smart here
298 isPenEvaluated_ = false;
299 }
300
307 Real value( const Vector<Real> &x, Real &tol ) {
308 // Compute objective function value
309 Real fval = getObjectiveValue(x,tol);
310 // Add value of the Moreau-Yosida penalty
311 if ( bnd_->isActivated() ) {
312 const Real half(0.5);
314 fval += half*mu_*(l1_->dot(*l1_) + u1_->dot(*u1_));
315 }
316 return fval;
317 }
318
326 void gradient( Vector<Real> &g, const Vector<Real> &x, Real &tol ) {
327 // Compute gradient of objective function
328 getObjectiveGradient(g,x,tol);
329 // Add gradient of the Moreau-Yosida penalty
330 if ( bnd_->isActivated() ) {
332 g.axpy(-mu_,*dl1_);
333 g.axpy(mu_,*du1_);
334 }
335 }
336
345 void hessVec( Vector<Real> &hv, const Vector<Real> &v, const Vector<Real> &x, Real &tol ) {
346 // Apply objective Hessian to a vector
347 obj_->hessVec(hv,v,x,tol);
348 // Add Hessian of the Moreau-Yosida penalty
349 if ( bnd_->isActivated() ) {
350 const Real one(1);
352
353 v_->set(v);
354 bnd_->pruneLowerActive(*v_,*xlam_);
355 v_->scale(-one);
356 v_->plus(v);
357 dv_->set(v_->dual());
358 dv2_->set(*dv_);
359 bnd_->pruneLowerActive(*dv_,*xlam_);
360 dv_->scale(-one);
361 dv_->plus(*dv2_);
362 hv.axpy(mu_,*dv_);
363
364 v_->set(v);
365 bnd_->pruneUpperActive(*v_,*xlam_);
366 v_->scale(-one);
367 v_->plus(v);
368 dv_->set(v_->dual());
369 dv2_->set(*dv_);
370 bnd_->pruneUpperActive(*dv_,*xlam_);
371 dv_->scale(-one);
372 dv_->plus(*dv2_);
373 hv.axpy(mu_,*dv_);
374 }
375 }
376}; // class MoreauYosidaObjective
377
378} // namespace ROL
379
380#endif
Contains definitions of custom data types in ROL.
Provides the interface to apply upper and lower bound constraints.
Provides the interface to evaluate the Moreau-Yosida penalty function.
const Ptr< BoundConstraint< Real > > bnd_
void getObjectiveGradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Real value(const Vector< Real > &x, Real &tol)
Compute value.
Ptr< VectorController< Real, int > > gradient_
MoreauYosidaObjective(const Ptr< Objective< Real > > &obj, const Ptr< BoundConstraint< Real > > &bnd, const Vector< Real > &x, const Vector< Real > &g, ParameterList &parlist)
Real getObjectiveValue(const Vector< Real > &x, Real &tol)
void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
MoreauYosidaObjective(const Ptr< Objective< Real > > &obj, const Ptr< BoundConstraint< Real > > &bnd, const Vector< Real > &x, const Vector< Real > &g, const Vector< Real > &lam, const Real mu=1e1, const bool updateMultiplier=true, const bool updatePenalty=true)
MoreauYosidaObjective(const Ptr< Objective< Real > > &obj, const Ptr< BoundConstraint< Real > > &bnd, const Vector< Real > &x, const Vector< Real > &g, const Vector< Real > &lam, ParameterList &parlist)
Real testComplementarity(const Vector< Real > &x)
void hessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply Hessian approximation to vector.
void initialize(const Vector< Real > &x, const Vector< Real > &g)
MoreauYosidaObjective(const Ptr< Objective< Real > > &obj, const Ptr< BoundConstraint< Real > > &bnd, const Vector< Real > &x, const Vector< Real > &g, const Real mu=1e1, const bool updateMultiplier=true, const bool updatePenalty=true)
void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update Moreau-Yosida penalty function.
void computePenalty(const Vector< Real > &x)
const Ptr< Objective< Real > > obj_
void updateMultipliers(Real mu, const Vector< Real > &x)
Ptr< ScalarController< Real, int > > fval_
Provides the interface to evaluate objective functions.
Defines the linear algebra or vector space interface.
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
virtual void axpy(const Real alpha, const Vector &x)
Compute where .