ROL
ROL_MoreauYosidaPenalty.hpp
Go to the documentation of this file.
1// @HEADER
2// ************************************************************************
3//
4// Rapid Optimization Library (ROL) Package
5// Copyright (2014) Sandia Corporation
6//
7// Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8// license for use of this work by or on behalf of the U.S. Government.
9//
10// Redistribution and use in source and binary forms, with or without
11// modification, are permitted provided that the following conditions are
12// met:
13//
14// 1. Redistributions of source code must retain the above copyright
15// notice, this list of conditions and the following disclaimer.
16//
17// 2. Redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution.
20//
21// 3. Neither the name of the Corporation nor the names of the
22// contributors may be used to endorse or promote products derived from
23// this software without specific prior written permission.
24//
25// THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36//
37// Questions? Contact lead developers:
38// Drew Kouri (dpkouri@sandia.gov) and
39// Denis Ridzal (dridzal@sandia.gov)
40//
41// ************************************************************************
42// @HEADER
43
44#ifndef ROL_MOREAUYOSIDAPENALTY_H
45#define ROL_MOREAUYOSIDAPENALTY_H
46
47#include "ROL_Objective.hpp"
49#include "ROL_Vector.hpp"
50#include "ROL_Types.hpp"
51#include "ROL_Ptr.hpp"
52#include <iostream>
53
62namespace ROL {
63
64template <class Real>
65class MoreauYosidaPenalty : public Objective<Real> {
66private:
67 const ROL::Ptr<Objective<Real> > obj_;
68 const ROL::Ptr<BoundConstraint<Real> > bnd_;
69
70 ROL::Ptr<Vector<Real> > g_;
71 ROL::Ptr<Vector<Real> > l_;
72 ROL::Ptr<Vector<Real> > u_;
73 ROL::Ptr<Vector<Real> > l1_;
74 ROL::Ptr<Vector<Real> > u1_;
75 ROL::Ptr<Vector<Real> > dl1_;
76 ROL::Ptr<Vector<Real> > du1_;
77 ROL::Ptr<Vector<Real> > xlam_;
78 ROL::Ptr<Vector<Real> > v_;
79 ROL::Ptr<Vector<Real> > dv_;
80 ROL::Ptr<Vector<Real> > dv2_;
81 ROL::Ptr<Vector<Real> > lam_;
82 ROL::Ptr<Vector<Real> > tmp_;
83
84 Real mu_;
85 Real fval_;
87 int nfval_;
88 int ngval_;
91
93 if ( bnd_->isActivated() ) {
94 Real one = 1.0;
95 if ( !isConEvaluated_ ) {
96 xlam_->set(x);
97 xlam_->axpy(one/mu_,*lam_);
98
99 if ( bnd_->isFeasible(*xlam_) ) {
100 l1_->zero(); dl1_->zero();
101 u1_->zero(); du1_->zero();
102 }
103 else {
104 // Compute lower penalty component
105 l1_->set(*l_);
106 bnd_->pruneLowerInactive(*l1_,*xlam_);
107 tmp_->set(*xlam_);
108 bnd_->pruneLowerInactive(*tmp_,*xlam_);
109 l1_->axpy(-one,*tmp_);
110
111 // Compute upper penalty component
112 u1_->set(*xlam_);
113 bnd_->pruneUpperInactive(*u1_,*xlam_);
114 tmp_->set(*u_);
115 bnd_->pruneUpperInactive(*tmp_,*xlam_);
116 u1_->axpy(-one,*tmp_);
117
118 // Compute derivative of lower penalty component
119 dl1_->set(l1_->dual());
120 bnd_->pruneLowerInactive(*dl1_,*xlam_);
121
122 // Compute derivative of upper penalty component
123 du1_->set(u1_->dual());
124 bnd_->pruneUpperInactive(*du1_,*xlam_);
125 }
126
127 isConEvaluated_ = true;
128 }
129 }
130 }
131
133 const ROL::Ptr<ROL::BoundConstraint<Real> > &bnd) {
134 g_ = x.dual().clone();
135 l_ = x.clone();
136 l1_ = x.clone();
137 dl1_ = x.dual().clone();
138 u_ = x.clone();
139 u1_ = x.clone();
140 du1_ = x.dual().clone();
141 xlam_ = x.clone();
142 v_ = x.clone();
143 dv_ = x.dual().clone();
144 dv2_ = x.dual().clone();
145 lam_ = x.clone();
146 tmp_ = x.clone();
147
148 l_->set(*bnd_->getLowerBound());
149 u_->set(*bnd_->getUpperBound());
150
151 lam_->zero();
152 //lam_->set(*u_);
153 //lam_->plus(*l_);
154 //lam_->scale(0.5);
155 }
156
157public:
159
161 const ROL::Ptr<BoundConstraint<Real> > &bnd,
162 const ROL::Vector<Real> &x,
163 const Real mu = 1e1,
164 const bool updateMultiplier = true,
165 const bool updatePenalty = true)
166 : obj_(obj), bnd_(bnd), mu_(mu),
167 fval_(0), isConEvaluated_(false), nfval_(0), ngval_(0),
168 updateMultiplier_(updateMultiplier), updatePenalty_(updatePenalty) {
169 initialize(x,bnd);
170 }
171
173 const ROL::Ptr<BoundConstraint<Real> > &bnd,
174 const ROL::Vector<Real> &x,
175 ROL::ParameterList &parlist)
176 : obj_(obj), bnd_(bnd),
177 fval_(0), isConEvaluated_(false), nfval_(0), ngval_(0) {
178 initialize(x,bnd);
179 ROL::ParameterList &list = parlist.sublist("Step").sublist("Moreau-Yosida Penalty");
180 updateMultiplier_ = list.get("Update Multiplier",true);
181 updatePenalty_ = list.get("Update Penalty",true);
182 mu_ = list.get("Initial Penalty Parameter",1e1);
183 }
184
186 const ROL::Ptr<BoundConstraint<Real> > &bnd,
187 const ROL::Vector<Real> &x,
188 const ROL::Vector<Real> &lam,
189 ROL::ParameterList &parlist)
190 : obj_(obj), bnd_(bnd),
191 fval_(0), isConEvaluated_(false), nfval_(0), ngval_(0) {
192 initialize(x,bnd);
193 lam_->set(lam);
194 ROL::ParameterList &list = parlist.sublist("Step").sublist("Moreau-Yosida Penalty");
195 updateMultiplier_ = list.get("Update Multiplier",true);
196 updatePenalty_ = list.get("Update Penalty",true);
197 mu_ = list.get("Initial Penalty Parameter",1e1);
198 }
199
200 void updateMultipliers(Real mu, const ROL::Vector<Real> &x) {
201 if ( bnd_->isActivated() ) {
202 if ( updateMultiplier_ ) {
203 const Real one(1);
205 lam_->set(*u1_);
206 lam_->axpy(-one,*l1_);
207 lam_->scale(mu_);
208 }
209 if ( updatePenalty_ ) {
210 mu_ = mu;
211 }
212 }
213 nfval_ = 0; ngval_ = 0;
214 isConEvaluated_ = false;
215 }
216
217 void reset(const Real mu) {
218 lam_->zero();
219 mu_ = mu;
220 nfval_ = 0; ngval_ = 0;
221 isConEvaluated_ = false;
222 }
223
225 Real val(0);
226 if (bnd_->isActivated()) {
228
229 tmp_->set(x);
230 tmp_->axpy(static_cast<Real>(-1), *l_);
231 Real lower = mu_*std::abs(tmp_->dot(*l1_));
232
233 tmp_->set(x);
234 tmp_->axpy(static_cast<Real>(-1), *u_);
235 Real upper = mu_*std::abs(tmp_->dot(*u1_));
236
237 tmp_->set(x);
238 bnd_->project(*tmp_);
239 tmp_->axpy(static_cast<Real>(-1), x);
240 Real xnorm = tmp_->norm();
241
242 val = std::max(xnorm,std::max(lower,upper));
243 }
244 return val;
245 }
246
247 Real getObjectiveValue(void) const {
248 return fval_;
249 }
250
251 ROL::Ptr<Vector<Real> > getGradient(void) const {
252 return g_;
253 }
254
256 return nfval_;
257 }
258
260 return ngval_;
261 }
262
270 void update( const Vector<Real> &x, bool flag = true, int iter = -1 ) {
271 obj_->update(x,flag,iter);
272 isConEvaluated_ = false;
273 }
274
281 Real value( const Vector<Real> &x, Real &tol ) {
282 Real half = 0.5;
283 // Compute objective function value
284 fval_ = obj_->value(x,tol);
285 nfval_++;
286 // Add value of the Moreau-Yosida penalty
287 Real fval = fval_;
288 if ( bnd_->isActivated() ) {
290 fval += half*mu_*(l1_->dot(*l1_) + u1_->dot(*u1_));
291 }
292 return fval;
293 }
294
302 void gradient( Vector<Real> &g, const Vector<Real> &x, Real &tol ) {
303 // Compute gradient of objective function
304 obj_->gradient(*g_,x,tol);
305 ngval_++;
306 g.set(*g_);
307 // Add gradient of the Moreau-Yosida penalty
308 if ( bnd_->isActivated() ) {
310 g.axpy(-mu_,*dl1_);
311 g.axpy(mu_,*du1_);
312 }
313 }
314
323 void hessVec( Vector<Real> &hv, const Vector<Real> &v, const Vector<Real> &x, Real &tol ) {
324 // Apply objective Hessian to a vector
325 obj_->hessVec(hv,v,x,tol);
326 // Add Hessian of the Moreau-Yosida penalty
327 if ( bnd_->isActivated() ) {
328 Real one = 1.0;
330
331 v_->set(v);
332 bnd_->pruneLowerActive(*v_,*xlam_);
333 v_->scale(-one);
334 v_->plus(v);
335 dv_->set(v_->dual());
336 dv2_->set(*dv_);
337 bnd_->pruneLowerActive(*dv_,*xlam_);
338 dv_->scale(-one);
339 dv_->plus(*dv2_);
340 hv.axpy(mu_,*dv_);
341
342 v_->set(v);
343 bnd_->pruneUpperActive(*v_,*xlam_);
344 v_->scale(-one);
345 v_->plus(v);
346 dv_->set(v_->dual());
347 dv2_->set(*dv_);
348 bnd_->pruneUpperActive(*dv_,*xlam_);
349 dv_->scale(-one);
350 dv_->plus(*dv2_);
351 hv.axpy(mu_,*dv_);
352 }
353 }
354
355// Definitions for parametrized (stochastic) objective functions
356public:
357 void setParameter(const std::vector<Real> &param) {
359 obj_->setParameter(param);
360 }
361}; // class MoreauYosidaPenalty
362
363} // namespace ROL
364
365#endif
Contains definitions of custom data types in ROL.
Provides the interface to apply upper and lower bound constraints.
Provides the interface to evaluate the Moreau-Yosida penalty function.
ROL::Ptr< Vector< Real > > dv_
void setParameter(const std::vector< Real > &param)
const ROL::Ptr< Objective< Real > > obj_
ROL::Ptr< Vector< Real > > u1_
ROL::Ptr< Vector< Real > > v_
ROL::Ptr< Vector< Real > > g_
void computePenalty(const Vector< Real > &x)
ROL::Ptr< Vector< Real > > dl1_
ROL::Ptr< Vector< Real > > dv2_
MoreauYosidaPenalty(const ROL::Ptr< Objective< Real > > &obj, const ROL::Ptr< BoundConstraint< Real > > &bnd, const ROL::Vector< Real > &x, const ROL::Vector< Real > &lam, ROL::ParameterList &parlist)
MoreauYosidaPenalty(const ROL::Ptr< Objective< Real > > &obj, const ROL::Ptr< BoundConstraint< Real > > &bnd, const ROL::Vector< Real > &x, ROL::ParameterList &parlist)
ROL::Ptr< Vector< Real > > u_
void initialize(const ROL::Vector< Real > &x, const ROL::Ptr< ROL::BoundConstraint< Real > > &bnd)
MoreauYosidaPenalty(const ROL::Ptr< Objective< Real > > &obj, const ROL::Ptr< BoundConstraint< Real > > &bnd, const ROL::Vector< Real > &x, const Real mu=1e1, const bool updateMultiplier=true, const bool updatePenalty=true)
ROL::Ptr< Vector< Real > > xlam_
ROL::Ptr< Vector< Real > > lam_
Real testComplementarity(const ROL::Vector< Real > &x)
void update(const Vector< Real > &x, bool flag=true, int iter=-1)
Update Moreau-Yosida penalty function.
void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
ROL::Ptr< Vector< Real > > l_
ROL::Ptr< Vector< Real > > l1_
ROL::Ptr< Vector< Real > > du1_
Real value(const Vector< Real > &x, Real &tol)
Compute value.
void hessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply Hessian approximation to vector.
ROL::Ptr< Vector< Real > > getGradient(void) const
const ROL::Ptr< BoundConstraint< Real > > bnd_
void updateMultipliers(Real mu, const ROL::Vector< Real > &x)
ROL::Ptr< Vector< Real > > tmp_
Provides the interface to evaluate objective functions.
virtual void setParameter(const std::vector< Real > &param)
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:84
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:209
virtual const Vector & dual() const
Return dual representation of , for example, the result of applying a Riesz map, or change of basis,...
Definition: ROL_Vector.hpp:226
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
virtual void axpy(const Real alpha, const Vector &x)
Compute where .
Definition: ROL_Vector.hpp:153