102 std::ostream &outStream) {
104 if (proj_ == nullPtr) {
105 proj_ = makePtr<PolyhedralProjection<Real>>(makePtrFromRef(bnd));
111 Real ftol = std::sqrt(ROL_EPSILON<Real>());
112 proj_->project(x,outStream); state_->nproj++;
113 state_->iterateVec->set(x);
115 state_->value = obj.
value(x,ftol); state_->nfval++;
116 obj.
gradient(*state_->gradientVec,x,ftol); state_->ngrad++;
117 state_->stepVec->set(x);
118 state_->stepVec->axpy(-one,state_->gradientVec->dual());
119 proj_->project(*state_->stepVec,outStream); state_->nproj++;
120 state_->stepVec->axpy(-one,x);
121 state_->gnorm = state_->stepVec->norm();
122 state_->snorm = ROL_INF<Real>();
130 std::ostream &outStream ) {
131 const Real half(0.5), one(1);
133 initialize(x,g,obj,bnd,outStream);
135 Real ftrial(0), gs(0), alphaTmp(0), tol(std::sqrt(ROL_EPSILON<Real>())), gtol(1);
137 Ptr<TypeB::Algorithm<Real>> algo;
138 Ptr<PQNObjective<Real>> qobj = makePtr<PQNObjective<Real>>(secant_,x,g);
139 Ptr<Problem<Real>> problem = makePtr<Problem<Real>>(qobj,xs);
140 problem->addBoundConstraint(makePtrFromRef(bnd));
142 problem->addLinearConstraint(
"LEC",proj_->getLinearConstraint(),
143 proj_->getMultiplier(),
144 proj_->getResidual());
145 problem->setProjectionAlgorithm(list_);
147 problem->finalize(
false,verbosity_>2,outStream);
150 if (verbosity_ > 0) writeOutput(outStream,
true);
153 gp->set(state_->gradientVec->dual());
154 while (status_->check(*state_)) {
156 qobj->setAnchor(x,*state_->gradientVec);
157 xs->set(x); xs->axpy(-one,*gp); proj_->project(*xs,outStream); state_->nproj++;
158 gtol = std::max(sp_tol_min_,std::min(sp_tol1_,sp_tol2_*state_->gnorm));
159 list_.sublist(
"Status Test").set(
"Gradient Tolerance",gtol);
160 if (algoName_ ==
"Trust Region") algo = makePtr<TypeB::LinMoreAlgorithm<Real>>(list_);
161 else if (algoName_ ==
"Line Search") algo = makePtr<TypeB::GradientAlgorithm<Real>>(list_);
162 else if (algoName_ ==
"Primal Dual Active Set") algo = makePtr<TypeB::PrimalDualActiveSetAlgorithm<Real>>(list_);
163 else if (algoName_ ==
"Moreau-Yosida") algo = makePtr<TypeB::MoreauYosidaAlgorithm<Real>>(list_);
164 else if (algoName_ ==
"Interior Point") algo = makePtr<TypeB::InteriorPointAlgorithm<Real>>(list_);
165 else algo = makePtr<TypeB::SpectralGradientAlgorithm<Real>>(list_);
166 algo->run(*problem,outStream);
167 s->set(*xs); s->axpy(-one,x);
168 spgIter_ = algo->getState()->iter;
169 state_->nproj += staticPtrCast<const TypeB::AlgorithmState<Real>>(algo->getState())->nproj;
172 state_->searchSize = one;
173 x.
set(*state_->iterateVec);
174 x.
axpy(state_->searchSize,*s);
176 ftrial = obj.
value(x,tol); ls_nfval_ = 1;
177 gs = state_->gradientVec->apply(*s);
178 if (verbosity_ > 1) {
179 outStream <<
" In TypeB::QuasiNewtonAlgorithm: Line Search" << std::endl;
180 outStream <<
" Step size: " << state_->searchSize << std::endl;
181 outStream <<
" Trial objective value: " << ftrial << std::endl;
182 outStream <<
" Computed reduction: " << state_->value-ftrial << std::endl;
183 outStream <<
" Dot product of gradient and step: " << gs << std::endl;
184 outStream <<
" Sufficient decrease bound: " << -gs*state_->searchSize*c1_ << std::endl;
185 outStream <<
" Number of function evaluations: " << ls_nfval_ << std::endl;
187 while ( ftrial > state_->value + c1_*state_->searchSize*gs && ls_nfval_ < maxit_ ) {
188 alphaTmp = -half*state_->searchSize*state_->searchSize*gs
189 / (ftrial-state_->value-state_->searchSize*gs);
190 state_->searchSize = (sigma1_*state_->searchSize <= alphaTmp && alphaTmp <= sigma2_*state_->searchSize)
191 ? alphaTmp : rhodec_*state_->searchSize;
193 x.
set(*state_->iterateVec);
194 x.
axpy(state_->searchSize,*s);
196 ftrial = obj.
value(x,tol); ls_nfval_++;
197 if (verbosity_ > 1) {
198 outStream << std::endl;
199 outStream <<
" Step size: " << state_->searchSize << std::endl;
200 outStream <<
" Trial objective value: " << ftrial << std::endl;
201 outStream <<
" Computed reduction: " << state_->value-ftrial << std::endl;
202 outStream <<
" Dot product of gradient and step: " << gs << std::endl;
203 outStream <<
" Sufficient decrease bound: " << -gs*state_->searchSize*c1_ << std::endl;
204 outStream <<
" Number of function evaluations: " << ls_nfval_ << std::endl;
207 state_->nfval += ls_nfval_;
210 state_->stepVec->set(*s);
211 state_->stepVec->scale(state_->searchSize);
212 state_->snorm = state_->stepVec->norm();
215 state_->iterateVec->set(x);
219 state_->value = ftrial;
221 gold->set(*state_->gradientVec);
222 obj.
gradient(*state_->gradientVec,x,tol); state_->ngrad++;
223 gp->set(state_->gradientVec->dual());
226 s->set(x); s->axpy(-one,*gp);
227 proj_->project(*s,outStream); state_->nproj++;
229 state_->gnorm = s->norm();
232 secant_->updateStorage(x,*state_->gradientVec,*gold,*state_->stepVec,state_->snorm,state_->iter);
235 if (verbosity_ > 0) writeOutput(outStream,writeHeader_);
242 std::stringstream hist;
243 if (verbosity_ > 1) {
244 hist << std::string(114,
'-') << std::endl;
245 hist <<
"Line-Search Projected Quasi-Newton with " << secantName_ <<
" Hessian approximation";
246 hist <<
" status output definitions" << std::endl << std::endl;
247 hist <<
" iter - Number of iterates (steps taken)" << std::endl;
248 hist <<
" value - Objective function value" << std::endl;
249 hist <<
" gnorm - Norm of the gradient" << std::endl;
250 hist <<
" snorm - Norm of the step (update to optimization vector)" << std::endl;
251 hist <<
" alpha - Line search step length" << std::endl;
252 hist <<
" #fval - Cumulative number of times the objective function was evaluated" << std::endl;
253 hist <<
" #grad - Cumulative number of times the gradient was computed" << std::endl;
254 hist <<
" #proj - Cumulative number of times the projection was computed" << std::endl;
255 hist <<
" ls_#fval - Number of the times the objective function was evaluated during the line search" << std::endl;
256 hist <<
" sp_iter - Number iterations to compute quasi-Newton step" << std::endl;
257 hist << std::string(114,
'-') << std::endl;
261 hist << std::setw(6) << std::left <<
"iter";
262 hist << std::setw(15) << std::left <<
"value";
263 hist << std::setw(15) << std::left <<
"gnorm";
264 hist << std::setw(15) << std::left <<
"snorm";
265 hist << std::setw(15) << std::left <<
"alpha";
266 hist << std::setw(10) << std::left <<
"#fval";
267 hist << std::setw(10) << std::left <<
"#grad";
268 hist << std::setw(10) << std::left <<
"#proj";
269 hist << std::setw(10) << std::left <<
"#ls_fval";
270 hist << std::setw(10) << std::left <<
"sp_iter";
284 std::stringstream hist;
285 hist << std::scientific << std::setprecision(6);
286 if ( state_->iter == 0 ) writeName(os);
287 if ( write_header ) writeHeader(os);
288 if ( state_->iter == 0 ) {
290 hist << std::setw(6) << std::left << state_->iter;
291 hist << std::setw(15) << std::left << state_->value;
292 hist << std::setw(15) << std::left << state_->gnorm;
293 hist << std::setw(15) << std::left <<
"---";
294 hist << std::setw(15) << std::left <<
"---";
295 hist << std::setw(10) << std::left << state_->nfval;
296 hist << std::setw(10) << std::left << state_->ngrad;
297 hist << std::setw(10) << std::left << state_->nproj;
298 hist << std::setw(10) << std::left <<
"---";
299 hist << std::setw(10) << std::left <<
"---";
304 hist << std::setw(6) << std::left << state_->iter;
305 hist << std::setw(15) << std::left << state_->value;
306 hist << std::setw(15) << std::left << state_->gnorm;
307 hist << std::setw(15) << std::left << state_->snorm;
308 hist << std::setw(15) << std::left << state_->searchSize;
309 hist << std::setw(10) << std::left << state_->nfval;
310 hist << std::setw(10) << std::left << state_->ngrad;
311 hist << std::setw(10) << std::left << state_->nproj;
312 hist << std::setw(10) << std::left << ls_nfval_;
313 hist << std::setw(10) << std::left << spgIter_;