44#ifndef ROL_TYPEP_PROXGRADIENTALGORITHM_DEF_HPP
45#define ROL_TYPEP_PROXGRADIENTALGORITHM_DEF_HPP
50template<
typename Real>
57 ParameterList &lslist = list.sublist(
"Step").sublist(
"Line Search");
58 maxit_ = lslist.get(
"Function Evaluation Limit", 20);
59 alpha0_ = lslist.get(
"Initial Step Size", 1.0);
60 normAlpha_ = lslist.get(
"Normalize Initial Step Size",
false);
61 alpha0bnd_ = lslist.get(
"Lower Bound for Initial Step Size", 1e-4);
62 useralpha_ = lslist.get(
"User Defined Initial Step Size",
false);
63 usePrevAlpha_ = lslist.get(
"Use Previous Step Length as Initial Guess",
false);
64 c1_ = lslist.get(
"Sufficient Decrease Tolerance", 1e-4);
66 useAdapt_ = lslist.get(
"Use Adaptive Step Size Selection",
true);
67 initProx_ = lslist.get(
"Apply Prox to Initial Guess",
false);
68 rhodec_ = lslist.sublist(
"Line-Search Method").get(
"Backtracking Rate", 0.5);
69 rhoinc_ = lslist.sublist(
"Line-Search Method").get(
"Increase Rate" , 2.0);
70 t0_ = list.sublist(
"Status Test").get(
"Gradient Scale" , 1.0);
71 verbosity_ = list.sublist(
"General").get(
"Output Level", 0);
75template<
typename Real>
82 std::ostream &outStream) {
110 Real snew = sobj.
value(px,ftol);
128template<
typename Real>
133 std::ostream &outStream ) {
139 Real strial(0), ntrial(0), Ftrial(0), Qk(0);
140 Real strialP(0), ntrialP(0), FtrialP(0), alphaP(0);
141 Real snorm(
state_->snorm), searchSize(
state_->searchSize);
143 bool incAlpha =
false, accept =
true;
152 state_->searchSize = searchSize;
158 Ftrial = strial + ntrial;
162 incAlpha = (Ftrial -
state_->value <=
c1_*Qk);
164 outStream <<
" In TypeP::GradientAlgorithm: Line Search" << std::endl;
165 outStream <<
" Step size: " <<
state_->searchSize << std::endl;
166 outStream <<
" Trial smooth value: " << strial << std::endl;
167 outStream <<
" Trial nonsmooth value: " << ntrial << std::endl;
168 outStream <<
" Computed reduction: " <<
state_->value-Ftrial << std::endl;
169 outStream <<
" Dot product of gradient and step: " << Qk << std::endl;
170 outStream <<
" Sufficient decrease bound: " << -Qk*
c1_ << std::endl;
171 outStream <<
" Number of function evaluations: " << ls_nfval << std::endl;
172 outStream <<
" Increase alpha?: " << incAlpha << std::endl;
177 FtrialP = ntrialP + strialP;
186 pxP->set(*
state_->iterateVec);
187 alphaP =
state_->searchSize;
201 Ftrial = strial + ntrial;
206 outStream << std::endl;
207 outStream <<
" Step size: " <<
state_->searchSize << std::endl;
208 outStream <<
" Trial smooth value: " << strial << std::endl;
209 outStream <<
" Trial nonsmooth value: " << ntrial << std::endl;
210 outStream <<
" Computed reduction: " <<
state_->value-Ftrial << std::endl;
211 outStream <<
" Dot product of gradient and step: " << Qk << std::endl;
212 outStream <<
" Sufficient decrease bound: " << -Qk*
c1_ << std::endl;
213 outStream <<
" Number of function evaluations: " << ls_nfval << std::endl;
216 if (Ftrial -
state_->value >
c1_*Qk || Ftrial > FtrialP) {
217 state_->iterateVec->set(*pxP);
221 state_->searchSize = alphaP;
223 state_->stepVec->axpy(-one,x);
238 Ftrial = strial + ntrial;
243 outStream << std::endl;
244 outStream <<
" Step size: " <<
state_->searchSize << std::endl;
245 outStream <<
" Trial smooth value: " << strial << std::endl;
246 outStream <<
" Trial nonsmooth value: " << ntrial << std::endl;
247 outStream <<
" Computed reduction: " <<
state_->value-Ftrial << std::endl;
248 outStream <<
" Dot product of gradient and step: " << Qk << std::endl;
249 outStream <<
" Sufficient decrease bound: " << -Qk*
c1_ << std::endl;
250 outStream <<
" Number of function evaluations: " << ls_nfval << std::endl;
254 state_->nsval += ls_nfval;
255 state_->nnval += ls_nfval;
278 dg->set(
state_->gradientVec->dual());
281 searchSize =
state_->searchSize;
284 snorm =
state_->stepVec->norm();
285 state_->gnorm = snorm / searchSize;
293template<
typename Real>
295 std::stringstream hist;
297 hist << std::string(109,
'-') << std::endl;
298 hist <<
"Proximal gradient descent";
299 hist <<
" status output definitions" << std::endl << std::endl;
300 hist <<
" iter - Number of iterates (steps taken)" << std::endl;
301 hist <<
" value - Objective function value" << std::endl;
302 hist <<
" gnorm - Norm of the proximal gradient with parameter alpha" << std::endl;
303 hist <<
" snorm - Norm of the step (update to optimization vector)" << std::endl;
304 hist <<
" alpha - Line search step length" << std::endl;
305 hist <<
" #sval - Cumulative number of times the smooth objective function was evaluated" << std::endl;
306 hist <<
" #nval - Cumulative number of times the nonsmooth objective function was evaluated" << std::endl;
307 hist <<
" #grad - Cumulative number of times the gradient was computed" << std::endl;
308 hist <<
" #prox - Cumulative number of times the proximal operator was computed" << std::endl;
309 hist << std::string(109,
'-') << std::endl;
313 hist << std::setw(6) << std::left <<
"iter";
314 hist << std::setw(15) << std::left <<
"value";
315 hist << std::setw(15) << std::left <<
"gnorm";
316 hist << std::setw(15) << std::left <<
"snorm";
317 hist << std::setw(15) << std::left <<
"alpha";
318 hist << std::setw(10) << std::left <<
"#sval";
319 hist << std::setw(10) << std::left <<
"#nval";
320 hist << std::setw(10) << std::left <<
"#grad";
321 hist << std::setw(10) << std::left <<
"#nprox";
326template<
typename Real>
328 std::stringstream hist;
329 hist << std::endl <<
"Proximal Gradient Descent with Bidirectional Line Search (Type P)" << std::endl;
333template<
typename Real>
335 std::stringstream hist;
336 hist << std::scientific << std::setprecision(6);
339 if (
state_->iter == 0 ) {
341 hist << std::setw(6) << std::left <<
state_->iter;
342 hist << std::setw(15) << std::left <<
state_->value;
343 hist << std::setw(15) << std::left <<
state_->gnorm;
344 hist << std::setw(15) << std::left <<
"---";
345 hist << std::setw(15) << std::left <<
"---";
346 hist << std::setw(10) << std::left <<
state_->nsval;
347 hist << std::setw(10) << std::left <<
state_->nnval;
348 hist << std::setw(10) << std::left <<
state_->ngrad;
349 hist << std::setw(10) << std::left <<
state_->nprox;
354 hist << std::setw(6) << std::left <<
state_->iter;
355 hist << std::setw(15) << std::left <<
state_->value;
356 hist << std::setw(15) << std::left <<
state_->gnorm;
357 hist << std::setw(15) << std::left <<
state_->snorm;
358 hist << std::setw(15) << std::left <<
state_->searchSize;
359 hist << std::setw(10) << std::left <<
state_->nsval;
360 hist << std::setw(10) << std::left <<
state_->nnval;
361 hist << std::setw(10) << std::left <<
state_->ngrad;
362 hist << std::setw(10) << std::left <<
state_->nprox;
virtual void initialize(const Vector< Real > &x)
Initialize temporary variables.
Provides the interface to evaluate objective functions.
virtual void prox(Vector< Real > &Pv, const Vector< Real > &v, Real t, Real &tol)
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
Provides an interface to check status of optimization algorithms.
void pgstep(Vector< Real > &pgiter, Vector< Real > &pgstep, Objective< Real > &nobj, const Vector< Real > &x, const Vector< Real > &dg, Real t, Real &tol) const
const Ptr< AlgorithmState< Real > > state_
virtual void writeExitStatus(std::ostream &os) const
const Ptr< CombinedStatusTest< Real > > status_
void initialize(const Vector< Real > &x, const Vector< Real > &g)
void writeName(std::ostream &os) const override
Print step name.
void writeOutput(std::ostream &os, bool write_header=false) const override
Print iterate status.
void writeHeader(std::ostream &os) const override
Print iterate header.
void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &sobj, Objective< Real > &nobj, Vector< Real > &px, Vector< Real > &dg, std::ostream &outStream=std::cout)
ProxGradientAlgorithm(ParameterList &list)
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &sobj, Objective< Real > &nobj, std::ostream &outStream=std::cout) override
Run algorithm on unconstrained problems (Type-U). This general interface supports the use of dual opt...
Defines the linear algebra or vector space interface.
virtual void set(const Vector &x)
Set where .
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
Real ROL_EPSILON(void)
Platform-dependent machine epsilon.