ROL
ROL_TypeG_InteriorPointAlgorithm_Def.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_TYPEG_INTERIORPOINTALGORITHM_DEF_H
45 #define ROL_TYPEG_INTERIORPOINTALGORITHM_DEF_H
46 
48 
49 namespace ROL {
50 namespace TypeG {
51 
52 template<typename Real>
54  : TypeG::Algorithm<Real>::Algorithm(),
55  list_(list), subproblemIter_(0), print_(false) {
56  // Set status test
57  status_->reset();
58  status_->add(makePtr<ConstraintStatusTest<Real>>(list));
59 
60  // Parse parameters
61  ParameterList& steplist = list.sublist("Step").sublist("Interior Point");
62  state_->searchSize = steplist.get("Initial Barrier Parameter", 1.0);
63  mumin_ = steplist.get("Minimum Barrier Parameter", 1e-4);
64  mumax_ = steplist.get("Maximum Barrier Parameter", 1e8);
65  rho_ = steplist.get("Barrier Penalty Reduction Factor", 0.5);
66  useLinearDamping_ = steplist.get("Use Linear Damping", true);
67  kappaD_ = steplist.get("Linear Damping Coefficient", 1.e-4);
68  print_ = steplist.sublist("Subproblem").get("Print History", false);
69  // Set parameters for step subproblem
70  gtol_ = steplist.sublist("Subproblem").get("Initial Optimality Tolerance", 1e-2);
71  ctol_ = steplist.sublist("Subproblem").get("Initial Feasibility Tolerance", 1e-2);
72  stol_ = static_cast<Real>(1e-6)*std::min(gtol_,ctol_);
73  int maxit = steplist.sublist("Subproblem").get("Iteration Limit", 1000);
74  list_.sublist("Status Test").set("Iteration Limit", maxit);
75  // Subproblem tolerance update parameters
76  gtolrate_ = steplist.sublist("Subproblem").get("Optimality Tolerance Reduction Factor", 0.1);
77  ctolrate_ = steplist.sublist("Subproblem").get("Feasibility Tolerance Reduction Factor", 0.1);
78  mingtol_ = static_cast<Real>(1e-2)*list.sublist("Status Test").get("Gradient Tolerance", 1e-8);
79  minctol_ = static_cast<Real>(1e-2)*list.sublist("Status Test").get("Constraint Tolerance", 1e-8);
80  // Get step name from parameterlist
81  stepname_ = steplist.sublist("Subproblem").get("Step Type","Augmented Lagrangian");
82 
83  // Output settings
84  verbosity_ = list.sublist("General").get("Output Level", 0);
86  print_ = (verbosity_ > 2 ? true : print_);
87  list_.sublist("General").set("Output Level",(print_ ? verbosity_ : 0));
88 }
89 
90 template<typename Real>
92  const Vector<Real> &g,
93  const Vector<Real> &l,
94  const Vector<Real> &c,
97  Constraint<Real> &con,
98  Vector<Real> &pwa,
99  Vector<Real> &dwa,
100  std::ostream &outStream) {
101  hasPolyProj_ = true;
102  if (proj_ == nullPtr) {
103  proj_ = makePtr<PolyhedralProjection<Real>>(makePtrFromRef(bnd));
104  hasPolyProj_ = false;
105  }
106  proj_->project(x,outStream);
107  bnd.projectInterior(x);
108  // Initialize data
110  // Initialize the algorithm state
111  state_->nfval = 0;
112  state_->ngrad = 0;
113  state_->ncval = 0;
114  updateState(x,l,ipobj,bnd,con,pwa,dwa,outStream);
115 }
116 
117 
118 template<typename Real>
120  const Vector<Real> &l,
123  Constraint<Real> &con,
124  Vector<Real> &pwa,
125  Vector<Real> &dwa,
126  std::ostream &outStream) {
127  const Real one(1);
128  Real zerotol = std::sqrt(ROL_EPSILON<Real>());
129  // Update objective and constraint
130  if (state_->iter == 0) {
131  ipobj.update(x,UpdateType::Initial,state_->iter);
132  con.update(x,UpdateType::Initial,state_->iter);
133  }
134  //else {
135  // ipobj.update(x,UpdateType::Accept,state_->iter);
136  // con.update(x,UpdateType::Accept,state_->iter);
137  //}
138  // Compute norm of the gradient of the Lagrangian
139  state_->value = ipobj.getObjectiveValue(x, zerotol);
140  //state_->gradientVec->set(*ipobj.getObjectiveGradient(x, zerotol));
141  ipobj.gradient(*state_->gradientVec, x, zerotol);
142  con.applyAdjointJacobian(dwa, l, x, zerotol);
143  state_->gradientVec->plus(dwa);
144  //state_->gnorm = state_->gradientVec->norm();
145  pwa.set(x);
146  pwa.axpy(-one,state_->gradientVec->dual());
147  proj_->project(pwa,outStream);
148  pwa.axpy(-one,x);
149  state_->gnorm = pwa.norm();
150  // Compute constraint violation
151  con.value(*state_->constraintVec, x, zerotol);
152  state_->cnorm = state_->constraintVec->norm();
153  // Update state
154  state_->nfval++;
155  state_->ngrad++;
156  state_->ncval++;
157 }
158 
159 template<typename Real>
161  const Vector<Real> &g,
162  Objective<Real> &obj,
164  Constraint<Real> &econ,
165  Vector<Real> &emul,
166  const Vector<Real> &eres,
167  std::ostream &outStream ) {
168  const Real one(1);
169  Ptr<Vector<Real>> pwa = x.clone(), dwa = g.clone();
170  // Initialize interior point data
171  InteriorPointObjective<Real> ipobj(makePtrFromRef(obj),makePtrFromRef(bnd),
172  x,g,useLinearDamping_,kappaD_,
173  state_->searchSize);
174  initialize(x,g,emul,eres,ipobj,bnd,econ,*pwa,*dwa,outStream);
175  Ptr<TypeE::Algorithm<Real>> algo;
176 
177  // Output
178  if (verbosity_ > 0) writeOutput(outStream,true);
179 
180  while (status_->check(*state_)) {
181  // Solve interior point subproblem
182  list_.sublist("Status Test").set("Gradient Tolerance", gtol_);
183  list_.sublist("Status Test").set("Constraint Tolerance", ctol_);
184  list_.sublist("Status Test").set("Step Tolerance", stol_);
185  algo = TypeE::AlgorithmFactory<Real>(list_);
186  if (hasPolyProj_) algo->run(x,g,ipobj,econ,emul,eres,
187  *proj_->getLinearConstraint(),
188  *proj_->getMultiplier(),
189  *proj_->getResidual(),outStream);
190  else algo->run(x,g,ipobj,econ,emul,eres,outStream);
191  subproblemIter_ = algo->getState()->iter;
192  state_->nfval += algo->getState()->nfval;
193  state_->ngrad += algo->getState()->ngrad;
194  state_->ncval += algo->getState()->ncval;
195 
196  // Compute step
197  state_->stepVec->set(x);
198  state_->stepVec->axpy(-one,*state_->iterateVec);
199  state_->lagmultVec->axpy(-one,emul);
200  state_->snorm = state_->stepVec->norm();
201  state_->snorm += state_->lagmultVec->norm();
202 
203  // Update iterate and Lagrange multiplier
204  state_->iterateVec->set(x);
205  state_->lagmultVec->set(emul);
206 
207  // Update objective and constraint
208  state_->iter++;
209 
210  // Update barrier parameter and subproblem tolerances
211  if (algo->getState()->statusFlag == EXITSTATUS_CONVERGED) {
212  if( (rho_< one && state_->searchSize > mumin_) || (rho_ > one && state_->searchSize < mumax_) ) {
213  state_->searchSize *= rho_;
214  ipobj.updatePenalty(state_->searchSize);
215  }
216  gtol_ *= gtolrate_; gtol_ = std::max(gtol_,mingtol_);
217  ctol_ *= ctolrate_; ctol_ = std::max(ctol_,minctol_);
218  stol_ = static_cast<Real>(1e-6)*std::min(gtol_,ctol_);
219  }
220 
221  // Update state
222  updateState(x,emul,ipobj,bnd,econ,*pwa,*dwa);
223 
224  // Update Output
225  if (verbosity_ > 0) writeOutput(outStream,printHeader_);
226  }
227  if (verbosity_ > 0) TypeG::Algorithm<Real>::writeExitStatus(outStream);
228 }
229 
230 template<typename Real>
231 void InteriorPointAlgorithm<Real>::writeHeader( std::ostream& os ) const {
232  std::stringstream hist;
233  if (verbosity_ > 1) {
234  hist << std::string(109,'-') << std::endl;
235  hist << "Interior Point Solver";
236  hist << " status output definitions" << std::endl << std::endl;
237  hist << " iter - Number of iterates (steps taken)" << std::endl;
238  hist << " fval - Objective function value" << std::endl;
239  hist << " cnorm - Norm of the constraint" << std::endl;
240  hist << " gLnorm - Norm of the gradient of the Lagrangian" << std::endl;
241  hist << " snorm - Norm of the step (update to optimization vector)" << std::endl;
242  hist << " penalty - Penalty parameter for bound constraints" << std::endl;
243  hist << " #fval - Cumulative number of times the objective function was evaluated" << std::endl;
244  hist << " #grad - Cumulative number of times the gradient was computed" << std::endl;
245  hist << " #cval - Cumulative number of times the constraint was evaluated" << std::endl;
246  hist << " optTol - Subproblem optimality tolerance" << std::endl;
247  hist << " feasTol - Subproblem feasibility tolerance" << std::endl;
248  hist << " subiter - Number of subproblem iterations" << std::endl;
249  hist << std::string(109,'-') << std::endl;
250  }
251 
252  hist << " ";
253  hist << std::setw(6) << std::left << "iter";
254  hist << std::setw(15) << std::left << "fval";
255  hist << std::setw(15) << std::left << "cnorm";
256  hist << std::setw(15) << std::left << "gLnorm";
257  hist << std::setw(15) << std::left << "snorm";
258  hist << std::setw(10) << std::left << "penalty";
259  hist << std::setw(8) << std::left << "#fval";
260  hist << std::setw(8) << std::left << "#grad";
261  hist << std::setw(8) << std::left << "#cval";
262  hist << std::setw(10) << std::left << "optTol";
263  hist << std::setw(10) << std::left << "feasTol";
264  hist << std::setw(8) << std::left << "subIter";
265  hist << std::endl;
266  os << hist.str();
267 }
268 
269 template<typename Real>
270 void InteriorPointAlgorithm<Real>::writeName( std::ostream& os ) const {
271  std::stringstream hist;
272  hist << std::endl << "Interior Point Solver (Type G, General Constraints)";
273  hist << std::endl;
274  hist << "Subproblem Solver: " << stepname_ << std::endl;
275  os << hist.str();
276 }
277 
278 template<typename Real>
279 void InteriorPointAlgorithm<Real>::writeOutput( std::ostream& os, const bool print_header ) const {
280  std::stringstream hist;
281  hist << std::scientific << std::setprecision(6);
282  if ( state_->iter == 0 ) writeName(os);
283  if ( print_header ) writeHeader(os);
284  if ( state_->iter == 0 ) {
285  hist << " ";
286  hist << std::setw(6) << std::left << state_->iter;
287  hist << std::setw(15) << std::left << state_->value;
288  hist << std::setw(15) << std::left << state_->cnorm;
289  hist << std::setw(15) << std::left << state_->gnorm;
290  hist << std::setw(15) << std::left << "---";
291  hist << std::scientific << std::setprecision(2);
292  hist << std::setw(10) << std::left << state_->searchSize;
293  hist << std::setw(8) << std::left << state_->nfval;
294  hist << std::setw(8) << std::left << state_->ngrad;
295  hist << std::setw(8) << std::left << state_->ncval;
296  hist << std::setw(10) << std::left << "---";
297  hist << std::setw(10) << std::left << "---";
298  hist << std::setw(8) << std::left << "---";
299  hist << std::endl;
300  }
301  else {
302  hist << " ";
303  hist << std::setw(6) << std::left << state_->iter;
304  hist << std::setw(15) << std::left << state_->value;
305  hist << std::setw(15) << std::left << state_->cnorm;
306  hist << std::setw(15) << std::left << state_->gnorm;
307  hist << std::setw(15) << std::left << state_->snorm;
308  hist << std::scientific << std::setprecision(2);
309  hist << std::setw(10) << std::left << state_->searchSize;
310  hist << std::scientific << std::setprecision(6);
311  hist << std::setw(8) << std::left << state_->nfval;
312  hist << std::setw(8) << std::left << state_->ngrad;
313  hist << std::setw(8) << std::left << state_->ncval;
314  hist << std::scientific << std::setprecision(2);
315  hist << std::setw(10) << std::left << gtol_;
316  hist << std::setw(10) << std::left << ctol_;
317  hist << std::scientific << std::setprecision(6);
318  hist << std::setw(8) << std::left << subproblemIter_;
319  hist << std::endl;
320  }
321  os << hist.str();
322 }
323 
324 } // namespace TypeG
325 } // namespace ROL
326 
327 #endif
Provides the interface to evaluate objective functions.
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
virtual void projectInterior(Vector< Real > &x)
Project optimization variables into the interior of the feasible set.
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update constraint function.
virtual void axpy(const Real alpha, const Vector &x)
Compute where .
Definition: ROL_Vector.hpp:153
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:80
virtual void value(Vector< Real > &c, const Vector< Real > &x, Real &tol)=0
Evaluate the constraint operator at .
void initialize(Vector< Real > &x, const Vector< Real > &g, const Vector< Real > &l, const Vector< Real > &c, InteriorPointObjective< Real > &ipobj, BoundConstraint< Real > &bnd, Constraint< Real > &con, Vector< Real > &pwa, Vector< Real > &dwa, std::ostream &outStream=std::cout)
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, Constraint< Real > &econ, Vector< Real > &emul, const Vector< Real > &eres, std::ostream &outStream=std::cout) override
Run algorithm on general constrained problems (Type-G). This is the primary Type-G interface...
void writeName(std::ostream &os) const override
Print step name.
Provides an interface to check status of optimization algorithms for problems with equality constrain...
Provides an interface to run general constrained optimization algorithms.
virtual void writeExitStatus(std::ostream &os) const
const Ptr< AlgorithmState< Real > > state_
void writeOutput(std::ostream &os, const bool print_header=false) const override
Print iterate status.
void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
Provides the interface to apply upper and lower bound constraints.
virtual void applyAdjointJacobian(Vector< Real > &ajv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply the adjoint of the the constraint Jacobian at , , to vector .
void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
void updateState(const Vector< Real > &x, const Vector< Real > &l, InteriorPointObjective< Real > &ipobj, BoundConstraint< Real > &bnd, Constraint< Real > &con, Vector< Real > &pwa, Vector< Real > &dwa, std::ostream &outStream=std::cout)
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:209
virtual Real norm() const =0
Returns where .
const Ptr< CombinedStatusTest< Real > > status_
void initialize(const Vector< Real > &x, const Vector< Real > &g, const Vector< Real > &mul, const Vector< Real > &c)
void writeHeader(std::ostream &os) const override
Print iterate header.
Defines the general constraint operator interface.
Real getObjectiveValue(const Vector< Real > &x, Real &tol)