本文整理汇总了C++中BoundConstraint::computeProjectedGradient方法的典型用法代码示例。如果您正苦于以下问题:C++ BoundConstraint::computeProjectedGradient方法的具体用法?C++ BoundConstraint::computeProjectedGradient怎么用?C++ BoundConstraint::computeProjectedGradient使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类BoundConstraint
的用法示例。
在下文中一共展示了BoundConstraint::computeProjectedGradient方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: update
/** \brief Update step, if successful.
Given a trial step, \f$s_k\f$, this function updates \f$x_{k+1}=x_k+s_k\f$.
This function also updates the secant approximation.
@param[in,out] x is the updated iterate
@param[in] s is the computed trial step
@param[in] obj is the objective function
@param[in] con are the bound constraints
@param[in] algo_state contains the current state of the algorithm
*/
void update( Vector<Real> &x, const Vector<Real> &s, Objective<Real> &obj, BoundConstraint<Real> &con,
AlgorithmState<Real> &algo_state ) {
Real tol = std::sqrt(ROL_EPSILON);
Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
// Update iterate
algo_state.iter++;
x.axpy(1.0, s);
// Compute new gradient
if ( edesc_ == DESCENT_SECANT ||
(edesc_ == DESCENT_NEWTONKRYLOV && useSecantPrecond_) ) {
gp_->set(*(step_state->gradientVec));
}
obj.gradient(*(step_state->gradientVec),x,tol);
algo_state.ngrad++;
// Update Secant Information
if ( edesc_ == DESCENT_SECANT ||
(edesc_ == DESCENT_NEWTONKRYLOV && useSecantPrecond_) ) {
secant_->update(*(step_state->gradientVec),*gp_,s,algo_state.snorm,algo_state.iter+1);
}
// Update algorithm state
(algo_state.iterateVec)->set(x);
if ( con.isActivated() ) {
if ( useProjectedGrad_ ) {
gp_->set(*(step_state->gradientVec));
con.computeProjectedGradient( *gp_, x );
algo_state.gnorm = gp_->norm();
}
else {
d_->set(x);
d_->axpy(-1.0,(step_state->gradientVec)->dual());
con.project(*d_);
d_->axpy(-1.0,x);
algo_state.gnorm = d_->norm();
}
}
else {
algo_state.gnorm = (step_state->gradientVec)->norm();
}
}
示例2: update
void update( Vector<Real> &x, const Vector<Real> &s,
Objective<Real> &obj, BoundConstraint<Real> &bnd,
AlgorithmState<Real> &algo_state ) {
Real tol = std::sqrt(ROL_EPSILON<Real>()), one(1);
Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
// Update iterate and store previous step
algo_state.iter++;
d_->set(x);
x.plus(s);
bnd.project(x);
(step_state->descentVec)->set(x);
(step_state->descentVec)->axpy(-one,*d_);
algo_state.snorm = s.norm();
// Compute new gradient
gp_->set(*(step_state->gradientVec));
obj.update(x,true,algo_state.iter);
if ( computeObj_ ) {
algo_state.value = obj.value(x,tol);
algo_state.nfval++;
}
obj.gradient(*(step_state->gradientVec),x,tol);
algo_state.ngrad++;
// Update Secant Information
secant_->updateStorage(x,*(step_state->gradientVec),*gp_,s,algo_state.snorm,algo_state.iter+1);
// Update algorithm state
(algo_state.iterateVec)->set(x);
if ( useProjectedGrad_ ) {
gp_->set(*(step_state->gradientVec));
bnd.computeProjectedGradient( *gp_, x );
algo_state.gnorm = gp_->norm();
}
else {
d_->set(x);
d_->axpy(-one,(step_state->gradientVec)->dual());
bnd.project(*d_);
d_->axpy(-one,x);
algo_state.gnorm = d_->norm();
}
}