当前位置: 首页>>代码示例>>C++>>正文


C++ BoundConstraint类代码示例

本文整理汇总了C++中BoundConstraint的典型用法代码示例。如果您正苦于以下问题:C++ BoundConstraint类的具体用法?C++ BoundConstraint怎么用?C++ BoundConstraint使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了BoundConstraint类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: ObjectiveFromBoundConstraint

 ObjectiveFromBoundConstraint( const BoundConstraint<Real> &bc ) :
   lo_( bc.getLowerVectorRCP() ),
   up_( bc.getUpperVectorRCP() )
   { 
     a_ = lo_->clone();
     b_ = up_->clone();
   }
开发者ID:agrippa,项目名称:Trilinos,代码行数:7,代码来源:ROL_ObjectiveFromBoundConstraint.hpp

示例2: updateIterate

 void updateIterate(Vector<Real> &xnew, const Vector<Real> &x, const Vector<Real> &s, Real alpha,
                    BoundConstraint<Real> &con ) {
   xnew.set(x);
   xnew.axpy(alpha,s);
   if ( con.isActivated() ) {
     con.project(xnew);
   }
 }
开发者ID:abhishek4747,项目名称:trilinos,代码行数:8,代码来源:ROL_LineSearch.hpp

示例3: Left

bool BoundConjunctiveConstraint::Subsume(BoundConstraint* that) const
{
    BoundConstraint* left = Left();
    BoundConstraint* right = Right();
    if (that->IsBinaryConstraint())
    {
        BoundBinaryConstraint* thatBinaryConstraint = static_cast<BoundBinaryConstraint*>(that);
        BoundConstraint* thatLeft = thatBinaryConstraint->Left();
        BoundConstraint* thatRight = thatBinaryConstraint->Right();
        bool leftSubsumeThatLeft = left->Subsume(thatLeft);
        bool rightSubsumeThatLeft = right->Subsume(thatLeft);
        bool leftSubsumeThatRight = left->Subsume(thatRight);
        bool rightSubsumeThatRight = right->Subsume(thatRight);
        bool leftOrRightSubsumeThatLeft = leftSubsumeThatLeft || rightSubsumeThatLeft;
        bool leftOrRightSubsumeThatRight = leftSubsumeThatRight || rightSubsumeThatRight;
        if (that->IsConjunctiveConstraint())
        {
            return leftOrRightSubsumeThatLeft && leftOrRightSubsumeThatRight;
        }
        else if (that->IsDisjunctiveConstraint())
        {
            return leftOrRightSubsumeThatLeft || leftOrRightSubsumeThatRight;
        }
        else // assert(false)
        {
            return false;
        }
    }
    else
    {
        bool leftSubsumeThat = left->Subsume(that);
        bool righSubsumeThat = right->Subsume(that);
        return leftSubsumeThat || righSubsumeThat;
    }
}
开发者ID:slaakko,项目名称:cmajor,代码行数:35,代码来源:BoundConcept.cpp

示例4: compute

  void compute( Vector<Real> &s, const Vector<Real> &x,
                Objective<Real> &obj, BoundConstraint<Real> &bnd,
                AlgorithmState<Real> &algo_state ) {
    Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();

    // Compute projected secant step
    // ---> Apply inactive-inactive block of inverse secant to gradient
    gp_->set(*(step_state->gradientVec));
    bnd.pruneActive(*gp_,*(step_state->gradientVec),x,algo_state.gnorm);
    secant_->applyH(s,*gp_);
    bnd.pruneActive(s,*(step_state->gradientVec),x,algo_state.gnorm);
    // ---> Add in active gradient components
    gp_->set(*(step_state->gradientVec));
    bnd.pruneInactive(*d_,*(step_state->gradientVec),x,algo_state.gnorm);
    s.plus(gp_->dual());
    s.scale(-1.0);
  }
开发者ID:Russell-Jones-OxPhys,项目名称:Trilinos,代码行数:17,代码来源:ROL_ProjectedSecantStep.hpp

示例5: f

  /** \brief Compute the gradient-based criticality measure.

             The criticality measure is 
             \f$\|x_k - P_{[a,b]}(x_k-\nabla f(x_k))\|_{\mathcal{X}}\f$.
             Here, \f$P_{[a,b]}\f$ denotes the projection onto the
             bound constraints.
 
             @param[in]    x     is the current iteration
             @param[in]    obj   is the objective function
             @param[in]    con   are the bound constraints
             @param[in]    tol   is a tolerance for inexact evaluations of the objective function
  */ 
  Real computeCriticalityMeasure(Vector<Real> &x, Objective<Real> &obj, BoundConstraint<Real> &con, Real tol) {
    Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
    obj.gradient(*(step_state->gradientVec),x,tol);
    xtmp_->set(x);
    xtmp_->axpy(-1.0,(step_state->gradientVec)->dual());
    con.project(*xtmp_);
    xtmp_->axpy(-1.0,x);
    return xtmp_->norm();
  }
开发者ID:rainiscold,项目名称:trilinos,代码行数:21,代码来源:ROL_PrimalDualActiveSetStep.hpp

示例6: compute

  void compute( Vector<Real> &s, const Vector<Real> &x,
                Objective<Real> &obj, BoundConstraint<Real> &bnd,
                AlgorithmState<Real> &algo_state ) {
    Real tol = std::sqrt(ROL_EPSILON<Real>());
    Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();

    // Compute projected Newton step
    // ---> Apply inactive-inactive block of inverse hessian to gradient
    gp_->set(*(step_state->gradientVec));
    bnd.pruneActive(*gp_,*(step_state->gradientVec),x,algo_state.gnorm);
    obj.invHessVec(s,*gp_,x,tol);
    bnd.pruneActive(s,*(step_state->gradientVec),x,algo_state.gnorm);
    // ---> Add in active gradient components
    gp_->set(*(step_state->gradientVec));
    bnd.pruneInactive(*d_,*(step_state->gradientVec),x,algo_state.gnorm);
    s.plus(gp_->dual());
    s.scale(-1.0);
  }
开发者ID:Russell-Jones-OxPhys,项目名称:Trilinos,代码行数:18,代码来源:ROL_ProjectedNewtonStep.hpp

示例7: update

  /** \brief Update step, if successful.

      Given a trial step, \f$s_k\f$, this function updates \f$x_{k+1}=x_k+s_k\f$. 
      This function also updates the secant approximation.

      @param[in,out]   x          is the updated iterate
      @param[in]       s          is the computed trial step
      @param[in]       obj        is the objective function
      @param[in]       con        are the bound constraints
      @param[in]       algo_state contains the current state of the algorithm
  */
  void update( Vector<Real> &x, const Vector<Real> &s, Objective<Real> &obj, BoundConstraint<Real> &con,
               AlgorithmState<Real> &algo_state ) {
    Real tol = std::sqrt(ROL_EPSILON);
    Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();

    

    // Update iterate
    algo_state.iter++;
    x.axpy(1.0, s);
    // Compute new gradient
    if ( edesc_ == DESCENT_SECANT || 
        (edesc_ == DESCENT_NEWTONKRYLOV && useSecantPrecond_) ) {
      gp_->set(*(step_state->gradientVec));
    }
    obj.gradient(*(step_state->gradientVec),x,tol);
    algo_state.ngrad++;

    // Update Secant Information
    if ( edesc_ == DESCENT_SECANT || 
        (edesc_ == DESCENT_NEWTONKRYLOV && useSecantPrecond_) ) {
      secant_->update(*(step_state->gradientVec),*gp_,s,algo_state.snorm,algo_state.iter+1);
    }

    // Update algorithm state
    (algo_state.iterateVec)->set(x);
    if ( con.isActivated() ) {
      if ( useProjectedGrad_ ) {
        gp_->set(*(step_state->gradientVec));
        con.computeProjectedGradient( *gp_, x );
        algo_state.gnorm = gp_->norm();
      }
      else {
        d_->set(x);
        d_->axpy(-1.0,(step_state->gradientVec)->dual());
        con.project(*d_);
        d_->axpy(-1.0,x);
        algo_state.gnorm = d_->norm();
      }
    }
    else {
      algo_state.gnorm = (step_state->gradientVec)->norm();
    }
  }
开发者ID:ChiahungTai,项目名称:Trilinos,代码行数:55,代码来源:ROL_LineSearchStep.hpp

示例8: update

  void update( Vector<Real> &x, const Vector<Real> &s,
               Objective<Real> &obj, BoundConstraint<Real> &bnd,
               AlgorithmState<Real> &algo_state ) {
    Real tol = std::sqrt(ROL_EPSILON<Real>()), one(1);
    Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();

    // Update iterate and store previous step
    algo_state.iter++;
    d_->set(x);
    x.plus(s);
    bnd.project(x);
    (step_state->descentVec)->set(x);
    (step_state->descentVec)->axpy(-one,*d_);
    algo_state.snorm = s.norm();

    // Compute new gradient
    gp_->set(*(step_state->gradientVec));
    obj.update(x,true,algo_state.iter);
    if ( computeObj_ ) {
      algo_state.value = obj.value(x,tol);
      algo_state.nfval++;
    }
    obj.gradient(*(step_state->gradientVec),x,tol);
    algo_state.ngrad++;

    // Update Secant Information
    secant_->updateStorage(x,*(step_state->gradientVec),*gp_,s,algo_state.snorm,algo_state.iter+1);

    // Update algorithm state
    (algo_state.iterateVec)->set(x);
    if ( useProjectedGrad_ ) {
      gp_->set(*(step_state->gradientVec));
      bnd.computeProjectedGradient( *gp_, x );
      algo_state.gnorm = gp_->norm();
    }
    else {
      d_->set(x);
      d_->axpy(-one,(step_state->gradientVec)->dual());
      bnd.project(*d_);
      d_->axpy(-one,x);
      algo_state.gnorm = d_->norm();
    }
  }
开发者ID:agrippa,项目名称:Trilinos,代码行数:43,代码来源:ROL_ProjectedSecantStep.hpp

示例9: GradDotStep

 Real GradDotStep(const Vector<Real> &g, const Vector<Real> &s,
                  const Vector<Real> &x,
                  BoundConstraint<Real> &bnd, Real eps = 0) {
   Real gs(0), one(1);
   if (!bnd.isActivated()) {
     gs = s.dot(g.dual());
   }
   else {
     d_->set(s);
     bnd.pruneActive(*d_,g,x,eps);
     gs = d_->dot(g.dual());
     d_->set(x);
     d_->axpy(-one,g.dual());
     bnd.project(*d_);
     d_->scale(-one);
     d_->plus(x);
     bnd.pruneInactive(*d_,g,x,eps);
     gs -= d_->dot(g.dual());
   }
   return gs;
 }
开发者ID:agrippa,项目名称:Trilinos,代码行数:21,代码来源:ROL_LineSearchStep.hpp

示例10: initialize

 void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g, 
                  Objective<Real> &obj, BoundConstraint<Real> &con, 
                  AlgorithmState<Real> &algo_state ) {
   Step<Real>::initialize(x,s,g,obj,con,algo_state);
   Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
   lineSearch_->initialize(x, s, *(step_state->gradientVec),obj,con);
   if ( edesc_ == DESCENT_NEWTONKRYLOV || edesc_ == DESCENT_NEWTON || edesc_ == DESCENT_SECANT ) {
     Teuchos::RCP<Objective<Real> > obj_ptr = Teuchos::rcp(&obj, false);
     Teuchos::RCP<BoundConstraint<Real> > con_ptr = Teuchos::rcp(&con, false);
     hessian_ = Teuchos::rcp(
       new ProjectedHessian<Real>(secant_,obj_ptr,con_ptr,algo_state.iterateVec,step_state->gradientVec,
                                  useSecantHessVec_));
     precond_ = Teuchos::rcp(
       new ProjectedPreconditioner<Real>(secant_,obj_ptr,con_ptr,algo_state.iterateVec,
                                         step_state->gradientVec,useSecantPrecond_));
   }
   if ( con.isActivated() ) {
     d_ = s.clone();
   }
   if ( con.isActivated() || edesc_ == DESCENT_SECANT 
                          || (edesc_ == DESCENT_NEWTONKRYLOV && useSecantPrecond_) ) {
     gp_ = g.clone();
   }
 }
开发者ID:ChiahungTai,项目名称:Trilinos,代码行数:24,代码来源:ROL_LineSearchStep.hpp

示例11: initialize

  /** \brief Initialize step.  

             This includes projecting the initial guess onto the constraints, 
             computing the initial objective function value and gradient, 
             and initializing the dual variables.

             @param[in,out]    x           is the initial guess 
             @param[in]        obj         is the objective function
             @param[in]        con         are the bound constraints
             @param[in]        algo_state  is the current state of the algorithm
  */
  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g, 
                   Objective<Real> &obj, BoundConstraint<Real> &con, 
                   AlgorithmState<Real> &algo_state ) {
    Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
    // Initialize state descent direction and gradient storage
    step_state->descentVec  = s.clone();
    step_state->gradientVec = g.clone();
    step_state->searchSize  = 0.0;
    // Initialize additional storage
    xlam_ = x.clone(); 
    x0_   = x.clone();
    xbnd_ = x.clone();
    As_   = s.clone(); 
    xtmp_ = x.clone(); 
    res_  = g.clone();
    Ag_   = g.clone(); 
    rtmp_ = g.clone(); 
    gtmp_ = g.clone(); 
    // Project x onto constraint set
    con.project(x);
    // Update objective function, get value, and get gradient
    Real tol = std::sqrt(ROL_EPSILON);
    obj.update(x,true,algo_state.iter);
    algo_state.value = obj.value(x,tol);
    algo_state.nfval++;
    algo_state.gnorm = computeCriticalityMeasure(x,obj,con,tol);
    algo_state.ngrad++;
    // Initialize dual variable
    lambda_ = s.clone(); 
    lambda_->set((step_state->gradientVec)->dual());
    lambda_->scale(-1.0);
    //con.setVectorToLowerBound(*lambda_);
    // Initialize Hessian and preconditioner
    Teuchos::RCP<Objective<Real> > obj_ptr = Teuchos::rcp(&obj, false);
    Teuchos::RCP<BoundConstraint<Real> > con_ptr = Teuchos::rcp(&con, false);
    hessian_ = Teuchos::rcp( 
      new PrimalDualHessian<Real>(secant_,obj_ptr,con_ptr,algo_state.iterateVec,xlam_,useSecantHessVec_) );
    precond_ = Teuchos::rcp( 
      new PrimalDualPreconditioner<Real>(secant_,obj_ptr,con_ptr,algo_state.iterateVec,xlam_,
                                         useSecantPrecond_) );
  }
开发者ID:rainiscold,项目名称:trilinos,代码行数:52,代码来源:ROL_PrimalDualActiveSetStep.hpp

示例12: update

  /** \brief Update step, if successful.

             This function returns \f$x_{k+1} = x_k + s_k\f$.
             It also updates secant information if being used.

             @param[in]        x           is the new iterate
             @param[out]       s           is the step computed via PDAS
             @param[in]        obj         is the objective function
             @param[in]        con         are the bound constraints
             @param[in]        algo_state  is the current state of the algorithm
  */
  void update( Vector<Real> &x, const Vector<Real> &s, Objective<Real> &obj, BoundConstraint<Real> &con,
               AlgorithmState<Real> &algo_state ) {
    Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();

    x.plus(s);
    feasible_ = con.isFeasible(x);
    algo_state.snorm = s.norm();
    algo_state.iter++;
    Real tol = std::sqrt(ROL_EPSILON);
    obj.update(x,true,algo_state.iter);
    algo_state.value = obj.value(x,tol);
    algo_state.nfval++;
    
    if ( secant_ != Teuchos::null ) {
      gtmp_->set(*(step_state->gradientVec));
    }
    algo_state.gnorm = computeCriticalityMeasure(x,obj,con,tol);
    algo_state.ngrad++;

    if ( secant_ != Teuchos::null ) {
      secant_->update(*(step_state->gradientVec),*gtmp_,s,algo_state.snorm,algo_state.iter+1);
    }
    (algo_state.iterateVec)->set(x);
  }
开发者ID:rainiscold,项目名称:trilinos,代码行数:35,代码来源:ROL_PrimalDualActiveSetStep.hpp

示例13: initialize

  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g, 
                   Objective<Real> &obj, BoundConstraint<Real> &bnd, 
                   AlgorithmState<Real> &algo_state ) {
    d_ = x.clone();

    // Initialize unglobalized step
    Teuchos::ParameterList& list
      = parlist_.sublist("Step").sublist("Line Search").sublist("Descent Method");
    EDescent edesc = StringToEDescent(list.get("Type","Quasi-Newton Method") );
    if (bnd.isActivated()) {
      switch(edesc) {
        case DESCENT_STEEPEST: {
          desc_ = Teuchos::rcp(new GradientStep<Real>(parlist_,computeObj_));
          break;
        }
        case DESCENT_NONLINEARCG: {
          desc_ = Teuchos::rcp(new NonlinearCGStep<Real>(parlist_,nlcg_,computeObj_));
          break;
        }
        case DESCENT_SECANT: {
          desc_ = Teuchos::rcp(new ProjectedSecantStep<Real>(parlist_,secant_,computeObj_));
          break;
        }
        case DESCENT_NEWTON: {
          desc_ = Teuchos::rcp(new ProjectedNewtonStep<Real>(parlist_,computeObj_));
          break;
        }
        case DESCENT_NEWTONKRYLOV: {
          desc_ = Teuchos::rcp(new ProjectedNewtonKrylovStep<Real>(parlist_,krylov_,secant_,computeObj_));
          break;
        }
        default:
          TEUCHOS_TEST_FOR_EXCEPTION(true,std::invalid_argument,
            ">>> (LineSearchStep::Initialize): Undefined descent type!");
      }
    }
    else {
      switch(edesc) {
        case DESCENT_STEEPEST: {
          desc_ = Teuchos::rcp(new GradientStep<Real>(parlist_,computeObj_));
          break;
        }
        case DESCENT_NONLINEARCG: {
          desc_ = Teuchos::rcp(new NonlinearCGStep<Real>(parlist_,nlcg_,computeObj_));
          break;
        }
        case DESCENT_SECANT: {
          desc_ = Teuchos::rcp(new SecantStep<Real>(parlist_,secant_,computeObj_));
          break;
        }
        case DESCENT_NEWTON: {
          desc_ = Teuchos::rcp(new NewtonStep<Real>(parlist_,computeObj_));
          break;
        }
        case DESCENT_NEWTONKRYLOV: {
          desc_ = Teuchos::rcp(new NewtonKrylovStep<Real>(parlist_,krylov_,secant_,computeObj_));
          break;
        }
        default:
          TEUCHOS_TEST_FOR_EXCEPTION(true,std::invalid_argument,
            ">>> (LineSearchStep::Initialize): Undefined descent type!");
      }
    }
    desc_->initialize(x,s,g,obj,bnd,algo_state);

    // Initialize line search
    lineSearch_->initialize(x,s,g,obj,bnd);
    //Teuchos::RCP<const StepState<Real> > desc_state = desc_->getStepState();
    //lineSearch_->initialize(x,s,*(desc_state->gradientVec),obj,bnd);
  }
开发者ID:agrippa,项目名称:Trilinos,代码行数:70,代码来源:ROL_LineSearchStep.hpp

示例14: f

  /** \brief Compute step.

      Computes a trial step, \f$s_k\f$ as defined by the enum EDescent.  Once the 
      trial step is determined, this function determines an approximate minimizer 
      of the 1D function \f$\phi_k(t) = f(x_k+ts_k)\f$.  This approximate 
      minimizer must satisfy sufficient decrease and curvature conditions.

      @param[out]      s          is the computed trial step
      @param[in]       x          is the current iterate
      @param[in]       obj        is the objective function
      @param[in]       con        are the bound constraints
      @param[in]       algo_state contains the current state of the algorithm
  */
  void compute( Vector<Real> &s, const Vector<Real> &x, Objective<Real> &obj, BoundConstraint<Real> &con, 
                AlgorithmState<Real> &algo_state ) {
    Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();

    Real tol = std::sqrt(ROL_EPSILON);

    // Set active set parameter
    Real eps = 0.0;
    if ( con.isActivated() ) {
      eps = algo_state.gnorm;
    }
    lineSearch_->setData(eps);
    if ( hessian_ != Teuchos::null ) {
      hessian_->setData(eps);
    }
    if ( precond_ != Teuchos::null ) {
      precond_->setData(eps);
    }

    // Compute step s
    switch(edesc_) {
      case DESCENT_NEWTONKRYLOV:
        flagKrylov_ = 0;
        krylov_->run(s,*hessian_,*(step_state->gradientVec),*precond_,iterKrylov_,flagKrylov_);
        break;
      case DESCENT_NEWTON:
      case DESCENT_SECANT:
        hessian_->applyInverse(s,*(step_state->gradientVec),tol);
        break;
      case DESCENT_NONLINEARCG:
        nlcg_->run(s,*(step_state->gradientVec),x,obj);
        break;
      case DESCENT_STEEPEST:
        s.set(step_state->gradientVec->dual());
        break;
      default: break;
    }

    // Compute g.dot(s)
    Real gs = 0.0;
    if ( !con.isActivated() ) {
      gs = -s.dot((step_state->gradientVec)->dual());
    }
    else {
      if ( edesc_ == DESCENT_STEEPEST ) {
        d_->set(x);
        d_->axpy(-1.0,s);
        con.project(*d_);
        d_->scale(-1.0);
        d_->plus(x);
        //d->set(s);
        //con.pruneActive(*d,s,x,eps);
        //con.pruneActive(*d,*(step_state->gradientVec),x,eps);
        gs = -d_->dot((step_state->gradientVec)->dual());
      }
      else {
        d_->set(s);
        con.pruneActive(*d_,*(step_state->gradientVec),x,eps);
        gs = -d_->dot((step_state->gradientVec)->dual());
        d_->set(x);
        d_->axpy(-1.0,(step_state->gradientVec)->dual());
        con.project(*d_);
        d_->scale(-1.0);
        d_->plus(x);
        con.pruneInactive(*d_,*(step_state->gradientVec),x,eps);
        gs -= d_->dot((step_state->gradientVec)->dual());
      }
    }

    // Check if s is a descent direction i.e., g.dot(s) < 0
    if ( gs >= 0.0 || (flagKrylov_ == 2 && iterKrylov_ <= 1) ) {
      s.set((step_state->gradientVec)->dual());
      if ( con.isActivated() ) {
        d_->set(s);
        con.pruneActive(*d_,s,x);
        gs = -d_->dot((step_state->gradientVec)->dual());
      }
      else {
        gs = -s.dot((step_state->gradientVec)->dual());
      }
    }
    s.scale(-1.0);

    // Perform line search
    Real fnew  = algo_state.value;
    ls_nfval_ = 0;
    ls_ngrad_ = 0;
//.........这里部分代码省略.........
开发者ID:ChiahungTai,项目名称:Trilinos,代码行数:101,代码来源:ROL_LineSearchStep.hpp

示例15: status

  virtual bool status( const ELineSearch type, int &ls_neval, int &ls_ngrad, const Real alpha, 
                       const Real fold, const Real sgold, const Real fnew, 
                       const Vector<Real> &x, const Vector<Real> &s, 
                       Objective<Real> &obj, BoundConstraint<Real> &con ) { 
    Real tol = std::sqrt(ROL_EPSILON);

    // Check Armijo Condition
    bool armijo = false;
    if ( con.isActivated() ) {
      Real gs = 0.0;
      if ( edesc_ == DESCENT_STEEPEST ) {
        updateIterate(*d_,x,s,alpha,con);
        d_->scale(-1.0);
        d_->plus(x);
        gs = -s.dot(*d_);
      }
      else {
        d_->set(s);
        d_->scale(-1.0);
        con.pruneActive(*d_,*(grad_),x,eps_);
        gs = alpha*(grad_)->dot(*d_);
        d_->zero();
        updateIterate(*d_,x,s,alpha,con);
        d_->scale(-1.0);
        d_->plus(x);
        con.pruneInactive(*d_,*(grad_),x,eps_);
        gs += d_->dot(grad_->dual());
      }
      if ( fnew <= fold - c1_*gs ) {
        armijo = true;
      }
    }
    else {
      if ( fnew <= fold + c1_*alpha*sgold ) {
        armijo = true;
      }
    }

    // Check Maximum Iteration
    bool itcond = false;
    if ( ls_neval >= maxit_ ) { 
      itcond = true;
    }

    // Check Curvature Condition
    bool curvcond = false;
    if ( armijo && ((type != LINESEARCH_BACKTRACKING && type != LINESEARCH_CUBICINTERP) ||
                    (edesc_ == DESCENT_NONLINEARCG)) ) {
      if (econd_ == CURVATURECONDITION_GOLDSTEIN) {
        if (fnew >= fold + (1.0-c1_)*alpha*sgold) {
          curvcond = true;
        }
      }
      else if (econd_ == CURVATURECONDITION_NULL) {
        curvcond = true;
      }
      else { 
        updateIterate(*xtst_,x,s,alpha,con);
        obj.update(*xtst_);
        obj.gradient(*g_,*xtst_,tol);
        Real sgnew = 0.0;
        if ( con.isActivated() ) {
          d_->set(s);
          d_->scale(-alpha);
          con.pruneActive(*d_,s,x);
          sgnew = -d_->dot(g_->dual());
        }
        else {
          sgnew = s.dot(g_->dual());
        }
        ls_ngrad++;
   
        if (    ((econd_ == CURVATURECONDITION_WOLFE)       
                     && (sgnew >= c2_*sgold))
             || ((econd_ == CURVATURECONDITION_STRONGWOLFE) 
                     && (std::abs(sgnew) <= c2_*std::abs(sgold)))
             || ((econd_ == CURVATURECONDITION_GENERALIZEDWOLFE) 
                     && (c2_*sgold <= sgnew && sgnew <= -c3_*sgold))
             || ((econd_ == CURVATURECONDITION_APPROXIMATEWOLFE) 
                     && (c2_*sgold <= sgnew && sgnew <= (2.0*c1_ - 1.0)*sgold)) ) {
          curvcond = true;
        }
      }
    }

    if (type == LINESEARCH_BACKTRACKING || type == LINESEARCH_CUBICINTERP) {
      if (edesc_ == DESCENT_NONLINEARCG) {
        return ((armijo && curvcond) || itcond);
      }
      else {
        return (armijo || itcond);
      }
    }
    else {
      return ((armijo && curvcond) || itcond);
    }
  }
开发者ID:abhishek4747,项目名称:trilinos,代码行数:97,代码来源:ROL_LineSearch.hpp


注:本文中的BoundConstraint类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。