当前位置: 首页>>代码示例>>C++>>正文


C++ SX::numel方法代码示例

本文整理汇总了C++中SX::numel方法的典型用法代码示例。如果您正苦于以下问题:C++ SX::numel方法的具体用法?C++ SX::numel怎么用?C++ SX::numel使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在SX的用法示例。


在下文中一共展示了SX::numel方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: main


//.........这里部分代码省略.........
  SX Z = ssym("Z",N,K+1);
  
  // State at final time
// SX ZF("ZF");
  
  // All variables
  SX x;
  x << vec(trans(Z));
  // x << vec(ZF);  
  cout << "x = " << x << endl;
  
  // Construct the "NLP"
  SX g;
  for(int i=0; i<N; ++i){
    for(int k=1; k<=K; ++k){
      
      // Add collocation equations to NLP
      SX rhs = 0;
      for(int j=0; j<=K; ++j)
        rhs += Z(i,j)*C[j][k];
      g << (h*F.eval(SX(Z(i,k))) - rhs);
    }
    
   // Add continuity equation to NLP
   SX rhs = 0;
   for(int j=0; j<=K; ++j)
     rhs += D[j]*Z(i,j);

   if(i<N-1)
     g << (SX(Z(i+1,0)) - rhs);
/*   else
    g << (ZF - rhs);*/
         
  }
  cout << "g = " << g << endl;
    
  SXFunction gfcn(x,g);

  // Dummy objective function
  SXFunction obj(x, Z(0,0)*Z(0,0));
  
  // ----
  // SOLVE THE NLP
  // ----
  
  // Allocate an NLP solver
  IpoptSolver solver(obj,gfcn);

  // Set options
  solver.setOption("tol",1e-10);
  solver.setOption("hessian_approximation","limited-memory");
//   pass_nonlinear_variables

  // initialize the solver
  solver.init();

  // Initial condition
  vector<double> xinit(x.numel(),0);
  solver.setInput(xinit,"x0");

  // Bounds on x
  vector<double> lbx(x.numel(),-100);
  vector<double> ubx(x.numel(), 100);
  lbx[0] = ubx[0] = z0;
  solver.setInput(lbx,"lbx");
  solver.setInput(ubx,"ubx");
  
  // Bounds on the constraints
  vector<double> lubg(g.numel(),0);
  solver.setInput(lubg,"lbg");
  solver.setInput(lubg,"ubg");
  
  // Solve the problem
  solver.solve();
  
  // Print the time points
  vector<double> t_opt(N*(K+1)+1);
  for(int i=0; i<N; ++i)
    for(int j=0; j<=K; ++j)
      t_opt[j + (K+1)*i] = h*(i + tau_root[j]);
  t_opt.back() = 1;
  
  cout << "time points: " << t_opt << endl;
  resfile << t_opt << endl;
  
  // Print the optimal cost
  cout << "optimal cost: " << solver.output(NLP_SOLVER_F) << endl;

  // Print the optimal solution
  vector<double> xopt(x.numel());
  solver.getOutput(xopt,"x");
  cout << "optimal solution: " << xopt << endl;
  resfile << xopt << endl;
  
  }
 
 resfile.close();
  
  return 0;
}
开发者ID:tmmsartor,项目名称:casadi,代码行数:101,代码来源:biegler_10_1.cpp

示例2: init

void LiftedSQPInternal::init(){
  // Call the init method of the base class
  NlpSolverInternal::init();

  // Number of lifted variables
  nv = getOption("num_lifted");
  if(verbose_){
    cout << "Initializing SQP method with " << nx_ << " variables and " << ng_ << " constraints." << endl;
    cout << "Lifting " << nv << " variables." << endl;
    if(gauss_newton_){
      cout << "Gauss-Newton objective with " << F_.input().numel() << " terms." << endl;
    }
  }
  
  // Read options
  max_iter_ = getOption("max_iter");
  max_iter_ls_ = getOption("max_iter_ls");
  toldx_ = getOption("toldx");
  tolgl_ = getOption("tolgl");
  sigma_ = getOption("sigma");
  rho_ = getOption("rho");
  mu_safety_ = getOption("mu_safety");
  eta_ = getOption("eta");
  tau_ = getOption("tau");
    
  // Assume SXFunction for now
  SXFunction ffcn = shared_cast<SXFunction>(F_);
  casadi_assert(!ffcn.isNull());
  SXFunction gfcn = shared_cast<SXFunction>(G_);
  casadi_assert(!gfcn.isNull());
  
  // Extract the free variables and split into independent and dependent variables
  SX x = ffcn.inputExpr(0);
  int nx = x.size();
  nu = nx-nv;
  SX u = x[Slice(0,nu)];
  SX v = x[Slice(nu,nu+nv)];

  // Extract the constraint equations and split into constraints and definitions of dependent variables
  SX f1 = ffcn.outputExpr(0);
  int nf1 = f1.numel();
  SX g = gfcn.outputExpr(0);
  int nf2 = g.numel()-nv;
  SX v_eq = g(Slice(0,nv));
  SX f2 = g(Slice(nv,nv+nf2));
  
  // Definition of v
  SX v_def = v_eq + v;

  // Objective function
  SX f;
  
  // Multipliers
  SX lam_x, lam_g, lam_f2;
  if(gauss_newton_){
    
    // Least square objective
    f = inner_prod(f1,f1)/2;
    
  } else {
    
    // Scalar objective function
    f = f1;
    
    // Lagrange multipliers for the simple bounds on u
    SX lam_u = ssym("lam_u",nu);
    
    // Lagrange multipliers for the simple bounds on v
    SX lam_v = ssym("lam_v",nv);
    
    // Lagrange multipliers for the simple bounds on x
    lam_x = vertcat(lam_u,lam_v);

    // Lagrange multipliers corresponding to the definition of the dependent variables
    SX lam_v_eq = ssym("lam_v_eq",nv);

    // Lagrange multipliers for the nonlinear constraints that aren't eliminated
    lam_f2 = ssym("lam_f2",nf2);

    if(verbose_){
      cout << "Allocated intermediate variables." << endl;
    }
    
    // Lagrange multipliers for constraints
    lam_g = vertcat(lam_v_eq,lam_f2);
    
    // Lagrangian function
    SX lag = f + inner_prod(lam_x,x);
    if(!f2.empty()) lag += inner_prod(lam_f2,f2);
    if(!v.empty()) lag += inner_prod(lam_v_eq,v_def);
    
    // Gradient of the Lagrangian
    SX lgrad = casadi::gradient(lag,x);
    if(!v.empty()) lgrad -= vertcat(SX::zeros(nu),lam_v_eq); // Put here to ensure that lgrad is of the form "h_extended -v_extended"
    makeDense(lgrad);
    if(verbose_){
      cout << "Generated the gradient of the Lagrangian." << endl;
    }

    // Condensed gradient of the Lagrangian
//.........这里部分代码省略.........
开发者ID:BrechtBa,项目名称:casadi,代码行数:101,代码来源:lifted_sqp_internal.cpp


注:本文中的SX::numel方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。