本文整理汇总了C++中vectord::begin方法的典型用法代码示例。如果您正苦于以下问题:C++ vectord::begin方法的具体用法?C++ vectord::begin怎么用?C++ vectord::begin使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类vectord
的用法示例。
在下文中一共展示了vectord::begin方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: run_nlopt
double run_nlopt(nlopt::algorithm algo, eval_func fpointer,
vectord& Xnext, int maxf, const std::vector<double>& vd,
const std::vector<double>& vu, void* objPointer)
{
double fmin = 0.0;
size_t n = Xnext.size();
nlopt::opt opt (algo,n);
std::vector<double> xstd(n);
opt.set_lower_bounds(vd);
opt.set_upper_bounds(vu);
opt.set_min_objective(fpointer, objPointer);
opt.set_maxeval(maxf);
// It seems BOBYQA can be unstable if the same point is repeated
// tested over and over. NLOPT bug?
opt.set_ftol_rel(1e-12);
opt.set_ftol_abs(1e-12);
std::copy(Xnext.begin(),Xnext.end(),xstd.begin());
try
{
opt.optimize(xstd, fmin);
}
catch (nlopt::roundoff_limited& e)
{
FILE_LOG(logDEBUG) << "NLOPT Warning: Potential roundoff error. "
<< "In general, this can be ignored.";
}
std::copy(xstd.begin(),xstd.end(),Xnext.begin());
return fmin;
}
示例2: computeCrossCorrelation
inline void KernelModel::computeCrossCorrelation(const vecOfvec& XX,
const vectord &query,
vectord& knx)
{
std::vector<vectord>::const_iterator x_it = XX.begin();
vectord::iterator k_it = knx.begin();
while(x_it != XX.end())
{ *k_it++ = (*mKernel)(*x_it++, query); }
}
示例3: setHyperParameters
void setHyperParameters(const vectord &theta)
{
if(theta.size() != n_params)
{
throw std::invalid_argument("Wrong number of kernel hyperparameters");
}
params = theta; //TODO: To make enough space. Make it more efficient.
std::transform(theta.begin(), theta.end(), params.begin(), (double (*)(double)) exp);
};
示例4: setHyperParameters
inline void KernelRegressor::setHyperParameters(const vectord &theta)
{
using boost::numeric::ublas::subrange;
if (mLearnAll)
{
size_t nk = mKernel.nHyperParameters();
size_t nm = mMean.nParameters();
mKernel.setHyperParameters(subrange(theta,0,nk));
vectord result(nm);
std::transform(theta.begin()+nk, theta.begin()+nk+nm,
result.begin(), (double (*)(double)) log);
mMean.setParameters(result);
mSigma = std::exp(theta(nk+nm));
}
else
{
mKernel.setHyperParameters(theta);
}
};
示例5: setLimits
inline void NLOPT_Optimization::setLimits(const vectord& down, const vectord& up)
{
std::copy(down.begin(),down.end(),mDown.begin());
std::copy(up.begin(),up.end(),mUp.begin());
}