本文整理汇总了C++中Problem::DotProduct方法的典型用法代码示例。如果您正苦于以下问题:C++ Problem::DotProduct方法的具体用法?C++ Problem::DotProduct怎么用?C++ Problem::DotProduct使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Problem
的用法示例。
在下文中一共展示了Problem::DotProduct方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: prevGradient
EndCriteria::Type
LineSearchBasedMethod::minimize(Problem& P,
const EndCriteria& endCriteria) {
// Initializations
RealType ftol = endCriteria.functionEpsilon();
size_t maxStationaryStateIterations_
= endCriteria.maxStationaryStateIterations();
EndCriteria::Type ecType = EndCriteria::None; // reset end criteria
P.reset(); // reset problem
DynamicVector<RealType> x_ = P.currentValue(); // store the starting point
size_t iterationNumber_ = 0;
// dimension line search
lineSearch_->searchDirection() = DynamicVector<RealType>(x_.size());
bool done = false;
// function and squared norm of gradient values;
RealType fnew, fold, gold2;
RealType fdiff;
// classical initial value for line-search step
RealType t = 1.0;
// Set gradient g at the size of the optimization problem
// search direction
size_t sz = lineSearch_->searchDirection().size();
DynamicVector<RealType> prevGradient(sz), d(sz), sddiff(sz), direction(sz);
// Initialize objective function, gradient prevGradient and
// search direction
P.setFunctionValue(P.valueAndGradient(prevGradient, x_));
P.setGradientNormValue(P.DotProduct(prevGradient, prevGradient));
lineSearch_->searchDirection() = -prevGradient;
bool first_time = true;
// Loop over iterations
do {
// Linesearch
if (!first_time)
prevGradient = lineSearch_->lastGradient();
t = (*lineSearch_)(P, ecType, endCriteria, t);
// don't throw: it can fail just because maxIterations exceeded
//QL_REQUIRE(lineSearch_->succeed(), "line-search failed!");
if (lineSearch_->succeed())
{
// Updates
// New point
x_ = lineSearch_->lastX();
// New function value
fold = P.functionValue();
P.setFunctionValue(lineSearch_->lastFunctionValue());
// New gradient and search direction vectors
// orthogonalization coef
gold2 = P.gradientNormValue();
P.setGradientNormValue(lineSearch_->lastGradientNorm2());
// conjugate gradient search direction
direction = getUpdatedDirection(P, gold2, prevGradient);
sddiff = direction - lineSearch_->searchDirection();
lineSearch_->searchDirection() = direction;
// Now compute accuracy and check end criteria
// Numerical Recipes exit strategy on fx (see NR in C++, p.423)
fnew = P.functionValue();
fdiff = 2.0*std::fabs(fnew-fold) /
(std::fabs(fnew) + std::fabs(fold) + std::numeric_limits<RealType>::epsilon());
if (fdiff < ftol ||
endCriteria.checkMaxIterations(iterationNumber_, ecType)) {
endCriteria.checkStationaryFunctionValue(0.0, 0.0,
maxStationaryStateIterations_, ecType);
endCriteria.checkMaxIterations(iterationNumber_, ecType);
return ecType;
}
P.setCurrentValue(x_); // update problem current value
++iterationNumber_; // Increase iteration number
first_time = false;
} else {
done = true;
}
} while (!done);
P.setCurrentValue(x_);
return ecType;
}