本文整理汇总了C++中Evaluator::get_result方法的典型用法代码示例。如果您正苦于以下问题:C++ Evaluator::get_result方法的具体用法?C++ Evaluator::get_result怎么用?C++ Evaluator::get_result使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Evaluator
的用法示例。
在下文中一共展示了Evaluator::get_result方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: operator
/* This function does bulk of the forward phase heavylifting */
void operator()() {
/* Some variables that determine when to stop */
const double SENTINEL = std::numeric_limits<double>::min();
double best_gain = SENTINEL;
double prev_cost = SENTINEL;
double best_confidence = SENTINEL;
/**
* Add each of the forced candidates to the list of selected candidates
* and compute an initial model so that we can compute residual later
*/
if (0<forced.size()) {
typename SetContainer::iterator first = forced.begin();
while (first != forced.end()) {
add_group (*first);
selected.push_back (*first);
++first;
}
Solver::solve (test_A, x, y, lambda, M, (N*selected.size()));
best_gain = prev_cost =
CostFunctor::cost (test_A, y, x, lambda, M, (N*selected.size()));
best_confidence = ConfidenceFunctor::confidence
(test_A, y, y_inv, x, M, N*(1+selected.size()));
}
/**
* each iteration corresponds to picking one group. we need to only
* pick the remaining groups --- we have already included all forced ones.
*/
const int num_iters = MAX_ITERS-selected.size();
for (int iter = 0; iter<num_iters; ++iter) {
#if USE_PFUNC
task root_task;
attribute root_attribute (false /*nested*/, false /*grouped*/);
#endif
double iter_time = micro_time();
/* Compute the residual for this iteration */
CostFunctor::residual (test_A, x, y, r, M, (selected.size()*N));
/* evaluate all the candidates, get the best */
Evaluator evaluator
(&A, &r, &kpi_weights, &factorizer, &map, &filter, lambda, M, N);
#if USE_PFUNC
ReduceType evaluate (my_space, evaluator, *global_taskmgr);
pfunc::spawn (*global_taskmgr, root_task, root_attribute, evaluate);
pfunc::wait (*global_taskmgr, root_task);
#else
evaluator (my_space);
#endif
/* reduce with the global minimum */
value_type local_best = evaluator.get_result();
value_type global_best = find_global_min (local_best);
/* Create the new model and some statistics about this model */
add_group (global_best.first);
Solver::solve (test_A, x, y, lambda, M, N*(1+selected.size()));
const double cost =
CostFunctor::cost (test_A, x, y, lambda, M, N*(1+selected.size()));
const double gain = ((SENTINEL==prev_cost)?(cost):(prev_cost-cost));
const double confidence = ConfidenceFunctor::confidence
(test_A, y, y_inv, x, M, N*(1+selected.size()));
/* Check to see if this is the optimal confidence. If so, variance */
if ((SENTINEL==best_confidence) ||
(compare (confidence,best_confidence))) {
best_confidence = confidence;
variance = CostFunctor::cost
(test_A, x, y, 0.0/*NO REGUL*/, M, N*(1+selected.size()));
}
/* Check if we need to stop --- if so, don't add current candidate */
if (stop (best_gain, gain, confidence)) {
if (ROOT==mpi_rank && 2<debug) {
printf ("stopping at iteration %d\n", iter);
printf ("cost = %lf, gain = %lf, confidence = %lf\n",
cost, gain, confidence);
}
break;
}
if (ROOT==mpi_rank && 2<debug)
printf ("selecting %d with cost = %lf, local best = %d, cost = %lf\n",
global_best.first, cost, local_best.first, local_best.second);
/* Add to the list of selected KPIs */
selected.push_back (global_best.first);
/* Set up gain and prev_cost */
prev_cost = cost;
if (gain>best_gain) best_gain=gain;
iter_time = micro_time()-iter_time;
if (mpi_rank==ROOT && 2<debug) {
printf ("Iteration %d: %lf (per sec)\n",
iter, (mpi_size*(my_space.end()-my_space.begin()))/iter_time);
}
}
//.........这里部分代码省略.........