本文整理汇总了C++中BaseLearner::getName方法的典型用法代码示例。如果您正苦于以下问题:C++ BaseLearner::getName方法的具体用法?C++ BaseLearner::getName怎么用?C++ BaseLearner::getName使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类BaseLearner
的用法示例。
在下文中一共展示了BaseLearner::getName方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: run
//.........这里部分代码省略.........
if ( pTrainingData->getNumExamples() < 2 )
{
filter( pTrainingData, currentNumberOfUsedData, false );
}
if (_verbose > 1)
{
cout << "--> Size of training data = " << pTrainingData->getNumExamples() << endl;
}
energy = pWeakHypothesis->run();
pConstantWeakHypothesis->run();
}
//estimate edge
filter( pTrainingData, currentNumberOfUsedData, false );
edge = pWeakHypothesis->getEdge(true) / 2.0;
constantEdge = pConstantWeakHypothesis->getEdge() / 2.0;
if ( constantEdge > edge )
{
delete pWeakHypothesis;
pWeakHypothesis = pConstantWeakHypothesis;
edge = constantEdge;
} else {
delete pConstantWeakHypothesis;
}
// calculate alpha
AlphaReal alpha = 0.0;
alpha = 0.5 * log( ( 1 + edge ) / ( 1 - edge ) );
pWeakHypothesis->setAlpha( alpha );
_sumAlpha += alpha;
if (_verbose > 1)
cout << "Weak learner: " << pWeakHypothesis->getName()<< endl;
// Output the step-by-step information
pTrainingData->clearIndexSet();
printOutputInfo(pOutInfo, t, pTrainingData, pTestData, pWeakHypothesis);
// Updates the weights and returns the edge
//AlphaReal gamma = updateWeights(pTrainingData, pWeakHypothesis);
if (_verbose > 1)
{
cout << setprecision(5)
<< "--> Alpha = " << pWeakHypothesis->getAlpha() << endl
<< "--> Edge = " << edge << endl
<< "--> Energy = " << energy << endl
// << "--> ConstantEnergy = " << constantEnergy << endl
// << "--> difference = " << (energy - constantEnergy) << endl
;
}
// update the margins
//saveMargins();
updateMargins( pTrainingData, pWeakHypothesis );
// append the current weak learner to strong hypothesis file,
// that is, serialize it.
ss.appendHypothesis(t, pWeakHypothesis);
// Add it to the internal list of weak hypotheses
_foundHypotheses.push_back(pWeakHypothesis);
// check if the time limit has been reached
if (_maxTime > 0)
{
time( ¤tTime );
float diff = difftime(currentTime, startTime); // difftime is in seconds
diff /= 60; // = minutes
if (diff > _maxTime)
{
if (_verbose > 0)
cout << "Time limit of " << _maxTime
<< " minutes has been reached!" << endl;
break;
}
} // check for maxtime
delete pWeakHypothesis;
} // loop on iterations
/////////////////////////////////////////////////////////
// write the footer of the strong hypothesis file
ss.writeFooter();
// Free the two input data objects
if (pTrainingData)
delete pTrainingData;
if (pTestData)
delete pTestData;
if (pOutInfo)
delete pOutInfo;
if (_verbose > 0)
cout << "Learning completed." << endl;
}
示例2: run
// -------------------------------------------------------------------------
void AdaBoostMHLearner::run( const nor_utils::Args& args, InputData* pTrainingData, const string baseLearnerName, const int numIterations, vector<BaseLearner*>& foundHypotheses )
{
// get the registered weak learner (type from name)
BaseLearner* pWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner(baseLearnerName);
// initialize learning options; normally it's done in the strong loop
// also, here we do it for Product learners, so input data can be created
pWeakHypothesisSource->initLearningOptions(args);
BaseLearner* pConstantWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
if (_verbose == 1)
cout << "Learning in progress..." << endl;
///////////////////////////////////////////////////////////////////////
// Starting the AdaBoost main loop
///////////////////////////////////////////////////////////////////////
for (int t = 0; t < numIterations; ++t)
{
if ((_verbose > 0)&&((t%100)==0))
cout << "--------------[ Boosting iteration " << (t+1) << " ]--------------" << endl;
BaseLearner* pWeakHypothesis = pWeakHypothesisSource->create();
pWeakHypothesis->initLearningOptions(args);
//pTrainingData->clearIndexSet();
pWeakHypothesis->setTrainingData(pTrainingData);
AlphaReal energy = pWeakHypothesis->run();
//float gamma = pWeakHypothesis->getEdge();
//cout << gamma << endl;
if ( (_withConstantLearner) || ( energy != energy ) ) // check constant learner if user wants it (if energi is nan, then we chose constant learner
{
BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
pConstantWeakHypothesis->initLearningOptions(args);
pConstantWeakHypothesis->setTrainingData(pTrainingData);
AlphaReal constantEnergy = pConstantWeakHypothesis->run();
if ( (constantEnergy <= energy) || ( energy != energy ) ) {
delete pWeakHypothesis;
pWeakHypothesis = pConstantWeakHypothesis;
}
}
if (_verbose > 1)
cout << "Weak learner: " << pWeakHypothesis->getName()<< endl;
// Updates the weights and returns the edge
AlphaReal gamma = updateWeights(pTrainingData, pWeakHypothesis);
if (_verbose > 1)
{
cout << setprecision(5)
<< "--> Alpha = " << pWeakHypothesis->getAlpha() << endl
<< "--> Edge = " << gamma << endl
<< "--> Energy = " << energy << endl
// << "--> ConstantEnergy = " << constantEnergy << endl
// << "--> difference = " << (energy - constantEnergy) << endl
;
}
// If gamma <= theta the algorithm must stop.
// If theta == 0 and gamma is 0, it means that the weak learner is no better than chance
// and no further training is possible.
if (gamma <= _theta)
{
if (_verbose > 0)
{
cout << "Can't train any further: edge = " << gamma
<< " (with and edge offset (theta)=" << _theta << ")" << endl;
}
// delete pWeakHypothesis;
// break;
}
// Add it to the internal list of weak hypotheses
foundHypotheses.push_back(pWeakHypothesis);
} // loop on iterations
/////////////////////////////////////////////////////////
if (_verbose > 0)
cout << "--------------[ AdaBoost Learning completed. ]--------------" << endl;
}
示例3: run
//.........这里部分代码省略.........
pWeakHypothesis->initLearningOptions(args);
//pTrainingData->clearIndexSet();
pWeakHypothesis->setTrainingData(pTrainingData);
float energy = pWeakHypothesis->run();
BaseLearner* pConstantWeakHypothesis;
if (_withConstantLearner) // check constant learner if user wants it
{
pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
pConstantWeakHypothesis->initLearningOptions(args);
pConstantWeakHypothesis->setTrainingData(pTrainingData);
float constantEnergy = pConstantWeakHypothesis->run();
}
//estimate edge
filter( pTrainingData, (int)(_Cn * log(t+2.0)), false );
float edge = pWeakHypothesis->getEdge() / 2.0;
if (_withConstantLearner) // check constant learner if user wants it
{
float constantEdge = pConstantWeakHypothesis->getEdge() / 2.0;
if ( constantEdge > edge )
{
delete pWeakHypothesis;
pWeakHypothesis = pConstantWeakHypothesis;
edge = constantEdge;
} else {
delete pConstantWeakHypothesis;
}
}
// calculate alpha
float alpha = 0.0;
alpha = 0.5 * log( ( 0.5 + edge ) / ( 0.5 - edge ) );
pWeakHypothesis->setAlpha( alpha );
if (_verbose > 1)
cout << "Weak learner: " << pWeakHypothesis->getName()<< endl;
// Output the step-by-step information
pTrainingData->clearIndexSet();
printOutputInfo(pOutInfo, t, pTrainingData, pTestData, pWeakHypothesis);
// Updates the weights and returns the edge
float gamma = updateWeights(pTrainingData, pWeakHypothesis);
if (_verbose > 1)
{
cout << setprecision(5)
<< "--> Alpha = " << pWeakHypothesis->getAlpha() << endl
<< "--> Edge = " << gamma << endl
<< "--> Energy = " << energy << endl
// << "--> ConstantEnergy = " << constantEnergy << endl
// << "--> difference = " << (energy - constantEnergy) << endl
;
}
// update the margins
updateMargins( pTrainingData, pWeakHypothesis );
// append the current weak learner to strong hypothesis file,
// that is, serialize it.
ss.appendHypothesis(t, pWeakHypothesis);
// Add it to the internal list of weak hypotheses
_foundHypotheses.push_back(pWeakHypothesis);
// check if the time limit has been reached
if (_maxTime > 0)
{
time( ¤tTime );
float diff = difftime(currentTime, startTime); // difftime is in seconds
diff /= 60; // = minutes
if (diff > _maxTime)
{
if (_verbose > 0)
cout << "Time limit of " << _maxTime
<< " minutes has been reached!" << endl;
break;
}
} // check for maxtime
delete pWeakHypothesis;
} // loop on iterations
/////////////////////////////////////////////////////////
// write the footer of the strong hypothesis file
ss.writeFooter();
// Free the two input data objects
if (pTrainingData)
delete pTrainingData;
if (pTestData)
delete pTestData;
if (pOutInfo)
delete pOutInfo;
if (_verbose > 0)
cout << "Learning completed." << endl;
}