本文整理汇总了C++中OutputInfo::initialize方法的典型用法代码示例。如果您正苦于以下问题:C++ OutputInfo::initialize方法的具体用法?C++ OutputInfo::initialize怎么用?C++ OutputInfo::initialize使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类OutputInfo
的用法示例。
在下文中一共展示了OutputInfo::initialize方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: saveLikelihoods
void MDDAGClassifier::saveLikelihoods(const string& dataFileName, const string& shypFileName,
const string& outFileName, int numIterations)
{
InputData* pData = loadInputData(dataFileName, shypFileName);
if (_verbose > 0)
cout << "Loading strong hypothesis..." << flush;
// The class that loads the weak hypotheses
UnSerialization us;
// Where to put the weak hypotheses
vector<BaseLearner*> weakHypotheses;
// loads them
us.loadHypotheses(shypFileName, weakHypotheses, pData);
// where the results go
vector< ExampleResults* > results;
if (_verbose > 0)
cout << "Classifying..." << flush;
const int numClasses = pData->getNumClasses();
const int numExamples = pData->getNumExamples();
ofstream outFile(outFileName.c_str());
string exampleName;
if (_verbose > 0)
cout << "Output likelihoods..." << flush;
// get the results
/////////////////////////////////////////////////////////////////////
// computeResults( pData, weakHypotheses, results, numIterations );
assert( !weakHypotheses.empty() );
// Initialize the output info
OutputInfo* pOutInfo = NULL;
if ( !_outputInfoFile.empty() )
pOutInfo = new OutputInfo(_outputInfoFile, "err");
// Creating the results structures. See file Structures.h for the
// PointResults structure
results.clear();
results.reserve(numExamples);
for (int i = 0; i < numExamples; ++i)
results.push_back( new ExampleResults(i, numClasses) );
// sum votes for classes
vector< AlphaReal > votesForExamples( numClasses );
vector< AlphaReal > expVotesForExamples( numClasses );
// iterator over all the weak hypotheses
vector<BaseLearner*>::const_iterator whyIt;
int t;
pOutInfo->initialize( pData );
// for every feature: 1..T
for (whyIt = weakHypotheses.begin(), t = 0;
whyIt != weakHypotheses.end() && t < numIterations; ++whyIt, ++t)
{
BaseLearner* currWeakHyp = *whyIt;
AlphaReal alpha = currWeakHyp->getAlpha();
// for every point
for (int i = 0; i < numExamples; ++i)
{
// a reference for clarity and speed
vector<AlphaReal>& currVotesVector = results[i]->getVotesVector();
// for every class
for (int l = 0; l < numClasses; ++l)
currVotesVector[l] += alpha * currWeakHyp->classify(pData, i, l);
}
// if needed output the step-by-step information
if ( pOutInfo )
{
pOutInfo->outputIteration(t);
pOutInfo->outputCustom(pData, currWeakHyp);
// Margins and edge requires an update of the weight,
// therefore I keep them out for the moment
//outInfo.outputMargins(pData, currWeakHyp);
//outInfo.outputEdge(pData, currWeakHyp);
pOutInfo->endLine();
} // for (int i = 0; i < numExamples; ++i)
// calculate likelihoods from votes
fill( votesForExamples.begin(), votesForExamples.end(), 0.0 );
AlphaReal lLambda = 0.0;
for (int i = 0; i < numExamples; ++i)
{
// a reference for clarity and speed
//.........这里部分代码省略.........
示例2: computeResults
// Returns the results into ptRes
void MDDAGClassifier::computeResults(InputData* pData, vector<BaseLearner*>& weakHypotheses,
vector< ExampleResults* >& results, int numIterations)
{
assert( !weakHypotheses.empty() );
const int numClasses = pData->getNumClasses();
const int numExamples = pData->getNumExamples();
// Initialize the output info
OutputInfo* pOutInfo = NULL;
if ( !_outputInfoFile.empty() )
{
if ( _args.getNumValues("outputinfo") > 1 )
{
pOutInfo = new OutputInfo(_args);;
}
else
{
pOutInfo = new OutputInfo(_outputInfoFile, "e01hamauc", false);
}
}
// Creating the results structures. See file Structures.h for the
// PointResults structure
results.clear();
results.reserve(numExamples);
for (int i = 0; i < numExamples; ++i)
results.push_back( new ExampleResults(i, numClasses) );
// iterator over all the weak hypotheses
vector<BaseLearner*>::const_iterator whyIt;
int t;
if ( pOutInfo )
{
pOutInfo->initialize( pData );
pOutInfo->outputHeader(pData->getClassMap(),
true, // output iterations
false, // output time
true // endline
);
}
// for every feature: 1..T
for (whyIt = weakHypotheses.begin(), t = 0;
whyIt != weakHypotheses.end() && t < numIterations; ++whyIt, ++t)
{
BaseLearner* currWeakHyp = *whyIt;
AlphaReal alpha = currWeakHyp->getAlpha();
// for every point
for (int i = 0; i < numExamples; ++i)
{
// a reference for clarity and speed
vector<AlphaReal>& currVotesVector = results[i]->getVotesVector();
// for every class
for (int l = 0; l < numClasses; ++l)
currVotesVector[l] += alpha * currWeakHyp->classify(pData, i, l);
}
// if needed output the step-by-step information
if ( pOutInfo )
{
pOutInfo->outputIteration(t);
// pOutInfo->outputError(pData, currWeakHyp);
// pOutInfo->outTPRFPR(pData);
//pOutInfo->outputBalancedError(pData, currWeakHyp);
// if ( ( t % 1 ) == 0 ) {
// pOutInfo->outputROC(pData);
// }
pOutInfo->outputCustom(pData, currWeakHyp);
// Margins and edge requires an update of the weight,
// therefore I keep them out for the moment
//outInfo.outputMargins(pData, currWeakHyp);
//outInfo.outputEdge(pData, currWeakHyp);
pOutInfo->endLine();
}
}
if (pOutInfo)
delete pOutInfo;
}
示例3: run
void FilterBoostLearner::run(const nor_utils::Args& args)
{
// load the arguments
this->getArgs(args);
time_t startTime, currentTime;
time(&startTime);
// get the registered weak learner (type from name)
BaseLearner* pWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner(_baseLearnerName);
// initialize learning options; normally it's done in the strong loop
// also, here we do it for Product learners, so input data can be created
pWeakHypothesisSource->initLearningOptions(args);
BaseLearner* pConstantWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
// get the training input data, and load it
InputData* pTrainingData = pWeakHypothesisSource->createInputData();
pTrainingData->initOptions(args);
pTrainingData->load(_trainFileName, IT_TRAIN, _verbose);
const int numClasses = pTrainingData->getNumClasses();
const int numExamples = pTrainingData->getNumExamples();
//initialize the margins variable
_margins.resize( numExamples );
for( int i=0; i<numExamples; i++ )
{
_margins[i].resize( numClasses );
fill( _margins[i].begin(), _margins[i].end(), 0.0 );
}
// get the testing input data, and load it
InputData* pTestData = NULL;
if ( !_testFileName.empty() )
{
pTestData = pWeakHypothesisSource->createInputData();
pTestData->initOptions(args);
pTestData->load(_testFileName, IT_TEST, _verbose);
}
// The output information object
OutputInfo* pOutInfo = NULL;
if ( !_outputInfoFile.empty() )
{
// Baseline: constant classifier - goes into 0th iteration
BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
pConstantWeakHypothesis->initLearningOptions(args);
pConstantWeakHypothesis->setTrainingData(pTrainingData);
AlphaReal constantEnergy = pConstantWeakHypothesis->run();
pOutInfo = new OutputInfo(args);
pOutInfo->initialize(pTrainingData);
updateMargins( pTrainingData, pConstantWeakHypothesis );
if (pTestData)
pOutInfo->initialize(pTestData);
pOutInfo->outputHeader(pTrainingData->getClassMap() );
pOutInfo->outputIteration(-1);
pOutInfo->outputCustom(pTrainingData, pConstantWeakHypothesis);
if (pTestData)
{
pOutInfo->separator();
pOutInfo->outputCustom(pTestData, pConstantWeakHypothesis);
}
pOutInfo->outputCurrentTime();
pOutInfo->endLine();
pOutInfo->initialize(pTrainingData);
if (pTestData)
pOutInfo->initialize(pTestData);
}
// reload the previously found weak learners if -resume is set.
// otherwise just return 0
int startingIteration = resumeWeakLearners(pTrainingData);
Serialization ss(_shypFileName, _isShypCompressed );
ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called
// perform the resuming if necessary. If not it will just return
resumeProcess(ss, pTrainingData, pTestData, pOutInfo);
if (_verbose == 1)
cout << "Learning in progress..." << endl;
///////////////////////////////////////////////////////////////////////
// Starting the AdaBoost main loop
//.........这里部分代码省略.........
示例4: main
//.........这里部分代码省略.........
InputData* pAutoassociativeData = new InputData();
pAutoassociativeData->initOptions(args);
pAutoassociativeData->load(autoassociativeFileName,IT_TRAIN,verbose);
// for the original labels
InputData* pLabelsData = new InputData();
pLabelsData->initOptions(args);
pLabelsData->load(labelsFileName,IT_TRAIN,verbose);
// set up all the InputData members identically to pAutoassociativeData
EncodeData* pOnePoint = new EncodeData();
pOnePoint->initOptions(args);
pOnePoint->load(autoassociativeFileName,IT_TRAIN,verbose);
const int numExamples = pAutoassociativeData->getNumExamples();
BaseLearner* pWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner("ParasiteLearner");
pWeakHypothesisSource->declareArguments(args);
ParasiteLearner* pWeakHypothesis;
ofstream outFile(outputFileName.c_str());
if (!outFile.is_open())
{
cerr << "ERROR: Cannot open strong hypothesis file <" << outputFileName << ">!" << endl;
exit(1);
}
for (int i = 0; i < numExamples ; ++i)
{
vector<float> alphas;
alphas.resize(numBaseLearners);
fill(alphas.begin(), alphas.end(), 0);
if (verbose >= 1)
cout << "--> Encoding example no " << (i+1) << endl;
pOnePoint->resetData();
pOnePoint->addExample( pAutoassociativeData->getExample(i) );
AlphaReal energy = 1;
OutputInfo* pOutInfo = NULL;
if ( args.hasArgument("outputinfo") )
{
args.getValue("outputinfo", 0, outputInfoFile);
pOutInfo = new OutputInfo(args);
pOutInfo->initialize(pOnePoint);
}
for (int t = 0; t < numIterations; ++t)
{
pWeakHypothesis = (ParasiteLearner*)pWeakHypothesisSource->create();
pWeakHypothesis->initLearningOptions(args);
pWeakHypothesis->setTrainingData(pOnePoint);
energy *= pWeakHypothesis->run();
// if (verbose >= 2)
// cout << "energy = " << energy << endl << flush;
AdaBoostMHLearner adaBoostMHLearner;
if (i == 0 && t == 0)
{
if ( pWeakHypothesis->getBaseLearners().size() < numBaseLearners )
numBaseLearners = pWeakHypothesis->getBaseLearners().size();
outFile << "%Hidden representation using autoassociative boosting" << endl << endl;
outFile << "@RELATION " << outputFileName << endl << endl;
outFile << "% numBaseLearners" << endl;
for (int j = 0; j < numBaseLearners; ++j)
outFile << "@ATTRIBUTE " << j << "_" <<
pWeakHypothesis->getBaseLearners()[j]->getId() << " NUMERIC" << endl;
outFile << "@ATTRIBUTE class {" << pLabelsData->getClassMap().getNameFromIdx(0);
for (int l = 1; l < pLabelsData->getClassMap().getNumNames(); ++l)
outFile << ", " << pLabelsData->getClassMap().getNameFromIdx(l);
outFile << "}" << endl<< endl<< "@DATA" << endl;
}
alphas[pWeakHypothesis->getSelectedIndex()] +=
pWeakHypothesis->getAlpha() * pWeakHypothesis->getSignOfAlpha();
if ( pOutInfo )
adaBoostMHLearner.printOutputInfo(pOutInfo, t, pOnePoint, NULL, pWeakHypothesis);
adaBoostMHLearner.updateWeights(pOnePoint,pWeakHypothesis);
}
float sumAlphas = 0;
for (int j = 0; j < numBaseLearners; ++j)
sumAlphas += alphas[j];
for (int j = 0; j < numBaseLearners; ++j)
outFile << alphas[j]/sumAlphas << ",";
const vector<Label>& labels = pLabelsData->getLabels(i);
for (int l = 0; l < labels.size(); ++l)
if (labels[l].y > 0)
outFile << pLabelsData->getClassMap().getNameFromIdx(labels[l].idx) << endl;
delete pOutInfo;
}
outFile.close();
}
if (pModel)
delete pModel;
return 0;
}
示例5: run
void FilterBoostLearner::run(const nor_utils::Args& args)
{
// load the arguments
this->getArgs(args);
time_t startTime, currentTime;
time(&startTime);
// get the registered weak learner (type from name)
BaseLearner* pWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner(_baseLearnerName);
// initialize learning options; normally it's done in the strong loop
// also, here we do it for Product learners, so input data can be created
pWeakHypothesisSource->initLearningOptions(args);
BaseLearner* pConstantWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
// get the training input data, and load it
InputData* pTrainingData = pWeakHypothesisSource->createInputData();
pTrainingData->initOptions(args);
pTrainingData->load(_trainFileName, IT_TRAIN, _verbose);
const int numClasses = pTrainingData->getNumClasses();
const int numExamples = pTrainingData->getNumExamples();
//initialize the margins variable
_margins.resize( numExamples );
for( int i=0; i<numExamples; i++ )
{
_margins[i].resize( numClasses );
fill( _margins[i].begin(), _margins[i].end(), 0.0 );
}
// get the testing input data, and load it
InputData* pTestData = NULL;
if ( !_testFileName.empty() )
{
pTestData = pWeakHypothesisSource->createInputData();
pTestData->initOptions(args);
pTestData->load(_testFileName, IT_TEST, _verbose);
}
// The output information object
OutputInfo* pOutInfo = NULL;
if ( !_outputInfoFile.empty() )
{
// Baseline: constant classifier - goes into 0th iteration
BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
pConstantWeakHypothesis->initLearningOptions(args);
pConstantWeakHypothesis->setTrainingData(pTrainingData);
float constantEnergy = pConstantWeakHypothesis->run();
pOutInfo = new OutputInfo(_outputInfoFile);
pOutInfo->initialize(pTrainingData);
updateMargins( pTrainingData, pConstantWeakHypothesis );
if (pTestData)
pOutInfo->initialize(pTestData);
pOutInfo->outputHeader();
pOutInfo->outputIteration(-1);
pOutInfo->outputError(pTrainingData, pConstantWeakHypothesis);
if (pTestData)
pOutInfo->outputError(pTestData, pConstantWeakHypothesis);
/*
pOutInfo->outputMargins(pTrainingData, pConstantWeakHypothesis);
pOutInfo->outputEdge(pTrainingData, pConstantWeakHypothesis);
if (pTestData)
pOutInfo->outputMargins(pTestData, pConstantWeakHypothesis);
pOutInfo->outputMAE(pTrainingData);
if (pTestData)
pOutInfo->outputMAE(pTestData);
*/
pOutInfo->outputCurrentTime();
pOutInfo->endLine();
pOutInfo->initialize(pTrainingData);
if (pTestData)
pOutInfo->initialize(pTestData);
}
// reload the previously found weak learners if -resume is set.
// otherwise just return 0
int startingIteration = resumeWeakLearners(pTrainingData);
Serialization ss(_shypFileName, _isShypCompressed );
ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called
//.........这里部分代码省略.........
示例6: run
void AdaBoostMHLearner::run(const nor_utils::Args& args)
{
// load the arguments
this->getArgs(args);
// get the registered weak learner (type from name)
BaseLearner* pWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner(_baseLearnerName);
// initialize learning options; normally it's done in the strong loop
// also, here we do it for Product learners, so input data can be created
pWeakHypothesisSource->initLearningOptions(args);
BaseLearner* pConstantWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
// get the training input data, and load it
InputData* pTrainingData = pWeakHypothesisSource->createInputData();
pTrainingData->initOptions(args);
pTrainingData->load(_trainFileName, IT_TRAIN, _verbose);
// get the testing input data, and load it
InputData* pTestData = NULL;
if ( !_testFileName.empty() )
{
pTestData = pWeakHypothesisSource->createInputData();
pTestData->initOptions(args);
pTestData->load(_testFileName, IT_TEST, _verbose);
}
// The output information object
OutputInfo* pOutInfo = NULL;
if ( !_outputInfoFile.empty() )
{
// Baseline: constant classifier - goes into 0th iteration
BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
pConstantWeakHypothesis->initLearningOptions(args);
pConstantWeakHypothesis->setTrainingData(pTrainingData);
AlphaReal constantEnergy = pConstantWeakHypothesis->run();
//pOutInfo = new OutputInfo(_outputInfoFile);
pOutInfo = new OutputInfo(args);
pOutInfo->initialize(pTrainingData);
if (pTestData)
pOutInfo->initialize(pTestData);
pOutInfo->outputHeader(pTrainingData->getClassMap());
pOutInfo->outputIteration(-1);
pOutInfo->outputCustom(pTrainingData, pConstantWeakHypothesis);
if (pTestData != NULL)
{
pOutInfo->separator();
pOutInfo->outputCustom(pTestData, pConstantWeakHypothesis);
}
pOutInfo->outputCurrentTime();
pOutInfo->endLine();
pOutInfo->initialize(pTrainingData);
if (pTestData)
pOutInfo->initialize(pTestData);
}
//cout << "Before serialization" << endl;
// reload the previously found weak learners if -resume is set.
// otherwise just return 0
int startingIteration = resumeWeakLearners(pTrainingData);
Serialization ss(_shypFileName, _isShypCompressed );
ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called
// perform the resuming if necessary. If not it will just return
resumeProcess(ss, pTrainingData, pTestData, pOutInfo);
if (_verbose == 1)
cout << "Learning in progress..." << endl;
//I put here the starting time, but it may take very long time to load the saved model
time_t startTime, currentTime;
time(&startTime);
///////////////////////////////////////////////////////////////////////
// Starting the AdaBoost main loop
///////////////////////////////////////////////////////////////////////
for (int t = startingIteration; t < _numIterations; ++t)
{
if (_verbose > 1)
cout << "------- WORKING ON ITERATION " << (t+1) << " -------" << endl;
BaseLearner* pWeakHypothesis = pWeakHypothesisSource->create();
pWeakHypothesis->initLearningOptions(args);
//pTrainingData->clearIndexSet();
pWeakHypothesis->setTrainingData(pTrainingData);
//.........这里部分代码省略.........
示例7: run
void SoftCascadeLearner::run(const nor_utils::Args& args)
{
// load the arguments
this->getArgs(args);
//print cascade properties
if (_verbose > 0) {
cout << "[+] Softcascade parameters :" << endl
<< "\t --> target detection rate = " << _targetDetectionRate << endl
<< "\t --> alpha (exp param) = " << _alphaExponentialParameter << endl
<< "\t --> bootstrap rate = " << _bootstrapRate << endl
<< endl;
}
// get the registered weak learner (type from name)
BaseLearner* pWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner(_baseLearnerName);
// initialize learning options; normally it's done in the strong loop
// also, here we do it for Product learners, so input data can be created
pWeakHypothesisSource->initLearningOptions(args);
// get the training input data, and load it
InputData* pTrainingData = pWeakHypothesisSource->createInputData();
pTrainingData->initOptions(args);
pTrainingData->load(_trainFileName, IT_TRAIN, 5);
InputData* pBootstrapData = NULL;
if (!_bootstrapFileName.empty()) {
pBootstrapData = pWeakHypothesisSource->createInputData();
pBootstrapData->initOptions(args);
pBootstrapData->load(_bootstrapFileName, IT_TRAIN, 5);
}
// get the testing input data, and load it
InputData* pTestData = NULL;
if ( !_testFileName.empty() )
{
pTestData = pWeakHypothesisSource->createInputData();
pTestData->initOptions(args);
pTestData->load(_testFileName, IT_TEST, 5);
}
Serialization ss(_shypFileName, false );
ss.writeHeader(_baseLearnerName);
// outputHeader();
// The output information object
OutputInfo* pOutInfo = NULL;
if ( !_outputInfoFile.empty() )
{
pOutInfo = new OutputInfo(args, true);
pOutInfo->setOutputList("sca", &args);
pOutInfo->initialize(pTrainingData);
if (pTestData)
pOutInfo->initialize(pTestData);
pOutInfo->outputHeader(pTrainingData->getClassMap(), true, true, false);
pOutInfo->outputUserHeader("thresh");
pOutInfo->headerEndLine();
}
// ofstream trainPosteriorsFile;
// ofstream testPosteriorsFile;
const NameMap& namemap = pTrainingData->getClassMap();
_positiveLabelIndex = namemap.getIdxFromName(_positiveLabelName);
// FIXME: output posteriors
// OutputInfo* pTrainPosteriorsOut = NULL;
// OutputInfo* pTestPosteriorsOut = NULL;
// if (! _trainPosteriorsFileName.empty()) {
// pTrainPosteriorsOut = new OutputInfo(_trainPosteriorsFileName, "pos", true);
// pTrainPosteriorsOut->initialize(pTrainingData);
// dynamic_cast<PosteriorsOutput*>( pTrainPosteriorsOut->getOutputInfoObject("pos") )->addClassIndex(_positiveLabelIndex );
// }
// if (! _testPosteriorsFileName.empty() && !_testFileName.empty() ) {
// pTestPosteriorsOut = new OutputInfo(_testPosteriorsFileName, "pos", true);
// pTestPosteriorsOut->initialize(pTestData);
// dynamic_cast<PosteriorsOutput*>( pTestPosteriorsOut->getOutputInfoObject("pos") )->addClassIndex(_positiveLabelIndex );
// }
const int numExamples = pTrainingData->getNumExamples();
vector<BaseLearner*> inWeakHypotheses;
if (_fullRun) {
// TODO : the full training is implementet, testing is needed
AdaBoostMHLearner* sHypothesis = new AdaBoostMHLearner();
sHypothesis->run(args, pTrainingData, _baseLearnerName, _numIterations, inWeakHypotheses );
delete sHypothesis;
//.........这里部分代码省略.........