本文整理汇总了C++中OutputInfo::outputCurrentTime方法的典型用法代码示例。如果您正苦于以下问题:C++ OutputInfo::outputCurrentTime方法的具体用法?C++ OutputInfo::outputCurrentTime怎么用?C++ OutputInfo::outputCurrentTime使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类OutputInfo
的用法示例。
在下文中一共展示了OutputInfo::outputCurrentTime方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: run
void FilterBoostLearner::run(const nor_utils::Args& args)
{
// load the arguments
this->getArgs(args);
time_t startTime, currentTime;
time(&startTime);
// get the registered weak learner (type from name)
BaseLearner* pWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner(_baseLearnerName);
// initialize learning options; normally it's done in the strong loop
// also, here we do it for Product learners, so input data can be created
pWeakHypothesisSource->initLearningOptions(args);
BaseLearner* pConstantWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
// get the training input data, and load it
InputData* pTrainingData = pWeakHypothesisSource->createInputData();
pTrainingData->initOptions(args);
pTrainingData->load(_trainFileName, IT_TRAIN, _verbose);
const int numClasses = pTrainingData->getNumClasses();
const int numExamples = pTrainingData->getNumExamples();
//initialize the margins variable
_margins.resize( numExamples );
for( int i=0; i<numExamples; i++ )
{
_margins[i].resize( numClasses );
fill( _margins[i].begin(), _margins[i].end(), 0.0 );
}
// get the testing input data, and load it
InputData* pTestData = NULL;
if ( !_testFileName.empty() )
{
pTestData = pWeakHypothesisSource->createInputData();
pTestData->initOptions(args);
pTestData->load(_testFileName, IT_TEST, _verbose);
}
// The output information object
OutputInfo* pOutInfo = NULL;
if ( !_outputInfoFile.empty() )
{
// Baseline: constant classifier - goes into 0th iteration
BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
pConstantWeakHypothesis->initLearningOptions(args);
pConstantWeakHypothesis->setTrainingData(pTrainingData);
float constantEnergy = pConstantWeakHypothesis->run();
pOutInfo = new OutputInfo(_outputInfoFile);
pOutInfo->initialize(pTrainingData);
updateMargins( pTrainingData, pConstantWeakHypothesis );
if (pTestData)
pOutInfo->initialize(pTestData);
pOutInfo->outputHeader();
pOutInfo->outputIteration(-1);
pOutInfo->outputError(pTrainingData, pConstantWeakHypothesis);
if (pTestData)
pOutInfo->outputError(pTestData, pConstantWeakHypothesis);
/*
pOutInfo->outputMargins(pTrainingData, pConstantWeakHypothesis);
pOutInfo->outputEdge(pTrainingData, pConstantWeakHypothesis);
if (pTestData)
pOutInfo->outputMargins(pTestData, pConstantWeakHypothesis);
pOutInfo->outputMAE(pTrainingData);
if (pTestData)
pOutInfo->outputMAE(pTestData);
*/
pOutInfo->outputCurrentTime();
pOutInfo->endLine();
pOutInfo->initialize(pTrainingData);
if (pTestData)
pOutInfo->initialize(pTestData);
}
// reload the previously found weak learners if -resume is set.
// otherwise just return 0
int startingIteration = resumeWeakLearners(pTrainingData);
Serialization ss(_shypFileName, _isShypCompressed );
ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called
//.........这里部分代码省略.........
示例2: run
void FilterBoostLearner::run(const nor_utils::Args& args)
{
// load the arguments
this->getArgs(args);
time_t startTime, currentTime;
time(&startTime);
// get the registered weak learner (type from name)
BaseLearner* pWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner(_baseLearnerName);
// initialize learning options; normally it's done in the strong loop
// also, here we do it for Product learners, so input data can be created
pWeakHypothesisSource->initLearningOptions(args);
BaseLearner* pConstantWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
// get the training input data, and load it
InputData* pTrainingData = pWeakHypothesisSource->createInputData();
pTrainingData->initOptions(args);
pTrainingData->load(_trainFileName, IT_TRAIN, _verbose);
const int numClasses = pTrainingData->getNumClasses();
const int numExamples = pTrainingData->getNumExamples();
//initialize the margins variable
_margins.resize( numExamples );
for( int i=0; i<numExamples; i++ )
{
_margins[i].resize( numClasses );
fill( _margins[i].begin(), _margins[i].end(), 0.0 );
}
// get the testing input data, and load it
InputData* pTestData = NULL;
if ( !_testFileName.empty() )
{
pTestData = pWeakHypothesisSource->createInputData();
pTestData->initOptions(args);
pTestData->load(_testFileName, IT_TEST, _verbose);
}
// The output information object
OutputInfo* pOutInfo = NULL;
if ( !_outputInfoFile.empty() )
{
// Baseline: constant classifier - goes into 0th iteration
BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
pConstantWeakHypothesis->initLearningOptions(args);
pConstantWeakHypothesis->setTrainingData(pTrainingData);
AlphaReal constantEnergy = pConstantWeakHypothesis->run();
pOutInfo = new OutputInfo(args);
pOutInfo->initialize(pTrainingData);
updateMargins( pTrainingData, pConstantWeakHypothesis );
if (pTestData)
pOutInfo->initialize(pTestData);
pOutInfo->outputHeader(pTrainingData->getClassMap() );
pOutInfo->outputIteration(-1);
pOutInfo->outputCustom(pTrainingData, pConstantWeakHypothesis);
if (pTestData)
{
pOutInfo->separator();
pOutInfo->outputCustom(pTestData, pConstantWeakHypothesis);
}
pOutInfo->outputCurrentTime();
pOutInfo->endLine();
pOutInfo->initialize(pTrainingData);
if (pTestData)
pOutInfo->initialize(pTestData);
}
// reload the previously found weak learners if -resume is set.
// otherwise just return 0
int startingIteration = resumeWeakLearners(pTrainingData);
Serialization ss(_shypFileName, _isShypCompressed );
ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called
// perform the resuming if necessary. If not it will just return
resumeProcess(ss, pTrainingData, pTestData, pOutInfo);
if (_verbose == 1)
cout << "Learning in progress..." << endl;
///////////////////////////////////////////////////////////////////////
// Starting the AdaBoost main loop
//.........这里部分代码省略.........
示例3: run
void AdaBoostMHLearner::run(const nor_utils::Args& args)
{
// load the arguments
this->getArgs(args);
// get the registered weak learner (type from name)
BaseLearner* pWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner(_baseLearnerName);
// initialize learning options; normally it's done in the strong loop
// also, here we do it for Product learners, so input data can be created
pWeakHypothesisSource->initLearningOptions(args);
BaseLearner* pConstantWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
// get the training input data, and load it
InputData* pTrainingData = pWeakHypothesisSource->createInputData();
pTrainingData->initOptions(args);
pTrainingData->load(_trainFileName, IT_TRAIN, _verbose);
// get the testing input data, and load it
InputData* pTestData = NULL;
if ( !_testFileName.empty() )
{
pTestData = pWeakHypothesisSource->createInputData();
pTestData->initOptions(args);
pTestData->load(_testFileName, IT_TEST, _verbose);
}
// The output information object
OutputInfo* pOutInfo = NULL;
if ( !_outputInfoFile.empty() )
{
// Baseline: constant classifier - goes into 0th iteration
BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
pConstantWeakHypothesis->initLearningOptions(args);
pConstantWeakHypothesis->setTrainingData(pTrainingData);
AlphaReal constantEnergy = pConstantWeakHypothesis->run();
//pOutInfo = new OutputInfo(_outputInfoFile);
pOutInfo = new OutputInfo(args);
pOutInfo->initialize(pTrainingData);
if (pTestData)
pOutInfo->initialize(pTestData);
pOutInfo->outputHeader(pTrainingData->getClassMap());
pOutInfo->outputIteration(-1);
pOutInfo->outputCustom(pTrainingData, pConstantWeakHypothesis);
if (pTestData != NULL)
{
pOutInfo->separator();
pOutInfo->outputCustom(pTestData, pConstantWeakHypothesis);
}
pOutInfo->outputCurrentTime();
pOutInfo->endLine();
pOutInfo->initialize(pTrainingData);
if (pTestData)
pOutInfo->initialize(pTestData);
}
//cout << "Before serialization" << endl;
// reload the previously found weak learners if -resume is set.
// otherwise just return 0
int startingIteration = resumeWeakLearners(pTrainingData);
Serialization ss(_shypFileName, _isShypCompressed );
ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called
// perform the resuming if necessary. If not it will just return
resumeProcess(ss, pTrainingData, pTestData, pOutInfo);
if (_verbose == 1)
cout << "Learning in progress..." << endl;
//I put here the starting time, but it may take very long time to load the saved model
time_t startTime, currentTime;
time(&startTime);
///////////////////////////////////////////////////////////////////////
// Starting the AdaBoost main loop
///////////////////////////////////////////////////////////////////////
for (int t = startingIteration; t < _numIterations; ++t)
{
if (_verbose > 1)
cout << "------- WORKING ON ITERATION " << (t+1) << " -------" << endl;
BaseLearner* pWeakHypothesis = pWeakHypothesisSource->create();
pWeakHypothesis->initLearningOptions(args);
//pTrainingData->clearIndexSet();
pWeakHypothesis->setTrainingData(pTrainingData);
//.........这里部分代码省略.........