当前位置: 首页>>代码示例>>C++>>正文


C++ BaseLearner类代码示例

本文整理汇总了C++中BaseLearner的典型用法代码示例。如果您正苦于以下问题:C++ BaseLearner类的具体用法?C++ BaseLearner怎么用?C++ BaseLearner使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了BaseLearner类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: exit

		void BanditSingleSparseStump::init() {
			const int numClasses = _pTrainingData->getNumClasses();
			const int numColumns = _pTrainingData->getNumAttributes();
			const int armNumber = _banditAlgo->getArmNumber();

			if ( numColumns < armNumber )
			{
				cerr << "The number of colums smaller than the number of the arms!!!!!!" << endl;
				exit( -1 );
			}

			BaseLearner* pWeakHypothesisSource = 
				BaseLearner::RegisteredLearners().getLearner("SingleSparseStumpLearner");

			_banditAlgo->setArmNumber( numColumns );

			vector<AlphaReal> initialValues( numColumns );

			for( int i=0; i < numColumns; i++ )
			{
				SingleSparseStumpLearner* singleStump = dynamic_cast<SingleSparseStumpLearner*>( pWeakHypothesisSource->create());

				singleStump->setTrainingData(_pTrainingData);
				AlphaReal energy = singleStump->run( i );
				AlphaReal edge = singleStump->getEdge();
				AlphaReal reward = getRewardFromEdge( (AlphaReal) edge );

				initialValues[i] = reward;

				delete singleStump;
			}

			_banditAlgo->initialize( initialValues );

	}
开发者ID:busarobi,项目名称:MDDAG2,代码行数:35,代码来源:BanditSingleSparseStump.cpp

示例2: inFile

	void DataReader::loadInputData(const string& dataFileName, const string& testDataFileName, const string& testDataFileName2, const string& shypFileName)
	{
		// open file
		ifstream inFile(shypFileName.c_str());
		if (!inFile.is_open())
		{
			cerr << "ERROR: Cannot open strong hypothesis file <" << shypFileName << ">!" << endl;
			exit(1);
		}
		
		// Declares the stream tokenizer
		nor_utils::StreamTokenizer st(inFile, "<>\n\r\t");
		
		// Move until it finds the multiboost tag
		if ( !UnSerialization::seekSimpleTag(st, "multiboost") )
		{
			// no multiboost tag found: this is not the correct file!
			cerr << "ERROR: Not a valid MultiBoost Strong Hypothesis file: " << shypFileName << endl;
			exit(1);
		}
		
		// Move until it finds the algo tag
		string basicLearnerName = UnSerialization::seekAndParseEnclosedValue<string>(st, "algo");
		
		// Check if the weak learner exists
		if ( !BaseLearner::RegisteredLearners().hasLearner(basicLearnerName) )
		{
			cerr << "ERROR: Weak learner <" << basicLearnerName << "> not registered!!" << endl;
			exit(1);
		}
		
		// get the training input data, and load it
		BaseLearner* baseLearner = BaseLearner::RegisteredLearners().getLearner(basicLearnerName);
		baseLearner->initLearningOptions(_args);
		_pTrainData = baseLearner->createInputData();
		
		// set the non-default arguments of the input data
		_pTrainData->initOptions(_args);
		// load the data
		_pTrainData->load(dataFileName, IT_TEST, _verbose);				
		_pCurrentData = _pTrainData;
		
		_pTestData = baseLearner->createInputData();
		
		// set the non-default arguments of the input data
		_pTestData->initOptions(_args);
		// load the data
		_pTestData->load(testDataFileName, IT_TEST, _verbose);				
        
        
        _pTestData2 = NULL;
        if (!testDataFileName2.empty()) {
            _pTestData2 = baseLearner->createInputData();
            
            // set the non-default arguments of the input data
            _pTestData2->initOptions(_args);
            // load the data
            _pTestData2->load(testDataFileName2, IT_TEST, _verbose);				            
        }        
	}				
开发者ID:DjalelBBZ,项目名称:MDDAG-Trigger,代码行数:60,代码来源:AdaBoostMDPClassifierAdv.cpp

示例3: calculateEdgeImprovement

	// -----------------------------------------------------------------------
	void TreeLearner::calculateEdgeImprovement( NodePoint& node ) {
		node._extended = true;
		_pTrainingData->loadIndexSet( node._learnerIdxSet );
		
		// run constant
		BaseLearner* pConstantWeakHypothesisSource =
		BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
		
		node._constantLearner = dynamic_cast<ScalarLearner*>( pConstantWeakHypothesisSource->create());
		node._constantLearner->setTrainingData(_pTrainingData);
		node._constantEnergy = node._constantLearner->run();
		
		node._constantEdge = node._constantLearner->getEdge(false);
		node._learner = NULL;
		
		if ( ! _pTrainingData->isSamplesFromOneClass() ) {
			node._learner = dynamic_cast<ScalarLearner*>(_pScalaWeakHypothesisSource->create());
			_pScalaWeakHypothesisSource->subCopyState(node._learner);
			node._learner->setTrainingData(_pTrainingData);
			
			node._learnerEnergy = node._learner->run();
			if ( node._learnerEnergy == node._learnerEnergy ) { // isnan
				node._edge = node._learner->getEdge(false);
				node._edgeImprovement = node._edge - node._constantEdge;								
			} else {
				node._edge = numeric_limits<AlphaReal>::signaling_NaN();
				node._edgeImprovement = -numeric_limits<AlphaReal>::max();
			}
		} else {
			node._edge = numeric_limits<AlphaReal>::signaling_NaN();
			node._edgeImprovement = 0.0;			
		}
		
	}
开发者ID:busarobi,项目名称:MDDAG2,代码行数:35,代码来源:TreeLearner.cpp

示例4: assert

// Continue returns the results into ptRes for savePosteriors
// must be called the computeResult first!!!
void MDDAGClassifier::continueComputingResults(InputData* pData, vector<BaseLearner*>& weakHypotheses,
        vector< ExampleResults* >& results, int fromIteration, int toIteration)
{
    assert( !weakHypotheses.empty() );

    const int numClasses = pData->getNumClasses();
    const int numExamples = pData->getNumExamples();


    // iterator over all the weak hypotheses
    vector<BaseLearner*>::const_iterator whyIt;
    int t;

    for (whyIt = weakHypotheses.begin(), t = 0;
            whyIt != weakHypotheses.end() && t < fromIteration; ++whyIt, ++t) {}

    // for every feature: 1..T
    for (; whyIt != weakHypotheses.end() && t < toIteration; ++whyIt, ++t)
    {
        BaseLearner* currWeakHyp = *whyIt;
        AlphaReal alpha = currWeakHyp->getAlpha();

        // for every point
        for (int i = 0; i < numExamples; ++i)
        {
            // a reference for clarity and speed
            vector<AlphaReal>& currVotesVector = results[i]->getVotesVector();

            // for every class
            for (int l = 0; l < numClasses; ++l)
                currVotesVector[l] += alpha * currWeakHyp->classify(pData, i, l);
        }
    }

}
开发者ID:busarobi,项目名称:MDDAG2,代码行数:37,代码来源:MDDAGClassifier.cpp

示例5: exit

	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------	
	void UnSerialization::loadHypothesis(nor_utils::StreamTokenizer& st, 
										 vector<BaseLearner*>& weakHypotheses,
										 InputData* pTrainingData, int verbose)
	{
		string basicLearnerName =  seekAndParseEnclosedValue<string>(st, "weakLearner");
		
		// Check if the weak learner exists
		if ( !BaseLearner::RegisteredLearners().hasLearner(basicLearnerName) ) {
			cerr << "ERROR: Weak learner <" << basicLearnerName << "> not registered!!" << endl;
			exit(1);
		}
		
		// allocate the weak learner object
		BaseLearner* pWeakHypothesis = 
		BaseLearner::RegisteredLearners().getLearner(basicLearnerName)->create();
		pWeakHypothesis->setTrainingData(pTrainingData);
		
		// load it
		pWeakHypothesis->load(st);
		
		// at least </weakhyp> should be expected,
		// therefore this was a broken weak learner
		if ( !st.has_token() ) {
			cerr << "WARNING: Incomplete weak hypothesis file found. Check the shyp file!" << endl;
			delete pWeakHypothesis;
			return;
		}
		// store it in the vector
		weakHypotheses.push_back(pWeakHypothesis);
		
		// show some progress while loading on verbose > 1
		if (verbose > 1 && weakHypotheses.size() % 1000 == 0)
			cout << "." << flush;
	}
开发者ID:DjalelBBZ,项目名称:MDDAG-Trigger,代码行数:37,代码来源:Serialization.cpp

示例6: calculateHypothesesMatrix

	void DataReader::calculateHypothesesMatrix()
	{		
		cout << "[+] Calculate weak hyp matrix..." << endl;
		const int numExamples = _pCurrentData->getNumExamples();
        const int numClasses = _pCurrentData->getNumClasses();
        
		hypermat& allOutputs = _weakHypothesesMatrices[_pCurrentData];
		allOutputs.resize(numExamples);
		

        cout << "Memory allocation for " << numExamples << " examples, " << _numIterations << " classifiers, and " << numClasses << " classes..." << flush;
		for(int i = 0; i < numExamples; ++i)
		{
			allOutputs[i].resize(_numIterations);
            for (int j = 0; j < _numIterations; ++j) {
                allOutputs[i][j].resize(numClasses, 0.);
            }
		}
        cout << "Done." << endl;
		
//        const int step = (_totalNumIterations) < 50 ? 1 : (_totalNumIterations) / 50;
//        cout << "Computing the weak hyp outputs: 0%." << flush;

        cout << "Computing the weak hyp outputs... " << flush;
        int t = 0;
		for(int wHypInd = 0; wHypInd < _numIterations; ++wHypInd )
		{
//            
//            if ((t + 1) % 1000 == 0)
//                cout << "." << flush;
//            
//            if ((t + 1) % step == 0)
//            {
//                float progress = static_cast<float>(t) / (float)(_totalNumIterations) * 100.0;
//                cout << "." << setprecision(2) << progress << "%." << flush;
//            }

            vector<BaseLearner*>::iterator whypIt;
            for (whypIt = _weakHypotheses[wHypInd].begin(); whypIt != _weakHypotheses[wHypInd].end(); ++whypIt) {
//                AbstainableLearner* currWeakHyp = dynamic_cast<AbstainableLearner*>(*whypIt);
                BaseLearner* currWeakHyp = *whypIt;
                AlphaReal alpha = currWeakHyp->getAlpha();
                
                for(int i = 0; i < numExamples; ++i)
                {
                    for (int l = 0; l < numClasses; ++l)
                    {
                        allOutputs[i][wHypInd][l] += alpha * currWeakHyp->classify(_pCurrentData, i, l);
                    }
                }

                ++t;
            }
        }
								
		cout << "Done." << endl;
	}
开发者ID:DjalelBBZ,项目名称:MDDAG-Trigger,代码行数:57,代码来源:AdaBoostMDPClassifierAdv.cpp

示例7: ExampleResults

	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------
	bool AdaBoostMDPClassifier::classifyTestMDP( int i )
	{
		double acc=0.0;
		const int numClasses = _pData->getNumClasses();
		const int numExamples = _pTestData->getNumExamples();
		
		
		ExampleResults* tmpResult = new ExampleResults( i, numClasses );			
		vector<AlphaReal>& currVotesVector = tmpResult->getVotesVector();
		
		for( int j=0; j<_weakHypotheses.size(); j++ )
		{
			
			if (_history[j]) {
				BaseLearner* currWeakHyp = _weakHypotheses[j];
				float alpha = currWeakHyp->getAlpha();
				
				// for every class
				for (int l = 0; l < numClasses; ++l)
					currVotesVector[l] += alpha * currWeakHyp->classify(_pTestData, i, l);
			}
		}
		
		
		vector<Label>::const_iterator lIt;
		
		const vector<Label>& labels = _pTestData->getLabels(i);
		
		
		// the vote of the winning negative class
		float maxNegClass = -numeric_limits<float>::max();
		// the vote of the winning positive class
		float minPosClass = numeric_limits<float>::max();
		
		
		for ( lIt = labels.begin(); lIt != labels.end(); ++lIt )
		{
			// get the negative winner class
			if ( lIt->y < 0 && currVotesVector[lIt->idx] > maxNegClass )
				maxNegClass = currVotesVector[lIt->idx];
			
			// get the positive winner class
			if ( lIt->y > 0 && currVotesVector[lIt->idx] < minPosClass )
				minPosClass = currVotesVector[lIt->idx];
		}
		
		// if the vote for the worst positive label is lower than the
		// vote for the highest negative label -> error
		if (minPosClass <= maxNegClass)
			return false;
		else {
			return true;
		}
		
	}
开发者ID:busarobi,项目名称:MDDAG,代码行数:57,代码来源:AdaBoostMDPClassifier.cpp

示例8: rand

	// ------------------------------------------------------------------------------
	void BanditSingleStumpLearner::estimatePayoffs( vector<AlphaReal>& payoffs )
	{
		set<int> oldIndexSet;
		set<int> randomIndexSet;
		const int numExamples = _pTrainingData->getNumExamples();
		const int numColumns = _pTrainingData->getNumAttributes();

		_pTrainingData->getIndexSet( oldIndexSet );
		int numSubset = static_cast<int>( static_cast<double>(numExamples) * _percentage );
		
		if ( numSubset < 2 ) {
			//use the whole dataset, do nothing
		} else {
			for (int j = 0; j < numExamples; ++j)
			{
				// Tricky way to select numOfDimensions columns randomly out of numColumns
				int rest = numExamples - j;
				AlphaReal r = rand()/static_cast<AlphaReal>(RAND_MAX);

				if ( static_cast<AlphaReal>(numSubset) / rest > r ) 
				{
					--numSubset;
					randomIndexSet.insert( j );
				}
			}
			_pTrainingData->loadIndexSet( randomIndexSet );
		}
		
		
		payoffs.resize( numColumns );

		BaseLearner* pWeakHypothesisSource = 
			BaseLearner::RegisteredLearners().getLearner("SingleStumpLearner");		
		
		for( int i=0; i < numColumns; i++ )
		{
			if ( payoffs[i] > 0.0 ) continue;

			SingleStumpLearner* singleStump = dynamic_cast<SingleStumpLearner*>( pWeakHypothesisSource->create());
			
			singleStump->setTrainingData(_pTrainingData);
			AlphaReal energy = singleStump->run( i );
			AlphaReal edge = singleStump->getEdge();
			AlphaReal reward = getRewardFromEdge( (float) edge );
			
			payoffs[i] = reward;			
			delete singleStump;
		}

		//restore the database
		_pTrainingData->loadIndexSet( oldIndexSet );
	}
开发者ID:busarobi,项目名称:MDDAG,代码行数:53,代码来源:BanditSingleStumpLearner.cpp

示例9: _verbose

	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------
	DataReader::DataReader(const nor_utils::Args& args, int verbose) : _verbose(verbose), _args(args)
	{				
		string mdpTrainFileName = _args.getValue<string>("traintestmdp", 0);				
		string testFileName = _args.getValue<string>("traintestmdp", 1);				
		string shypFileName = _args.getValue<string>("traintestmdp", 3);
		_numIterations = _args.getValue<int>("traintestmdp", 2);				
		
		string tmpFname = _args.getValue<string>("traintestmdp", 4);
		
		
		
		if (_verbose > 0)
			cout << "Loading arff data for MDP learning..." << flush;
		
		// load the arff
		loadInputData(mdpTrainFileName, testFileName, shypFileName);
		
		if (_verbose > 0)
			cout << "Done." << endl << flush;
		
		
		if (_verbose > 0)
			cout << "Loading strong hypothesis..." << flush;
		
		// The class that loads the weak hypotheses
		UnSerialization us;
		
		// loads them
		us.loadHypotheses(shypFileName, _weakHypotheses, _pTrainData);			
		if (_numIterations<_weakHypotheses.size())
			_weakHypotheses.resize(_numIterations);
		
		if (_verbose > 0)
			cout << "Done." << endl << flush;			
		
		assert( _weakHypotheses.size() >= _numIterations );
		
		// calculate the sum of alphas
		vector<BaseLearner*>::iterator it;
		_sumAlphas=0.0;
		for( it = _weakHypotheses.begin(); it != _weakHypotheses.end(); ++it )
		{
			BaseLearner* currBLearner = *it;
			_sumAlphas += currBLearner->getAlpha();
		}
		
	}
开发者ID:busarobi,项目名称:MDDAG,代码行数:49,代码来源:AdaBoostMDPClassifierAdv.cpp

示例10: scoreVector

    vector<AlphaReal> DataReader::getWhypClassification( const int wHypInd, const int instance )
	{
		const int numClasses = _pCurrentData->getNumClasses();
		
        vector<AlphaReal> scoreVector(numClasses);
        
        vector<BaseLearner*>::iterator whypIt;
        for (whypIt = _weakHypotheses[wHypInd].begin(); whypIt != _weakHypotheses[wHypInd].end(); ++whypIt) {
            BaseLearner* currWeakHyp = *whypIt;

            AlphaReal alpha = currWeakHyp->getAlpha();
            
            for (int l = 0; l < numClasses; ++l)
                scoreVector[l] += alpha * currWeakHyp->classify(_pCurrentData, instance, l);
		}
		return scoreVector;
	}
开发者ID:DjalelBBZ,项目名称:MDDAG-Trigger,代码行数:17,代码来源:AdaBoostMDPClassifierAdv.cpp

示例11: assert

	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------
	vector<int> DataReader::classifyKthWeakLearner( const int wHypInd, const int instance, ExampleResults* exampleResult )
	{		
		if (_verbose>3) {
			//cout << "Classifiying: " << wHypInd << endl;
		}
		
		if ( wHypInd >= _numIterations ) {
            assert(false);
        }
		
		const int numClasses = _pCurrentData->getNumClasses();				
		
		// a reference for clarity and speed
		vector<AlphaReal>& currVotesVector = exampleResult->getVotesVector();
        
        vector<int> ternaryPhis(numClasses);
        
		AlphaReal alpha;
		
		// for every class
		if (_isDataStorageMatrix)
		{
			for (int l = 0; l < numClasses; ++l) {
				currVotesVector[l] += (*_pCurrentMatrix)[instance][wHypInd][l];
                ternaryPhis[l] = (currVotesVector[l] > 0) ? 1 : ((currVotesVector[l] < 0) ? -1 : 0) ;
            }
		}
        else
		{
            vector<BaseLearner*>::iterator whypIt;
            for (whypIt = _weakHypotheses[wHypInd].begin(); whypIt != _weakHypotheses[wHypInd].end(); ++whypIt) {
                BaseLearner* currWeakHyp = *whypIt;
                alpha = currWeakHyp->getAlpha();
                
                for (int l = 0; l < numClasses; ++l) {
                    int vote = currWeakHyp->classify(_pCurrentData, instance, l);
                    currVotesVector[l] += alpha * vote;
                    
                    ternaryPhis[l] = (currVotesVector[l] > 0) ? 1 : ((currVotesVector[l] < 0) ? -1 : 0) ;
                }
            }
		}
		
		return ternaryPhis;
	}
开发者ID:DjalelBBZ,项目名称:MDDAG-Trigger,代码行数:47,代码来源:AdaBoostMDPClassifierAdv.cpp

示例12: initLearningOptions

	void ProductLearner::initLearningOptions(const nor_utils::Args& args)
	{
		BaseLearner::initLearningOptions(args);

		string baseLearnerName;
		args.getValue("baselearnertype", 0, baseLearnerName);   
		args.getValue("baselearnertype", 1, _numBaseLearners);   

		// get the registered weak learner (type from name)
		BaseLearner* pWeakHypothesisSource = 
			BaseLearner::RegisteredLearners().getLearner(baseLearnerName);
		pWeakHypothesisSource->initLearningOptions(args);

		for( int ib = 0; ib < _numBaseLearners; ++ib ) {
			_baseLearners.push_back(pWeakHypothesisSource->create());
			_baseLearners[ib]->initLearningOptions(args);
		}
	}
开发者ID:busarobi,项目名称:MDDAG2,代码行数:18,代码来源:ProductLearner.cpp

示例13: fill

 void SoftCascadeLearner::computePosteriors(InputData* pData, vector<BaseLearner*> & weakHypotheses, vector<AlphaReal> & oPosteriors, int positiveLabelIndex)
 {
     const int numExamples = pData->getNumExamples();
     
     oPosteriors.resize(numExamples);
     fill(oPosteriors.begin(), oPosteriors.end(), 0. );
     
     vector<BaseLearner*>::iterator whyIt = weakHypotheses.begin();                          
     for (;whyIt != weakHypotheses.end(); ++whyIt )
     {
         BaseLearner* currWeakHyp = *whyIt;
         AlphaReal alpha = currWeakHyp->getAlpha();
                     
         for (int i = 0; i < numExamples; ++i)
         {
             AlphaReal alphaH = alpha * currWeakHyp->classify(pData, i, positiveLabelIndex);
             oPosteriors[i] += alphaH;
         }                       
     }
 }
开发者ID:junjiek,项目名称:cmu-exp,代码行数:20,代码来源:SoftCascadeLearner.cpp

示例14: classifyKthWeakLearner

	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------
	double DataReader::classifyKthWeakLearner( const int wHypInd, const int instance, ExampleResults* exampleResult )		
	{		
		if (_verbose>3) {
			//cout << "Classifiying: " << wHypInd << endl;
		}
		
		if ( wHypInd >= _numIterations ) return -1.0; // indicating error						
		
		const int numClasses = _pCurrentData->getNumClasses();
		
		BaseLearner* currWeakHyp = _weakHypotheses[wHypInd];
		float alpha = currWeakHyp->getAlpha();
		
		// a reference for clarity and speed
		vector<AlphaReal>& currVotesVector = exampleResult->getVotesVector();
		
		// for every class
		for (int l = 0; l < numClasses; ++l)
			currVotesVector[l] += alpha * currWeakHyp->classify(_pCurrentData, instance, l);
		
		return alpha;
	}
开发者ID:busarobi,项目名称:MDDAG,代码行数:24,代码来源:AdaBoostMDPClassifierAdv.cpp

示例15: initLearningOptions

void TreeLearnerUCT::initLearningOptions(const nor_utils::Args& args)
{
    BaseLearner::initLearningOptions(args);

    string baseLearnerName;
    args.getValue("baselearnertype", 0, baseLearnerName);
    args.getValue("baselearnertype", 1, _numBaseLearners);

    // get the registered weak learner (type from name)
    BaseLearner* pWeakHypothesisSource =
        BaseLearner::RegisteredLearners().getLearner(baseLearnerName);

    for( int ib = 0; ib < _numBaseLearners; ++ib ) {
        _baseLearners.push_back(pWeakHypothesisSource->create());
        _baseLearners[ib]->initLearningOptions(args);

        vector< int > tmpVector( 2, -1 );
        _idxPairs.push_back( tmpVector );
    }

    string updateRule = "";
    if ( args.hasArgument( "updaterule" ) )
        args.getValue("updaterule", 0, updateRule );

    if ( updateRule.compare( "edge" ) == 0 )
        _updateRule = EDGE_SQUARE;
    else if ( updateRule.compare( "alphas" ) == 0 )
        _updateRule = ALPHAS;
    else if ( updateRule.compare( "edgesquare" ) == 0 )
        _updateRule = ESQUARE;
    else {
        cerr << "Unknown update rule in ProductLearnerUCT (set to default [edge]" << endl;
        _updateRule = EDGE_SQUARE;
    }

}
开发者ID:junjiek,项目名称:cmu-exp,代码行数:36,代码来源:TreeLearnerUCT.cpp


注:本文中的BaseLearner类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。