当前位置: 首页>>代码示例>>C++>>正文


C++ BaseLearner::create方法代码示例

本文整理汇总了C++中BaseLearner::create方法的典型用法代码示例。如果您正苦于以下问题:C++ BaseLearner::create方法的具体用法?C++ BaseLearner::create怎么用?C++ BaseLearner::create使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在BaseLearner的用法示例。


在下文中一共展示了BaseLearner::create方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: init

		void BanditSingleSparseStump::init() {
			const int numClasses = _pTrainingData->getNumClasses();
			const int numColumns = _pTrainingData->getNumAttributes();
			const int armNumber = _banditAlgo->getArmNumber();

			if ( numColumns < armNumber )
			{
				cerr << "The number of colums smaller than the number of the arms!!!!!!" << endl;
				exit( -1 );
			}

			BaseLearner* pWeakHypothesisSource = 
				BaseLearner::RegisteredLearners().getLearner("SingleSparseStumpLearner");

			_banditAlgo->setArmNumber( numColumns );

			vector<AlphaReal> initialValues( numColumns );

			for( int i=0; i < numColumns; i++ )
			{
				SingleSparseStumpLearner* singleStump = dynamic_cast<SingleSparseStumpLearner*>( pWeakHypothesisSource->create());

				singleStump->setTrainingData(_pTrainingData);
				AlphaReal energy = singleStump->run( i );
				AlphaReal edge = singleStump->getEdge();
				AlphaReal reward = getRewardFromEdge( (AlphaReal) edge );

				initialValues[i] = reward;

				delete singleStump;
			}

			_banditAlgo->initialize( initialValues );

	}
开发者ID:busarobi,项目名称:MDDAG2,代码行数:35,代码来源:BanditSingleSparseStump.cpp

示例2: calculateEdgeImprovement

	// -----------------------------------------------------------------------
	void TreeLearner::calculateEdgeImprovement( NodePoint& node ) {
		node._extended = true;
		_pTrainingData->loadIndexSet( node._learnerIdxSet );
		
		// run constant
		BaseLearner* pConstantWeakHypothesisSource =
		BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
		
		node._constantLearner = dynamic_cast<ScalarLearner*>( pConstantWeakHypothesisSource->create());
		node._constantLearner->setTrainingData(_pTrainingData);
		node._constantEnergy = node._constantLearner->run();
		
		node._constantEdge = node._constantLearner->getEdge(false);
		node._learner = NULL;
		
		if ( ! _pTrainingData->isSamplesFromOneClass() ) {
			node._learner = dynamic_cast<ScalarLearner*>(_pScalaWeakHypothesisSource->create());
			_pScalaWeakHypothesisSource->subCopyState(node._learner);
			node._learner->setTrainingData(_pTrainingData);
			
			node._learnerEnergy = node._learner->run();
			if ( node._learnerEnergy == node._learnerEnergy ) { // isnan
				node._edge = node._learner->getEdge(false);
				node._edgeImprovement = node._edge - node._constantEdge;								
			} else {
				node._edge = numeric_limits<AlphaReal>::signaling_NaN();
				node._edgeImprovement = -numeric_limits<AlphaReal>::max();
			}
		} else {
			node._edge = numeric_limits<AlphaReal>::signaling_NaN();
			node._edgeImprovement = 0.0;			
		}
		
	}
开发者ID:busarobi,项目名称:MDDAG2,代码行数:35,代码来源:TreeLearner.cpp

示例3: estimatePayoffs

	// ------------------------------------------------------------------------------
	void BanditSingleStumpLearner::estimatePayoffs( vector<AlphaReal>& payoffs )
	{
		set<int> oldIndexSet;
		set<int> randomIndexSet;
		const int numExamples = _pTrainingData->getNumExamples();
		const int numColumns = _pTrainingData->getNumAttributes();

		_pTrainingData->getIndexSet( oldIndexSet );
		int numSubset = static_cast<int>( static_cast<double>(numExamples) * _percentage );
		
		if ( numSubset < 2 ) {
			//use the whole dataset, do nothing
		} else {
			for (int j = 0; j < numExamples; ++j)
			{
				// Tricky way to select numOfDimensions columns randomly out of numColumns
				int rest = numExamples - j;
				AlphaReal r = rand()/static_cast<AlphaReal>(RAND_MAX);

				if ( static_cast<AlphaReal>(numSubset) / rest > r ) 
				{
					--numSubset;
					randomIndexSet.insert( j );
				}
			}
			_pTrainingData->loadIndexSet( randomIndexSet );
		}
		
		
		payoffs.resize( numColumns );

		BaseLearner* pWeakHypothesisSource = 
			BaseLearner::RegisteredLearners().getLearner("SingleStumpLearner");		
		
		for( int i=0; i < numColumns; i++ )
		{
			if ( payoffs[i] > 0.0 ) continue;

			SingleStumpLearner* singleStump = dynamic_cast<SingleStumpLearner*>( pWeakHypothesisSource->create());
			
			singleStump->setTrainingData(_pTrainingData);
			AlphaReal energy = singleStump->run( i );
			AlphaReal edge = singleStump->getEdge();
			AlphaReal reward = getRewardFromEdge( (float) edge );
			
			payoffs[i] = reward;			
			delete singleStump;
		}

		//restore the database
		_pTrainingData->loadIndexSet( oldIndexSet );
	}
开发者ID:busarobi,项目名称:MDDAG,代码行数:53,代码来源:BanditSingleStumpLearner.cpp

示例4: initLearningOptions

	void ProductLearner::initLearningOptions(const nor_utils::Args& args)
	{
		BaseLearner::initLearningOptions(args);

		string baseLearnerName;
		args.getValue("baselearnertype", 0, baseLearnerName);   
		args.getValue("baselearnertype", 1, _numBaseLearners);   

		// get the registered weak learner (type from name)
		BaseLearner* pWeakHypothesisSource = 
			BaseLearner::RegisteredLearners().getLearner(baseLearnerName);
		pWeakHypothesisSource->initLearningOptions(args);

		for( int ib = 0; ib < _numBaseLearners; ++ib ) {
			_baseLearners.push_back(pWeakHypothesisSource->create());
			_baseLearners[ib]->initLearningOptions(args);
		}
	}
开发者ID:busarobi,项目名称:MDDAG2,代码行数:18,代码来源:ProductLearner.cpp

示例5: initLearningOptions

void TreeLearnerUCT::initLearningOptions(const nor_utils::Args& args)
{
    BaseLearner::initLearningOptions(args);

    string baseLearnerName;
    args.getValue("baselearnertype", 0, baseLearnerName);
    args.getValue("baselearnertype", 1, _numBaseLearners);

    // get the registered weak learner (type from name)
    BaseLearner* pWeakHypothesisSource =
        BaseLearner::RegisteredLearners().getLearner(baseLearnerName);

    for( int ib = 0; ib < _numBaseLearners; ++ib ) {
        _baseLearners.push_back(pWeakHypothesisSource->create());
        _baseLearners[ib]->initLearningOptions(args);

        vector< int > tmpVector( 2, -1 );
        _idxPairs.push_back( tmpVector );
    }

    string updateRule = "";
    if ( args.hasArgument( "updaterule" ) )
        args.getValue("updaterule", 0, updateRule );

    if ( updateRule.compare( "edge" ) == 0 )
        _updateRule = EDGE_SQUARE;
    else if ( updateRule.compare( "alphas" ) == 0 )
        _updateRule = ALPHAS;
    else if ( updateRule.compare( "edgesquare" ) == 0 )
        _updateRule = ESQUARE;
    else {
        cerr << "Unknown update rule in ProductLearnerUCT (set to default [edge]" << endl;
        _updateRule = EDGE_SQUARE;
    }

}
开发者ID:junjiek,项目名称:cmu-exp,代码行数:36,代码来源:TreeLearnerUCT.cpp

示例6: run

    void FilterBoostLearner::run(const nor_utils::Args& args)
    {
        // load the arguments
        this->getArgs(args);

        time_t startTime, currentTime;
        time(&startTime);

        // get the registered weak learner (type from name)
        BaseLearner* pWeakHypothesisSource = 
            BaseLearner::RegisteredLearners().getLearner(_baseLearnerName);
        // initialize learning options; normally it's done in the strong loop
        // also, here we do it for Product learners, so input data can be created
        pWeakHypothesisSource->initLearningOptions(args);

        BaseLearner* pConstantWeakHypothesisSource = 
            BaseLearner::RegisteredLearners().getLearner("ConstantLearner");

        // get the training input data, and load it

        InputData* pTrainingData = pWeakHypothesisSource->createInputData();
        pTrainingData->initOptions(args);
        pTrainingData->load(_trainFileName, IT_TRAIN, _verbose);

        const int numClasses = pTrainingData->getNumClasses();
        const int numExamples = pTrainingData->getNumExamples();
                
        //initialize the margins variable
        _margins.resize( numExamples );
        for( int i=0; i<numExamples; i++ )
        {
            _margins[i].resize( numClasses );
            fill( _margins[i].begin(), _margins[i].end(), 0.0 );
        }


        // get the testing input data, and load it
        InputData* pTestData = NULL;
        if ( !_testFileName.empty() )
        {
            pTestData = pWeakHypothesisSource->createInputData();
            pTestData->initOptions(args);
            pTestData->load(_testFileName, IT_TEST, _verbose);
        }

        // The output information object
        OutputInfo* pOutInfo = NULL;


        if ( !_outputInfoFile.empty() ) 
        {
            // Baseline: constant classifier - goes into 0th iteration

            BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
            pConstantWeakHypothesis->initLearningOptions(args);
            pConstantWeakHypothesis->setTrainingData(pTrainingData);
            AlphaReal constantEnergy = pConstantWeakHypothesis->run();

            pOutInfo = new OutputInfo(args);
            pOutInfo->initialize(pTrainingData);

            updateMargins( pTrainingData, pConstantWeakHypothesis );

            if (pTestData)
                pOutInfo->initialize(pTestData);
            pOutInfo->outputHeader(pTrainingData->getClassMap() );

            pOutInfo->outputIteration(-1);
            pOutInfo->outputCustom(pTrainingData, pConstantWeakHypothesis);

            if (pTestData)
            {
                pOutInfo->separator();
                pOutInfo->outputCustom(pTestData, pConstantWeakHypothesis);
            }
                        
            pOutInfo->outputCurrentTime();

            pOutInfo->endLine();
            pOutInfo->initialize(pTrainingData);
                        
            if (pTestData)
                pOutInfo->initialize(pTestData);
        }
        // reload the previously found weak learners if -resume is set. 
        // otherwise just return 0
        int startingIteration = resumeWeakLearners(pTrainingData);


        Serialization ss(_shypFileName, _isShypCompressed );
        ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called

        // perform the resuming if necessary. If not it will just return
        resumeProcess(ss, pTrainingData, pTestData, pOutInfo);

        if (_verbose == 1)
            cout << "Learning in progress..." << endl;
                                
        ///////////////////////////////////////////////////////////////////////
        // Starting the AdaBoost main loop
//.........这里部分代码省略.........
开发者ID:junjiek,项目名称:cmu-exp,代码行数:101,代码来源:FilterBoostLearner.cpp

示例7: main


//.........这里部分代码省略.........
		args.getValue("verbose", 0, verbose);
	
	//////////////////////////////////////////////////////////////////////////////////////////  
	//////////////////////////////////////////////////////////////////////////////////////////
	
	// defines the seed
	if (args.hasArgument("seed"))
	{
		unsigned int seed = args.getValue<unsigned int>("seed", 0);
		srand(seed);
	}
	
	//////////////////////////////////////////////////////////////////////////////////////////  
	//////////////////////////////////////////////////////////////////////////////////////////
	
	GenericStrongLearner* pModel = NULL;
	
	if ( args.hasArgument("train") ||
        args.hasArgument("traintest") || 
	    args.hasArgument("trainvalidtest") ) // for Viola-Jones Cascade
	{
		
		// get the name of the learner
		string baseLearnerName = defaultLearner;
		if ( args.hasArgument("learnertype") )
			args.getValue("learnertype", 0, baseLearnerName);
		
		checkBaseLearner(baseLearnerName);
		if (verbose > 1)    
			cout << "--> Using learner: " << baseLearnerName << endl;
		
		// This hould be changed: the user decides the strong learner
		BaseLearner*  pWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner(baseLearnerName);
		pModel = pWeakHypothesisSource->createGenericStrongLearner( args );
		
		pModel->run(args);
	}
	//////////////////////////////////////////////////////////////////////////////////////////
	//////////////////////////////////////////////////////////////////////////////////////////
	else if ( args.hasArgument("traintestmddag") )
	{
		// -test <dataFile> <shypFile> <numIters>
		string shypFileName = args.getValue<string>("traintestmddag", 2);
		
		string baseLearnerName = UnSerialization::getWeakLearnerName(shypFileName);
		
		BaseLearner*  pWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner(baseLearnerName);
		pModel = pWeakHypothesisSource->createGenericStrongLearner( args );
		
		pModel->run(args);
		
	}		
	//////////////////////////////////////////////////////////////////////////////////////////
	//////////////////////////////////////////////////////////////////////////////////////////
	else if ( args.hasArgument("test") )
	{
		// -test <dataFile> <shypFile> <numIters>
		string shypFileName = args.getValue<string>("test", 1);
		
		string baseLearnerName = UnSerialization::getWeakLearnerName(shypFileName);
                
		BaseLearner*  pWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner(baseLearnerName);
		pModel = pWeakHypothesisSource->createGenericStrongLearner( args );
		
		pModel->classify(args);
	}
开发者ID:busarobi,项目名称:MDDAG2,代码行数:67,代码来源:main.cpp

示例8: run

	void FilterBoostLearner::run(const nor_utils::Args& args)
	{
		// load the arguments
		this->getArgs(args);

		time_t startTime, currentTime;
		time(&startTime);

		// get the registered weak learner (type from name)
		BaseLearner* pWeakHypothesisSource = 
			BaseLearner::RegisteredLearners().getLearner(_baseLearnerName);
		// initialize learning options; normally it's done in the strong loop
		// also, here we do it for Product learners, so input data can be created
		pWeakHypothesisSource->initLearningOptions(args);

		BaseLearner* pConstantWeakHypothesisSource = 
			BaseLearner::RegisteredLearners().getLearner("ConstantLearner");

		// get the training input data, and load it

		InputData* pTrainingData = pWeakHypothesisSource->createInputData();
		pTrainingData->initOptions(args);
		pTrainingData->load(_trainFileName, IT_TRAIN, _verbose);

		const int numClasses = pTrainingData->getNumClasses();
		const int numExamples = pTrainingData->getNumExamples();
		
		//initialize the margins variable
		_margins.resize( numExamples );
		for( int i=0; i<numExamples; i++ )
		{
			_margins[i].resize( numClasses );
			fill( _margins[i].begin(), _margins[i].end(), 0.0 );
		}


		// get the testing input data, and load it
		InputData* pTestData = NULL;
		if ( !_testFileName.empty() )
		{
			pTestData = pWeakHypothesisSource->createInputData();
			pTestData->initOptions(args);
			pTestData->load(_testFileName, IT_TEST, _verbose);
		}

		// The output information object
		OutputInfo* pOutInfo = NULL;


		if ( !_outputInfoFile.empty() ) 
		{
			// Baseline: constant classifier - goes into 0th iteration

			BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
			pConstantWeakHypothesis->initLearningOptions(args);
			pConstantWeakHypothesis->setTrainingData(pTrainingData);
			float constantEnergy = pConstantWeakHypothesis->run();

			pOutInfo = new OutputInfo(_outputInfoFile);
			pOutInfo->initialize(pTrainingData);

			updateMargins( pTrainingData, pConstantWeakHypothesis );

			if (pTestData)
				pOutInfo->initialize(pTestData);
			pOutInfo->outputHeader();

			pOutInfo->outputIteration(-1);
			pOutInfo->outputError(pTrainingData, pConstantWeakHypothesis);

			if (pTestData)
				pOutInfo->outputError(pTestData, pConstantWeakHypothesis);
			/*
			pOutInfo->outputMargins(pTrainingData, pConstantWeakHypothesis);
			
			pOutInfo->outputEdge(pTrainingData, pConstantWeakHypothesis);

			if (pTestData)
				pOutInfo->outputMargins(pTestData, pConstantWeakHypothesis);

			pOutInfo->outputMAE(pTrainingData);

			if (pTestData)
				pOutInfo->outputMAE(pTestData);
			*/
			pOutInfo->outputCurrentTime();

			pOutInfo->endLine();
			pOutInfo->initialize(pTrainingData);
			
			if (pTestData)
				pOutInfo->initialize(pTestData);
		}
		// reload the previously found weak learners if -resume is set. 
		// otherwise just return 0
		int startingIteration = resumeWeakLearners(pTrainingData);


		Serialization ss(_shypFileName, _isShypCompressed );
		ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called
//.........这里部分代码省略.........
开发者ID:ShenWei,项目名称:src,代码行数:101,代码来源:FilterBoostLearner.cpp

示例9: calculateChildrenAndEnergies

void TreeLearnerUCT::calculateChildrenAndEnergies( NodePointUCT& bLearner, int depthIndex ) {
    bLearner._extended = true;
    _pTrainingData->loadIndexSet( bLearner._learnerIdxSet );

    //separate the dataset
    set< int > idxPos, idxNeg;
    idxPos.clear();
    idxNeg.clear();
    float phix;

    for (int i = 0; i < _pTrainingData->getNumExamples(); ++i) {
        // this returns the phi value of classifier
        phix = bLearner._learner->classify(_pTrainingData,i,0);
        if ( phix <  0 )
            idxNeg.insert( _pTrainingData->getRawIndex( i ) );
        else if ( phix > 0 ) { // have to redo the multiplications, haven't been tested
            idxPos.insert( _pTrainingData->getRawIndex( i ) );
        }
    }

    if ( (idxPos.size() < 1 ) || (idxNeg.size() < 1 ) ) {
        //retval.clear();
        bLearner._extended = false;
        //return retval;
    }

    _pTrainingData->loadIndexSet( idxPos );

    if ( ! _pTrainingData->isSamplesFromOneClass() ) {
        BaseLearner* posLearner = _baseLearners[0]->copyState();

        //posLearner->run();
        dynamic_cast<FeaturewiseLearner*>(posLearner)->run( depthIndex );
        //
        //float posEdge = getEdge( posLearner, _pTrainingData );
        posLearner->setTrainingData( _pTrainingData );
        bLearner._leftEdge = posLearner->getEdge();

        //tmpPair.first = posEdge;
        //tmpPair.second.first.first = posLearner;
        bLearner._leftChild = posLearner;
        //set the parent idx to zero
        //tmpPair.second.first.second.first = 0;
        //this means that it will be a left child in the tree
        //tmpPair.second.first.second.second = 0;
        //tmpPair.second.second = idxPos;
        bLearner._leftChildIdxSet = idxPos;
    } else {
        BaseLearner* pConstantWeakHypothesisSource =
            BaseLearner::RegisteredLearners().getLearner("ConstantLearner");

        BaseLearner* posLearner = pConstantWeakHypothesisSource->create();
        posLearner->setTrainingData(_pTrainingData);
        //float constantEnergy = posLearner->run();
        dynamic_cast<FeaturewiseLearner*>(posLearner)->run( depthIndex );

        //BaseLearner* posLearner = _baseLearners[0]->copyState();
        //float posEdge = getEdge( posLearner, _pTrainingData );
        posLearner->setTrainingData( _pTrainingData );
        bLearner._leftEdge = posLearner->getEdge();

        //tmpPair.first = posEdge;
        //tmpPair.second.first.first = posLearner;
        bLearner._leftChild = posLearner;
        //set the parent idx to zero
        //tmpPair.second.first.second.first = 0;
        //this means that it will be a left child in the tree
        //tmpPair.second.first.second.second = 0;
        //tmpPair.second.second = idxPos;
        bLearner._leftChildIdxSet = idxPos;
    }

    //retval.push_back( tmpPair );

    _pTrainingData->loadIndexSet( idxNeg );

    if ( ! _pTrainingData->isSamplesFromOneClass() ) {
        BaseLearner* negLearner = _baseLearners[0]->copyState();


        //negLearner->run();
        dynamic_cast<FeaturewiseLearner*>(negLearner)->run( depthIndex );
        //float negEdge = getEdge( negLearner, _pTrainingData );

        negLearner->setTrainingData( _pTrainingData );
        bLearner._rightEdge = negLearner->getEdge();
        //tmpPair.first = negEdge;
        //tmpPair.second.first.first = negLearner;
        bLearner._rightChild = negLearner;
        //set the parent idx to zero
        //tmpPair.second.first.second.first = 0;
        //this means that it will be a right child in the tree
        //tmpPair.second.first.second.second = 1;
        //tmpPair.second.second = idxNeg;
        bLearner._rightChildIdxSet = idxNeg;
    } else {
        BaseLearner* pConstantWeakHypothesisSource =
            BaseLearner::RegisteredLearners().getLearner("ConstantLearner");

        BaseLearner* negLearner =  pConstantWeakHypothesisSource->create();
//.........这里部分代码省略.........
开发者ID:junjiek,项目名称:cmu-exp,代码行数:101,代码来源:TreeLearnerUCT.cpp

示例10: run

	// -------------------------------------------------------------------------
	void AdaBoostMHLearner::run( const nor_utils::Args& args, InputData* pTrainingData, const string baseLearnerName, const int numIterations, vector<BaseLearner*>& foundHypotheses )
	{
		
		// get the registered weak learner (type from name)
		BaseLearner* pWeakHypothesisSource = 
		BaseLearner::RegisteredLearners().getLearner(baseLearnerName);
		// initialize learning options; normally it's done in the strong loop
		// also, here we do it for Product learners, so input data can be created
		pWeakHypothesisSource->initLearningOptions(args);
		
		BaseLearner* pConstantWeakHypothesisSource = 
		BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
		
							
		if (_verbose == 1)
			cout << "Learning in progress..." << endl;
		
		
		///////////////////////////////////////////////////////////////////////
		// Starting the AdaBoost main loop
		///////////////////////////////////////////////////////////////////////
		for (int t = 0; t < numIterations; ++t)
		{
			if ((_verbose > 0)&&((t%100)==0))
				cout << "--------------[ Boosting iteration " << (t+1) << " ]--------------" << endl;				
			
			BaseLearner* pWeakHypothesis = pWeakHypothesisSource->create();
			pWeakHypothesis->initLearningOptions(args);
			//pTrainingData->clearIndexSet();
			
			pWeakHypothesis->setTrainingData(pTrainingData);
			
			AlphaReal energy = pWeakHypothesis->run();
			
			//float gamma = pWeakHypothesis->getEdge();
			//cout << gamma << endl;
			
			if ( (_withConstantLearner) || ( energy != energy ) ) // check constant learner if user wants it (if energi is nan, then we chose constant learner
			{
				BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
				pConstantWeakHypothesis->initLearningOptions(args);
				pConstantWeakHypothesis->setTrainingData(pTrainingData);
				AlphaReal constantEnergy = pConstantWeakHypothesis->run();
				
				if ( (constantEnergy <= energy) || ( energy != energy ) ) {
					delete pWeakHypothesis;
					pWeakHypothesis = pConstantWeakHypothesis;
				}
			}
			
			if (_verbose > 1)
				cout << "Weak learner: " << pWeakHypothesis->getName()<< endl;
			
			// Updates the weights and returns the edge
			AlphaReal gamma = updateWeights(pTrainingData, pWeakHypothesis);
			
			if (_verbose > 1)
			{
				cout << setprecision(5)
				<< "--> Alpha = " << pWeakHypothesis->getAlpha() << endl
				<< "--> Edge  = " << gamma << endl
				<< "--> Energy  = " << energy << endl
				//            << "--> ConstantEnergy  = " << constantEnergy << endl
				//            << "--> difference  = " << (energy - constantEnergy) << endl
				;
			}
			
			// If gamma <= theta the algorithm must stop.
			// If theta == 0 and gamma is 0, it means that the weak learner is no better than chance
			// and no further training is possible.
			if (gamma <= _theta)
			{
				if (_verbose > 0)
				{
					cout << "Can't train any further: edge = " << gamma 
					<< " (with and edge offset (theta)=" << _theta << ")" << endl;
				}
				
				//          delete pWeakHypothesis;
				//          break; 
			}
						
			// Add it to the internal list of weak hypotheses
			foundHypotheses.push_back(pWeakHypothesis); 
			
		}  // loop on iterations
		/////////////////////////////////////////////////////////
		
		if (_verbose > 0)
			cout << "--------------[ AdaBoost Learning completed. ]--------------" << endl;
	}
开发者ID:busarobi,项目名称:MDDAG2,代码行数:92,代码来源:AdaBoostMHLearner.cpp

示例11: calculateChildrenAndEnergies

	void BanditTreeLearner::calculateChildrenAndEnergies( NodePoint& bLearner ) {
		bLearner._extended = true;
		_pTrainingData->loadIndexSet( bLearner._learnerIdxSet );

		//separate the dataset
		set< int > idxPos, idxNeg;
		idxPos.clear();
		idxNeg.clear();
		float phix;
		float energy;

		for (int i = 0; i < _pTrainingData->getNumExamples(); ++i) {
			// this returns the phi value of classifier
			phix = bLearner._learner->classify(_pTrainingData,i,0);
			if ( phix <  0 )
				idxNeg.insert( _pTrainingData->getRawIndex( i ) );
			else if ( phix > 0 ) { // have to redo the multiplications, haven't been tested
				idxPos.insert( _pTrainingData->getRawIndex( i ) );
			}
		}

		if ( (idxPos.size() < 1 ) || (idxNeg.size() < 1 ) ) {
			bLearner._extended = false;
		}

		_pTrainingData->loadIndexSet( idxPos );
		energy = numeric_limits<float>::signaling_NaN();	

		if ( ! _pTrainingData->isSamplesFromOneClass() ) {
			ScalarLearner* posLearner = dynamic_cast<ScalarLearner* >(_baseLearners[0]->copyState());

			energy = dynamic_cast<FeaturewiseLearner* >(posLearner)->run( _armsForPulling );
			if ( energy == energy ) {
				bLearner._leftEdge = posLearner->getEdge();

				bLearner._leftChild = posLearner;
				bLearner._leftChildIdxSet = idxPos;
			} else {
				delete posLearner;
			}
		}

		if ( energy != energy ) { //we didn't find column, this can occur when we have sparse data
			BaseLearner* pConstantWeakHypothesisSource = 
				BaseLearner::RegisteredLearners().getLearner("ConstantLearner");

			ScalarLearner* posLearner = dynamic_cast<ScalarLearner* >(pConstantWeakHypothesisSource->create());
			posLearner->setTrainingData(_pTrainingData);
			float constantEnergy = posLearner->run();

			bLearner._leftEdge = posLearner->getEdge();
			bLearner._leftChild = posLearner;
			bLearner._leftChildIdxSet = idxPos;
		}

		_pTrainingData->loadIndexSet( idxNeg );
		energy = numeric_limits<float>::signaling_NaN();

		if ( ! _pTrainingData->isSamplesFromOneClass() ) {
			ScalarLearner* negLearner = dynamic_cast<ScalarLearner* >(_baseLearners[0]->copyState());


			energy = dynamic_cast< FeaturewiseLearner* >(negLearner)->run( _armsForPulling );
			if ( energy == energy ) 
			{
				bLearner._rightEdge = negLearner->getEdge();
				bLearner._rightChild = negLearner;
				bLearner._rightChildIdxSet = idxNeg;
			} else {
				delete negLearner;
			}
		}

		if ( energy != energy ) 
		{
			BaseLearner* pConstantWeakHypothesisSource = 
				BaseLearner::RegisteredLearners().getLearner("ConstantLearner");

			ScalarLearner* negLearner =  dynamic_cast<ScalarLearner* >(pConstantWeakHypothesisSource->create());
			negLearner->setTrainingData(_pTrainingData);
			float constantEnergy = negLearner->run();

			bLearner._rightEdge = negLearner->getEdge();
			bLearner._rightChild = negLearner;

			bLearner._rightChildIdxSet = idxNeg;
		}

	}
开发者ID:ShenWei,项目名称:src,代码行数:89,代码来源:BanditTreeLearner.cpp

示例12: run

	float BanditProductLearner::run()
	{
		if ( ! this->_banditAlgo->isInitialized() ) {
			init();
		}
		// the bandit algorithm selects the subset the tree learner is allowed to use
		// the armindexes will be stored in _armsForPulling
		getArms();

		const int numClasses = _pTrainingData->getNumClasses();
		const int numExamples = _pTrainingData->getNumExamples();

		// Backup original labels
		for (int i = 0; i < numExamples; ++i) {
			const vector<Label>& labels = _pTrainingData->getLabels(i);
			vector<char> exampleLabels;
			for (int l = 0; l < numClasses; ++l)
				exampleLabels.push_back(labels[l].y);
			_savedLabels.push_back(exampleLabels);
		}

		for(int ib = 0; ib < _numBaseLearners; ++ib)
			_baseLearners[ib]->setTrainingData(_pTrainingData);

		float energy = numeric_limits<float>::max();
		float previousEnergy, hx, previousAlpha;
		BaseLearner* pPreviousBaseLearner = 0;

		bool firstLoop = true;
		int ib = -1;
		while (1) {
			ib += 1;
			if (ib >= _numBaseLearners) {
				ib = 0;
				firstLoop = false;
			}
			previousEnergy = energy;
			previousAlpha = _alpha;
			if (pPreviousBaseLearner)
				delete pPreviousBaseLearner;
			if ( !firstLoop ) {
				// take the old learner off the labels
				for (int i = 0; i < numExamples; ++i) {
					vector<Label>& labels = _pTrainingData->getLabels(i);
					for (int l = 0; l < numClasses; ++l) {
						// Here we could have the option of using confidence rated setting so the
						// real valued output of classify instead of its sign
						hx = _baseLearners[ib]->classify(_pTrainingData,i,l);
						if ( hx < 0 )
							labels[l].y *= -1;
						else if ( hx == 0 ) { // have to redo the multiplications, haven't been tested
							for(int ib1 = 0; ib1 < _numBaseLearners && labels[l].y != 0; ++ib1) {
								if (ib != ib1) {
									hx = _baseLearners[ib1]->classify(_pTrainingData,i,l);
									if (hx < 0)
										labels[l].y *= -1;
									else if (hx == 0)
										labels[l].y = 0;
								}
							}
						}
					}
				}
			}
			pPreviousBaseLearner = _baseLearners[ib]->copyState();
			energy = dynamic_cast< FeaturewiseLearner* >(_baseLearners[ib])->run(_armsForPulling );
			// check if it is signailing_nan
			if ( energy != energy )
			{
				if (_verbose > 2) {
					cout << "Cannot find weak hypothesis, constant learner is used!!" << endl;
				}
				BaseLearner* pConstantWeakHypothesisSource = 
					BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
				BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
				pConstantWeakHypothesis->setTrainingData( _pTrainingData );
				energy = pConstantWeakHypothesis->run();
				
				delete _baseLearners[ib];
				_baseLearners[ib] = pConstantWeakHypothesis;
				
			}
			_alpha = _baseLearners[ib]->getAlpha();
			if (_verbose > 2) {
				cout << "E[" << (ib+1) <<  "] = " << energy << endl << flush;
				cout << "alpha[" << (ib+1) <<  "] = " << _alpha << endl << flush;
			}
			for (int i = 0; i < numExamples; ++i) {
				vector<Label>& labels = _pTrainingData->getLabels(i);
				for (int l = 0; l < numClasses; ++l) {
					// Here we could have the option of using confidence rated setting so the
					// real valued output of classify instead of its sign
					if (labels[l].y != 0) { // perhaps replace it by nor_utils::is_zero(labels[l].y)
						hx = _baseLearners[ib]->classify(_pTrainingData,i,l);
						if ( hx < 0 )
							labels[l].y *= -1;
						else if ( hx == 0 )
							labels[l].y = 0;
					}
				}
//.........这里部分代码省略.........
开发者ID:ShenWei,项目名称:src,代码行数:101,代码来源:BanditProductLearner.cpp


注:本文中的BaseLearner::create方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。