本文整理汇总了C++中nor_utils::Args::hasArgument方法的典型用法代码示例。如果您正苦于以下问题:C++ Args::hasArgument方法的具体用法?C++ Args::hasArgument怎么用?C++ Args::hasArgument使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类nor_utils::Args
的用法示例。
在下文中一共展示了Args::hasArgument方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: initLearningOptions
void BaseLearner::initLearningOptions(const nor_utils::Args& args)
{
if ( args.hasArgument("verbose") )
args.getValue("verbose", 0, _verbose);
// Set the value of theta
if ( args.hasArgument("edgeoffset") )
args.getValue("edgeoffset", 0, _theta);
}
示例2:
VJCascadeClassifier::VJCascadeClassifier(const nor_utils::Args &args, int verbose)
: _verbose(verbose), _args(args), _positiveLabelIndex(-1)
{
// The file with the step-by-step information
if ( args.hasArgument("outputinfo") )
args.getValue("outputinfo", 0, _outputInfoFile);
if ( args.hasArgument("positivelabel") )
{
args.getValue("positivelabel", 0, _positiveLabelName);
} else {
cout << "The name of positive label has to be given!!!" << endl;
exit(-1);
}
}
示例3: getArgs
void FilterBoostLearner::getArgs(const nor_utils::Args& args)
{
AdaBoostMHLearner::getArgs( args );
// Set the value of the sample size
if ( args.hasArgument("Cn") )
{
args.getValue("Cn", 0, _Cn);
if (_verbose > 1)
cout << "--> Resampling size: " << _Cn << endl;
}
if ( args.hasArgument("onlinetraining") )
{
_onlineWeakLearning = true;
}
}
示例4:
MDDAGClassifier::MDDAGClassifier(const nor_utils::Args &args, int verbose)
: _verbose(verbose), _args(args)
{
// The file with the step-by-step information
if ( args.hasArgument("outputinfo") )
args.getValue("outputinfo", 0, _outputInfoFile);
}
示例5:
//----------------------------------------------------------------
//----------------------------------------------------------------
void Exp3::initLearningOptions(const nor_utils::Args& args)
{
if ( args.hasArgument( "gamma" ) ){
_gamma = args.getValue<double>("gamma", 0 );
}
}
示例6: initLearningOptions
void StochasticLearner::initLearningOptions(const nor_utils::Args& args)
{
BaseLearner::initLearningOptions(args);
if (args.hasArgument("initgamma"))
args.getValue("initgamma", 0, _initialGammat);
if (args.hasArgument("gammdivperiod"))
args.getValue("gammdivperiod", 0, _gammdivperiod);
if (args.hasArgument("graditer"))
args.getValue("graditer", 0, _maxIter);
if (args.hasArgument("gradmethod"))
{
string gradMethod;
args.getValue("gradmethod", 0, gradMethod);
if ( gradMethod.compare( "sgd" ) == 0 )
_gMethod = OPT_SGD;
else if ( gradMethod.compare( "bgd" ) == 0 )
_gMethod = OPT_BGD;
else {
cerr << "SigmoidSingleStumpLearner::Unknown update gradient method" << endl;
exit( -1 );
}
}
if (args.hasArgument("tfunc"))
{
string targetFunction;
args.getValue("tfunc", 0, targetFunction);
if ( targetFunction.compare( "exploss" ) == 0 )
_tFunction = TF_EXPLOSS;
else if ( targetFunction.compare( "edge" ) == 0 )
_tFunction = TF_EDGE;
else {
cerr << "SigmoidSingleStumpLearner::Unknown target function" << endl;
exit( -1 );
}
}
}
示例7: initLearningOptions
void EnumLearnerSA::initLearningOptions(const nor_utils::Args& args)
{
BaseLearner::initLearningOptions(args);
if ( args.hasArgument( "uoffset" ) )
args.getValue("uoffset", 0, _uOffset);
}
示例8: initLearningOptions
void FeaturewiseLearner::initLearningOptions(const nor_utils::Args& args)
{
AbstainableLearner::initLearningOptions(args);
_maxNumOfDimensions = numeric_limits<int>::max();
// If the sampling is required
if ( args.hasArgument("rsample") )
_maxNumOfDimensions = args.getValue<int>("rsample", 0);
}
示例9: getArgs
void MultiMDDAGLearner::getArgs(const nor_utils::Args& args)
{
MDDAGLearner::getArgs(args);
// Set the value of theta
if ( args.hasArgument("updateperc") )
args.getValue("updateperc", 0, _randomNPercent);
}
示例10: initLearningOptions
void ParasiteLearner::initLearningOptions(const nor_utils::Args& args)
{
BaseLearner::initLearningOptions(args);
args.getValue("pool", 0, _nameBaseLearnerFile);
args.getValue("pool", 1, _numBaseLearners);
if ( args.hasArgument("closed") )
_closed = 1;
}
示例11: doConfusionMatrix
void FilterBoostLearner::doConfusionMatrix(const nor_utils::Args& args)
{
FilterBoostClassifier classifier(args, _verbose);
// -cmatrix <dataFile> <shypFile>
if ( args.hasArgument("cmatrix") )
{
string testFileName = args.getValue<string>("cmatrix", 0);
string shypFileName = args.getValue<string>("cmatrix", 1);
classifier.printConfusionMatrix(testFileName, shypFileName);
}
// -cmatrixfile <dataFile> <shypFile> <outFile>
else if ( args.hasArgument("cmatrixfile") )
{
string testFileName = args.getValue<string>("cmatrix", 0);
string shypFileName = args.getValue<string>("cmatrix", 1);
string outResFileName = args.getValue<string>("cmatrix", 2);
classifier.saveConfusionMatrix(testFileName, shypFileName, outResFileName);
}
}
示例12: resumeProcess
int MultiMDDAGLearner::resumeProcess(const nor_utils::Args& args, InputData* pTestData)
{
int numPolicies = 0;
AlphaReal policyAlpha = 0.0;
if ( args.hasArgument("policyalpha") )
args.getValue("policyalpha", 0, policyAlpha);
_policy = new AdaBoostArrayOfPolicyArray(args, _actionNumber);
return numPolicies;
}
示例13: initLearningOptions
void AbstainableLearner::initLearningOptions(const nor_utils::Args& args)
{
BaseLearner::initLearningOptions(args);
// set abstention
if ( args.hasArgument("abstention") )
{
string abstType = args.getValue<string>("abstention", 0);
if (abstType == "greedy")
_abstention = ABST_GREEDY;
else if (abstType == "full")
_abstention = ABST_FULL;
else if (abstType == "real")
_abstention = ABST_REAL;
else if (abstType == "classwise")
_abstention = ABST_CLASSWISE;
else
{
cerr << "ERROR: Invalid type of abstention <" << abstType << ">!!" << endl;
exit(1);
}
}
}
示例14: initLearningOptions
void TreeLearnerUCT::initLearningOptions(const nor_utils::Args& args)
{
BaseLearner::initLearningOptions(args);
string baseLearnerName;
args.getValue("baselearnertype", 0, baseLearnerName);
args.getValue("baselearnertype", 1, _numBaseLearners);
// get the registered weak learner (type from name)
BaseLearner* pWeakHypothesisSource =
BaseLearner::RegisteredLearners().getLearner(baseLearnerName);
for( int ib = 0; ib < _numBaseLearners; ++ib ) {
_baseLearners.push_back(pWeakHypothesisSource->create());
_baseLearners[ib]->initLearningOptions(args);
vector< int > tmpVector( 2, -1 );
_idxPairs.push_back( tmpVector );
}
string updateRule = "";
if ( args.hasArgument( "updaterule" ) )
args.getValue("updaterule", 0, updateRule );
if ( updateRule.compare( "edge" ) == 0 )
_updateRule = EDGE_SQUARE;
else if ( updateRule.compare( "alphas" ) == 0 )
_updateRule = ALPHAS;
else if ( updateRule.compare( "edgesquare" ) == 0 )
_updateRule = ESQUARE;
else {
cerr << "Unknown update rule in ProductLearnerUCT (set to default [edge]" << endl;
_updateRule = EDGE_SQUARE;
}
}
示例15: getArgs
void SoftCascadeLearner::getArgs(const nor_utils::Args& args)
{
if ( args.hasArgument("verbose") )
args.getValue("verbose", 0, _verbose);
///////////////////////////////////////////////////
// get the output strong hypothesis file name, if given
if ( args.hasArgument("shypname") )
args.getValue("shypname", 0, _shypFileName);
else
_shypFileName = string(SHYP_NAME);
_shypFileName = nor_utils::addAndCheckExtension(_shypFileName, SHYP_EXTENSION);
///////////////////////////////////////////////////
//TODO : create an abstract classe for cascade compliant base learners and accept only its offspring!
// get the name of the learner
_baseLearnerName = defaultLearner;
if ( args.hasArgument("learnertype") )
args.getValue("learnertype", 0, _baseLearnerName);
// cout << "! Only HaarSingleStumpeLearner is allowed.\n";
// -train <dataFile> <nInterations>
if ( args.hasArgument("train") )
{
args.getValue("train", 0, _trainFileName);
args.getValue("train", 1, _numIterations);
}
// -traintest <trainingDataFile> <testDataFile> <nInterations>
else if ( args.hasArgument("traintest") )
{
args.getValue("traintest", 0, _trainFileName);
args.getValue("traintest", 1, _testFileName);
args.getValue("traintest", 2, _numIterations);
}
// The file with the step-by-step information
if ( args.hasArgument("outputinfo") )
args.getValue("outputinfo", 0, _outputInfoFile);
else
_outputInfoFile = OUTPUT_NAME;
// --constant: check constant learner in each iteration
if ( args.hasArgument("constant") )
_withConstantLearner = true;
if ( args.hasArgument("positivelabel") )
{
args.getValue("positivelabel", 0, _positiveLabelName);
} else {
cout << "Error : The name of positive label must to given. \n Type --h softcascade to know the mandatory options." << endl;
exit(-1);
}
if (args.hasArgument("trainposteriors")) {
args.getValue("trainposteriors", 0, _trainPosteriorsFileName);
}
if (args.hasArgument("testposteriors")) {
args.getValue("testposteriors", 0, _testPosteriorsFileName);
}
if (args.hasArgument("detectionrate")) {
args.getValue("detectionrate", 0, _targetDetectionRate);
}
else {
cout << "Error : the target detection rate must be given. \n Type --h softcascade to know the mandatory options.";
exit(-1);
}
if (args.hasArgument("expalpha")) {
args.getValue("expalpha", 0, _alphaExponentialParameter);
}
else {
cout << "Error : the parameter used to initialize the rejection distribution vector must be given. \n Type --h softcascade to know the mandatory options.";
exit(-1);
}
if (args.hasArgument("calibrate")) {
args.getValue("calibrate", 0, _unCalibratedShypFileName);
if (args.getNumValues("calibrate") > 1) {
args.getValue("calibrate", 0, _inShypLimit);
}
}
else {
_fullRun = true;
_unCalibratedShypFileName = "shypToBeCalibrated.xml";
cout << "The strong hypothesis file will be seved into the file " << _unCalibratedShypFileName;
//cout << "Error : the shyp file of the uncalibrated trained classifier must be given ! \n";
//exit(-1);
}
if (args.hasArgument("bootstrap")) {
cout << "Warning ! The bootstrapping set and the training set must come from the same superset. \n";
args.getValue("bootstrap", 0, _bootstrapFileName);
args.getValue("bootstrap", 1, _bootstrapRate);
//.........这里部分代码省略.........