本文整理汇总了C++中tmva::Factory::TrainAllMethodsForClassification方法的典型用法代码示例。如果您正苦于以下问题:C++ Factory::TrainAllMethodsForClassification方法的具体用法?C++ Factory::TrainAllMethodsForClassification怎么用?C++ Factory::TrainAllMethodsForClassification使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tmva::Factory
的用法示例。
在下文中一共展示了Factory::TrainAllMethodsForClassification方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: Boost
void Boost(){
TString outfileName = "boost.root";
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification", outputFile,
"!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D" );
factory->AddVariable( "var0", 'F' );
factory->AddVariable( "var1", 'F' );
TFile *input(0);
TString fname = "./data.root";
if (!gSystem->AccessPathName( fname )) {
// first we try to find tmva_example.root in the local directory
std::cout << "--- BOOST : Accessing " << fname << std::endl;
input = TFile::Open( fname );
}
else {
gROOT->LoadMacro( "../development/createData.C");
create_circ(20000);
cout << " created data.root with data and circle arranged in half circles"<<endl;
input = TFile::Open( fname );
}
if (!input) {
std::cout << "ERROR: could not open data file" << std::endl;
exit(1);
}
TTree *signal = (TTree*)input->Get("TreeS");
TTree *background = (TTree*)input->Get("TreeB");
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
gROOT->cd( outfileName+TString(":/") );
factory->AddSignalTree ( signal, signalWeight );
factory->AddBackgroundTree( background, backgroundWeight );
factory->PrepareTrainingAndTestTree( "", "",
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
TString fisher="H:!V";
factory->BookMethod( TMVA::Types::kFisher, "Fisher", fisher );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoost", fisher+":Boost_Num=100:Boost_Type=AdaBoost" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostLog", fisher+":Boost_Num=100:Boost_Transform=log:Boost_Type=AdaBoost:Boost_AdaBoostBeta=1.0" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostLog2", fisher+":Boost_Num=100:Boost_Transform=log:Boost_Type=AdaBoost:Boost_AdaBoostBeta=2.0" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostStep", fisher+":Boost_Num=100:Boost_Transform=step:Boost_Type=AdaBoost:Boost_AdaBoostBeta=1.0" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostStep2", fisher+":Boost_Num=100:Boost_Transform=step:Boost_Type=AdaBoost:Boost_AdaBoostBeta=1.2" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostStep3", fisher+":Boost_Num=100:Boost_Transform=step:Boost_Type=AdaBoost:Boost_AdaBoostBeta=1.5" );
// Train MVAs using the set of training events
factory->TrainAllMethodsForClassification();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例2: Example_Eric
//.........这里部分代码省略.........
factory->BookMethod( TMVA::Types::kFDA, "FDA_SA",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=SA:MaxCalls=15000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" );
if (Use["FDA_MT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
if (Use["FDA_GAMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
if (Use["FDA_MCMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MCMT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:SampleSize=20" );
// TMVA ANN: MLP (recommended ANN) -- all ANNs in TMVA are Multilayer Perceptrons
if (Use["MLP"])
factory->BookMethod( TMVA::Types::kMLP, "MLP", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5" );
if (Use["MLPBFGS"])
factory->BookMethod( TMVA::Types::kMLP, "MLPBFGS", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS" );
// CF(Clermont-Ferrand)ANN
if (Use["CFMlpANN"])
factory->BookMethod( TMVA::Types::kCFMlpANN, "CFMlpANN", "!H:!V:NCycles=2000:HiddenLayers=N+1,N" ); // n_cycles:#nodes:#nodes:...
// Tmlp(Root)ANN
if (Use["TMlpANN"])
factory->BookMethod( TMVA::Types::kTMlpANN, "TMlpANN", "!H:!V:NCycles=200:HiddenLayers=N+1,N:LearningMethod=BFGS:ValidationFraction=0.3" ); // n_cycles:#nodes:#nodes:...
// Support Vector Machine
if (Use["SVM"])
factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" );
// Boosted Decision Trees
if (Use["BDTG"]) // Gradient Boost
factory->BookMethod( TMVA::Types::kBDT, "BDTG",
"!H:!V:NTrees=1000:BoostType=Grad:Shrinkage=0.30:UseBaggedGrad:GradBaggingFraction=0.6:SeparationType=GiniIndex:nCuts=20:NNodesMax=5" );
if (Use["BDT"]) // Adaptive Boost
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:!V:NTrees=400:nEventsMin=400:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
if (Use["BDTB"]) // Bagging
factory->BookMethod( TMVA::Types::kBDT, "BDTB",
"!H:!V:NTrees=400:BoostType=Bagging:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
if (Use["BDTD"]) // Decorrelation + Adaptive Boost
factory->BookMethod( TMVA::Types::kBDT, "BDTD",
"!H:!V:NTrees=400:nEventsMin=400:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning:VarTransform=Decorrelate" );
// RuleFit -- TMVA implementation of Friedman's method
if (Use["RuleFit"])
factory->BookMethod( TMVA::Types::kRuleFit, "RuleFit",
"H:!V:RuleFitModule=RFTMVA:Model=ModRuleLinear:MinImp=0.001:RuleMinDist=0.001:NTrees=20:fEventsMin=0.01:fEventsMax=0.5:GDTau=-1.0:GDTauPrec=0.01:GDStep=0.01:GDNSteps=10000:GDErrScale=1.02" );
// --------------------------------------------------------------------------------------------------
// As an example how to use the ROOT plugin mechanism, book BDT via
// plugin mechanism
if (Use["Plugin"]) {
//
// first the plugin has to be defined, which can happen either through the following line in the local or global .rootrc:
//
// # plugin handler plugin name(regexp) class to be instanciated library constructor format
// [email protected]@MethodBase: ^BDT TMVA::MethodBDT TMVA.1 "MethodBDT(TString,TString,DataSet&,TString)"
//
// or by telling the global plugin manager directly
gPluginMgr->AddHandler("[email protected]@MethodBase", "BDT", "TMVA::MethodBDT", "TMVA.1", "MethodBDT(TString,TString,DataSet&,TString)");
factory->BookMethod( TMVA::Types::kPlugins, "BDT",
"!H:!V:NTrees=400:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=CostComplexity:PruneStrength=50" );
}
// --------------------------------------------------------------------------------------------------
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethodsForClassification();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例3: main
int main()
{
// this loads the library
TMVA::Tools::Instance();
//---------------------------------------------------------------
// default MVA methods to be trained + tested
std::map<std::string,int> Use;
Use["Cuts"] =1;
Use["BDT"] =1;
// ---------------------------------------------------------------
std::cout << std::endl;
std::cout << "==> Start TMVAClassification" << std::endl;
// Create a new root output file.
TString outfileName( "TMVA_output.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification", outputFile,
"!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D" );
// Add the variables you want to consider
//factory->AddVariable( "MT := MT", 'F' );
//factory->AddVariable( "nJets := nJets", 'F' );
factory->AddVariable( "MET := MET", 'F' );
factory->AddVariable( "MT2W := MT2W", 'F' );
factory->AddVariable( "dPhiMETjet := dPhiMETjet", 'F' );
factory->AddVariable( "HTratio := HTratio", 'F' );
factory->AddVariable( "HadronicChi2 := HadronicChi2", 'F' );
factory->AddVariable( "nWTag := nWTag", 'I' );
// Open samples
TFile* f_signal = TFile::Open((string(MICROTUPLES_FOLDER)+"signal.root").c_str());
TFile* f_ttbar = TFile::Open((string(MICROTUPLES_FOLDER)+"ttbar.root" ).c_str());
//TFile* f_W2Jets = TFile::Open((string(MICROTUPLES_FOLDER)+"W2Jets.root").c_str());
//TFile* f_W3Jets = TFile::Open((string(MICROTUPLES_FOLDER)+"W3Jets.root").c_str());
//TFile* f_W4Jets = TFile::Open((string(MICROTUPLES_FOLDER)+"W4Jets.root").c_str());
TTree* signal = (TTree*) f_signal->Get("microTuple");
TTree* ttbar = (TTree*) f_ttbar ->Get("microTuple");
//TTree* W2Jets = (TTree*) f_W2Jets->Get("microTuple");
//TTree* W3Jets = (TTree*) f_W3Jets->Get("microTuple");
//TTree* W4Jets = (TTree*) f_W4Jets->Get("microTuple");
// Register the trees
// float weightSignal = 1.0 * 20000.0 / getNumberOfEvent(signal);
// float weightBackground = 225.2 * 20000.0 / getNumberOfEvent(ttbar);
float weightSignal = 1.0;
float weightBackground = 1.0;
factory->AddSignalTree ( signal, weightSignal );
factory->AddBackgroundTree( ttbar, weightBackground);
/*
cout << " signal ; w = " << 1.0 * 20000.0 / getNumberOfEvent(signal) << endl;
factory->AddSignalTree ( signal, 1.0 * 20000.0 / getNumberOfEvent(signal));
cout << " ttbar ; w = " << 225.2 * 20000.0 / getNumberOfEvent(ttbar) << endl;
factory->AddBackgroundTree( ttbar, 234.0 * 20000.0 / getNumberOfEvent(ttbar));
cout << " W2Jets ; w = " << 2159 * 20000.0 / getNumberOfEvent(W2Jets) << endl;
factory->AddBackgroundTree( W2Jets, 2159 * 20000.0 / getNumberOfEvent(W2Jets));
cout << " W3Jets ; w = " << 640 * 20000.0 / getNumberOfEvent(W3Jets) << endl;
factory->AddBackgroundTree( W3Jets, 640 * 20000.0 / getNumberOfEvent(W3Jets));
cout << " W4Jets ; w = " << 264 * 20000.0 / getNumberOfEvent(W4Jets) << endl;
factory->AddBackgroundTree( W4Jets, 264 * 20000.0 / getNumberOfEvent(W4Jets));
*/
// Add preselection cuts
std::string preselectionCutsSig("nJets > 4 && MET > 80 && MT > 100");
std::string preselectionCutsBkg("nJets > 4 && MET > 80 && MT > 100");
// Prepare the training
factory->PrepareTrainingAndTestTree( preselectionCutsSig.c_str(), preselectionCutsBkg.c_str(),
"nTrain_Signal=40000:nTrain_Background=300000:nTest_Signal=40000:nTest_Background=300000:SplitMode=Random:NormMode=EqualNumEvents:!V" );
// Cut optimisation
//if (Use["Cuts"]) factory->BookMethod( TMVA::Types::kCuts, "Cuts",
// "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart" );
if (Use["BDT"]) factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:!V:NTrees=400:nEventsMin=400:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning");
// --------------------------------------------------------------
// Train MVAs using the set of training events
factory->TrainAllMethodsForClassification();
// Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
//.........这里部分代码省略.........