本文整理汇总了C++中tmva::Factory::TrainAllMethods方法的典型用法代码示例。如果您正苦于以下问题:C++ Factory::TrainAllMethods方法的具体用法?C++ Factory::TrainAllMethods怎么用?C++ Factory::TrainAllMethods使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tmva::Factory
的用法示例。
在下文中一共展示了Factory::TrainAllMethods方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: TMVAClassification
void TMVAClassification()
{
TMVA::Tools::Instance();
std::cout << "==> Start TMVAClassification" << std::endl;
TString outfileName( "TMVA.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification", outputFile,
"AnalysisType=Classification" );
factory->AddVariable("LifeTime", 'F');
factory->AddVariable("FlightDistance", 'F');
factory->AddVariable("pt", 'F');
TString fname = "../tau_data/training.root";
TFile *input = TFile::Open( fname );
TTree *tree = (TTree*)input->Get("data");
factory->AddTree(tree, "Signal", 1., "signal == 1", "Training");
factory->AddTree(tree, "Signal", 1., "signal == 1", "Test");
factory->AddTree(tree, "Background", 1., "signal == 0", "Training");
factory->AddTree(tree, "Background", 1., "signal == 0", "Test");
// gradient boosting training
factory->BookMethod(TMVA::Types::kBDT, "GBDT",
"NTrees=40:BoostType=Grad:Shrinkage=0.01:MaxDepth=7:UseNvars=6:nCuts=20:MinNodeSize=10");
factory->TrainAllMethods();
input->Close();
outputFile->Close();
delete factory;
}
示例2: regressphi
void regressphi() {
TMVA::Tools::Instance();
std::cout << "==> Start TMVAClassification" << std::endl;
// Create a ROOT output file where TMVA will store ntuples, histograms, etc.
TString outfileName( "TMVA.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "mva", outputFile,
"!V:!Silent:Color:DrawProgressBar" );
factory->AddVariable( "npv" , 'F' );
factory->AddVariable( "u" , 'F' );
factory->AddVariable( "uphi" , 'F' );
factory->AddVariable( "chsumet/sumet" , 'F' );
factory->AddVariable( "tku" , 'F' );
factory->AddVariable( "tkuphi" , 'F' );
factory->AddVariable( "nopusumet/sumet" , 'F' );
factory->AddVariable( "nopuu" , 'F' );
factory->AddVariable( "nopuuphi" , 'F' );
factory->AddVariable( "pusumet/sumet" , 'F' );
factory->AddVariable( "pumet" , 'F' );
factory->AddVariable( "pumetphi" , 'F' );
factory->AddVariable( "pucsumet/sumet" , 'F' );
factory->AddVariable( "pucu" , 'F' );
factory->AddVariable( "pucuphi" , 'F' );
factory->AddVariable( "jspt_1" , 'F' );
factory->AddVariable( "jseta_1" , 'F' );
factory->AddVariable( "jsphi_1" , 'F' );
factory->AddVariable( "jspt_2" , 'F' );
factory->AddVariable( "jseta_2" , 'F' );
factory->AddVariable( "jsphi_2" , 'F' );
factory->AddVariable( "nalljet" , 'I' );
factory->AddVariable( "njet" , 'I' );
factory->AddTarget( "rphi_z-uphi+ 2.*TMath::Pi()*(rphi_z-uphi < -TMath::Pi()) - 2.*TMath::Pi()*(rphi_z-uphi > TMath::Pi()) " );
TString lName = "../Jets/r11-dimu_nochs_v2.root"; TFile *lInput = TFile::Open(lName);
TTree *lRegress = (TTree*)lInput ->Get("Flat");
Double_t lRWeight = 1.0;
factory->AddRegressionTree( lRegress , lRWeight );
TCut lCut = "nbtag == 0"; //Cut to remove real MET
//(rpt_z < 40 || (rpt_z > 40 && rpt_z+u1 < 40)) && nbtag == 0 "; ==> stronger cut to remove Real MET
factory->PrepareTrainingAndTestTree( lCut,
"nTrain_Regression=0:nTest_Regression=0:SplitMode=Block:NormMode=NumEvents:!V" );
// Boosted Decision Trees
factory->BookMethod( TMVA::Types::kBDT, "RecoilPhiRegress_data_clean2_njet",
"!H:!V:VarTransform=None:nEventsMin=200:NTrees=100:BoostType=Grad:Shrinkage=0.1:MaxDepth=100:NNodesMax=100000:UseYesNoLeaf=F:nCuts=2000");//MaxDepth=100
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
//if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例3: TMVAClassification
void TMVAClassification(char* trainFile, char* tree,
char* mycuts, char* mycutb, char* inputVars[], int size)
{
// this loads the library
TMVA::Tools::Instance();
// Create a new root output file.
TFile* outputFile = TFile::Open( "TMVA.root", "RECREATE" );
// Create the factory object.
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification", outputFile,
"!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D" );
// ---------- input variables
for (int ivar = 0; ivar < size; ++ivar) {
factory->AddVariable(inputVars[ivar], 'F');
}
// read training and test data
TFile *input = TFile::Open( trainFile);
TTree *signal = (TTree*)input->Get(tree);
TTree *background = (TTree*)input->Get(tree);
// global event weights per tree
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
// ====== register trees ====================================================
// you can add an arbitrary number of signal or background trees
factory->AddSignalTree ( signal, signalWeight );
factory->AddBackgroundTree( background, backgroundWeight );
// tell the factory to use all remaining events in the trees after training for testing:
factory->PrepareTrainingAndTestTree( TCut(mycuts), TCut(mycutb),
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
// If no numbers of events are given, half of the events in the tree are used for training, and
// the other half for testing:
// ---- Use BDT: Adaptive Boost
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:!V:NTrees=400:nEventsMin=400:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
// ---- Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// Save the output
outputFile->Close();
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
}
示例4: process
void process(const std::vector<std::string>& inputFiles, const std::string& name, const std::string& outputFile) {
TChain* signal = loadChain(inputFiles, "signal");
TChain* background = loadChain(inputFiles, "background");
TFile* output = TFile::Open(outputFile.c_str(), "recreate");
TMVA::Factory* factory = new TMVA::Factory(name.c_str(), output, "V");
factory->AddSignalTree(signal, 1.);
factory->AddBackgroundTree(background, 1.);
//{
//factory->AddVariable("lightJet1p2_Pt");
//factory->AddVariable("leptonic_B_Pt");
//factory->AddVariable("leptonic_Top_Pt");
//factory->AddVariable("leptonic_Top_M");
//factory->AddVariable("hadronic_B_Pt");
//factory->AddVariable("hadronic_W_M");
//factory->AddVariable("hadronic_Top_Pt");
//factory->AddVariable("hadronic_Top_M");
//factory->AddVariable("delta_R_tops");
//factory->AddVariable("delta_R_lightjets");
//factory->AddVariable("leptonic_B_CSV");
//factory->AddVariable("hadronic_B_CSV");
//}
// chi^2 style
{
factory->AddVariable("leptonic_Top_M");
factory->AddVariable("hadronic_W_M");
factory->AddVariable("hadronic_Top_M");
factory->AddVariable("ht_fraction");
}
factory->SetWeightExpression("weight");
factory->PrepareTrainingAndTestTree("", "", "V:VerboseLevel=Info:nTrain_Signal=100000:nTrain_Background=100000:nTest_Signal=100000:nTest_Background=100000");
factory->BookMethod(TMVA::Types::kBDT, "BDT", "V:BoostType=AdaBoost:nCuts=20:VarTransform=D");
factory->BookMethod(TMVA::Types::kMLP, "NN", "V:VarTransform=D");
//factory->BookMethod(TMVA::Types::kPDERS, "PDERS", "V");
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
output->Close();
delete output;
delete signal;
delete background;
}
示例5: test_train
void test_train(TString signalName = "WW",
TString bkgName = "DY")
{
TFile *outFile = new TFile("myAnalysisFile.root","RECREATE");
TMVA::Factory *factory = new TMVA::Factory(signalName, outFile,"");
TString directory = "../rootFiles/SF/MediumIDTighterIP/";
//signalName = directory + signalName;
//defining WW signal
TFile *MySignalFile = new TFile("../rootFiles/SF/MediumIDTighterIP/WW.root","READ");
TTree* sigTree = (TTree*)MySignalFile->Get("nt");
factory->AddSignalTree(sigTree,1);
//defining DY background
TFile *MyBkgFile = new TFile("../rootFiles/SF/MediumIDTighterIP/DY.root","READ");
TTree* bkgTree = (TTree*)MyBkgFile->Get("nt");
factory->AddBackgroundTree(bkgTree,1);
factory->SetWeightExpression("baseW");
//************************************ FACTORY
factory->AddVariable("fullpmet");
factory->AddVariable("trkpmet");
factory->AddVariable("ratioMet");
factory->AddVariable("ptll");
factory->AddVariable("mth");
factory->AddVariable("jetpt1");
factory->AddVariable("ptWW");
factory->AddVariable("dphilljet");
factory->AddVariable("dphillmet");
factory->AddVariable("dphijet1met");
factory->AddVariable("nvtx");
factory->PrepareTrainingAndTestTree("",500,500,500,500);
cout<<"I've prepared trees"<<endl;
//factory->BookMethod(TMVA::Types::kFisher, "Fisher","");
factory->BookMethod(TMVA::Types::kBDT, "BDT","");
cout<<"I've booked method"<<endl;
factory->TrainAllMethods();
factory->TestAllMethods();
cout<<"I've tested all methods"<<endl;
factory->EvaluateAllMethods();
cout<<"I've evaluated all methods"<<endl;
}
示例6: Loop
void REG::Loop() {
if (fChain == 0) return;
Long64_t nentries = fChain->GetEntriesFast();
Long64_t nbytes = 0, nb = 0;
TMVA::Tools::Instance();
std::cout << std::endl;
std::cout << "==> Start TMVARegression" << std::endl;
TString outfileName( "TMVAReg.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "TMVARegression", outputFile,
"!V:!Silent:Color:DrawProgressBar" );
factory->AddVariable( "l1Pt", "Variable 1", "units", 'F' );
factory->AddVariable( "l1Eta", "Variable 2", "units", 'F' );
factory->AddVariable( "l1Phi", "Variable 1", "units", 'F' );
// factory->AddVariable( "RhoL1", "Variable 2", "units", 'F' );
factory->AddTarget( "ratio" );
factory->AddRegressionTree(fChain, 1.0 );
TCut mycut = "";
factory->PrepareTrainingAndTestTree( mycut,
"nTrain_Regression=10000:nTest_Regression=0:SplitMode=Block:NormMode=NumEvents:!V" );
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:!V:NTrees=100:nEventsMin=5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
// factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
// factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVARegression is done!" << std::endl;
delete factory;
}
示例7: trainBDT
void trainBDT(void)
{
// Open input file and get tree
TFile *infile = new TFile("l3bdt.root");
TTree *l3tree = (TTree*)infile->Get("l3tree");
if(l3tree == NULL){
cout << "Couldn't open \"l3bdt.root\"!" << endl;
return;
}
// Open output root file (for TMVA)
TFile *outfile = new TFile("l3BDT_out.root", "RECREATE");
TMVA::Factory *fac = new TMVA::Factory("L3",outfile,"");
// Specify input tree that contains both signal and background
TCut signalCut("is_good==1");
TCut backgroundCut("is_good==0");
fac->SetInputTrees(l3tree, signalCut, backgroundCut);
// Add variables
fac->AddVariable("Nstart_counter", 'I');
fac->AddVariable("Ntof", 'I');
fac->AddVariable("Nbcal_points", 'I');
fac->AddVariable("Nbcal_clusters", 'I');
fac->AddVariable("Ebcal_points", 'F');
fac->AddVariable("Ebcal_clusters", 'F');
fac->AddVariable("Nfcal_clusters", 'I');
fac->AddVariable("Efcal_clusters", 'F');
fac->AddVariable("Ntrack_candidates", 'I');
fac->AddVariable("Ptot_candidates", 'F');
TCut preSelectCut("");
fac->PrepareTrainingAndTestTree(preSelectCut,"");
fac->BookMethod(TMVA::Types::kBDT, "BDT", "");
fac->TrainAllMethods();
fac->TestAllMethods();
fac->EvaluateAllMethods();
delete fac;
outfile->Close();
delete outfile;
}
示例8: trainBJetIdMVA
void trainBJetIdMVA(TString SELECTION)
{
// the training is done using a dedicated tree format
TFile *src = TFile::Open("bjetId_"+SELECTION+".root");
TTree *tr = (TTree*)src->Get("jets");
TFile *outf = new TFile("bjetId_"+SELECTION+"_MVA.root","RECREATE");
TCut signalCut = "abs(partonId) == 5";
TCut bkgCut = "abs(partonId) != 5";
TCut preselectionCut = "btagIdx<4 && etaIdx<4 && etaIdx>-1 && ptIdx<4";
int N = 100000;
cout<<"NUMBER OF TRAINING EVENTS = "<<N<<endl;
TMVA::Factory* factory = new TMVA::Factory("factory_"+SELECTION+"_",outf,"!V:!Silent:Color:DrawProgressBar:Transformations=I;G:AnalysisType=Classification" );
factory->SetInputTrees(tr,signalCut,bkgCut);
factory->AddVariable("btagIdx",'I');
factory->AddVariable("etaIdx" ,'I');
factory->AddVariable("btag" ,'F');
factory->AddVariable("eta" ,'F');
char name[1000];
sprintf(name,"nTrain_Signal=%d:nTrain_Background=%d:nTest_Signal=%d:nTest_Background=%d",N,N,N,N);
factory->PrepareTrainingAndTestTree(preselectionCut,name);
// specify the training methods
factory->BookMethod(TMVA::Types::kLikelihood,"Likelihood");
//factory->BookMethod(TMVA::Types::kBDT,"BDT_DEF");
//factory->BookMethod(TMVA::Types::kBDT,"BDT_ADA","NTrees=600:AdaBoostBeta=0.1:nCuts=35");
//factory->BookMethod(TMVA::Types::kBDT,"BDT_GRAD1","NTrees=600:nCuts=40:BoostType=Grad:Shrinkage=0.5");
factory->BookMethod(TMVA::Types::kBDT,"BDT_GRAD2","NTrees=600:nCuts=25:BoostType=Grad:Shrinkage=0.2");
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
}
示例9: TMVAtest
void TMVAtest(){
//gSystem->Load("../lib/slc5_amd64_gcc462/libTAMUWWMEPATNtuple.so");
gSystem->Load("libPhysics");
//gSystem->Load("EvtTreeForAlexx_h.so");
gSystem->Load("libTMVA.1");
gSystem->Load("AutoDict_vector_TLorentzVector__cxx.so");
TMVA::Tools::Instance();
TFile* outputFile = TFile::Open("TMVA1.root", "RECREATE");
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification",outputFile,"V=true:Color:DrawProgressBar");// ":Transformations=I;D;P;G,D" );
TFile* signal = TFile::Open("/uscms_data/d2/aperloff/Spring12ME7TeV/MEResults/microNtuples_oldStructure/microWW_EPDv01.root");
TFile* bkg = TFile::Open("/uscms_data/d2/aperloff/Spring12ME7TeV/MEResults/microNtuples_oldStructure/microWJets_EPDv01.root");
TTree* stree = (TTree*)signal->Get("METree");
TTree* btree = (TTree*)bkg->Get("METree");
factory->AddSignalTree(stree,1.0);
factory->AddBackgroundTree(btree,1.0);
factory->SetSignalWeightExpression("1.0");
factory->SetBackgroundWeightExpression("1.0");
factory->AddVariable("tEventProb[0]");
factory->AddVariable("tEventProb[1]");
factory->AddVariable("tEventProb[2]");
//factory->AddVariable("tEventProb0 := tEventProb[0]",'F');
//factory->AddVariable("tEventProb1 := tEventProb[1]",'F');
//factory->AddVariable("tEventProb2 := tEventProb[2]",'F');
TCut test("Entry$>-2 && jLV[1].Pt()>30");
TCut mycuts (test);
factory->PrepareTrainingAndTestTree(mycuts,mycuts,"nTrain_Signal=0:nTrain_Background=0:nTest_Signal=0:nTest_Background=0:SplitMode=Random:NormMode=None:V=true:VerboseLevel=DEBUG");
factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=400:nEventsMin=400:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
outputFile->Close();
}
示例10: TMVAClassification
//.........这里部分代码省略.........
factory->AddVariable("deltaEta := deta", 'F');
factory->AddVariable("deltaPhi := dphi", 'F');
factory->AddVariable("sigmaIetaIeta := sieie", 'F');
factory->AddVariable("HoverE := hoe", 'F');
factory->AddVariable("trackIso := trackiso", 'F');
factory->AddVariable("ecalIso := ecaliso", 'F');
factory->AddVariable("hcalIso := hcaliso", 'F');
//factory->AddVariable("nMissingHits := misshits", 'I');
// You can add so-called "Spectator variables", which are not used in the MVA training,
// but will appear in the final "TestTree" produced by TMVA. This TestTree will contain the
// input variables, the response values of all trained MVAs, and the spectator variables
factory->AddSpectator( "et", 'F' );
factory->AddSpectator( "eta", 'F' );
factory->AddSpectator( "phi", 'F' );
// read training and test data
TFile *input = TFile::Open( "SigElectrons.root" );
TFile *inputB = TFile::Open( "BkgElectrons.root" );
std::cout << "--- TMVAClassification : Using input file: " << input->GetName() << std::endl;
TTree *signal = (TTree*)input->Get("ntuple");
TTree *background = (TTree*)inputB->Get("ntuple");
factory->AddSignalTree ( signal, 1.0 );
factory->AddBackgroundTree( background, 1.0 );
// This would set individual event weights (the variables defined in the
// expression need to exist in the original TTree)
// for signal : factory->SetSignalWeightExpression("weight1*weight2");
// for background: factory->SetBackgroundWeightExpression("weight1*weight2");
//factory->SetBackgroundWeightExpression("weight");
// Apply additional cuts on the signal and background samples (can be different)
TCut mycuts = "";
TCut mycutb = "";
// tell the factory to use all remaining events in the trees after training for testing:
factory->PrepareTrainingAndTestTree( mycuts, mycutb,
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
// If no numbers of events are given, half of the events in the tree are used for training, and
// the other half for testing:
// factory->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" );
// To also specify the number of testing events, use:
// factory->PrepareTrainingAndTestTree( mycut,
// "NSigTrain=3000:NBkgTrain=3000:NSigTest=3000:NBkgTest=3000:SplitMode=Random:!V" );
// ---- Book MVA methods
//
// please lookup the various method configuration options in the corresponding cxx files, eg:
// src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
// it is possible to preset ranges in the option string in which the cut optimisation should be done:
// "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
// Cut optimisation
if (Use["Cuts"])
factory->BookMethod( TMVA::Types::kCuts, "Cuts",
"!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart" );
// Likelihood
if (Use["Likelihood"])
factory->BookMethod( TMVA::Types::kLikelihood, "Likelihood",
"H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" );
// --------------------------------------------------------------------------------------------------
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// gROOT->ProcessLine(".x /usr/local/bin/root/tmva/test/correlations.C");
gROOT->ProcessLine(".x /usr/local/bin/root/tmva/test/variables.C");
}
示例11: classifyBDT
void classifyBDT(TString inputVariables = "trainingVars.txt",
TString signalName = "/mnt/hscratch/dabercro/skims2/BDT_Signal.root",
TString backName = "/mnt/hscratch/dabercro/skims2/BDT_Background.root") {
TMVA::Tools::Instance();
std::cout << "==> Start TMVAClassification" << std::endl;
// Create a ROOT output file where TMVA will store ntuples, histograms, etc.
TString outfileName( "TMVA/TMVA.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassificationCategory", outputFile,
"!V:!Silent:Color:DrawProgressBar:Transformations=I;N" );
// A very simple MVA (feel free to uncomment and comment what you like) => as a rule of thumb 10-20 variables is where people start to get worried about total number
ifstream configFile;
configFile.open(inputVariables.Data());
TString tempFormula;
configFile >> tempFormula; // Is the name of the BDT
while(!configFile.eof()){
configFile >> tempFormula;
if(tempFormula != ""){
factory->AddVariable(tempFormula,'F');
}
}
TString lVars;
// TCut lCut = "jet1qg2<2.&&jet1pt>250.&&jet1pullAngle>-5.";// < 10 && jet1mass_m2 > 60 && jet1mass_m2 < 120";
// TCut lCut = "passZ > 3 && fjet1pt > 250 && fjet1MassPruned < 120 && fatjetid < 2";
TCut lCut = "abs(fjet1PartonId)!=24&&abs(fjet1PartonId)!=23";
// std::string lEventCut = "event % 2 == 1";
// lCut += lEventCut.c_str();
// TCut lSCut = "passT > 0 && fjet1pt > 250 && fjet1MassPruned < 120 && abs(fjet1PartonId) == 24&& fatjetid < 2";
TCut lSCut = "abs(fjet1PartonId)==24||abs(fjet1PartonId)==23";
// lSCut += lEventCut.c_str();
TCut cleanCut = "fjet1QGtagSub2 > -10 && fjet1PullAngle > -4 && abs(fjet1pt/fjet1MassTrimmed)<200 && abs(fjet1pt/fjet1MassPruned)<200";
TFile *lSAInput = TFile::Open(signalName);
TTree *lSASignal = (TTree*)lSAInput ->Get("DMSTree");
TFile *lSBInput = TFile::Open(backName);
TTree *lSBSignal = (TTree*)lSBInput ->Get("DMSTree");
Double_t lSWeight = 1.0;
Double_t lBWeight = 1.0;
gROOT->cd( outfileName+TString(":/") );
factory->AddSignalTree ( lSASignal, lSWeight );
gROOT->cd( outfileName+TString(":/") );
factory->AddBackgroundTree( lSBSignal, lBWeight );
factory->SetWeightExpression("weight");
std::stringstream pSignal,pBackground;
pSignal << "nTrain_Signal="<< lSASignal->GetEntries() << ":nTrain_Background=" << lSBSignal->GetEntries();
// factory->PrepareTrainingAndTestTree( lSCut, lCut,(pSignal.str()+":SplitMode=Block:NormMode=NumEvents:!V").c_str() );
factory->PrepareTrainingAndTestTree(lSCut&&cleanCut,lCut&&cleanCut,"nTrain_Signal=0:nTrain_Background=0:SplitMode=Alternate:NormMode=NumEvents:!V");
std::string lName = "alpha_VBF";
TString lBDTDef = "!H:!V:NTrees=400:BoostType=Grad:Shrinkage=0.1:UseBaggedGrad=F:nCuts=2000:NNodesMax=10000:MaxDepth=5:UseYesNoLeaf=F:nEventsMin=200";
// TString lBDTDef = "!H:!V:NTrees=400:BoostType=Grad:Shrinkage=0.1:UseBaggedGrad=F:nCuts=2000:MaxDepth=5:UseYesNoLeaf=F:MinNodeSize=0.086:NegWeightTreatment=IgnoreNegWeightsInTraining";
factory->BookMethod(TMVA::Types::kBDT,"BDT_simple_alpha",lBDTDef);
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
//if (!gROOT->IsBatch()) TMVAGui( outfileName );
//TString lBDTDef = "!H:!V:NTrees=100:BoostType=Grad:Shrinkage=0.10:UseBaggedGrad=F:nCuts=2000:NNodesMax=10000:MaxDepth=3:SeparationType=GiniIndex";
}
示例12: TMVAClassification
//.........这里部分代码省略.........
if (Use["FDA_MT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
if (Use["FDA_GAMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
if (Use["FDA_MCMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MCMT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:SampleSize=20" );
// TMVA ANN: MLP (recommended ANN) -- all ANNs in TMVA are Multilayer Perceptrons
if (Use["MLP"])
// factory->BookMethod( TMVA::Types::kMLP, "MLP", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:!UseRegulator" );
factory->BookMethod( TMVA::Types::kMLP, "MLP", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+8:TestRate=5:!UseRegulator" );
if (Use["MLPBFGS"])
factory->BookMethod( TMVA::Types::kMLP, "MLPBFGS", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:!UseRegulator" );
if (Use["MLPBNN"])
factory->BookMethod( TMVA::Types::kMLP, "MLPBNN", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:UseRegulator" ); // BFGS training with bayesian regulators
// CF(Clermont-Ferrand)ANN
if (Use["CFMlpANN"])
factory->BookMethod( TMVA::Types::kCFMlpANN, "CFMlpANN", "!H:!V:NCycles=2000:HiddenLayers=N+1,N" ); // n_cycles:#nodes:#nodes:...
// Tmlp(Root)ANN
if (Use["TMlpANN"])
factory->BookMethod( TMVA::Types::kTMlpANN, "TMlpANN", "!H:!V:NCycles=200:HiddenLayers=N+1,N:LearningMethod=BFGS:ValidationFraction=0.3" ); // n_cycles:#nodes:#nodes:...
// Support Vector Machine
if (Use["SVM"])
factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" );
// Boosted Decision Trees
if (Use["BDTG"]) // Gradient Boost
factory->BookMethod( TMVA::Types::kBDT, "BDTG",
"!H:!V:NTrees=1000:BoostType=Grad:Shrinkage=0.10:UseBaggedGrad:GradBaggingFraction=0.5:nCuts=20:NNodesMax=5" );
if (Use["BDT"]) // Adaptive Boost
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:!V:NTrees=800:nEventsMin=50:MaxDepth=2:BoostType=AdaBoost:AdaBoostBeta=1:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning:NNodesMax=5" );
if (Use["BDTB"]) // Bagging
factory->BookMethod( TMVA::Types::kBDT, "BDTB",
"!H:!V:NTrees=400:BoostType=Bagging:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
if (Use["BDTD"]) // Decorrelation + Adaptive Boost
factory->BookMethod( TMVA::Types::kBDT, "BDTD",
"!H:!V:NTrees=400:nEventsMin=400:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning:VarTransform=Decorrelate" );
if (Use["BDTF"]) // Allow Using Fisher discriminant in node splitting for (strong) linearly correlated variables
factory->BookMethod( TMVA::Types::kBDT, "BDTMitFisher",
"!H:!V:NTrees=50:nEventsMin=150:UseFisherCuts:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
// RuleFit -- TMVA implementation of Friedman's method
if (Use["RuleFit"])
factory->BookMethod( TMVA::Types::kRuleFit, "RuleFit",
"H:!V:RuleFitModule=RFTMVA:Model=ModRuleLinear:MinImp=0.001:RuleMinDist=0.001:NTrees=20:fEventsMin=0.01:fEventsMax=0.5:GDTau=-1.0:GDTauPrec=0.01:GDStep=0.01:GDNSteps=10000:GDErrScale=1.02" );
// For an example of the category classifier usage, see: TMVAClassificationCategory
// --------------------------------------------------------------------------------------------------
// ---- Now you can optimize the setting (configuration) of the MVAs using the set of training events
// factory->OptimizeAllMethods("SigEffAt001","Scan");
// factory->OptimizeAllMethods("ROCIntegral","GA");
// --------------------------------------------------------------------------------------------------
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
std::cout << "Training all methods" << std::endl;
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
std::cout << "Testing all methods" << std::endl;
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
std::cout << "Evaluating all methods" << std::endl;
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例13: TMVAClassification
//.........这里部分代码省略.........
if (Use["FDA_SA"]) // can also use Simulated Annealing (SA) algorithm (see Cuts_SA options])
factory->BookMethod( TMVA::Types::kFDA, "FDA_SA",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=SA:MaxCalls=15000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" );
if (Use["FDA_MT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
if (Use["FDA_GAMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
if (Use["FDA_MCMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MCMT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:SampleSize=20" );
// TMVA ANN: MLP (recommended ANN) -- all ANNs in TMVA are Multilayer Perceptrons
if (Use["MLP"])
factory->BookMethod( TMVA::Types::kMLP, "MLP", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:!UseRegulator" );
if (Use["MLPBFGS"])
factory->BookMethod( TMVA::Types::kMLP, "MLPBFGS", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:!UseRegulator" );
if (Use["MLPBNN"])
factory->BookMethod( TMVA::Types::kMLP, "MLPBNN", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:UseRegulator" ); // BFGS training with bayesian regulators
// CF(Clermont-Ferrand)ANN
if (Use["CFMlpANN"])
factory->BookMethod( TMVA::Types::kCFMlpANN, "CFMlpANN", "!H:!V:NCycles=2000:HiddenLayers=N+1,N" ); // n_cycles:#nodes:#nodes:...
// Tmlp(Root)ANN
if (Use["TMlpANN"])
factory->BookMethod( TMVA::Types::kTMlpANN, "TMlpANN", "!H:!V:NCycles=200:HiddenLayers=N+1,N:LearningMethod=BFGS:ValidationFraction=0.3" ); // n_cycles:#nodes:#nodes:...
// Support Vector Machine
if (Use["SVM"])
factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" );
// Boosted Decision Trees
if (Use["BDTG"]) // Gradient Boost
factory->BookMethod( TMVA::Types::kBDT, "BDTG",
"!H:!V:NTrees=1000:MinNodeSize=2.5%:BoostType=Grad:Shrinkage=0.10:UseBaggedBoost:BaggedSampleFraction=0.5:nCuts=20:MaxDepth=2" );
if (Use["BDT"]) // Adaptive Boost
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:!V:NTrees=850:MinNodeSize=2.5%:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:UseBaggedBoost:BaggedSampleFraction=0.5:SeparationType=GiniIndex:nCuts=20" );
if (Use["BDTB"]) // Bagging
factory->BookMethod( TMVA::Types::kBDT, "BDTB",
"!H:!V:NTrees=400:BoostType=Bagging:SeparationType=GiniIndex:nCuts=20" );
if (Use["BDTD"]) // Decorrelation + Adaptive Boost
factory->BookMethod( TMVA::Types::kBDT, "BDTD",
"!H:!V:NTrees=400:MinNodeSize=5%:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:VarTransform=Decorrelate" );
if (Use["BDTF"]) // Allow Using Fisher discriminant in node splitting for (strong) linearly correlated variables
factory->BookMethod( TMVA::Types::kBDT, "BDTMitFisher",
"!H:!V:NTrees=50:MinNodeSize=2.5%:UseFisherCuts:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=-1" );
// RuleFit -- TMVA implementation of Friedman's method
if (Use["RuleFit"])
factory->BookMethod( TMVA::Types::kRuleFit, "RuleFit",
"H:!V:RuleFitModule=RFTMVA:Model=ModRuleLinear:MinImp=0.001:RuleMinDist=0.001:NTrees=20:fEventsMin=0.01:fEventsMax=0.5:GDTau=-1.0:GDTauPrec=0.01:GDStep=0.01:GDNSteps=10000:GDErrScale=1.02" );
// For an example of the category classifier usage, see: TMVAClassificationCategory
// --------------------------------------------------------------------------------------------------
// ---- Now you can optimize the setting (configuration) of the MVAs using the set of training events
// ---- STILL EXPERIMENTAL and only implemented for BDT's !
// factory->OptimizeAllMethods("SigEffAt001","Scan");
// factory->OptimizeAllMethods("ROCIntegral","FitGA");
// --------------------------------------------------------------------------------------------------
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVAGui( outfileDir + outfileName );
}
示例14: main
//.........这里部分代码省略.........
}
// Apply additional cuts on the signal and background samples (can be different)
// // If no numbers of events are given, half of the events in the tree are used
// for training, and the other half for testing:
// factory->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" );
// ---- Book MVA methods
//
// please lookup the various method configuration options in the corresponding cxx files, eg:
// src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
// it is possible to preset ranges in the option string in which the cut optimisation should be done:
// "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
// PDE - RS method
if (Use["PDERS"])
factory->BookMethod( TMVA::Types::kPDERS, "PDERS", "!H:!V:Normthree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
// And the options strings for the MinMax and RMS methods, respectively:
// "!H:!V:VolumeRangeMode=MinMax:DeltaFrac=0.2:KernelEstimator=Gauss:GaussSigma=0.3" );
// "!H:!V:VolumeRangeMode=RMS:DeltaFrac=3:KernelEstimator=Gauss:GaussSigma=0.3" );
if (Use["PDEFoam"])
factory->BookMethod( TMVA::Types::kPDEFoam, "PDEFoam", "!H:!V:MultiTargetRegression=F:TargetSelection=Mpv:TailCut=0.001:VolFrac=0.3:nActiveCells=500:nSampl=2000:nBin=5:Compress=T:Kernel=None:Nmin=10:VarTransform=None" );
// K-Nearest Neighbour classifier (KNN)
if (Use["KNN"])
factory->BookMethod( TMVA::Types::kKNN, "KNN", "nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
// Linear discriminant
if (Use["LD"]) factory->BookMethod( TMVA::Types::kLD, "LD","!H:!V:VarTransform=G,U,D" );
// Function discrimination analysis (FDA) -- test of various fitters - the recommended one is Minuit (or GA or SA)
if (Use["FDA_MC"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MC",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=MC:SampleSize=100000:Sigma=0.1:VarTransform=D" );
if (Use["FDA_GA"]) // can also use Simulated Annealing (SA) algorithm (see Cuts_SA options) .. the formula of this example is good for parabolas
factory->BookMethod( TMVA::Types::kFDA, "FDA_GA",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:PopSize=100:Cycles=3:Steps=30:Trim=True:SaveBestGen=1:VarTransform=Norm" );
if (Use["FDA_MT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
if (Use["FDA_GAMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
// Neural network (MLP)
if (Use["MLP"])
// factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
// factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=200:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
// factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=400:HiddenLayers=N+10:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15" );
// factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=N:NeuronType=tanh:NCycles=200:HiddenLayers=N+10:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15" );
// factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=G,N:NeuronType=tanh:NCycles=200:HiddenLayers=N+5:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15" );
factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:NeuronType=tanh:NCycles=250:HiddenLayers=N+5:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:TestRate=10");
// Support Vector Machine
if (Use["SVM"])
factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=N" );
// factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=N,G" );
// Boosted Decision Trees
if (Use["BDT"])
// factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=100:nEventsMin=5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
// factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=200:nEventsMin=5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:PruneMethod=CostComplexity:PruneStrength=30" );
// factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=300:nEventsMin=5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:PruneMethod=CostComplexity:PruneStrength=30" );
// factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=100:nEventsMin=5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:PruneMethod=CostComplexity:PruneStrength=30" );
factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=100:nEventsMin=20:BoostType=AdaBoostR2:SeparationType=RegressionVariance:PruneMethod=CostComplexity:PruneStrength=30");
if (Use["BDTG"])
// factory->BookMethod( TMVA::Types::kBDT, "BDTG","!H:!V:NTrees=2000::BoostType=Grad:Shrinkage=0.1:UseBaggedGrad:GradBaggingFraction=0.5:nCuts=20:MaxDepth=3:NNodesMax=15" );
factory->BookMethod( TMVA::Types::kBDT, "BDTG","!H:!V:NTrees=1000::BoostType=Grad:Shrinkage=0.1:UseBaggedGrad:GradBaggingFraction=0.5:MaxDepth=5:NNodesMax=25:PruneMethod=CostComplexity:PruneStrength=30");
// --------------------------------------------------------------------------------------------------
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVARegression is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
// if (!gROOT->IsBatch()) TMVARegGui( outputFileName.c_str() );
return 0;
}
示例15: BJetRegression
//.........这里部分代码省略.........
// Apply additional cuts on the signal and background samples (can be different)
TCut mycut = ""; // for example: TCut mycut = "abs(var1)<0.5 && abs(var2-0.5)<1";
// tell the factory to use all remaining events in the trees after training for testing:
factory->PrepareTrainingAndTestTree( mycut,
"nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V" );
// If no numbers of events are given, half of the events in the tree are used
// for training, and the other half for testing:
// factory->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" );
// ---- Book MVA methods
//
// please lookup the various method configuration options in the corresponding cxx files, eg:
// src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
// it is possible to preset ranges in the option string in which the cut optimisation should be done:
// "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
// PDE - RS method
if (Use["PDERS"])
factory->BookMethod( TMVA::Types::kPDERS, "PDERS",
"!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
// And the options strings for the MinMax and RMS methods, respectively:
// "!H:!V:VolumeRangeMode=MinMax:DeltaFrac=0.2:KernelEstimator=Gauss:GaussSigma=0.3" );
// "!H:!V:VolumeRangeMode=RMS:DeltaFrac=3:KernelEstimator=Gauss:GaussSigma=0.3" );
if (Use["PDEFoam"])
factory->BookMethod( TMVA::Types::kPDEFoam, "PDEFoam",
"!H:!V:MultiTargetRegression=F:TargetSelection=Mpv:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Compress=T:Kernel=None:Nmin=10:VarTransform=None" );
// K-Nearest Neighbour classifier (KNN)
if (Use["KNN"])
factory->BookMethod( TMVA::Types::kKNN, "KNN",
"nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
// Linear discriminant
if (Use["LD"])
factory->BookMethod( TMVA::Types::kLD, "LD",
"!H:!V:VarTransform=None" );
// Function discrimination analysis (FDA) -- test of various fitters - the recommended one is Minuit (or GA or SA)
if (Use["FDA_MC"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MC",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=MC:SampleSize=100000:Sigma=0.1:VarTransform=D" );
if (Use["FDA_GA"]) // can also use Simulated Annealing (SA) algorithm (see Cuts_SA options) .. the formula of this example is good for parabolas
factory->BookMethod( TMVA::Types::kFDA, "FDA_GA",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:PopSize=100:Cycles=3:Steps=30:Trim=True:SaveBestGen=1:VarTransform=Norm" );
if (Use["FDA_MT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
if (Use["FDA_GAMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
// Neural network (MLP)
if (Use["MLP"])
factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
// Support Vector Machine
if (Use["SVM"])
factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" );
// Boosted Decision Trees
if (Use["BDT"])
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:!V:NTrees=100:MinNodeSize=1.0%:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
if (Use["BDTG"])
factory->BookMethod( TMVA::Types::kBDT, "BDTG",
"!H:!V:NTrees=2000::BoostType=Grad:Shrinkage=0.1:UseBaggedBoost:BaggedSampleFraction=0.7:nCuts=200:MaxDepth=3:NNodesMax=15" );
// --------------------------------------------------------------------------------------------------
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVARegression is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVARegGui( outfileName );
}