本文整理汇总了C++中tmva::Factory::BookMethod方法的典型用法代码示例。如果您正苦于以下问题:C++ Factory::BookMethod方法的具体用法?C++ Factory::BookMethod怎么用?C++ Factory::BookMethod使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tmva::Factory
的用法示例。
在下文中一共展示了Factory::BookMethod方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: main
//.........这里部分代码省略.........
"!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D:AnalysisType=Classification" );
// Define the input variables that shall be used for the MVA training
// note that you may also use variable expressions, such as: "3*var1/var2*abs(var3)"
// [all types of expressions that can also be parsed by TTree::Draw( "expression" )]
// factory->AddVariable("jet1_pt","jet1_pt","", 'D');
// factory->AddVariable("jet2_pt","jet2_pt","", 'D');
factory->AddVariable("dijet_M","dijet_M","", 'D');
factory->AddVariable("dijet_deta","dijet_deta","", 'D');
factory->AddVariable("metnomu_significance","metnomu_significance","",'D');
factory->AddVariable("alljetsmetnomu_mindphi","alljetsmetnomu_mindphi","",'D');
// factory->AddSpectator( "nPhot_presel", "nPhot_presel", "", 'F' );
std::vector<std::string> backgrounds;
backgrounds.push_back("WJetsToLNu_HT-100to200_Tune4C_13TeV-madgraph-tauol");
backgrounds.push_back("WJetsToLNu_HT-200to400_Tune4C_13TeV-madgraph-tauola");
backgrounds.push_back("WJetsToLNu_HT-400to600_Tune4C_13TeV-madgraph-tauola");
backgrounds.push_back("WJetsToLNu_HT-600toInf_Tune4C_13TeV-madgraph-tauola");
backgrounds.push_back("ZJetsToNuNu_HT-100to200_Tune4C_13TeV-madgraph-tauola");
backgrounds.push_back("ZJetsToNuNu_HT-200to400_Tune4C_13TeV-madgraph-tauola");
backgrounds.push_back("ZJetsToNuNu_HT-400to600_Tune4C_13TeV-madgraph-tauola");
backgrounds.push_back("ZJetsToNuNu_HT-600toInf_Tune4C_13TeV-madgraph-tauola");
std::vector<std::string> signals;
signals.push_back("VBF_HToInv_M-125_13TeV_powheg-pythia6");
double lumiData = 10000;//in pb-1
for (int i=0; i<signals.size(); i++){
float weight = getNormalisationFactor(lumiData,signals[i]);
TFile* f=TFile::Open(Form("%s/%s.root",inPath.c_str(),signals[i].c_str()));
TTree* sig=(TTree*) f->Get("lightTree/LightTree");
if (!sig)
{
std::cout << "====> ERROR: Sig tree " << signals[i] << " cannot be found" << std::endl;
continue;
}
factory->AddSignalTree ( sig, weight);
}
for (int i=0; i<backgrounds.size(); i++){
float weight = getNormalisationFactor(lumiData,backgrounds[i]);
TFile* f=TFile::Open(Form("%s/%s.root",inPath.c_str(),backgrounds[i].c_str()));
TTree* bkg=(TTree*) f->Get("lightTree/LightTree");
if (!bkg)
{
std::cout << "====> ERROR: Bkg tree " << backgrounds[i] << " cannot be found" << std::endl;
continue;
}
factory->AddBackgroundTree ( bkg, weight);
}
// Apply additional cuts on the signal and background samples (can be different)
TCut mycuts;
TCut mycutb;
//Preselection to get rid of QCD
mycuts = "passtrigger==1 && nvetomuons==0 && nvetoelectrons==0 && metnomuons>140 && abs(jet1_eta)<4.7 && abs(jet2_eta)<4.7 && dijet_M>700 && jet1_eta*jet2_eta<0 && metnomu_significance>4 && alljetsmetnomu_mindphi>2 && jet1_pt>50 && jet2_pt>40";
mycutb = "passtrigger==1 && nvetomuons==0 && nvetoelectrons==0 && metnomuons>140 && abs(jet1_eta)<4.7 && abs(jet2_eta)<4.7 && dijet_M>700 && jet1_eta*jet2_eta<0 && metnomu_significance>4 && alljetsmetnomu_mindphi>2 && jet1_pt>50 && jet2_pt>40";
factory->PrepareTrainingAndTestTree( mycuts, mycutb,
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
// ---- Book MVA methods
// factory->BookMethod( TMVA::Types::kCuts, "Cuts",
// // "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp[0]=FSmart:VarProp[1]=FSmart:VarProp[2]=FSmart:VarProp[3]=FSmart:VarProp[4]=FSmart:VarProp[5]=FSmart" );
factory->BookMethod( TMVA::Types::kCuts, "CutsGA",
"H:!V:FitMethod=GA:EffSel:Steps=30:Cycles=3:PopSize=400:SC_steps=10:SC_rate=5:SC_factor=0.95:VarProp[0]=FSmart:VarProp[1]=FSmart:VarProp[2]=FSmart:VarProp[3]=FSmart" );
// factory->BookMethod( TMVA::Types::kCuts, "CutsSA",
// "!H:!V:FitMethod=SA:EffSel:MaxCalls=150000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" );
factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=1000:MinNodeSize=2.5%:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning");
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例2: Boost
void Boost(){
TString outfileName = "boost.root";
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification", outputFile,
"!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D" );
factory->AddVariable( "var0", 'F' );
factory->AddVariable( "var1", 'F' );
TFile *input(0);
TString fname = "./data.root";
if (!gSystem->AccessPathName( fname )) {
// first we try to find tmva_example.root in the local directory
std::cout << "--- BOOST : Accessing " << fname << std::endl;
input = TFile::Open( fname );
}
else {
gROOT->LoadMacro( "./createData.C");
create_circ(20000);
cout << " created data.root with data and circle arranged in half circles"<<endl;
input = TFile::Open( fname );
}
if (!input) {
std::cout << "ERROR: could not open data file" << std::endl;
exit(1);
}
TTree *signal = (TTree*)input->Get("TreeS");
TTree *background = (TTree*)input->Get("TreeB");
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
gROOT->cd( outfileName+TString(":/") );
factory->AddSignalTree ( signal, signalWeight );
factory->AddBackgroundTree( background, backgroundWeight );
factory->PrepareTrainingAndTestTree( "", "",
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
TString fisher="H:!V";
factory->BookMethod( TMVA::Types::kFisher, "Fisher", fisher );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoost", fisher+":Boost_Num=100:Boost_Type=AdaBoost" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostLog", fisher+":Boost_Num=100:Boost_Transform=log:Boost_Type=AdaBoost:Boost_AdaBoostBeta=1.0" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostLog2", fisher+":Boost_Num=100:Boost_Transform=log:Boost_Type=AdaBoost:Boost_AdaBoostBeta=2.0" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostStep", fisher+":Boost_Num=100:Boost_Transform=step:Boost_Type=AdaBoost:Boost_AdaBoostBeta=1.0" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostStep2", fisher+":Boost_Num=100:Boost_Transform=step:Boost_Type=AdaBoost:Boost_AdaBoostBeta=1.2" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostStep3", fisher+":Boost_Num=100:Boost_Transform=step:Boost_Type=AdaBoost:Boost_AdaBoostBeta=1.5" );
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例3: testBDT
//.........这里部分代码省略.........
TString outfileName( "bdtTMVA_FCNC_tZ.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "doBDT_FCNC_tZ", outputFile,
"!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D:AnalysisType=Classification" );
// global event weights per tree (see below for setting event-wise weights)
//Double_t signalWeight = 0.003582;
//Double_t backgroundWeight = 0.0269;
Double_t signalWeight = 1;
Double_t backgroundWeight = 1;
TFile *input_sig = TFile::Open( "proof.root" );
TFile *input_wz = TFile::Open( "proof.root" );
TTree *signal = (TTree*)input_sig->Get("Ttree_FCNCkut");
TTree *background_WZ = (TTree*)input_wz->Get("Ttree_WZ");
/*TTree *background_ZZ = (TTree*)input_wz->Get("Ttree_ZZ");
TTree *background_WW = (TTree*)input_wz->Get("Ttree_WW");
TTree *background_TTbar = (TTree*)input_wz->Get("Ttree_TTbar");
TTree *background_Zjets = (TTree*)input_wz->Get("Ttree_Zjets");
TTree *background_Wjets = (TTree*)input_wz->Get("Ttree_Wjets");
TTree *background_TtW = (TTree*)input_wz->Get("Ttree_TtW");
TTree *background_TbartW = (TTree*)input_wz->Get("Ttree_TbartW");*/
// You can add an arbitrary number of signal or background trees
factory->AddSignalTree ( signal, signalWeight );
factory->AddBackgroundTree( background_WZ, backgroundWeight );
/*factory->AddBackgroundTree( background_ZZ, backgroundWeight );
factory->AddBackgroundTree( background_WW, backgroundWeight );
factory->AddBackgroundTree( background_TTbar, backgroundWeight );
factory->AddBackgroundTree( background_Zjets, backgroundWeight );
factory->AddBackgroundTree( background_Wjets, backgroundWeight );
factory->AddBackgroundTree( background_TtW, backgroundWeight );
factory->AddBackgroundTree( background_TbartW, backgroundWeight );*/
factory->AddVariable("tree_topMass", 'F');
factory->AddVariable("tree_deltaPhilb", 'F');
factory->AddVariable("tree_asym", 'F');
factory->AddVariable("tree_Zpt", 'F');
// to set weights. The variable must exist in the tree
// for signal : factory->SetSignalWeightExpression ("weight1*weight2");
// for background: factory->SetBackgroundWeightExpression("weight1*weight2");
// Apply additional cuts on the signal and background samples (can be different)
TCut mycuts = ""; // for example: TCut mycuts = "abs(var1)<0.5 && abs(var2-0.5)<1";
TCut mycutb = ""; // for example: TCut mycutb = "abs(var1)<0.5";
factory->PrepareTrainingAndTestTree( mycuts, mycutb,
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
factory->BookMethod( TMVA::Types::kBDT, "BDT", "!H:!V:NTrees=100:nEventsMin=100:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning:VarTransform=Decorrelate" );
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例4: tmvaClassifier
//.........这里部分代码省略.........
factory->AddVariable( "detajj", "#Delta#eta_{jj}", "", 'F' );
factory->AddVariable( "spt", "#Delta_{rel}", "GeV", 'F' );
}
else
{
factory->AddVariable( "mjj", "M_{jj}" "GeV", 'F' );
factory->AddVariable( "detajj", "#Delta#eta_{jj}", "", 'F' );
factory->AddVariable( "setajj", "#Sigma#eta_{j}", "", 'F' );
factory->AddVariable( "eta1", "#eta(1)", "", 'F' );
factory->AddVariable( "eta2", "#eta(2)", "", 'F' );
factory->AddVariable( "pt1", "p_{T}(1)", "GeV", 'F' );
factory->AddVariable( "pt2", "p_{T}(2)", "GeV", 'F' );
factory->AddVariable( "spt", "#Delta_{rel}", "GeV", 'F' );
if(useQG) factory->AddVariable( "qg1", "q/g(1)", "", 'F' );
if(useQG) factory->AddVariable( "qg2", "q/g(2)", "", 'F' );
}
// Apply additional cuts on the signal and background samples (can be different)
TCut mycuts = ""; // for example: TCut mycuts = "abs(var1)<0.5 && abs(var2-0.5)<1";
TCut mycutb = ""; // for example: TCut mycutb = "abs(var1)<0.5";
factory->PrepareTrainingAndTestTree( mycuts, mycutb,
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
// ---- Book MVA methods
//
// Please lookup the various method configuration options in the corresponding cxx files, eg:
// src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
// it is possible to preset ranges in the option string in which the cut optimisation should be done:
// "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
// Cut optimisation
if (Use["Cuts"])
factory->BookMethod( TMVA::Types::kCuts, "Cuts",
"!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart" );
if (Use["CutsD"])
factory->BookMethod( TMVA::Types::kCuts, "CutsD",
"!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=Decorrelate" );
if (Use["CutsPCA"])
factory->BookMethod( TMVA::Types::kCuts, "CutsPCA",
"!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=PCA" );
if (Use["CutsGA"])
factory->BookMethod( TMVA::Types::kCuts, "CutsGA",
"H:!V:FitMethod=GA:CutRangeMin[0]=-10:CutRangeMax[0]=10:VarProp[1]=FMax:EffSel:Steps=30:Cycles=3:PopSize=400:SC_steps=10:SC_rate=5:SC_factor=0.95" );
if (Use["CutsSA"])
factory->BookMethod( TMVA::Types::kCuts, "CutsSA",
"!H:!V:FitMethod=SA:EffSel:MaxCalls=150000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" );
// Likelihood ("naive Bayes estimator")
if (Use["Likelihood"])
factory->BookMethod( TMVA::Types::kLikelihood, "Likelihood",
"H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" );
// Decorrelated likelihood
if (Use["LikelihoodD"])
factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodD",
"!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=Decorrelate" );
// PCA-transformed likelihood
if (Use["LikelihoodPCA"])
factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodPCA",
"!H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=PCA" );
示例5: Reg
void Reg(){
TMVA::Tools::Instance();
std::cout << "==> Start TMVARegression" << std::endl;
ifstream myfile;
myfile.open("99per.txt");
ostringstream xcS,xcH,xcP,xcC,xcN;
double xS,xH,xC,xN,xP;
if(myfile.is_open()){
while(!myfile.eof()){
myfile>>xS>>xH>>xC>>xN>>xP;
}
}
xcS<<xS;
xcH<<xH;
xcC<<xC;
xcN<<xN;
xcP<<xP;
//Output file
TString outfileName( "Ex1out_FullW_def.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
//Declaring the factory
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification", outputFile,
"!V:!Silent:Color:DrawProgressBar" );
//Declaring Input Varibles
factory->AddVariable( "Sieie",'F');
factory->AddVariable( "ToE", 'F' );
factory->AddVariable( "isoC",'F' );
factory->AddVariable( "isoN",'F' );
factory->AddVariable( "isoP",'F' );
TString fname = "../../CutTMVATrees_Barrel.root";
input = TFile::Open( fname );
// --- Register the regression tree
TTree *signal = (TTree*)input->Get("t_S");
TTree *background = (TTree*)input->Get("t_B");
//Just Some more settings
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
// You can add an arbitrary number of regression trees
factory->AddSignalTree( signal, signalWeight );
factory->AddBackgroundTree( background , backgroundWeight );
TCut mycuts ="";
TCut mycutb ="";
// factory->PrepareTrainingAndTestTree(mycuts,mycutb,"nTrain_Signal=9000:nTrain_Background=9000:nTest_Signal=10000:nTest_Background=10000");
factory->SetBackgroundWeightExpression("weightPT*weightXS");
factory->SetSignalWeightExpression("weightPT*weightXS");
TString methodName = "Cuts_FullsampleW_def";
TString methodOptions ="!H:!V:FitMethod=GA:EffMethod=EffSEl";
methodOptions +=":VarProp[0]=FMin:VarProp[1]=FMin:VarProp[2]=FMin:VarProp[3]=FMin:VarProp[4]=FMin";
methodOptions +=":CutRangeMax[0]="+xcS.str();
methodOptions +=":CutRangeMax[1]="+xcH.str();
methodOptions +=":CutRangeMax[2]="+xcC.str();
methodOptions +=":CutRangeMax[3]="+xcN.str();
methodOptions +=":CutRangeMax[4]="+xcP.str();
//************
factory->BookMethod(TMVA::Types::kCuts,methodName,methodOptions);
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVARegression is done!" << std::endl;
delete factory;
}
示例6: TestBDT_forreal_test
int TestBDT_forreal_test(TString sig) {
// This loads the library
TMVA::Tools::Instance();
// to get access to the GUI and all tmva macros
//TString thisdir = gSystem->DirName(gInterpreter->GetCurrentMacroName());//was not commented, but does not work anymore
//gROOT->SetMacroPath(thisdir + ":" + gROOT->GetMacroPath());//was not commented, but cannot not work anymore
//gROOT->ProcessLine(".L TMVAGui.C");
TString outfileName( "rootfiles/TMVA/resultTMVA_"+sig+"VsTTbar2l.root" );
TString weightname( "weightsTMVA_"+sig+"VsTTbar2l" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( weightname, outputFile,"!V:!Silent:Color:DrawProgressBar");
vector<TString> histonames; histonames.clear();
map<string, float> value;
//histonames.push_back("MT2W");
//histonames.push_back("MT2_lb_b");
//histonames.push_back("MT2_lb_bqq");
//histonames.push_back("MT2_lb_b_mass");
//histonames.push_back("MT2_lb_bqq_mass");
histonames.push_back("Mlb_lead_bdiscr");
//histonames.push_back("Mjjj");
//histonames.push_back("topness");
//histonames.push_back("topnessMod");
histonames.push_back("pfmet");
//histonames.push_back("ak4_HT");
//histonames.push_back("MET_over_sqrtHT");
//histonames.push_back("ak4_htratiom");
histonames.push_back("dR_lep_leadb");
//histonames.push_back("hadronic_top_chi2");
//histonames.push_back("ngoodbtags");
histonames.push_back("ngoodjets");
//histonames.push_back("mindphi_met_j1_j2");
//histonames.push_back("lep1_pt");
histonames.push_back("ak4pfjets_leadMEDbjet_p4_Pt");
for(unsigned int b = 0; b<histonames.size(); ++b){
factory->AddVariable(histonames[b], 'F' );
}
TString signame = "/nfs-7/userdata/stopRun2/testMVA/"+sig+".root";
TString bkgname1 = "/nfs-7/userdata/stopRun2/testMVA/TTJets_DiLept_madgraph_25ns_1.root";
TString bkgname2 = "/nfs-7/userdata/stopRun2/testMVA/TTJets_DiLept_madgraph_25ns_2.root";
/*
TString signame = "/hadoop/cms/store/user/haweber/forBDT/"+sig+".root";
TString bkgname1 = "/hadoop/cms/store/user/haweber/forBDT/TTJets_DiLept_madgraph_25ns_1.root";
TString bkgname2 = "/hadoop/cms/store/user/haweber/forBDT/TTJets_DiLept_madgraph_25ns_2.root";
*/
cout << "signame " << signame << endl;
TFile *inputSig = TFile::Open( signame );
TFile *inputBkg1 = TFile::Open( bkgname1 );
TFile *inputBkg2 = TFile::Open( bkgname2 );
TTree *signal = (TTree*)inputSig->Get("t");
TTree *background1 = (TTree*)inputBkg1->Get("t");
TTree *background2 = (TTree*)inputBkg2->Get("t");
// global event weights per tree (see below for setting event-wise weights)
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
// You can add an arbitrary number of signal or background trees
factory->AddSignalTree ( signal, signalWeight );
factory->AddBackgroundTree( background1, backgroundWeight );
factory->AddBackgroundTree( background2, backgroundWeight );
//factory->SetBackgroundWeightExpression( "weight" );
// Apply additional cuts on the signal and background samples (can be different)
//TCut mycuts = "MT2W>200&&mindphi_met_j1_j2>0.8"; // for example: TCut mycuts = "abs(var1)<0.5 && abs(var2-0.5)<1";
//TCut mycutb = "MT2W>200&&mindphi_met_j1_j2>0.8"; // for example: TCut mycutb = "abs(var1)<0.5";
//if(sig.Contains("T2tt_425_325")||sig.Contains("T2tt_500_325")){ mycuts = "mindphi_met_j1_j2>0.8"; mycutb = "mindphi_met_j1_j2>0.8"; }
TCut mycuts = "";
TCut mycutb = "";
factory->PrepareTrainingAndTestTree( mycuts, mycutb,
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:!V:NTrees=850:MinNodeSize=2.5%:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:UseBaggedBoost:BaggedSampleFraction=0.5:SeparationType=GiniIndex:nCuts=20" );
// Train MVAs using the set of training events
cout << "Train methods" << endl;
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
cout << "Test methods" << endl;
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
cout << "Evaluate methods" << endl;
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
//.........这里部分代码省略.........
示例7: TMVATrainer
//.........这里部分代码省略.........
factory->AddVariable("TagVarCSV_vertexMass","TagVarCSV_vertexMass","units",'F');
factory->AddVariable("TagVarCSV_vertexNTracks","TagVarCSV_vertexNTracks","units",'F');
factory->AddVariable("TagVarCSV_vertexEnergyRatio","TagVarCSV_vertexEnergyRatio","units",'F');
factory->AddVariable("TagVarCSV_vertexJetDeltaR","TagVarCSV_vertexJetDeltaR","units",'F');
factory->AddVariable("TagVarCSV_flightDistance2dSig","TagVarCSV_flightDistance2dSig","units",'F');
//factory->AddVariable("TagVarCSV_flightDistance3dSig","TagVarCSV_flightDistance3dSig","units",'F');
// You can add so-called "Spectator variables", which are not used in the MVA training,
// but will appear in the final "TestTree" produced by TMVA. This TestTree will contain the
// input variables, the response values of all trained MVAs, and the spectator variables
factory->AddSpectator("Jet_pt","Jet_pt","units",'F');
factory->AddSpectator("Jet_eta","Jet_eta","units",'F');
factory->AddSpectator("Jet_phi","Jet_phi","units",'F');
factory->AddSpectator("Jet_mass","Jet_mass","units",'F');
factory->AddSpectator("Jet_massGroomed","Jet_massGroomed","units",'F');
factory->AddSpectator("Jet_flavour","Jet_flavour","units",'F');
factory->AddSpectator("Jet_nbHadrons","Jet_nbHadrons","units",'F');
factory->AddSpectator("Jet_JP","Jet_JP","units",'F');
factory->AddSpectator("Jet_JBP","Jet_JBP","units",'F');
factory->AddSpectator("Jet_CSV","Jet_CSV","units",'F');
factory->AddSpectator("Jet_CSVIVF","Jet_CSVIVF","units",'F');
factory->AddSpectator("Jet_tau1","Jet_tau1","units",'F');
factory->AddSpectator("Jet_tau2","Jet_tau2","units",'F');
factory->AddSpectator("SubJet1_CSVIVF","SubJet1_CSVIVF","units",'F');
factory->AddSpectator("SubJet2_CSVIVF","SubJet2_CSVIVF","units",'F');
// Read training and test data
// (it is also possible to use ASCII format as input -> see TMVA Users Guide)
TString fnameSig = "RadionToHH_4b_M-800_TuneZ2star_8TeV-Madgraph_pythia6_JetTaggingVariables_training.root";
TString fnameBkg = "QCD_Pt-300to470_TuneZ2star_8TeV_pythia6_JetTaggingVariables_training.root";
TFile *inputSig = TFile::Open( fnameSig );
TFile *inputBkg = TFile::Open( fnameBkg );
std::cout << "--- TMVAClassification : Using input files: " << inputSig->GetName() << std::endl
<< inputBkg->GetName() << std::endl;
// --- Register the training and test trees
TTree *sigTree = (TTree*)inputSig->Get("tagVars/ttree");
TTree *bkgTree = (TTree*)inputBkg->Get("tagVars/ttree");
// // global event weights per tree (see below for setting event-wise weights)
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
// factory->SetInputTrees( tree,signalCut,backgroundCut );
factory->AddSignalTree ( sigTree, signalWeight );
factory->AddBackgroundTree( bkgTree, backgroundWeight );
// Apply additional cuts on the signal and background samples (can be different)
TCut signalCut = "Jet_massGroomed>80 && Jet_massGroomed<150";
TCut backgroundCut = "abs(Jet_flavour)==5 && Jet_nbHadrons>1 && Jet_massGroomed>80 && Jet_massGroomed<150";
// Tell the factory how to use the training and testing events
factory->PrepareTrainingAndTestTree( signalCut, backgroundCut,
"nTrain_Signal=22000:nTest_Signal=20000:nTrain_Background=22000:nTest_Background=2730:SplitMode=Random:!V" );
// Gradient Boost
factory->BookMethod( TMVA::Types::kBDT, "BDTG_T1000D3_fat_BBvsGSP",
"!H:!V:NTrees=1000:MaxDepth=3:MinNodeSize=1.5%:BoostType=Grad:Shrinkage=0.10:UseBaggedBoost:BaggedSampleFraction=0.5:SeparationType=GiniIndex:nCuts=20" );
//factory->BookMethod( TMVA::Types::kBDT, "BDTG_T1000D5_fat_BBvsGSP",
// "!H:!V:NTrees=1000:MaxDepth=5:MinNodeSize=2.5%:BoostType=Grad:Shrinkage=0.10:UseBaggedBoost:BaggedSampleFraction=0.5:nCuts=20" );
// // Adaptive Boost
// factory->BookMethod( TMVA::Types::kBDT, "BDT",
// "!H:!V:NTrees=1000:MaxDepth=5:MinNodeSize=2.5%:BoostType=AdaBoost:AdaBoostBeta=0.5:UseBaggedBoost:BaggedSampleFraction=0.5:SeparationType=GiniIndex:nCuts=20" );
// // Bagging
// factory->BookMethod( TMVA::Types::kBDT, "BDTB",
// "!H:!V:NTrees=1000:MaxDepth=5:MinNodeSize=2.5%:BoostType=Bagging:SeparationType=GiniIndex:nCuts=20" );
// // Decorrelation + Adaptive Boost
// factory->BookMethod( TMVA::Types::kBDT, "BDTD",
// "!H:!V:NTrees=1000:MaxDepth=5:MinNodeSize=2.5%:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20:VarTransform=Decorrelate" );
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例8: TMVATraining_ch4
void TMVATraining_ch4( )
{
TFile* outputFile = TFile::Open( "TMVA_ch4.root", "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "MVAnalysis", outputFile,"!V");
TFile *signal = TFile::Open("../production/BGx0/Prod2_iptubeK0/B0_etapr-eta-3pi2pi_KS-pi+pi-_output_signal_iptubeK0.root");
TFile *background = TFile::Open("../production/BGx0/Prod2_iptubeK0/B0_etapr-eta-3pi2pi_KS-pi+pi-_output_ccbar_iptubeK0.root");
factory->AddSignalTree ( (TTree*)signal->Get("B0"), 1.0 );
factory->AddBackgroundTree ( (TTree*)background->Get("B0"), 1.0 );
sigCut = TCut("B0__isContinuumEvent==0");
bgCut = TCut("B0__isContinuumEvent==1");
factory->AddVariable("B0_ThrustB",'F');
factory->AddVariable("B0_ThrustO",'F');
factory->AddVariable("B0_CosTBTO",'F');
factory->AddVariable("B0_CosTBz",'F');
factory->AddVariable("B0_R2",'F');
factory->AddVariable("B0_cc1",'F');
factory->AddVariable("B0_cc2",'F');
factory->AddVariable("B0_cc3",'F');
factory->AddVariable("B0_cc4",'F');
factory->AddVariable("B0_cc5",'F');
factory->AddVariable("B0_cc6",'F');
factory->AddVariable("B0_cc7",'F');
factory->AddVariable("B0_cc8",'F');
factory->AddVariable("B0_cc9",'F');
factory->AddVariable("B0_mm2",'F');
factory->AddVariable("B0_et",'F');
factory->AddVariable("B0_hso00",'F');
// factory->AddVariable("B0_hso01",'F');
factory->AddVariable("B0_hso02",'F');
//factory->AddVariable("B0_hso03",'F');
factory->AddVariable("B0_hso04",'F');
factory->AddVariable("B0_hso10",'F');
factory->AddVariable("B0_hso12",'F');
factory->AddVariable("B0_hso14",'F');
factory->AddVariable("B0_hso20",'F');
factory->AddVariable("B0_hso22",'F');
factory->AddVariable("B0_hso24",'F');
factory->AddVariable("B0_hoo0",'F');
factory->AddVariable("B0_hoo1",'F');
factory->AddVariable("B0_hoo2",'F');
factory->AddVariable("B0_hoo3",'F');
factory->AddVariable("B0_hoo4",'F');
factory->PrepareTrainingAndTestTree(sigCut, bgCut, "!V:nTrain_Signal=10000:nTest_Signal=10000:nTrain_Background=10000:nTest_Background=10000:SplitMode=Random:NormMode=NumEvents" );
//factory->BookMethod( TMVA::Types::kLikelihood, "Likelihood", "H:V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=PCA");
//factory->BookMethod( TMVA::Types::kMLP, "MLP", "!V:NCycles=200:HiddenLayers=N+1,N:TestRate=5" );
factory->BookMethod( TMVA::Types::kMLP, "MLPBNN", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:UseRegulator" );
factory->BookMethod( TMVA::Types::kBDT, "BDT", "!H:!V:NTrees=850:MinNodeSize=2.5%:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:UseBaggedBoost:BaggedSampleFraction=0.5:SeparationType=GiniIndex:nCuts=20" );
factory->BookMethod( TMVA::Types::kSVM, "SVM", "!H:!V:Gamma=0.25:Tol=0.001:VarTransform=Norm" );
//factory->BookMethod( TMVA::Types::kBDT, "FastBDT", "!H:!V:CreateMVAPdfs:NbinsMVAPdf=40:NTrees=100:Shrinkage=0.10"); //:RandRatio=0.5:NCutLevel=8:NTreeLayers=3");
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
outputFile->Close();
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVA::TMVAGui( "TMVA_ch4.root" );
}
示例9: main
int main ()
{
TFile * outputfile = TFile::Open ("outputTMVA.root","RECREATE");
TMVA::Factory * TMVAtest = new TMVA::Factory ("TMVAtest", outputfile, "S") ;
//PG get the signal and deliver it to the TMVA factory
TChain signalTree ("sample") ;
signalTree.Add ("data/sig_0.root") ;
std::cout << "READ " << signalTree.GetEntries () << " signal events\n" ;
TMVAtest->AddSignalTree (&signalTree, 1) ;
//PG get the bkg and deliver it to the TMVA factory
TChain bkgTree ("sample") ;
bkgTree.Add ("data/bkg_0.root") ;
std::cout << "READ " << bkgTree.GetEntries () << " bkg events\n" ;
TMVAtest->AddBackgroundTree (&bkgTree, 1) ;
//PG get the training and test samples and deliver them to the TMVA factory
TChain signalTrainTree ("sample") ;
signalTrainTree.Add ("data/sig_1.root") ;
std::cout << "READ " << signalTrainTree.GetEntries () << " signal train events\n" ;
TChain bkgTrainTree ("sample") ;
bkgTrainTree.Add ("data/bkg_1.root") ;
std::cout << "READ " << bkgTrainTree.GetEntries () << " bkg train events\n" ;
TMVAtest->SetInputTrees (signalTrainTree.GetTree (), bkgTrainTree.GetTree (), 1., 1.) ;
//PG variables to be used for the selection
//PG must be defined in the TTrees
TMVAtest->AddVariable ("vars.x", 'F') ;
TMVAtest->AddVariable ("vars.y" , 'F') ;
int signalNumTrain = signalTrainTree.GetEntries () * 4 / 5 ;
int bkgNumTrain = bkgTrainTree.GetEntries () * 4 / 5 ;
int signalNumTest = signalTrainTree.GetEntries () - signalNumTrain ;
int bkgNumTest = bkgTrainTree.GetEntries () - bkgNumTrain ;
char trainOptions[120] ;
sprintf (trainOptions,"NSigTrain=%d:NBkgTrain=%d:NSigTest=%d:NBkgTest=%d",
signalNumTrain, bkgNumTrain,
signalNumTest, bkgNumTest) ;
sprintf (trainOptions,"NSigTrain=%d:NBkgTrain=%d:NSigTest=%d:NBkgTest=%d",
0,0,0,0) ;
std::cout << "TRAINING CONFIGURATION : " << trainOptions << "\n" ;
TMVAtest->PrepareTrainingAndTestTree ("",trainOptions) ;
//PG prepare the classifier
//PG cut-based, default params
TMVAtest->BookMethod (TMVA::Types::kCuts, "Cuts") ;
TMVAtest->TrainAllMethods () ;
TMVAtest->TestAllMethods () ;
TMVAtest->EvaluateAllMethods () ;
delete TMVAtest ;
delete outputfile ;
}
示例10: main
int main(int argc, char * argv[])
{
//Processing input options
int c;
std::string outFname;
outFname = std::string("QualityNaF.root");
// Open input files, get the trees
TChain *mc = InputFileReader("FileListNtuples_ext.txt","parametri_geo");
// Preparing options for the TMVA::Factory
std::string options(
"!V:"
"!Silent:"
"Color:"
"DrawProgressBar:"
"Transformations=I;D;P;G,D:"
"AnalysisType=Classification"
);
//Creating the factory
TFile * ldFile = new TFile(outFname.c_str(),"RECREATE");
TMVA::Factory * factory = new TMVA::Factory("QualityNaF", ldFile, options.c_str());
//Preparing variables
//general
/*factory->AddVariable("Chisquare", 'F');
factory->AddVariable("Layernonusati", 'I');
factory->AddVariable("NTofUsed", 'I');
factory->AddVariable("diffR", 'F');
factory->AddVariable("TOF_Up_Down", 'F');*/
//Tof
//factory->AddVariable("TOFchisq_s", 'F');
//factory->AddVariable("TOFchisq_t", 'F');
//RICH
factory->AddVariable("Richtotused", 'F');
factory->AddVariable("RichPhEl", 'F');
factory->AddVariable("RICHprob", 'F');
factory->AddVariable("RICHcollovertotal");
factory->AddVariable("RICHLipBetaConsistency");
factory->AddVariable("RICHTOFBetaConsistency");
factory->AddVariable("RICHChargeConsistency");
factory->AddVariable("RICHPmts");
factory->AddVariable("RICHgetExpected");
factory->AddVariable("tot_hyp_p_uncorr");
factory->AddVariable("Bad_ClusteringRICH");
factory->AddVariable("NSecondariesRICHrich");
//factory->AddVariable("HitHValldir");
//factory->AddVariable("HitHVallrefl");
//factory->AddVariable("HVBranchCheck:= (HitHValldir - HitHVoutdir) - (HitHVallrefl - HitHVoutrefl)");
factory->AddVariable("HitHVoutdir");
factory->AddVariable("HitHVoutrefl");
//Spectator Variables
factory->AddSpectator("R", 'F');
factory->AddSpectator("BetaRICH_new", 'F');
//Preselection cuts
std::string PreSelection = "qL1>0&&(joinCutmask&187)==187&&qL1<1.75&&R>0";
std::string ChargeCut = "qUtof>0.8&&qUtof<1.3&&qLtof>0.8&&qLtof<1.3";
std::string VelocityCut = /*"Beta<0.8";*/"((joinCutmask>>11))==1024&&BetaRICH_new>0&&BetaRICH_new<0.975";
std::string signalCut = /*"(R/Beta)*(1-Beta^2)^0.5>1.65&&GenMass>1&&GenMass<2";*/"(R/BetaRICH_new)*(1-BetaRICH_new^2)^0.5>0.5&&(R/BetaRICH_new)*(1-BetaRICH_new^2)^0.5<1.5";
std::string bkgndCut = /*"(R/Beta)*(1-Beta^2)^0.5>1.65&&GenMass>0&&GenMass<1";*/"(R/BetaRICH_new)*(1-BetaRICH_new^2)^0.5>3";
factory->AddTree(mc,"Signal" ,1,(PreSelection +"&&"+ ChargeCut + "&&" + VelocityCut + "&&"+ signalCut).c_str());
factory->AddTree(mc,"Background",1,(PreSelection +"&&"+ ChargeCut + "&&" + VelocityCut + "&&"+ bkgndCut).c_str());
// Preparing
std::string preselection = "";
std::string inputparams(
"SplitMode=Random:"
"NormMode=NumEvents:"
"!V"
);
factory->PrepareTrainingAndTestTree(preselection.c_str(),inputparams.c_str());
// Training
std::string trainparams ="!H:!V:MaxDepth=3";
factory->BookMethod(TMVA::Types::kBDT, "BDT", trainparams.c_str());
trainparams ="!H:!V";
factory->BookMethod(TMVA::Types::kLikelihood, "Likelihood", trainparams.c_str());
trainparams ="!H:!V:VarTransform=Decorrelate";
//factory->BookMethod(TMVA::Types::kLikelihood, "LikelihoodD", trainparams.c_str());
trainparams ="!H:!V";
//factory->BookMethod(TMVA::Types::kCuts, "Cuts", trainparams.c_str());
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
}
示例11: TMVAClassificationCategory
void TMVAClassificationCategory()
{
//---------------------------------------------------------------
std::cout << std::endl << "==> Start TMVAClassificationCategory" << std::endl;
bool batchMode(false);
// Create a new root output file.
TString outfileName( "TMVA.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
// Create the factory object. Later you can choose the methods
// whose performance you'd like to investigate. The factory will
// then run the performance analysis for you.
//
// The first argument is the base of the name of all the
// weightfiles in the directory weight/
//
// The second argument is the output file for the training results
// All TMVA output can be suppressed by removing the "!" (not) in
// front of the "Silent" argument in the option string
std::string factoryOptions( "!V:!Silent:Transformations=I;D;P;G,D" );
if (batchMode) factoryOptions += ":!Color:!DrawProgressBar";
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassificationCategory", outputFile, factoryOptions );
// If you wish to modify default settings
// (please check "src/Config.h" to see all available global options)
// (TMVA::gConfig().GetVariablePlotting()).fTimesRMS = 8.0;
// (TMVA::gConfig().GetIONames()).fWeightFileDir = "myWeightDirectory";
// Define the input variables that shall be used for the MVA training
// note that you may also use variable expressions, such as: "3*var1/var2*abs(var3)"
// [all types of expressions that can also be parsed by TTree::Draw( "expression" )]
factory->AddVariable( "var1", 'F' );
factory->AddVariable( "var2", 'F' );
factory->AddVariable( "var3", 'F' );
factory->AddVariable( "var4", 'F' );
// You can add so-called "Spectator variables", which are not used in the MVA training,
// but will appear in the final "TestTree" produced by TMVA. This TestTree will contain the
// input variables, the response values of all trained MVAs, and the spectator variables
factory->AddSpectator( "eta" );
// load the signal and background event samples from ROOT trees
TFile *input(0);
TString fname( "" );
if (UseOffsetMethod) fname = "../execs/data/toy_sigbkg_categ_offset.root";
else fname = "../execs/data/toy_sigbkg_categ_varoff.root";
if (!gSystem->AccessPathName( fname )) {
// first we try to find tmva_example.root in the local directory
std::cout << "--- TMVAClassificationCategory: Accessing " << fname << std::endl;
input = TFile::Open( fname );
}
if (!input) {
std::cout << "ERROR: could not open data file: " << fname << std::endl;
exit(1);
}
TTree *signal = (TTree*)input->Get("TreeS");
TTree *background = (TTree*)input->Get("TreeB");
/// global event weights per tree (see below for setting event-wise weights)
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
/// you can add an arbitrary number of signal or background trees
factory->AddSignalTree ( signal, signalWeight );
factory->AddBackgroundTree( background, backgroundWeight );
// Apply additional cuts on the signal and background samples (can be different)
TCut mycuts = ""; // for example: TCut mycuts = "abs(var1)<0.5 && abs(var2-0.5)<1";
TCut mycutb = ""; // for example: TCut mycutb = "abs(var1)<0.5";
// tell the factory to use all remaining events in the trees after training for testing:
factory->PrepareTrainingAndTestTree( mycuts, mycutb,
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
// Fisher discriminant
factory->BookMethod( TMVA::Types::kFisher, "Fisher", "!H:!V:Fisher" );
// Likelihood
factory->BookMethod( TMVA::Types::kLikelihood, "Likelihood",
"!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" );
// Categorised classifier
TMVA::MethodCategory* mcat = 0;
// the variable sets
TString theCat1Vars = "var1:var2:var3:var4";
TString theCat2Vars = (UseOffsetMethod ? "var1:var2:var3:var4" : "var1:var2:var3");
// the Fisher
TMVA::MethodBase* fiCat = factory->BookMethod( TMVA::Types::kCategory, "FisherCat","" );
mcat = dynamic_cast<TMVA::MethodCategory*>(fiCat);
mcat->AddMethod("abs(eta)<=1.3",theCat1Vars, TMVA::Types::kFisher,"Category_Fisher_1","!H:!V:Fisher");
mcat->AddMethod("abs(eta)>1.3", theCat2Vars, TMVA::Types::kFisher,"Category_Fisher_2","!H:!V:Fisher");
//.........这里部分代码省略.........
示例12: WWTMVAClassification
//.........这里部分代码省略.........
if(mH==450.) mass4bodycut = "(fit_mlvjj>332 && fit_mlvjj<518)"; // 3j450el
if(mH==500.) mass4bodycut = "(fit_mlvjj>362 && fit_mlvjj<569)"; // 3j500el
if(mH==550.) mass4bodycut = "(fit_mlvjj>398 && fit_mlvjj<616)"; // 3j550el
if(mH==600.) mass4bodycut = "(fit_mlvjj>419 && fit_mlvjj<660)"; // 3j600el
}
}
char mycutschar[1000];
sprintf(mycutschar,"ggdevt == %i &&(Mass2j_PFCor>65 && Mass2j_PFCor<95) && %s", njets, mass4bodycut);
TCut mycuts (mycutschar);
// tell the factory to use all remaining events in the trees after training for testing:
factory->PrepareTrainingAndTestTree( mycuts, mycuts,
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
// If no numbers of events are given, half of the events in the tree are used for training, and
// the other half for testing:
// factory->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" );
// To also specify the number of testing events, use:
// factory->PrepareTrainingAndTestTree( mycut,
// "NSigTrain=3000:NBkgTrain=3000:NSigTest=3000:NBkgTest=3000:SplitMode=Random:!V" );
// ---- Book MVA methods
//
// please lookup the various method configuration options in the corresponding cxx files, eg:
// src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
// it is possible to preset ranges in the option string in which the cut optimisation should be done:
// "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
// Cut optimisation
if (Use["Cuts"])
factory->BookMethod( TMVA::Types::kCuts, "Cuts",
"!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart" );
if (Use["CutsD"])
factory->BookMethod( TMVA::Types::kCuts, "CutsD",
"!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=Decorrelate" );
if (Use["CutsPCA"])
factory->BookMethod( TMVA::Types::kCuts, "CutsPCA",
"!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=PCA" );
if (Use["CutsGA"])
factory->BookMethod( TMVA::Types::kCuts, "CutsGA",
"H:!V:FitMethod=GA:CutRangeMin[0]=-10:CutRangeMax[0]=10:VarProp[1]=FMax:EffSel:Steps=30:Cycles=3:PopSize=400:SC_steps=10:SC_rate=5:SC_factor=0.95" );
if (Use["CutsSA"])
factory->BookMethod( TMVA::Types::kCuts, "CutsSA",
"!H:!V:FitMethod=SA:EffSel:MaxCalls=150000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" );
// Likelihood
if (Use["Likelihood"])
factory->BookMethod( TMVA::Types::kLikelihood, "Likelihood",
"H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" );
// test the decorrelated likelihood
if (Use["LikelihoodD"])
factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodD",
"!H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=Decorrelate" );
if (Use["LikelihoodPCA"])
factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodPCA",
"!H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=PCA" );
示例13: TMVAClassification
//.........这里部分代码省略.........
factory->AddVariable(name, 'F');
}
// This would set individual event weights (the variables defined in the
// expression need to exist in the original TTree)
// for signal : factory->SetSignalWeightExpression("weight1*weight2");
// for background: factory->SetBackgroundWeightExpression("weight1*weight2");
// commented JB : 04/26 ??
//factory->dSetBackgroundWeightExpression("weight");
// Apply additional cuts on the signal and background samples (can be different)
TCut mycuts = "";
TCut mycutb = "";
// Tell the factory how to use the training and testing events
//
// If no numbers of events are given, half of the events in the tree are used
// for training, and the other half for testing:
// factory->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" );
// To also specify the number of testing events, use:
//factory->PrepareTrainingAndTestTree( mycuts,mycutb,"NSigTrain=9000:NBkgTrain=50000:NSigTest=9000:NBkgTest=50000:SplitMode=Random:!V" );
factory->PrepareTrainingAndTestTree( mycuts, mycutb, "nTrain_Signal=4900:nTrain_Background=49000:nTest_Signal=4900:nTest_Background=49000:SplitMode=Random:!V"); // for KFVertex
// factory->PrepareTrainingAndTestTree( mycuts, mycutb,"nTrain_Signal=20000:nTrain_Background=40000:nTest_Signal=20000:nTest_Background=40000:SplitMode=Random:!V"); // for PPV
// ---- Book MVA methods
//
// Please lookup the various method configuration options in the corresponding cxx files, eg:
// src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
// it is possible to preset ranges in the option string in which the cut optimisation should be done:
// "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
// Cut optimisation
if (Use["Cuts"])
factory->BookMethod( TMVA::Types::kCuts, "Cuts",
"!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart" );
if (Use["CutsD"])
factory->BookMethod( TMVA::Types::kCuts, "CutsD",
"!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=Decorrelate" );
if (Use["CutsPCA"])
factory->BookMethod( TMVA::Types::kCuts, "CutsPCA",
"!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=PCA" );
if (Use["CutsGA"])
factory->BookMethod( TMVA::Types::kCuts, "CutsGA",
"H:!V:FitMethod=GA:CutRangeMin[0]=-10:CutRangeMax[0]=10:VarProp[1]=FMax:EffSel:Steps=30:Cycles=3:PopSize=400:SC_steps=10:SC_rate=5:SC_factor=0.95" );
if (Use["CutsSA"])
factory->BookMethod( TMVA::Types::kCuts, "CutsSA",
"!H:!V:FitMethod=SA:EffSel:MaxCalls=150000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" );
// Likelihood ("naive Bayes estimator")
if (Use["Likelihood"])
factory->BookMethod( TMVA::Types::kLikelihood, "Likelihood",
"H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" );
// Decorrelated likelihood
if (Use["LikelihoodD"])
factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodD",
"!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=Decorrelate" );
// PCA-transformed likelihood
if (Use["LikelihoodPCA"])
factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodPCA",
"!H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=PCA" );
示例14: TMVAClassificationCategory
void TMVAClassificationCategory()
{
//---------------------------------------------------------------
// Example for usage of different event categories with classifiers
std::cout << std::endl << "==> Start TMVAClassificationCategory" << std::endl;
bool batchMode = false;
// Create a new root output file.
TString outfileName( "TMVA.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
// Create the factory object (see TMVAClassification.C for more information)
std::string factoryOptions( "!V:!Silent:Transformations=I;D;P;G,D" );
if (batchMode) factoryOptions += ":!Color:!DrawProgressBar";
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassificationCategory", outputFile, factoryOptions );
// Define the input variables used for the MVA training
factory->AddVariable( "var1", 'F' );
factory->AddVariable( "var2", 'F' );
factory->AddVariable( "var3", 'F' );
factory->AddVariable( "var4", 'F' );
// You can add so-called "Spectator variables", which are not used in the MVA training,
// but will appear in the final "TestTree" produced by TMVA. This TestTree will contain the
// input variables, the response values of all trained MVAs, and the spectator variables
factory->AddSpectator( "eta" );
// Load the signal and background event samples from ROOT trees
TFile *input(0);
TString fname( "" );
if (UseOffsetMethod) fname = "data/toy_sigbkg_categ_offset.root";
else fname = "data/toy_sigbkg_categ_varoff.root";
if (!gSystem->AccessPathName( fname )) {
// first we try to find tmva_example.root in the local directory
std::cout << "--- TMVAClassificationCategory: Accessing " << fname << std::endl;
input = TFile::Open( fname );
}
if (!input) {
std::cout << "ERROR: could not open data file: " << fname << std::endl;
exit(1);
}
TTree *signal = (TTree*)input->Get("TreeS");
TTree *background = (TTree*)input->Get("TreeB");
/// Global event weights per tree (see below for setting event-wise weights)
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
/// You can add an arbitrary number of signal or background trees
factory->AddSignalTree ( signal, signalWeight );
factory->AddBackgroundTree( background, backgroundWeight );
// Apply additional cuts on the signal and background samples (can be different)
TCut mycuts = ""; // for example: TCut mycuts = "abs(var1)<0.5 && abs(var2-0.5)<1";
TCut mycutb = ""; // for example: TCut mycutb = "abs(var1)<0.5";
// Tell the factory how to use the training and testing events
factory->PrepareTrainingAndTestTree( mycuts, mycutb,
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
// ---- Book MVA methods
// Fisher discriminant
factory->BookMethod( TMVA::Types::kFisher, "Fisher", "!H:!V:Fisher" );
// Likelihood
factory->BookMethod( TMVA::Types::kLikelihood, "Likelihood",
"!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" );
// --- Categorised classifier
TMVA::MethodCategory* mcat = 0;
// The variable sets
TString theCat1Vars = "var1:var2:var3:var4";
TString theCat2Vars = (UseOffsetMethod ? "var1:var2:var3:var4" : "var1:var2:var3");
// Fisher with categories
TMVA::MethodBase* fiCat = factory->BookMethod( TMVA::Types::kCategory, "FisherCat","" );
mcat = dynamic_cast<TMVA::MethodCategory*>(fiCat);
mcat->AddMethod( "abs(eta)<=1.3", theCat1Vars, TMVA::Types::kFisher, "Category_Fisher_1","!H:!V:Fisher" );
mcat->AddMethod( "abs(eta)>1.3", theCat2Vars, TMVA::Types::kFisher, "Category_Fisher_2","!H:!V:Fisher" );
// Likelihood with categories
TMVA::MethodBase* liCat = factory->BookMethod( TMVA::Types::kCategory, "LikelihoodCat","" );
mcat = dynamic_cast<TMVA::MethodCategory*>(liCat);
mcat->AddMethod( "abs(eta)<=1.3",theCat1Vars, TMVA::Types::kLikelihood,
"Category_Likelihood_1","!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" );
mcat->AddMethod( "abs(eta)>1.3", theCat2Vars, TMVA::Types::kLikelihood,
"Category_Likelihood_2","!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" );
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethods();
//.........这里部分代码省略.........
示例15: TMVAClassificationHwwNtuple
//.........这里部分代码省略.........
// Read training and test data
// (it is also possible to use ASCII format as input -> see TMVA Users Guide)
//TString fname = "./tmva_class_example.root";
//TString fname = "/afs/cern.ch/work/s/salee/private/HWWwidth/HWW/GGVvAnalyzer/MkNtuple/Hw1Int8TeV/MkNtuple.root";
//TString fname = "/terranova_0/HWWwidth/HWW/GGVvAnalyzer/MkNtuple/Hw1Int8TeV/MkNtuple.root";
//if (gSystem->AccessPathName( fname )) // file does not exist in local directory
// exit(-1);
//gSystem->Exec("wget http://root.cern.ch/files/tmva_class_example.root");
//TFile *input = TFile::Open( fname );
//TFile *SB_OnPeak = TFile::Open("root://eoscms.cern.ch//eos/cms/store/group/phys_higgs/cmshww/amassiro/HiggsWidth/gg2vv/latinogg2vv_Hw1_IntOnPeak_8TeV.root");
//TTree *SB_OnPeak_Tree = (TTree*)SB_OnPeak->Get("latino");
TChain *S_Chain = new TChain("latino");
TChain *C_Chain = new TChain("latino");
TChain *SCI_Chain = new TChain("latino");
TChain *qqWW_Chain = new TChain("latino");
S_Chain->Add("root://eoscms.cern.ch//eos/cms/store/group/phys_higgs/cmshww/amassiro/HiggsWidth/gg2vv/latinogg2vv_Hw1_SigOnPeak_8TeV.root");
S_Chain->Add("root://eoscms.cern.ch//eos/cms/store/group/phys_higgs/cmshww/amassiro/HiggsWidth/gg2vv/latinogg2vv_Hw1_SigShoulder_8TeV.root");
S_Chain->Add("root://eoscms.cern.ch//eos/cms/store/group/phys_higgs/cmshww/amassiro/HiggsWidth/gg2vv/latinogg2vv_Hw1_SigTail_8TeV.root");
SCI_Chain->Add("root://eoscms.cern.ch//eos/cms/store/group/phys_higgs/cmshww/amassiro/HiggsWidth/gg2vv/latinogg2vv_Hw1_IntOnPeak_8TeV.root");
SCI_Chain->Add("root://eoscms.cern.ch//eos/cms/store/group/phys_higgs/cmshww/amassiro/HiggsWidth/gg2vv/latinogg2vv_Hw1_IntShoulder_8TeV.root");
SCI_Chain->Add("root://eoscms.cern.ch//eos/cms/store/group/phys_higgs/cmshww/amassiro/HiggsWidth/gg2vv/latinogg2vv_Hw1_IntTail_8TeV.root");
C_Chain->Add("root://eoscms.cern.ch//eos/cms/store/group/phys_higgs/cmshww/amassiro/HiggsWidth/gg2vv/latinogg2vv_Hw25_CotHead_8TeV.root");
C_Chain->Add("root://eoscms.cern.ch//eos/cms/store/group/phys_higgs/cmshww/amassiro/HiggsWidth/gg2vv/latinogg2vv_Hw25_CotTail_8TeV.root");
qqWW_Chain->Add("/afs/cern.ch/user/m/maiko/work/public/Tree/tree_skim_wwmin/nominals/latino_000_WWJets2LMad.root");
// --- Register the training and test trees
// You can add an arbitrary number of signal or background trees
factory->AddSignalTree ( S_Chain );
factory->AddBackgroundTree( qqWW_Chain );
factory->AddBackgroundTree( C_Chain );
// Classification training and test data in ROOT tree format with signal and background events being located in the same tree
//factory->SetInputTrees(SCI_Chain, GenOffCut, GenOnCut);
// To give different trees for training and testing, do as follows:
// factory->AddSignalTree( signalTrainingTree, signalTrainWeight, "Training" );
// factory->AddSignalTree( signalTestTree, signalTestWeight, "Test" );
factory->SetWeightExpression ("2.1*puW*baseW*effW*triggW*19.468");
//factory->SetSignalWeightExpression ("2.1*puW*baseW*effW*triggW*19.468");
//factory->SetBackgroundWeightExpression("puW*baseW*effW*triggW*19.468");
//factory->PrepareTrainingAndTestTree( ChanCommOff,
// "nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=None:!V" );
//"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V";
factory->PrepareTrainingAndTestTree( ChanCommOff0J,
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=None:!V" );
// ---- Book MVA methods
//
// Cut optimisation
if (Use["Cuts"])
factory->BookMethod( TMVA::Types::kCuts, "Cuts",
"!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart" );
if (Use["BDT"]) // Adaptive Boost
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:V:NTrees=850:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
//"!H:!V:NTrees=850:MinNodeSize=2.5%:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:UseBaggedBoost:BaggedSampleFraction=0.5:SeparationType=GiniIndex:nCuts=20" );
// For an example of the category classifier usage, see: TMVAClassificationCategory
// -----------------------------------------------------------------------------------------
// ---- Now you can optimize the setting (configuration) of the MVAs using the set of training events
// ---- STILL EXPERIMENTAL and only implemented for BDT's !
// factory->OptimizeAllMethods("SigEffAt001","Scan");
// factory->OptimizeAllMethods("ROCIntegral","FitGA");
// -----------------------------------------------------------------------------------------
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
//if (!gROOT->IsBatch()) TMVAGui( outfileName );
}