本文整理汇总了C++中tmva::Factory::TestAllMethods方法的典型用法代码示例。如果您正苦于以下问题:C++ Factory::TestAllMethods方法的具体用法?C++ Factory::TestAllMethods怎么用?C++ Factory::TestAllMethods使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tmva::Factory
的用法示例。
在下文中一共展示了Factory::TestAllMethods方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: test_train
void test_train(TString signalName = "WW",
TString bkgName = "DY")
{
TFile *outFile = new TFile("myAnalysisFile.root","RECREATE");
TMVA::Factory *factory = new TMVA::Factory(signalName, outFile,"");
TString directory = "../rootFiles/SF/MediumIDTighterIP/";
//signalName = directory + signalName;
//defining WW signal
TFile *MySignalFile = new TFile("../rootFiles/SF/MediumIDTighterIP/WW.root","READ");
TTree* sigTree = (TTree*)MySignalFile->Get("nt");
factory->AddSignalTree(sigTree,1);
//defining DY background
TFile *MyBkgFile = new TFile("../rootFiles/SF/MediumIDTighterIP/DY.root","READ");
TTree* bkgTree = (TTree*)MyBkgFile->Get("nt");
factory->AddBackgroundTree(bkgTree,1);
factory->SetWeightExpression("baseW");
//************************************ FACTORY
factory->AddVariable("fullpmet");
factory->AddVariable("trkpmet");
factory->AddVariable("ratioMet");
factory->AddVariable("ptll");
factory->AddVariable("mth");
factory->AddVariable("jetpt1");
factory->AddVariable("ptWW");
factory->AddVariable("dphilljet");
factory->AddVariable("dphillmet");
factory->AddVariable("dphijet1met");
factory->AddVariable("nvtx");
factory->PrepareTrainingAndTestTree("",500,500,500,500);
cout<<"I've prepared trees"<<endl;
//factory->BookMethod(TMVA::Types::kFisher, "Fisher","");
factory->BookMethod(TMVA::Types::kBDT, "BDT","");
cout<<"I've booked method"<<endl;
factory->TrainAllMethods();
factory->TestAllMethods();
cout<<"I've tested all methods"<<endl;
factory->EvaluateAllMethods();
cout<<"I've evaluated all methods"<<endl;
}
示例2: trainBDT
void trainBDT(void)
{
// Open input file and get tree
TFile *infile = new TFile("l3bdt.root");
TTree *l3tree = (TTree*)infile->Get("l3tree");
if(l3tree == NULL){
cout << "Couldn't open \"l3bdt.root\"!" << endl;
return;
}
// Open output root file (for TMVA)
TFile *outfile = new TFile("l3BDT_out.root", "RECREATE");
TMVA::Factory *fac = new TMVA::Factory("L3",outfile,"");
// Specify input tree that contains both signal and background
TCut signalCut("is_good==1");
TCut backgroundCut("is_good==0");
fac->SetInputTrees(l3tree, signalCut, backgroundCut);
// Add variables
fac->AddVariable("Nstart_counter", 'I');
fac->AddVariable("Ntof", 'I');
fac->AddVariable("Nbcal_points", 'I');
fac->AddVariable("Nbcal_clusters", 'I');
fac->AddVariable("Ebcal_points", 'F');
fac->AddVariable("Ebcal_clusters", 'F');
fac->AddVariable("Nfcal_clusters", 'I');
fac->AddVariable("Efcal_clusters", 'F');
fac->AddVariable("Ntrack_candidates", 'I');
fac->AddVariable("Ptot_candidates", 'F');
TCut preSelectCut("");
fac->PrepareTrainingAndTestTree(preSelectCut,"");
fac->BookMethod(TMVA::Types::kBDT, "BDT", "");
fac->TrainAllMethods();
fac->TestAllMethods();
fac->EvaluateAllMethods();
delete fac;
outfile->Close();
delete outfile;
}
示例3: trainBJetIdMVA
void trainBJetIdMVA(TString SELECTION)
{
// the training is done using a dedicated tree format
TFile *src = TFile::Open("bjetId_"+SELECTION+".root");
TTree *tr = (TTree*)src->Get("jets");
TFile *outf = new TFile("bjetId_"+SELECTION+"_MVA.root","RECREATE");
TCut signalCut = "abs(partonId) == 5";
TCut bkgCut = "abs(partonId) != 5";
TCut preselectionCut = "btagIdx<4 && etaIdx<4 && etaIdx>-1 && ptIdx<4";
int N = 100000;
cout<<"NUMBER OF TRAINING EVENTS = "<<N<<endl;
TMVA::Factory* factory = new TMVA::Factory("factory_"+SELECTION+"_",outf,"!V:!Silent:Color:DrawProgressBar:Transformations=I;G:AnalysisType=Classification" );
factory->SetInputTrees(tr,signalCut,bkgCut);
factory->AddVariable("btagIdx",'I');
factory->AddVariable("etaIdx" ,'I');
factory->AddVariable("btag" ,'F');
factory->AddVariable("eta" ,'F');
char name[1000];
sprintf(name,"nTrain_Signal=%d:nTrain_Background=%d:nTest_Signal=%d:nTest_Background=%d",N,N,N,N);
factory->PrepareTrainingAndTestTree(preselectionCut,name);
// specify the training methods
factory->BookMethod(TMVA::Types::kLikelihood,"Likelihood");
//factory->BookMethod(TMVA::Types::kBDT,"BDT_DEF");
//factory->BookMethod(TMVA::Types::kBDT,"BDT_ADA","NTrees=600:AdaBoostBeta=0.1:nCuts=35");
//factory->BookMethod(TMVA::Types::kBDT,"BDT_GRAD1","NTrees=600:nCuts=40:BoostType=Grad:Shrinkage=0.5");
factory->BookMethod(TMVA::Types::kBDT,"BDT_GRAD2","NTrees=600:nCuts=25:BoostType=Grad:Shrinkage=0.2");
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
}
示例4: TMVAtest
void TMVAtest(){
//gSystem->Load("../lib/slc5_amd64_gcc462/libTAMUWWMEPATNtuple.so");
gSystem->Load("libPhysics");
//gSystem->Load("EvtTreeForAlexx_h.so");
gSystem->Load("libTMVA.1");
gSystem->Load("AutoDict_vector_TLorentzVector__cxx.so");
TMVA::Tools::Instance();
TFile* outputFile = TFile::Open("TMVA1.root", "RECREATE");
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification",outputFile,"V=true:Color:DrawProgressBar");// ":Transformations=I;D;P;G,D" );
TFile* signal = TFile::Open("/uscms_data/d2/aperloff/Spring12ME7TeV/MEResults/microNtuples_oldStructure/microWW_EPDv01.root");
TFile* bkg = TFile::Open("/uscms_data/d2/aperloff/Spring12ME7TeV/MEResults/microNtuples_oldStructure/microWJets_EPDv01.root");
TTree* stree = (TTree*)signal->Get("METree");
TTree* btree = (TTree*)bkg->Get("METree");
factory->AddSignalTree(stree,1.0);
factory->AddBackgroundTree(btree,1.0);
factory->SetSignalWeightExpression("1.0");
factory->SetBackgroundWeightExpression("1.0");
factory->AddVariable("tEventProb[0]");
factory->AddVariable("tEventProb[1]");
factory->AddVariable("tEventProb[2]");
//factory->AddVariable("tEventProb0 := tEventProb[0]",'F');
//factory->AddVariable("tEventProb1 := tEventProb[1]",'F');
//factory->AddVariable("tEventProb2 := tEventProb[2]",'F');
TCut test("Entry$>-2 && jLV[1].Pt()>30");
TCut mycuts (test);
factory->PrepareTrainingAndTestTree(mycuts,mycuts,"nTrain_Signal=0:nTrain_Background=0:nTest_Signal=0:nTest_Background=0:SplitMode=Random:NormMode=None:V=true:VerboseLevel=DEBUG");
factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=400:nEventsMin=400:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
outputFile->Close();
}
示例5: main
//.........这里部分代码省略.........
}
// Apply additional cuts on the signal and background samples (can be different)
// // If no numbers of events are given, half of the events in the tree are used
// for training, and the other half for testing:
// factory->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" );
// ---- Book MVA methods
//
// please lookup the various method configuration options in the corresponding cxx files, eg:
// src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
// it is possible to preset ranges in the option string in which the cut optimisation should be done:
// "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
// PDE - RS method
if (Use["PDERS"])
factory->BookMethod( TMVA::Types::kPDERS, "PDERS", "!H:!V:Normthree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
// And the options strings for the MinMax and RMS methods, respectively:
// "!H:!V:VolumeRangeMode=MinMax:DeltaFrac=0.2:KernelEstimator=Gauss:GaussSigma=0.3" );
// "!H:!V:VolumeRangeMode=RMS:DeltaFrac=3:KernelEstimator=Gauss:GaussSigma=0.3" );
if (Use["PDEFoam"])
factory->BookMethod( TMVA::Types::kPDEFoam, "PDEFoam", "!H:!V:MultiTargetRegression=F:TargetSelection=Mpv:TailCut=0.001:VolFrac=0.3:nActiveCells=500:nSampl=2000:nBin=5:Compress=T:Kernel=None:Nmin=10:VarTransform=None" );
// K-Nearest Neighbour classifier (KNN)
if (Use["KNN"])
factory->BookMethod( TMVA::Types::kKNN, "KNN", "nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
// Linear discriminant
if (Use["LD"]) factory->BookMethod( TMVA::Types::kLD, "LD","!H:!V:VarTransform=G,U,D" );
// Function discrimination analysis (FDA) -- test of various fitters - the recommended one is Minuit (or GA or SA)
if (Use["FDA_MC"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MC",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=MC:SampleSize=100000:Sigma=0.1:VarTransform=D" );
if (Use["FDA_GA"]) // can also use Simulated Annealing (SA) algorithm (see Cuts_SA options) .. the formula of this example is good for parabolas
factory->BookMethod( TMVA::Types::kFDA, "FDA_GA",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:PopSize=100:Cycles=3:Steps=30:Trim=True:SaveBestGen=1:VarTransform=Norm" );
if (Use["FDA_MT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
if (Use["FDA_GAMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
// Neural network (MLP)
if (Use["MLP"])
// factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
// factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=200:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
// factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=400:HiddenLayers=N+10:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15" );
// factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=N:NeuronType=tanh:NCycles=200:HiddenLayers=N+10:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15" );
// factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=G,N:NeuronType=tanh:NCycles=200:HiddenLayers=N+5:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15" );
factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:NeuronType=tanh:NCycles=250:HiddenLayers=N+5:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:TestRate=10");
// Support Vector Machine
if (Use["SVM"])
factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=N" );
// factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=N,G" );
// Boosted Decision Trees
if (Use["BDT"])
// factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=100:nEventsMin=5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
// factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=200:nEventsMin=5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:PruneMethod=CostComplexity:PruneStrength=30" );
// factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=300:nEventsMin=5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:PruneMethod=CostComplexity:PruneStrength=30" );
// factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=100:nEventsMin=5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:PruneMethod=CostComplexity:PruneStrength=30" );
factory->BookMethod( TMVA::Types::kBDT, "BDT","!H:!V:NTrees=100:nEventsMin=20:BoostType=AdaBoostR2:SeparationType=RegressionVariance:PruneMethod=CostComplexity:PruneStrength=30");
if (Use["BDTG"])
// factory->BookMethod( TMVA::Types::kBDT, "BDTG","!H:!V:NTrees=2000::BoostType=Grad:Shrinkage=0.1:UseBaggedGrad:GradBaggingFraction=0.5:nCuts=20:MaxDepth=3:NNodesMax=15" );
factory->BookMethod( TMVA::Types::kBDT, "BDTG","!H:!V:NTrees=1000::BoostType=Grad:Shrinkage=0.1:UseBaggedGrad:GradBaggingFraction=0.5:MaxDepth=5:NNodesMax=25:PruneMethod=CostComplexity:PruneStrength=30");
// --------------------------------------------------------------------------------------------------
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVARegression is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
// if (!gROOT->IsBatch()) TMVARegGui( outputFileName.c_str() );
return 0;
}
示例6: BJetRegression
//.........这里部分代码省略.........
// Apply additional cuts on the signal and background samples (can be different)
TCut mycut = ""; // for example: TCut mycut = "abs(var1)<0.5 && abs(var2-0.5)<1";
// tell the factory to use all remaining events in the trees after training for testing:
factory->PrepareTrainingAndTestTree( mycut,
"nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V" );
// If no numbers of events are given, half of the events in the tree are used
// for training, and the other half for testing:
// factory->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" );
// ---- Book MVA methods
//
// please lookup the various method configuration options in the corresponding cxx files, eg:
// src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
// it is possible to preset ranges in the option string in which the cut optimisation should be done:
// "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
// PDE - RS method
if (Use["PDERS"])
factory->BookMethod( TMVA::Types::kPDERS, "PDERS",
"!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
// And the options strings for the MinMax and RMS methods, respectively:
// "!H:!V:VolumeRangeMode=MinMax:DeltaFrac=0.2:KernelEstimator=Gauss:GaussSigma=0.3" );
// "!H:!V:VolumeRangeMode=RMS:DeltaFrac=3:KernelEstimator=Gauss:GaussSigma=0.3" );
if (Use["PDEFoam"])
factory->BookMethod( TMVA::Types::kPDEFoam, "PDEFoam",
"!H:!V:MultiTargetRegression=F:TargetSelection=Mpv:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Compress=T:Kernel=None:Nmin=10:VarTransform=None" );
// K-Nearest Neighbour classifier (KNN)
if (Use["KNN"])
factory->BookMethod( TMVA::Types::kKNN, "KNN",
"nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
// Linear discriminant
if (Use["LD"])
factory->BookMethod( TMVA::Types::kLD, "LD",
"!H:!V:VarTransform=None" );
// Function discrimination analysis (FDA) -- test of various fitters - the recommended one is Minuit (or GA or SA)
if (Use["FDA_MC"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MC",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=MC:SampleSize=100000:Sigma=0.1:VarTransform=D" );
if (Use["FDA_GA"]) // can also use Simulated Annealing (SA) algorithm (see Cuts_SA options) .. the formula of this example is good for parabolas
factory->BookMethod( TMVA::Types::kFDA, "FDA_GA",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:PopSize=100:Cycles=3:Steps=30:Trim=True:SaveBestGen=1:VarTransform=Norm" );
if (Use["FDA_MT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
if (Use["FDA_GAMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
// Neural network (MLP)
if (Use["MLP"])
factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
// Support Vector Machine
if (Use["SVM"])
factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" );
// Boosted Decision Trees
if (Use["BDT"])
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:!V:NTrees=100:MinNodeSize=1.0%:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
if (Use["BDTG"])
factory->BookMethod( TMVA::Types::kBDT, "BDTG",
"!H:!V:NTrees=2000::BoostType=Grad:Shrinkage=0.1:UseBaggedBoost:BaggedSampleFraction=0.7:nCuts=200:MaxDepth=3:NNodesMax=15" );
// --------------------------------------------------------------------------------------------------
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVARegression is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVARegGui( outfileName );
}
示例7: TMVAClassification
//.........这里部分代码省略.........
if (Use["FDA_MT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
if (Use["FDA_GAMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
if (Use["FDA_MCMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MCMT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:SampleSize=20" );
// TMVA ANN: MLP (recommended ANN) -- all ANNs in TMVA are Multilayer Perceptrons
if (Use["MLP"])
factory->BookMethod( TMVA::Types::kMLP, "MLP", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:!UseRegulator" );
if (Use["MLPBFGS"])
factory->BookMethod( TMVA::Types::kMLP, "MLPBFGS", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:!UseRegulator" );
if (Use["MLPBNN"])
factory->BookMethod( TMVA::Types::kMLP, "MLPBNN", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:UseRegulator" ); // BFGS training with bayesian regulators
// CF(Clermont-Ferrand)ANN
if (Use["CFMlpANN"])
factory->BookMethod( TMVA::Types::kCFMlpANN, "CFMlpANN", "!H:!V:NCycles=2000:HiddenLayers=N+1,N" ); // n_cycles:#nodes:#nodes:...
// Tmlp(Root)ANN
if (Use["TMlpANN"])
factory->BookMethod( TMVA::Types::kTMlpANN, "TMlpANN", "!H:!V:NCycles=200:HiddenLayers=N+1,N:LearningMethod=BFGS:ValidationFraction=0.3" ); // n_cycles:#nodes:#nodes:...
// Support Vector Machine
if (Use["SVM"])
factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" );
// Boosted Decision Trees
if (Use["BDTG"]) // Gradient Boost
factory->BookMethod( TMVA::Types::kBDT, "BDTG",
"!H:!V:NTrees=1000:MinNodeSize=2.5%:BoostType=Grad:Shrinkage=0.10:UseBaggedBoost:BaggedSampleFraction=0.5:nCuts=20:MaxDepth=2" );
if (Use["BDT"]) // Adaptive Boost
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:!V:NTrees=850:MinNodeSize=2.5%:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:UseBaggedBoost:BaggedSampleFraction=0.5:SeparationType=GiniIndex:nCuts=20" );
if (Use["BDTB"]) // Bagging
factory->BookMethod( TMVA::Types::kBDT, "BDTB",
"!H:!V:NTrees=400:BoostType=Bagging:SeparationType=GiniIndex:nCuts=20" );
if (Use["BDTD"]) // Decorrelation + Adaptive Boost
factory->BookMethod( TMVA::Types::kBDT, "BDTD",
"!H:!V:NTrees=400:MinNodeSize=5%:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:VarTransform=Decorrelate" );
if (Use["BDTF"]) // Allow Using Fisher discriminant in node splitting for (strong) linearly correlated variables
factory->BookMethod( TMVA::Types::kBDT, "BDTMitFisher",
"!H:!V:NTrees=50:MinNodeSize=2.5%:UseFisherCuts:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20" );
// RuleFit -- TMVA implementation of Friedman's method
if (Use["RuleFit"])
factory->BookMethod( TMVA::Types::kRuleFit, "RuleFit",
"H:!V:RuleFitModule=RFTMVA:Model=ModRuleLinear:MinImp=0.001:RuleMinDist=0.001:NTrees=20:fEventsMin=0.01:fEventsMax=0.5:GDTau=-1.0:GDTauPrec=0.01:GDStep=0.01:GDNSteps=10000:GDErrScale=1.02" );
// For an example of the category classifier usage, see: TMVAClassificationCategory
// --------------------------------------------------------------------------------------------------
// ---- Now you can optimize the setting (configuration) of the MVAs using the set of training events
// ---- STILL EXPERIMENTAL and only implemented for BDT's !
// factory->OptimizeAllMethods("SigEffAt001","Scan");
// factory->OptimizeAllMethods("ROCIntegral","FitGA");
// --------------------------------------------------------------------------------------------------
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
//if (!gROOT->IsBatch())
// gROOT->ProcessLine(TString::Format("TMVAGui(\"%s\")", outfileName.Data()));
// efficiencies( TString fin = "TMVA.root", Int_t type = 2, Bool_t useTMVAStyle = kTRUE );
}
示例8: TMVAClassificationCategory
//.........这里部分代码省略.........
// Define the input variables that shall be used for the MVA training
// note that you may also use variable expressions, such as: "3*var1/var2*abs(var3)"
// [all types of expressions that can also be parsed by TTree::Draw( "expression" )]
factory->AddVariable( "var1", 'F' );
factory->AddVariable( "var2", 'F' );
factory->AddVariable( "var3", 'F' );
factory->AddVariable( "var4", 'F' );
// You can add so-called "Spectator variables", which are not used in the MVA training,
// but will appear in the final "TestTree" produced by TMVA. This TestTree will contain the
// input variables, the response values of all trained MVAs, and the spectator variables
factory->AddSpectator( "eta" );
// load the signal and background event samples from ROOT trees
TFile *input(0);
TString fname( "" );
if (UseOffsetMethod) fname = "../execs/data/toy_sigbkg_categ_offset.root";
else fname = "../execs/data/toy_sigbkg_categ_varoff.root";
if (!gSystem->AccessPathName( fname )) {
// first we try to find tmva_example.root in the local directory
std::cout << "--- TMVAClassificationCategory: Accessing " << fname << std::endl;
input = TFile::Open( fname );
}
if (!input) {
std::cout << "ERROR: could not open data file: " << fname << std::endl;
exit(1);
}
TTree *signal = (TTree*)input->Get("TreeS");
TTree *background = (TTree*)input->Get("TreeB");
/// global event weights per tree (see below for setting event-wise weights)
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
/// you can add an arbitrary number of signal or background trees
factory->AddSignalTree ( signal, signalWeight );
factory->AddBackgroundTree( background, backgroundWeight );
// Apply additional cuts on the signal and background samples (can be different)
TCut mycuts = ""; // for example: TCut mycuts = "abs(var1)<0.5 && abs(var2-0.5)<1";
TCut mycutb = ""; // for example: TCut mycutb = "abs(var1)<0.5";
// tell the factory to use all remaining events in the trees after training for testing:
factory->PrepareTrainingAndTestTree( mycuts, mycutb,
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
// Fisher discriminant
factory->BookMethod( TMVA::Types::kFisher, "Fisher", "!H:!V:Fisher" );
// Likelihood
factory->BookMethod( TMVA::Types::kLikelihood, "Likelihood",
"!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" );
// Categorised classifier
TMVA::MethodCategory* mcat = 0;
// the variable sets
TString theCat1Vars = "var1:var2:var3:var4";
TString theCat2Vars = (UseOffsetMethod ? "var1:var2:var3:var4" : "var1:var2:var3");
// the Fisher
TMVA::MethodBase* fiCat = factory->BookMethod( TMVA::Types::kCategory, "FisherCat","" );
mcat = dynamic_cast<TMVA::MethodCategory*>(fiCat);
mcat->AddMethod("abs(eta)<=1.3",theCat1Vars, TMVA::Types::kFisher,"Category_Fisher_1","!H:!V:Fisher");
mcat->AddMethod("abs(eta)>1.3", theCat2Vars, TMVA::Types::kFisher,"Category_Fisher_2","!H:!V:Fisher");
// the Likelihood
TMVA::MethodBase* liCat = factory->BookMethod( TMVA::Types::kCategory, "LikelihoodCat","" );
mcat = dynamic_cast<TMVA::MethodCategory*>(liCat);
mcat->AddMethod("abs(eta)<=1.3",theCat1Vars, TMVA::Types::kLikelihood,"Category_Likelihood_1","!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50");
mcat->AddMethod("abs(eta)>1.3", theCat2Vars, TMVA::Types::kLikelihood,"Category_Likelihood_2","!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50");
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassificationCategory is done!" << std::endl;
// Clean up
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例9: main
//.........这里部分代码省略.........
// for (int iSample=0; iSample<numberOfSamples; iSample++){
// int numEnt = treeJetLepVect[iSample]->GetEntries(Cut.c_str());
// std::cout << " Sample = " << nameSample[iSample] << " ~ " << nameHumanReadable[iSample] << " --> " << numEnt << std::endl;
// if (numEnt != 0) {
// bool isSig = false;
// for (std::vector<std::string>::const_iterator itSig = SignalName.begin(); itSig != SignalName.end(); itSig++){
// if (nameHumanReadable[iSample] == *itSig) isSig = true;
// }
// if (isSig) {
// factory->AddTree( treeJetLepVect[iSample], TString("Signal"), Normalization[iSample] ); //---> ci deve essere uno chiamato Signal!
// }
// else {
// factory->AddTree( treeJetLepVect[iSample], TString(nameHumanReadable[iSample]), Normalization[iSample] );
// }
// }
// }
//
// for (int iSample=0; iSample<numberOfSamples; iSample++){
// int numEnt = treeJetLepVect[iSample]->GetEntries(Cut.c_str());
// std::cout << " Sample = " << nameSample[iSample] << " ~ " << nameHumanReadable[iSample] << " --> " << numEnt << std::endl;
// if (numEnt != 0) {
// bool isSig = false;
// for (std::vector<std::string>::const_iterator itSig = SignalName.begin(); itSig != SignalName.end(); itSig++){
// if (nameHumanReadable[iSample] == *itSig) isSig = true;
// }
// if (isSig) {
// // factory->AddTree( treeJetLepVect[iSample], TString("Signal"), Normalization[iSample] ); //---> ci deve essere uno chiamato Signal!
// }
// else {
// factory->AddTree( treeJetLepVect[iSample], TString(nameHumanReadable[iSample]), Normalization[iSample] );
// }
// }
// }
std::cerr << " AAAAAAAAAAAAAAAAAAAAAAAAAAAAA " << std::endl;
TCut mycuts = Cut.c_str();
// factory->SetWeightExpression( nameWeight.c_str() );
// factory->SetBackgroundWeightExpression( nameWeight.c_str() );
// factory->SetSignalWeightExpression ( nameWeight.c_str() );
std::cerr << " BBBBBBBBBBBBBBBBBBBBBBBBBBBBB " << std::endl;
factory->PrepareTrainingAndTestTree( mycuts ,"SplitMode=Random:NormMode=None:!V");
// factory->PrepareTrainingAndTestTree( "" ,"SplitMode=Random:NormMode=None:!V");
std::cerr << " CCCCCCCCCCCCCCCCCCCCCCCCCCCCC " << std::endl;
// gradient boosted decision trees
// if (Use["BDTG"]) factory->BookMethod( TMVA::Types::kBDT, "BDTG", "!H:!V:NTrees=1000:BoostType=Grad:Shrinkage=0.10:UseBaggedGrad:GradBaggingFraction=0.50:nCuts=20:NNodesMax=8");
if (Use["BDTG"]) factory->BookMethod( TMVA::Types::kBDT, "BDTG", "!H:!V:NTrees=600:BoostType=Grad:Shrinkage=0.10:UseBaggedGrad:GradBaggingFraction=0.50:nCuts=20:NNodesMax=8");
// neural network
if (Use["MLP"]) factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:NeuronType=tanh:NCycles=1000:HiddenLayers=N+5,5:TestRate=5:EstimatorType=MSE");
// functional discriminant with GA minimizer
if (Use["FDA_GA"]) factory->BookMethod( TMVA::Types::kFDA, "FDA_GA", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:PopSize=300:Cycles=3:Steps=20:Trim=True:SaveBestGen=1" );
// PDE-Foam approach
if (Use["PDEFoam"]) factory->BookMethod( TMVA::Types::kPDEFoam, "PDEFoam", "!H:!V:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Nmin=100:Kernel=None:Compress=T" );
//==== Optimize parameters in MVA methods
// factory->OptimizeAllMethods();
// factory->OptimizeAllMethods("ROCIntegral","Scan");
//==== Train MVAs using the set of training events ====
factory->TrainAllMethods();
//==== Evaluate all MVAs using the set of test events ====
factory->TestAllMethods();
//==== Evaluate and compare performance of all configured MVAs ====
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAnalysis is done!" << std::endl;
delete factory;
//==== change position of weights file
std::string toDo;
toDo = "rm -r Weights-MVA-MultiClass/weights_" + HiggsMass + "_testVariables";
std::cerr << "toDo = " << toDo << std::endl;
system (toDo.c_str());
toDo = "mv weights Weights-MVA-MultiClass/weights_" + HiggsMass + "_testVariables";
std::cerr << "toDo = " << toDo << std::endl;
system (toDo.c_str());
// Launch the GUI for the root macros
// if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例10: TMVAClassification
//.........这里部分代码省略.........
if (Use["FDA_MT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
if (Use["FDA_GAMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
if (Use["FDA_MCMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MCMT",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:SampleSize=20" );
// TMVA ANN: MLP (recommended ANN) -- all ANNs in TMVA are Multilayer Perceptrons
if (Use["MLP"])
// factory->BookMethod( TMVA::Types::kMLP, "MLP", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:!UseRegulator" );
factory->BookMethod( TMVA::Types::kMLP, "MLP", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+8:TestRate=5:!UseRegulator" );
if (Use["MLPBFGS"])
factory->BookMethod( TMVA::Types::kMLP, "MLPBFGS", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:!UseRegulator" );
if (Use["MLPBNN"])
factory->BookMethod( TMVA::Types::kMLP, "MLPBNN", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:UseRegulator" ); // BFGS training with bayesian regulators
// CF(Clermont-Ferrand)ANN
if (Use["CFMlpANN"])
factory->BookMethod( TMVA::Types::kCFMlpANN, "CFMlpANN", "!H:!V:NCycles=2000:HiddenLayers=N+1,N" ); // n_cycles:#nodes:#nodes:...
// Tmlp(Root)ANN
if (Use["TMlpANN"])
factory->BookMethod( TMVA::Types::kTMlpANN, "TMlpANN", "!H:!V:NCycles=200:HiddenLayers=N+1,N:LearningMethod=BFGS:ValidationFraction=0.3" ); // n_cycles:#nodes:#nodes:...
// Support Vector Machine
if (Use["SVM"])
factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" );
// Boosted Decision Trees
if (Use["BDTG"]) // Gradient Boost
factory->BookMethod( TMVA::Types::kBDT, "BDTG",
"!H:!V:NTrees=1000:BoostType=Grad:Shrinkage=0.10:UseBaggedGrad:GradBaggingFraction=0.5:nCuts=20:NNodesMax=5" );
if (Use["BDT"]) // Adaptive Boost
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:!V:NTrees=800:nEventsMin=50:MaxDepth=2:BoostType=AdaBoost:AdaBoostBeta=1:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning:NNodesMax=5" );
if (Use["BDTB"]) // Bagging
factory->BookMethod( TMVA::Types::kBDT, "BDTB",
"!H:!V:NTrees=400:BoostType=Bagging:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
if (Use["BDTD"]) // Decorrelation + Adaptive Boost
factory->BookMethod( TMVA::Types::kBDT, "BDTD",
"!H:!V:NTrees=400:nEventsMin=400:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning:VarTransform=Decorrelate" );
if (Use["BDTF"]) // Allow Using Fisher discriminant in node splitting for (strong) linearly correlated variables
factory->BookMethod( TMVA::Types::kBDT, "BDTMitFisher",
"!H:!V:NTrees=50:nEventsMin=150:UseFisherCuts:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
// RuleFit -- TMVA implementation of Friedman's method
if (Use["RuleFit"])
factory->BookMethod( TMVA::Types::kRuleFit, "RuleFit",
"H:!V:RuleFitModule=RFTMVA:Model=ModRuleLinear:MinImp=0.001:RuleMinDist=0.001:NTrees=20:fEventsMin=0.01:fEventsMax=0.5:GDTau=-1.0:GDTauPrec=0.01:GDStep=0.01:GDNSteps=10000:GDErrScale=1.02" );
// For an example of the category classifier usage, see: TMVAClassificationCategory
// --------------------------------------------------------------------------------------------------
// ---- Now you can optimize the setting (configuration) of the MVAs using the set of training events
// factory->OptimizeAllMethods("SigEffAt001","Scan");
// factory->OptimizeAllMethods("ROCIntegral","GA");
// --------------------------------------------------------------------------------------------------
// ---- Now you can tell the factory to train, test, and evaluate the MVAs
// Train MVAs using the set of training events
std::cout << "Training all methods" << std::endl;
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
std::cout << "Testing all methods" << std::endl;
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
std::cout << "Evaluating all methods" << std::endl;
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例11: TrainRegressionFJ
//.........这里部分代码省略.........
// Please lookup the various method configuration options in the corresponding cxx files, eg:
// src/MethodCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
// it is possible to preset ranges in the option string in which the cut optimisation should be done:
// "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
// PDE - RS method
if (Use["PDERS"])
factory->BookMethod( TMVA::Types::kPDERS, "PDERS",
"!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
// And the options strings for the MinMax and RMS methods, respectively:
// "!H:!V:VolumeRangeMode=MinMax:DeltaFrac=0.2:KernelEstimator=Gauss:GaussSigma=0.3" );
// "!H:!V:VolumeRangeMode=RMS:DeltaFrac=3:KernelEstimator=Gauss:GaussSigma=0.3" );
if (Use["PDEFoam"])
factory->BookMethod( TMVA::Types::kPDEFoam, "PDEFoam",
"!H:!V:MultiTargetRegression=F:TargetSelection=Mpv:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Compress=T:Kernel=None:Nmin=10:VarTransform=None" );
// K-Nearest Neighbour classifier (KNN)
if (Use["KNN"])
factory->BookMethod( TMVA::Types::kKNN, "KNN",
"nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
// Linear discriminant
if (Use["LD"])
factory->BookMethod( TMVA::Types::kLD, "LD",
"!H:!V:VarTransform=None" );
// Function discrimination analysis (FDA) -- test of various fitters - the recommended one is Minuit (or GA or SA)
if (Use["FDA_MC"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MC",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=MC:SampleSize=100000:Sigma=0.1:VarTransform=D" );
if (Use["FDA_GA"]) // can also use Simulated Annealing (SA) algorithm (see Cuts_SA options) .. the formula of this example is good for parabolas
factory->BookMethod( TMVA::Types::kFDA, "FDA_GA",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:PopSize=100:Cycles=3:Steps=30:Trim=True:SaveBestGen=1:VarTransform=Norm" );
if (Use["FDA_MT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
if (Use["FDA_GAMT"])
factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
"!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
// Neural network (MLP)
if (Use["MLP"])
factory->BookMethod( TMVA::Types::kMLP, "MLP",
"!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
// Support Vector Machine
if (Use["SVM"])
factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" );
// Boosted Decision Trees
if (Use["BDT"])
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:V:NTrees=100:nEventsMin=30:NodePurityLimit=0.5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
//"!H:V:NTrees=60:nEventsMin=20:NodePurityLimit=0.5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30:DoBoostMonitor" );
if (Use["BDT1"])
factory->BookMethod( TMVA::Types::kBDT, "BDT1",
"!H:V:NTrees=100:nEventsMin=5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
if (Use["BDTG"])
factory->BookMethod( TMVA::Types::kBDT, "BDTG",
"!H:V:NTrees=2000:BoostType=Grad:Shrinkage=0.10:UseBaggedGrad:GradBaggingFraction=0.7:nCuts=200:MaxDepth=3:NNodesMax=15" );
if (Use["BDTG1"])
factory->BookMethod( TMVA::Types::kBDT, "BDTG1",
"!H:V:NTrees=1000:BoostType=Grad:Shrinkage=0.10:UseBaggedGrad:GradBaggingFraction=0.5:nCuts=20:MaxDepth=3:NNodesMax=15" );
//--------------------------------------------------------------------------
// Train MVAs using the set of training events
factory->TrainAllMethods();
// --- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// --- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
//--------------------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVARegression is done!" << std::endl;
for (UInt_t i=0; i<files.size(); i++)
files.at(i)->Close();
delete outputFile;
delete factory;
// Launch the GUI for the root macros
//gROOT->SetMacroPath( "$ROOTSYS/tmva/macros/" );
//gROOT->Macro( "$ROOTSYS/tmva/macros/TMVAlogon.C" );
//gROOT->LoadMacro( "$ROOTSYS/tmva/macros/TMVAGui.C" );
//if (!gROOT->IsBatch()) TMVARegGui( outfileName );
}
示例12: testBDT
//.........这里部分代码省略.........
TString outfileName( "bdtTMVA_FCNC_tZ.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "doBDT_FCNC_tZ", outputFile,
"!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D:AnalysisType=Classification" );
// global event weights per tree (see below for setting event-wise weights)
//Double_t signalWeight = 0.003582;
//Double_t backgroundWeight = 0.0269;
Double_t signalWeight = 1;
Double_t backgroundWeight = 1;
TFile *input_sig = TFile::Open( "proof.root" );
TFile *input_wz = TFile::Open( "proof.root" );
TTree *signal = (TTree*)input_sig->Get("Ttree_FCNCkut");
TTree *background_WZ = (TTree*)input_wz->Get("Ttree_WZ");
/*TTree *background_ZZ = (TTree*)input_wz->Get("Ttree_ZZ");
TTree *background_WW = (TTree*)input_wz->Get("Ttree_WW");
TTree *background_TTbar = (TTree*)input_wz->Get("Ttree_TTbar");
TTree *background_Zjets = (TTree*)input_wz->Get("Ttree_Zjets");
TTree *background_Wjets = (TTree*)input_wz->Get("Ttree_Wjets");
TTree *background_TtW = (TTree*)input_wz->Get("Ttree_TtW");
TTree *background_TbartW = (TTree*)input_wz->Get("Ttree_TbartW");*/
// You can add an arbitrary number of signal or background trees
factory->AddSignalTree ( signal, signalWeight );
factory->AddBackgroundTree( background_WZ, backgroundWeight );
/*factory->AddBackgroundTree( background_ZZ, backgroundWeight );
factory->AddBackgroundTree( background_WW, backgroundWeight );
factory->AddBackgroundTree( background_TTbar, backgroundWeight );
factory->AddBackgroundTree( background_Zjets, backgroundWeight );
factory->AddBackgroundTree( background_Wjets, backgroundWeight );
factory->AddBackgroundTree( background_TtW, backgroundWeight );
factory->AddBackgroundTree( background_TbartW, backgroundWeight );*/
factory->AddVariable("tree_topMass", 'F');
factory->AddVariable("tree_deltaPhilb", 'F');
factory->AddVariable("tree_asym", 'F');
factory->AddVariable("tree_Zpt", 'F');
// to set weights. The variable must exist in the tree
// for signal : factory->SetSignalWeightExpression ("weight1*weight2");
// for background: factory->SetBackgroundWeightExpression("weight1*weight2");
// Apply additional cuts on the signal and background samples (can be different)
TCut mycuts = ""; // for example: TCut mycuts = "abs(var1)<0.5 && abs(var2-0.5)<1";
TCut mycutb = ""; // for example: TCut mycutb = "abs(var1)<0.5";
factory->PrepareTrainingAndTestTree( mycuts, mycutb,
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
factory->BookMethod( TMVA::Types::kBDT, "BDT", "!H:!V:NTrees=100:nEventsMin=100:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning:VarTransform=Decorrelate" );
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例13: trainMVACat
//.........这里部分代码省略.........
//int N = TMath::Min((float)N_SIG,N_BKG_EFF);
//cout<<N_SIG<<" "<<N_BKG_EFF<<endl;
const int NVAR = 21;
TString VAR[NVAR] = {
"nJets",
//"nBJets",
"ht",
"jetPt[0]","jetPt[1]","jetPt[2]","jetPt[3]","jetPt[4]","jetPt[5]",
"mbbMin","dRbbMin",
//"dRbbAve","mbbAve",
//"btagAve","btagMax","btagMin",
//"qglAve","qglMin","qglMedian",
"sphericity","aplanarity","foxWolfram[0]","foxWolfram[1]","foxWolfram[2]","foxWolfram[3]",
"mTop[0]","ptTTbar","mTTbar","dRbbTop","chi2"
};
char TYPE[NVAR] = {
'I',
//'I',
'F',
'F','F','F','F','F','F',
'F','F',
//'F','F',
//'F','F','F',
//'F','F','F',
'F','F','F','F','F','F',
'F','F','F','F','F'
};
for(int i=0;i<NVAR;i++) {
factory->AddVariable(VAR[i],TYPE[i]);
}
factory->AddSpectator("status",'I');
factory->AddSpectator("nBJets",'I');
sprintf(name,"nTrain_Signal=%d:nTrain_Background=%d:nTest_Signal=%d:nTest_Background=%d",-1,-1,-1,-1);
factory->PrepareTrainingAndTestTree(preselectionCut,name);
TMVA::IMethod* BDT_Category = factory->BookMethod( TMVA::Types::kCategory,"BDT_Category");
TMVA::MethodCategory* mcategory_BDT = dynamic_cast<TMVA::MethodCategory*>(BDT_Category);
mcategory_BDT->AddMethod("status == 0 && nBJets == 2",
"nJets:ht:jetPt[0]:jetPt[1]:jetPt[2]:jetPt[3]:jetPt[4]:jetPt[5]:mbbMin:dRbbMin:sphericity:aplanarity:foxWolfram[0]:foxWolfram[1]:foxWolfram[2]:foxWolfram[3]:mTop[0]:ptTTbar:mTTbar:dRbbTop:chi2:",
TMVA::Types::kBDT,
"BDT_Cat1",
"NTrees=2000:BoostType=Grad:Shrinkage=0.1");
mcategory_BDT->AddMethod("status == 0 && nBJets > 2",
"nJets:ht:jetPt[0]:jetPt[1]:jetPt[2]:jetPt[3]:jetPt[4]:jetPt[5]:mbbMin:dRbbMin:sphericity:aplanarity:foxWolfram[0]:foxWolfram[1]:foxWolfram[2]:foxWolfram[3]:mTop[0]:ptTTbar:mTTbar:dRbbTop:chi2:",
TMVA::Types::kBDT,
"BDT_Cat2",
"NTrees=2000:BoostType=Grad:Shrinkage=0.1");
mcategory_BDT->AddMethod("status < 0 && nBJets == 2",
"nJets:ht:jetPt[0]:jetPt[1]:jetPt[2]:jetPt[3]:jetPt[4]:jetPt[5]:mbbMin:dRbbMin:sphericity:aplanarity:foxWolfram[0]:foxWolfram[1]:foxWolfram[2]:foxWolfram[3]:",
TMVA::Types::kBDT,
"BDT_Cat3",
"NTrees=2000:BoostType=Grad:Shrinkage=0.1");
mcategory_BDT->AddMethod("status < 0 && nBJets > 2",
"nJets:ht:jetPt[0]:jetPt[1]:jetPt[2]:jetPt[3]:jetPt[4]:jetPt[5]:mbbMin:dRbbMin:sphericity:aplanarity:foxWolfram[0]:foxWolfram[1]:foxWolfram[2]:foxWolfram[3]:",
TMVA::Types::kBDT,
"BDT_Cat4",
"NTrees=2000:BoostType=Grad:Shrinkage=0.1");
TMVA::IMethod* Fisher_Category = factory->BookMethod( TMVA::Types::kCategory,"Fisher_Category");
TMVA::MethodCategory* mcategory_Fisher = dynamic_cast<TMVA::MethodCategory*>(Fisher_Category);
mcategory_Fisher->AddMethod("status == 0 && nBJets == 2",
"nJets:ht:jetPt[0]:jetPt[1]:jetPt[2]:jetPt[3]:jetPt[4]:jetPt[5]:mbbMin:dRbbMin:sphericity:aplanarity:foxWolfram[0]:foxWolfram[1]:foxWolfram[2]:foxWolfram[3]:mTop[0]:ptTTbar:mTTbar:dRbbTop:chi2:",
TMVA::Types::kFisher,
"Fisher_Cat1","H:!V:Fisher");
mcategory_Fisher->AddMethod("status == 0 && nBJets > 2",
"nJets:ht:jetPt[0]:jetPt[1]:jetPt[2]:jetPt[3]:jetPt[4]:jetPt[5]:mbbMin:dRbbMin:sphericity:aplanarity:foxWolfram[0]:foxWolfram[1]:foxWolfram[2]:foxWolfram[3]:mTop[0]:ptTTbar:mTTbar:dRbbTop:chi2:",
TMVA::Types::kFisher,
"Fisher_Cat2","H:!V:Fisher");
mcategory_Fisher->AddMethod("status < 0 && nBJets == 2",
"nJets:ht:jetPt[0]:jetPt[1]:jetPt[2]:jetPt[3]:jetPt[4]:jetPt[5]:mbbMin:dRbbMin:sphericity:aplanarity:foxWolfram[0]:foxWolfram[1]:foxWolfram[2]:foxWolfram[3]:",
TMVA::Types::kFisher,
"Fisher_Cat3","H:!V:Fisher");
mcategory_Fisher->AddMethod("status < 0 && nBJets > 2",
"nJets:ht:jetPt[0]:jetPt[1]:jetPt[2]:jetPt[3]:jetPt[4]:jetPt[5]:mbbMin:dRbbMin:sphericity:aplanarity:foxWolfram[0]:foxWolfram[1]:foxWolfram[2]:foxWolfram[3]:",
TMVA::Types::kFisher,
"Fisher_Cat4","H:!V:Fisher");
// specify the training methods
//factory->BookMethod(TMVA::Types::kFisher,"Fisher");
//factory->BookMethod(TMVA::Types::kBDT,"BDT_GRAD_2000","NTrees=2000:BoostType=Grad:Shrinkage=0.1");
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
outf->Close();
}
示例14: Boost
void Boost(){
TString outfileName = "boost.root";
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification", outputFile,
"!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D" );
factory->AddVariable( "var0", 'F' );
factory->AddVariable( "var1", 'F' );
TFile *input(0);
TString fname = "./data.root";
if (!gSystem->AccessPathName( fname )) {
// first we try to find tmva_example.root in the local directory
std::cout << "--- BOOST : Accessing " << fname << std::endl;
input = TFile::Open( fname );
}
else {
gROOT->LoadMacro( "./createData.C");
create_circ(20000);
cout << " created data.root with data and circle arranged in half circles"<<endl;
input = TFile::Open( fname );
}
if (!input) {
std::cout << "ERROR: could not open data file" << std::endl;
exit(1);
}
TTree *signal = (TTree*)input->Get("TreeS");
TTree *background = (TTree*)input->Get("TreeB");
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
gROOT->cd( outfileName+TString(":/") );
factory->AddSignalTree ( signal, signalWeight );
factory->AddBackgroundTree( background, backgroundWeight );
factory->PrepareTrainingAndTestTree( "", "",
"nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
TString fisher="H:!V";
factory->BookMethod( TMVA::Types::kFisher, "Fisher", fisher );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoost", fisher+":Boost_Num=100:Boost_Type=AdaBoost" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostLog", fisher+":Boost_Num=100:Boost_Transform=log:Boost_Type=AdaBoost:Boost_AdaBoostBeta=1.0" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostLog2", fisher+":Boost_Num=100:Boost_Transform=log:Boost_Type=AdaBoost:Boost_AdaBoostBeta=2.0" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostStep", fisher+":Boost_Num=100:Boost_Transform=step:Boost_Type=AdaBoost:Boost_AdaBoostBeta=1.0" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostStep2", fisher+":Boost_Num=100:Boost_Transform=step:Boost_Type=AdaBoost:Boost_AdaBoostBeta=1.2" );
factory->BookMethod( TMVA::Types::kFisher, "FisherBoostStep3", fisher+":Boost_Num=100:Boost_Transform=step:Boost_Type=AdaBoost:Boost_AdaBoostBeta=1.5" );
// Train MVAs using the set of training events
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
// Launch the GUI for the root macros
if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例15: Reg
void Reg(){
TMVA::Tools::Instance();
std::cout << "==> Start TMVARegression" << std::endl;
ifstream myfile;
myfile.open("99per.txt");
ostringstream xcS,xcH,xcP,xcC,xcN;
double xS,xH,xC,xN,xP;
if(myfile.is_open()){
while(!myfile.eof()){
myfile>>xS>>xH>>xC>>xN>>xP;
}
}
xcS<<xS;
xcH<<xH;
xcC<<xC;
xcN<<xN;
xcP<<xP;
//Output file
TString outfileName( "Ex1out_FullW_def.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
//Declaring the factory
TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification", outputFile,
"!V:!Silent:Color:DrawProgressBar" );
//Declaring Input Varibles
factory->AddVariable( "Sieie",'F');
factory->AddVariable( "ToE", 'F' );
factory->AddVariable( "isoC",'F' );
factory->AddVariable( "isoN",'F' );
factory->AddVariable( "isoP",'F' );
TString fname = "../../CutTMVATrees_Barrel.root";
input = TFile::Open( fname );
// --- Register the regression tree
TTree *signal = (TTree*)input->Get("t_S");
TTree *background = (TTree*)input->Get("t_B");
//Just Some more settings
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
// You can add an arbitrary number of regression trees
factory->AddSignalTree( signal, signalWeight );
factory->AddBackgroundTree( background , backgroundWeight );
TCut mycuts ="";
TCut mycutb ="";
// factory->PrepareTrainingAndTestTree(mycuts,mycutb,"nTrain_Signal=9000:nTrain_Background=9000:nTest_Signal=10000:nTest_Background=10000");
factory->SetBackgroundWeightExpression("weightPT*weightXS");
factory->SetSignalWeightExpression("weightPT*weightXS");
TString methodName = "Cuts_FullsampleW_def";
TString methodOptions ="!H:!V:FitMethod=GA:EffMethod=EffSEl";
methodOptions +=":VarProp[0]=FMin:VarProp[1]=FMin:VarProp[2]=FMin:VarProp[3]=FMin:VarProp[4]=FMin";
methodOptions +=":CutRangeMax[0]="+xcS.str();
methodOptions +=":CutRangeMax[1]="+xcH.str();
methodOptions +=":CutRangeMax[2]="+xcC.str();
methodOptions +=":CutRangeMax[3]="+xcN.str();
methodOptions +=":CutRangeMax[4]="+xcP.str();
//************
factory->BookMethod(TMVA::Types::kCuts,methodName,methodOptions);
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVARegression is done!" << std::endl;
delete factory;
}