本文整理汇总了C++中tmva::Reader::EvaluateRegression方法的典型用法代码示例。如果您正苦于以下问题:C++ Reader::EvaluateRegression方法的具体用法?C++ Reader::EvaluateRegression怎么用?C++ Reader::EvaluateRegression使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tmva::Reader
的用法示例。
在下文中一共展示了Reader::EvaluateRegression方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: TMVARegressionApplication
//.........这里部分代码省略.........
Int_t* AK8Puppijet_nSV=data.GetPtrInt("AK8Puppijet_nSV");
vector<float> *AK8Puppijet_SVMass = data.GetPtrVectorFloat("AK8Puppijet_SVMass");
int nEle= data.GetInt("nEle");
int nMu=data.GetInt("nMu");
Float_t* AK8PuppijetEleEF = data.GetPtrFloat("AK8PuppijetEleEF");
//Float_t* AK8PuppijetMuoEF = data.GetPtrFloat("AK8PuppijetMuoEF");
Int_t* AK8PuppijetCMulti=data.GetPtrInt("AK8PuppijetCMulti");
Int_t* AK8PuppijetEleMulti=data.GetPtrInt("AK8PuppijetEleMulti");
Int_t* AK8PuppijetMuoMulti=data.GetPtrInt("AK8PuppijetMuoMulti");
for(int i=0; i<2;i++){
TLorentzVector* thisAK8Jet ;
if(i==1)thisAK8Jet=thatJet;
else thisAK8Jet=thisJet;
pt_AK8MatchedToHbb=thisAK8Jet->Pt();
eta_AK8MatchedToHbb=thisAK8Jet->Eta();
nsv_AK8MatchedToHbb=AK8Puppijet_nSV[i];
sv0mass_AK8MatchedToHbb=AK8Puppijet_SVMass[i][0];
sv1mass_AK8MatchedToHbb=AK8Puppijet_SVMass[i][1];
nmu_AK8MatchedToHbb=AK8PuppijetMuoMulti[i];
nel_AK8MatchedToHbb=AK8PuppijetEleMulti[i];
muenfr_AK8MatchedToHbb=AK8PuppijetMuoEF[i];
nch_AK8MatchedToHbb=AK8PuppijetCMulti[i];
emenfr_AK8MatchedToHbb=AK8PuppijetEleEF[i];
spec1=nVtx;
spec2=AK8PuppijetSDmass[i];
Float_t val ;
for (Int_t ih=0; ih<nhists; ih++) {
TString title = hists[ih]->GetTitle();
val= (reader->EvaluateRegression( title ))[0];
}
varTemp[i]=val;
}
double PUPPIweight[2]={0};
PUPPIweight[0]=getPUPPIweight(thisJet->Pt(),thisJet->Eta());
PUPPIweight[1]=getPUPPIweight(thatJet->Pt(),thatJet->Eta());
double PUPPIweightThea[2]={0};
PUPPIweightThea[0]=getPUPPIweight_o(thisJet->Pt(),thisJet->Eta());
PUPPIweightThea[1]=getPUPPIweight_o(thatJet->Pt(),thatJet->Eta());
double Mjja= ((*thisJet)+(*thatJet)).M()+250
-((*thisJet)).M()-((*thatJet)).M();
TLorentzVector thisJetReg, thatJetReg;
thisJetReg=(*thisJet)*varTemp[0];
thatJetReg=(*thatJet)*varTemp[1];
double Mjjb= (thisJetReg+thatJetReg).M()+250
-(thisJetReg).M()-(thatJetReg).M();
double PUPPIweightOnRegressed[2]={0};
PUPPIweightOnRegressed[0]=getPUPPIweightOnRegressed(thisJetReg.Pt(),thisJetReg.Eta());
PUPPIweightOnRegressed[1]=getPUPPIweightOnRegressed(thatJetReg.Pt(),thatJetReg.Eta());
vector<float> *subjetSDPx = data.GetPtrVectorFloat("AK8PuppisubjetSDPx");
vector<float> *subjetSDPy = data.GetPtrVectorFloat("AK8PuppisubjetSDPy");
vector<float> *subjetSDPz = data.GetPtrVectorFloat("AK8PuppisubjetSDPz");
vector<float> *subjetSDE = data.GetPtrVectorFloat("AK8PuppisubjetSDE");
vector<float> *AK8PuppisubjetSDRawFactor = data.GetPtrVectorFloat("AK8PuppisubjetSDRawFactor");
示例2: TMVARegressionApplication
//.........这里部分代码省略.........
// - the variable names MUST corresponds in name and type to those given in the weight file(s) used
Float_t var1, var2;
reader->AddVariable( "var1", &var1 );
reader->AddVariable( "var2", &var2 );
// Spectator variables declared in the training have to be added to the reader, too
Float_t spec1,spec2;
reader->AddSpectator( "spec1:=var1*2", &spec1 );
reader->AddSpectator( "spec2:=var1*3", &spec2 );
// --- Book the MVA methods
TString dir = "dataset/weights/";
TString prefix = "TMVARegression";
// Book method(s)
for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
if (it->second) {
TString methodName = it->first + " method";
TString weightfile = dir + prefix + "_" + TString(it->first) + ".weights.xml";
reader->BookMVA( methodName, weightfile );
}
}
// Book output histograms
TH1* hists[100];
Int_t nhists = -1;
for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
TH1* h = new TH1F( it->first.c_str(), TString(it->first) + " method", 100, -100, 600 );
if (it->second) hists[++nhists] = h;
}
nhists++;
// Prepare input tree (this must be replaced by your data source)
// in this example, there is a toy tree with signal and one with background events
// we'll later on use only the "signal" events for the test in this example.
//
TFile *input(0);
TString fname = "./tmva_reg_example.root";
if (!gSystem->AccessPathName( fname )) {
input = TFile::Open( fname ); // check if file in local directory exists
}
else {
TFile::SetCacheFileDir(".");
input = TFile::Open("http://root.cern.ch/files/tmva_reg_example.root", "CACHEREAD"); // if not: download from ROOT server
}
if (!input) {
std::cout << "ERROR: could not open data file" << std::endl;
exit(1);
}
std::cout << "--- TMVARegressionApp : Using input file: " << input->GetName() << std::endl;
// --- Event loop
// Prepare the tree
// - here the variable names have to corresponds to your tree
// - you can use the same variables as above which is slightly faster,
// but of course you can use different ones and copy the values inside the event loop
//
TTree* theTree = (TTree*)input->Get("TreeR");
std::cout << "--- Select signal sample" << std::endl;
theTree->SetBranchAddress( "var1", &var1 );
theTree->SetBranchAddress( "var2", &var2 );
std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
TStopwatch sw;
sw.Start();
for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {
if (ievt%1000 == 0) {
std::cout << "--- ... Processing event: " << ievt << std::endl;
}
theTree->GetEntry(ievt);
// Retrieve the MVA target values (regression outputs) and fill into histograms
// NOTE: EvaluateRegression(..) returns a vector for multi-target regression
for (Int_t ih=0; ih<nhists; ih++) {
TString title = hists[ih]->GetTitle();
Float_t val = (reader->EvaluateRegression( title ))[0];
hists[ih]->Fill( val );
}
}
sw.Stop();
std::cout << "--- End of event loop: "; sw.Print();
// --- Write histograms
TFile *target = new TFile( "TMVARegApp.root","RECREATE" );
for (Int_t ih=0; ih<nhists; ih++) hists[ih]->Write();
target->Close();
std::cout << "--- Created root file: \"" << target->GetName()
<< "\" containing the MVA output histograms" << std::endl;
delete reader;
std::cout << "==> TMVARegressionApplication is done!" << std::endl << std::endl;
}
示例3: GrowTree
//.........这里部分代码省略.........
ttf_lheweight->UpdateFormulaLeaves();
}
/// These need to be called when arrays of variable size are used in TTree.
for (formIt=inputFormulasReg0.begin(), formItEnd=inputFormulasReg0.end(); formIt!=formItEnd; formIt++)
(*formIt)->GetNdata();
for (formIt=inputFormulasReg1.begin(), formItEnd=inputFormulasReg1.end(); formIt!=formItEnd; formIt++)
(*formIt)->GetNdata();
for (formIt=inputFormulasFJReg0.begin(), formItEnd=inputFormulasFJReg0.end(); formIt!=formItEnd; formIt++)
(*formIt)->GetNdata();
for (formIt=inputFormulasFJReg1.begin(), formItEnd=inputFormulasFJReg1.end(); formIt!=formItEnd; formIt++)
(*formIt)->GetNdata();
for (formIt=inputFormulasFJReg2.begin(), formItEnd=inputFormulasFJReg2.end(); formIt!=formItEnd; formIt++)
(*formIt)->GetNdata();
ttf_lheweight->GetNdata();
/// Fill branches
EVENT_run = EVENT.run;
EVENT_event = EVENT.event;
#ifdef STITCH
efflumi = ttf_lheweight->EvalInstance();
// efflumi_UEPS_up = efflumi * hcount->GetBinContent(2) / hcount->GetBinContent(3);
//efflumi_UEPS_down = efflumi * hcount->GetBinContent(2) / hcount->GetBinContent(4);
#endif
bool verbose = false;
for (Int_t ihj = 0; ihj < 2; ihj++) {
/// Evaluate TMVA regression output
for (UInt_t iexpr = 0; iexpr < nvars; iexpr++) {
if (ihj==0) {
readerVars[iexpr] = inputFormulasReg0.at(iexpr)->EvalInstance();
} else if (ihj==1) {
readerVars[iexpr] = inputFormulasReg1.at(iexpr)->EvalInstance();
}
}
hJet_ptReg[ihj] = (reader->EvaluateRegression(regMethod + " method"))[0];
if (verbose) std::cout << readerVars[idx_pt] << " " << readerVars[idx_rawpt] << " " << hJet_pt[ihj] << " " << hJet_ptReg[ihj] << " " << hJet_genPt[ihj] << std::endl;
const TLorentzVector p4Zero = TLorentzVector(0., 0., 0., 0.);
// int idx = hJCidx[0] ;
// std::cout << "the regressed pt for jet 0 is " << hJet_ptReg[0] << "; the hJCidx is " << hJCidx[0] << ", hence the origianl pt is " << hJet_pt[idx] << std::endl;
const TLorentzVector& hJet_p4Norm_0 = makePtEtaPhiM(hJet_pt[hJCidx[0]] , hJet_pt[hJCidx[0]], hJet_eta[hJCidx[0]], hJet_phi[hJCidx[0]], hJet_m[hJCidx[0]]);
const TLorentzVector& hJet_p4Norm_1 = makePtEtaPhiM(hJet_pt[hJCidx[1]] , hJet_pt[hJCidx[1]], hJet_eta[hJCidx[1]], hJet_phi[hJCidx[1]], hJet_m[hJCidx[1]]);
const TLorentzVector& hJet_p4Gen_0 = hJet_genPt[hJCidx[0]] > 0 ?
makePtEtaPhiM(hJet_genPt[hJCidx[0]] , hJet_pt[hJCidx[0]], hJet_eta[hJCidx[0]], hJet_phi[hJCidx[0]], hJet_m[hJCidx[0]]) : p4Zero;
const TLorentzVector& hJet_p4Gen_1 = hJet_genPt[hJCidx[1]] > 0 ?
makePtEtaPhiM(hJet_genPt[hJCidx[1]] , hJet_pt[hJCidx[1]], hJet_eta[hJCidx[1]], hJet_phi[hJCidx[1]], hJet_m[hJCidx[1]]) : p4Zero;
const TLorentzVector& hJet_p4Reg_0 = makePtEtaPhiM(hJet_ptReg[0] , hJet_pt[hJCidx[0]], hJet_eta[hJCidx[0]], hJet_phi[hJCidx[0]], hJet_m[hJCidx[0]]);
const TLorentzVector& hJet_p4Reg_1 = makePtEtaPhiM(hJet_ptReg[1] , hJet_pt[hJCidx[1]], hJet_eta[hJCidx[1]], hJet_phi[hJCidx[1]], hJet_m[hJCidx[1]]);
HptNorm = (hJet_p4Norm_0 + hJet_p4Norm_1 ).Pt();
HptGen = (hJet_p4Gen_0 + hJet_p4Gen_1 ).Pt();
HptReg = (hJet_p4Reg_0 + hJet_p4Reg_1 ).Pt();
HmassNorm = (hJet_p4Norm_0 + hJet_p4Norm_1 ).M();
HmassGen = (hJet_p4Gen_0 + hJet_p4Gen_1 ).M();
HmassReg = (hJet_p4Reg_0 + hJet_p4Reg_1 ).M();
// std::cout << "HmassReg is " << HmassReg << std::endl;
}
outTree->Fill(); // fill it!
} // end loop over TTree entries
/// Get elapsed time
sw.Stop();
std::cout << "--- End of event loop: ";
sw.Print();
output->cd();
outTree->Write();
output->Close();
input->Close();
delete input;
delete output;
for (formIt=inputFormulasReg0.begin(), formItEnd=inputFormulasReg0.end(); formIt!=formItEnd; formIt++)
delete *formIt;
for (formIt=inputFormulasReg1.begin(), formItEnd=inputFormulasReg1.end(); formIt!=formItEnd; formIt++)
delete *formIt;
for (formIt=inputFormulasFJReg0.begin(), formItEnd=inputFormulasFJReg0.end(); formIt!=formItEnd; formIt++)
delete *formIt;
for (formIt=inputFormulasFJReg1.begin(), formItEnd=inputFormulasFJReg1.end(); formIt!=formItEnd; formIt++)
delete *formIt;
for (formIt=inputFormulasFJReg2.begin(), formItEnd=inputFormulasFJReg2.end(); formIt!=formItEnd; formIt++)
delete *formIt;
delete ttf_lheweight;
std::cout << "==> GrowTree is done!" << std::endl << std::endl;
return;
}
示例4: useAutoencoder
//.........这里部分代码省略.........
TString dir = "weights/";
TString prefix = "TMVAAutoencoder";
TString weightfile = dir + prefix + TString("_") + method_name + TString(".weights.xml");
TString outPrefix = "transformed";
TString outfilename = pathToData + outPrefix + TString("_") + method_name + TString(".root");
reader->BookMVA( method_name, weightfile );
TFile* outFile = new TFile (outfilename.Data (), "RECREATE");
std::vector<std::string> inputNames = {"training"};
std::map<std::string,std::vector<std::string>> varsForInput;
varsForInput["training"].emplace_back ("id");
varsForInput["training"].emplace_back ("signal");
for (auto inputName : inputNames)
{
std::stringstream outfilename;
outfilename << inputName << "_transformed__" << method_name.Data () << ".root";
std::cout << outfilename.str () << std::endl;
/* return; */
std::stringstream infilename;
infilename << pathToData.Data () << inputName << ".root";
TTree* outTree = new TTree("transformed","transformed");
std::vector<Float_t> outVariables (localVariableNames.size ());
itVar = begin (variables);
auto itOutVar = begin (outVariables);
for (auto varName : localVariableNames)
{
Float_t* pOutVar = &(*itOutVar);
outTree->Branch (varName.c_str (), pOutVar, "F");
(*itOutVar) = 0.0;
++itOutVar;
Float_t* pVar = &(*itVar);
std::stringstream svar;
svar << varName << "_in";
outTree->Branch (svar.str ().c_str (), pVar, "F");
(*itVar) = 0.0;
++itVar;
}
Float_t signal_original = 0.0;
outTree->Branch ("signal_original", &signal_original, "F");
TFile *input(0);
std::cout << "infilename = " << infilename.str ().c_str () << std::endl;
input = TFile::Open (infilename.str ().c_str ());
TTree* tree = (TTree*)input->Get("data");
Int_t ids;
// id field if needed
if (std::find (varsForInput[inputName].begin (), varsForInput[inputName].end (), "id") != varsForInput[inputName].end ())
tree->SetBranchAddress("id", &ids);
// variables for prediction
itVar = begin (variables);
for (auto inputName : localVariableNames)
{
Float_t* pVar = &(*itVar);
tree->SetBranchAddress (inputName.c_str(), pVar);
++itVar;
}
for (Long64_t ievt=0; ievt < tree->GetEntries(); ievt++)
{
tree->GetEntry(ievt);
// predict
signal_original = variables.at (idxSignal);
for (int forcedSignal = 0; forcedSignal <= 1; ++forcedSignal)
{
variables.at (idxSignal) = forcedSignal;
std::vector<Float_t> regressionValues = reader->EvaluateRegression (method_name);
size_t idx = 0;
for (auto it = std::begin (regressionValues), itEnd = std::end (regressionValues); it != itEnd; ++it)
{
outVariables.at (idx) = *it;
++idx;
}
outTree->Fill ();
}
}
outFile->Write ();
input->Close();
}
delete reader;
return outfilename;
}