本文整理汇总了C++中tmva::Factory::AddRegressionTree方法的典型用法代码示例。如果您正苦于以下问题:C++ Factory::AddRegressionTree方法的具体用法?C++ Factory::AddRegressionTree怎么用?C++ Factory::AddRegressionTree使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tmva::Factory
的用法示例。
在下文中一共展示了Factory::AddRegressionTree方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: regressphi
void regressphi() {
TMVA::Tools::Instance();
std::cout << "==> Start TMVAClassification" << std::endl;
// Create a ROOT output file where TMVA will store ntuples, histograms, etc.
TString outfileName( "TMVA.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "mva", outputFile,
"!V:!Silent:Color:DrawProgressBar" );
factory->AddVariable( "npv" , 'F' );
factory->AddVariable( "u" , 'F' );
factory->AddVariable( "uphi" , 'F' );
factory->AddVariable( "chsumet/sumet" , 'F' );
factory->AddVariable( "tku" , 'F' );
factory->AddVariable( "tkuphi" , 'F' );
factory->AddVariable( "nopusumet/sumet" , 'F' );
factory->AddVariable( "nopuu" , 'F' );
factory->AddVariable( "nopuuphi" , 'F' );
factory->AddVariable( "pusumet/sumet" , 'F' );
factory->AddVariable( "pumet" , 'F' );
factory->AddVariable( "pumetphi" , 'F' );
factory->AddVariable( "pucsumet/sumet" , 'F' );
factory->AddVariable( "pucu" , 'F' );
factory->AddVariable( "pucuphi" , 'F' );
factory->AddVariable( "jspt_1" , 'F' );
factory->AddVariable( "jseta_1" , 'F' );
factory->AddVariable( "jsphi_1" , 'F' );
factory->AddVariable( "jspt_2" , 'F' );
factory->AddVariable( "jseta_2" , 'F' );
factory->AddVariable( "jsphi_2" , 'F' );
factory->AddVariable( "nalljet" , 'I' );
factory->AddVariable( "njet" , 'I' );
factory->AddTarget( "rphi_z-uphi+ 2.*TMath::Pi()*(rphi_z-uphi < -TMath::Pi()) - 2.*TMath::Pi()*(rphi_z-uphi > TMath::Pi()) " );
TString lName = "../Jets/r11-dimu_nochs_v2.root"; TFile *lInput = TFile::Open(lName);
TTree *lRegress = (TTree*)lInput ->Get("Flat");
Double_t lRWeight = 1.0;
factory->AddRegressionTree( lRegress , lRWeight );
TCut lCut = "nbtag == 0"; //Cut to remove real MET
//(rpt_z < 40 || (rpt_z > 40 && rpt_z+u1 < 40)) && nbtag == 0 "; ==> stronger cut to remove Real MET
factory->PrepareTrainingAndTestTree( lCut,
"nTrain_Regression=0:nTest_Regression=0:SplitMode=Block:NormMode=NumEvents:!V" );
// Boosted Decision Trees
factory->BookMethod( TMVA::Types::kBDT, "RecoilPhiRegress_data_clean2_njet",
"!H:!V:VarTransform=None:nEventsMin=200:NTrees=100:BoostType=Grad:Shrinkage=0.1:MaxDepth=100:NNodesMax=100000:UseYesNoLeaf=F:nCuts=2000");//MaxDepth=100
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVAClassification is done!" << std::endl;
delete factory;
//if (!gROOT->IsBatch()) TMVAGui( outfileName );
}
示例2: Loop
void REG::Loop() {
if (fChain == 0) return;
Long64_t nentries = fChain->GetEntriesFast();
Long64_t nbytes = 0, nb = 0;
TMVA::Tools::Instance();
std::cout << std::endl;
std::cout << "==> Start TMVARegression" << std::endl;
TString outfileName( "TMVAReg.root" );
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
TMVA::Factory *factory = new TMVA::Factory( "TMVARegression", outputFile,
"!V:!Silent:Color:DrawProgressBar" );
factory->AddVariable( "l1Pt", "Variable 1", "units", 'F' );
factory->AddVariable( "l1Eta", "Variable 2", "units", 'F' );
factory->AddVariable( "l1Phi", "Variable 1", "units", 'F' );
// factory->AddVariable( "RhoL1", "Variable 2", "units", 'F' );
factory->AddTarget( "ratio" );
factory->AddRegressionTree(fChain, 1.0 );
TCut mycut = "";
factory->PrepareTrainingAndTestTree( mycut,
"nTrain_Regression=10000:nTest_Regression=0:SplitMode=Block:NormMode=NumEvents:!V" );
factory->BookMethod( TMVA::Types::kBDT, "BDT",
"!H:!V:NTrees=100:nEventsMin=5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
factory->TrainAllMethods();
// ---- Evaluate all MVAs using the set of test events
// factory->TestAllMethods();
// ----- Evaluate and compare performance of all configured MVAs
// factory->EvaluateAllMethods();
// --------------------------------------------------------------
// Save the output
outputFile->Close();
std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
std::cout << "==> TMVARegression is done!" << std::endl;
delete factory;
}
示例3: main
//.........这里部分代码省略.........
factory->AddVariable( "ele1_eRegrInput_bEMax_Over_bCE" , 'F');
factory->AddVariable( "ele1_dxy_PV" , 'F');
factory->AddVariable( "ele1_dz_PV" , 'F');
factory->AddVariable( "ele1_sigmaP/ele1_tkP" , 'F');
factory->AddVariable( "ele1_eRegrInput_bCELow_Over_sCE", 'F');
factory->AddVariable( "ele1_eRegrInput_e3x3_Over_bCELow" , 'F');
factory->AddVariable( "ele1_eRegrInput_Deta_bCLow_sC" , 'F');
factory->AddVariable( "ele1_eRegrInput_Dphi_bCLow_sC" , 'F');
factory->AddVariable( "ele1_eRegrInput_seedbC_etacry" , 'F');
factory->AddVariable( "ele1_eRegrInput_seedbC_phicry" , 'F');
// You can add so-called "Spectator variables", which are not used in the MVA training,
// but will appear in the final "TestTree" produced by TMVA. This TestTree will contain the
// input variables, the response values of all trained MVAs, and the spectator variables
// factory->AddSpectator( "spec1:=var1*2", "Spectator 1", "units", 'F' );
// factory->AddSpectator( "spec2:=var1*3", "Spectator 2", "units", 'F' );
// Add the variable carrying the regression target
// factory->AddTarget("ele1_scE/ele1_E_true" );
factory->AddTarget("ele1_tkP/ele1_E_true" );
// It is also possible to declare additional targets for multi-dimensional regression, ie:
// -- factory->AddTarget( "fvalue2" );
// BUT: this is currently ONLY implemented for MLP
// global event weights per tree (see below for setting event-wise weights)
Double_t regWeight = 1.0;
// You can add an arbitrary number of regression trees
factory->AddRegressionTree( treeDATA, regWeight );
// This would set individual event weights (the variables defined in the
// expression need to exist in the original TTree)
// factory->SetWeightExpression( "var1", "Regression" );
// TCut mycut = "ele1_isEB==1 && ele1_sigmaP/ele1_tkP<0.4 && ele1_fbrem>0 && abs(ele1_dxy_PV)<0.05 && abs(ele1_dz_PV)<0.05 && ele1_eRegrInput_etaW > 0.006 && ele1_eRegrInput_phiW<0.08 && ele1_eRegrInput_sigietaieta_bC1>0.006 && ele1_eRegrInput_sigiphiiphi_bC1>0.008 && abs(ele1_eRegrInput_Deta_bC_sC)<0.004 && abs(ele1_eRegrInput_Dphi_bC_sC)<0.04 && abs(ele1_eRegrInput_seedbC_etacry)<0.6 && abs(ele1_eRegrInput_seedbC_phicry)<0.6 && ele1_scE/ele1_scERaw<1.2 && (ele1_scE/ele1_E_true)<1.4 && (ele1_scE/ele1_E_true)>0.3"; // for example: TCut mycut = "abs(var1)<0.5 && abs(var2-0.5)<1";
TCut mycut = "ele1_isEB==1 && ele1_sigmaP/ele1_tkP<0.4 && ele1_fbrem>0 && abs(ele1_dxy_PV)<0.05 && abs(ele1_dz_PV)<0.05 && ele1_eRegrInput_etaW > 0.006 && ele1_eRegrInput_phiW<0.08 && ele1_eRegrInput_sigietaieta_bC1>0.006 && ele1_eRegrInput_sigiphiiphi_bC1>0.008 && abs(ele1_eRegrInput_Deta_bC_sC)<0.004 && abs(ele1_eRegrInput_Dphi_bC_sC)<0.04 && abs(ele1_eRegrInput_seedbC_etacry)<0.6 && abs(ele1_eRegrInput_seedbC_phicry)<0.6 && ele1_scE/ele1_scERaw<1.2 && ele1_tkP/ele1_E_true<1.8 && ele1_tkP/ele1_E_true>0.2"; // for example: TCut mycut = "abs(var1)<0.5 && abs(var2-0.5)<1";
// tell the factory to use all remaining events in the trees after training for testing:
factory->PrepareTrainingAndTestTree( mycut,
"nTrain_Regression=2500000:nTest_Regression=2500000:SplitMode=Random:NormMode=NumEvents:!V" );
TString Name = Form("weight_%s_%s_P_W",RegionOfTraining.c_str(),UseMethodFlag.c_str());
(TMVA::gConfig().GetIONames()).fWeightFileDir = Name;
}
if(RegionOfTraining=="EE"){
factory->AddVariable( "ele1_scE/ele1_scERaw" , 'F');
factory->AddVariable( "ele1_eRegrInput_nPV",'F');
factory->AddVariable( "ele1_eRegrInput_r9",'F');
factory->AddVariable( "ele1_fbrem",'F');
factory->AddVariable( "ele1_eta",'F');
factory->AddVariable( "ele1_DphiIn",'F');
factory->AddVariable( "ele1_DetaIn",'F');
factory->AddVariable( "ele1_sigmaIetaIeta",'F');
factory->AddVariable( "ele1_eRegrInput_etaW",'F');
factory->AddVariable( "ele1_eRegrInput_phiW",'F');
示例4: BJetRegression
//.........这里部分代码省略.........
// factory->AddSpectator( "spec2:=var1*3", "Spectator 2", "units", 'F' );
// Add the variable carrying the regression target
// factory->AddTarget( "matchGenJet1Pt" );
factory->AddTarget( "matchGenJetPt" );
// It is also possible to declare additional targets for multi-dimensional regression, ie:
// -- factory->AddTarget( "fvalue2" );
// BUT: this is currently ONLY implemented for MLP
// Read training and test data (see TMVAClassification for reading ASCII files)
// load the signal and background event samples from ROOT trees
TFile *input(0);
TString fname = "/scratch/zmao/regression/allSample_both_isobTag.root";
if (!gSystem->AccessPathName( fname ))
input = TFile::Open( fname ); // check if file in local directory exists
else
input = TFile::Open( "http://root.cern.ch/files/tmva_reg_example.root" ); // if not: download from ROOT server
if (!input) {
std::cout << "ERROR: could not open data file" << std::endl;
exit(1);
}
std::cout << "--- TMVARegression : Using input file: " << input->GetName() << std::endl;
// --- Register the regression tree
TTree *regTree = (TTree*)input->Get("eventTree");
// global event weights per tree (see below for setting event-wise weights)
Double_t regWeight = 1.0;
// You can add an arbitrary number of regression trees
factory->AddRegressionTree( regTree, regWeight );
// This would set individual event weights (the variables defined in the
// expression need to exist in the original TTree)
// factory->SetWeightExpression( "triggerEff", "Regression" );
// Apply additional cuts on the signal and background samples (can be different)
TCut mycut = ""; // for example: TCut mycut = "abs(var1)<0.5 && abs(var2-0.5)<1";
// tell the factory to use all remaining events in the trees after training for testing:
factory->PrepareTrainingAndTestTree( mycut,
"nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V" );
// If no numbers of events are given, half of the events in the tree are used
// for training, and the other half for testing:
// factory->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" );
// ---- Book MVA methods
//
// please lookup the various method configuration options in the corresponding cxx files, eg:
// src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
// it is possible to preset ranges in the option string in which the cut optimisation should be done:
// "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
// PDE - RS method
if (Use["PDERS"])
factory->BookMethod( TMVA::Types::kPDERS, "PDERS",
"!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
// And the options strings for the MinMax and RMS methods, respectively:
// "!H:!V:VolumeRangeMode=MinMax:DeltaFrac=0.2:KernelEstimator=Gauss:GaussSigma=0.3" );
// "!H:!V:VolumeRangeMode=RMS:DeltaFrac=3:KernelEstimator=Gauss:GaussSigma=0.3" );
if (Use["PDEFoam"])
示例5: TrainRegressionFJ
//.........这里部分代码省略.........
processes.push_back("ZnnH145");
processes.push_back("ZnnH150");
#ifdef USE_WH
processes.push_back("WlnH110");
processes.push_back("WlnH115");
processes.push_back("WlnH120");
processes.push_back("WlnH125");
processes.push_back("WlnH130");
processes.push_back("WlnH135");
processes.push_back("WlnH140");
processes.push_back("WlnH145");
processes.push_back("WlnH150");
#endif
std::vector<TFile *> files;
for (UInt_t i=0; i<processes.size(); i++){
std::string process = processes.at(i);
input = (TFile*) TFile::Open(dirname + prefix + process + suffix, "READ");
if (!input) {
std::cout << "ERROR: Could not open input file." << std::endl;
exit(1);
}
std::cout << "--- TMVARegression : Using input file: " << input->GetName() << std::endl;
files.push_back(input);
// --- Register the regression tree
regTrainTree = (TTree*) input->Get("tree_train");
regTestTree = (TTree*) input->Get("tree_test");
// Global event weights per tree (see below for setting event-wise weights)
Double_t regWeight = 1.0;
// You can add an arbitrary number of regression trees
factory->AddRegressionTree(regTrainTree, regWeight, TMVA::Types::kTraining);
factory->AddRegressionTree(regTestTree , regWeight, TMVA::Types::kTesting );
}
// Set individual event weights (the variables must exist in the original TTree)
//factory->SetWeightExpression( "var1", "Regression" );
// Apply additional cuts on the signal and background samples (can be different)
TCut mycut = "fathFilterJets_genPt>10 && fathFilterJets_pt>15 && abs(fathFilterJets_eta)<2.5"; // this is to avoid 3rd filter jet without gen match
//TCut mycut = "hJet_genPt[0] > 0. && hJet_genPt[1] > 0. && hJet_csv[0] > 0. && hJet_csv[1] > 0. && hJet_pt[0] > 20. && hJet_pt[1] > 20. && abs(hJet_eta[0])<2.5 && abs(hJet_eta[1])<2.5";
// Tell the factory to use all remaining events in the trees after training for testing:
factory->PrepareTrainingAndTestTree( mycut, "V:nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents" );
// If no numbers of events are given, half of the events in the tree are used
// for training, and the other half for testing:
// factory->PrepareTrainingAndTestTree( mycut, "SplitMode=Random:!V" );
// --- Book MVA methods
//
// Please lookup the various method configuration options in the corresponding cxx files, eg:
// src/MethodCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
// it is possible to preset ranges in the option string in which the cut optimisation should be done:
// "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
// PDE - RS method
if (Use["PDERS"])
factory->BookMethod( TMVA::Types::kPDERS, "PDERS",
"!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
// And the options strings for the MinMax and RMS methods, respectively:
// "!H:!V:VolumeRangeMode=MinMax:DeltaFrac=0.2:KernelEstimator=Gauss:GaussSigma=0.3" );
// "!H:!V:VolumeRangeMode=RMS:DeltaFrac=3:KernelEstimator=Gauss:GaussSigma=0.3" );
示例6: TMVARegression
//.........这里部分代码省略.........
// It is also possible to declare additional targets for multi-dimensional regression, ie:
// -- factory->AddTarget( "fvalue2" );
// BUT: this is currently ONLY implemented for MLP
// read training and test data (see TMVAClassification for reading ASCII files)
// load the signal and background event samples from ROOT trees
// TFile *input(0);
// TString fname = "./stabTreeFed630.root";
// if (!gSystem->AccessPathName( fname )) {
// input = TFile::Open( fname ); // check if file in local directory exists
// }
// if (!input) {
// std::cout << "ERROR: could not open data file" << std::endl;
// exit(1);
// }
// std::cout << "--- TMVARegression : Using input file: " << input->GetName() << std::endl;
TChain * ntu = new TChain("x");
ntu->Add("/data2/EcalLaserMonitoringData/ntuples_2011_158851_178888/ntu_data_0015*.root");
ntu->Add("/data2/EcalLaserMonitoringData/ntuples_2011_158851_178888/ntu_data_0016*.root");
//TTree *regTree = (TTree*)input->Get("ntu");
// global event weights per tree (see below for setting event-wise weights)
Double_t regWeight = 1.0;
// ====== register trees ====================================================
//
// the following method is the prefered one:
// you can add an arbitrary number of regression trees
//factory->AddRegressionTree( regTree, regWeight );
factory->AddRegressionTree( ntu, regWeight );
// Alternative call:
// -- factory->AddRegressionTree( regTree, regWeight );
// This would set individual event weights (the variables defined in the
// expression need to exist in the original TTree)
// factory->SetWeightExpression( "fwhm-26", "Regression" );
// Apply additional cuts on the signal and background samples (can be different)
// TCut mycut = "abs(0.5*(apd0+apd1)-1)<0.015 && timeStamp<1299.8e6"; // for example: TCut mycut = "abs(var1)<0.5 && abs(var2-0.5)<1";
cout << "Defining cuts..." << endl;
TCut mycut = "fed[0] == 605 && harness == 6 && field > 0.1 && run < 161200 && apdpnAB[0] > 0";
// Count selected events and use them all
TCanvas cdummy("cdummy","cdummy"); cdummy.cd();
Int_t selectedEvts = ntu->Draw("l_fwhm[0]",mycut,"N");
cdummy.Delete();
cout << selectedEvts << endl;
char trainPara[132];
sprintf(trainPara,"nTrain_Regression=%d:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V",selectedEvts-10);
// tell the factory to use all remaining events in the trees after training for testing:
factory->PrepareTrainingAndTestTree( mycut, trainPara );
// If no numbers of events are given, half of the events in the tree are used for training, and
// the other half for testing:
// factory->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" );
// To also specify the number of testing events, use:
// factory->PrepareTrainingAndTestTree( mycut,
// "NSigTrain=3000:NBkgTrain=3000:NSigTest=3000:NBkgTest=3000:SplitMode=Random:!V" );
// ---- Book MVA methods
示例7: autoencoder
TString autoencoder (std::string inputFileName)
{
std::string tmstr (now ());
TString tmstmp (tmstr.c_str ());
std::cout << "==> Start Autoencoder " << std::endl;
std::cout << "-------------------- open input file ---------------- " << std::endl;
TString fname = pathToData + TString (inputFileName.c_str ()) + TString (".root");
TFile *input = TFile::Open( fname );
std::cout << "-------------------- get tree ---------------- " << std::endl;
TTree *tree = (TTree*)input->Get("data");
TString outfileName( "TMVAAutoEnc__" );
outfileName += TString (inputFileName.c_str ()) + TString ("__") + tmstmp + TString (".root");
std::cout << "-------------------- open output file ---------------- " << std::endl;
TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
std::cout << "-------------------- prepare factory ---------------- " << std::endl;
TMVA::Factory *factory = new TMVA::Factory( "TMVAAutoencoder", outputFile,
"AnalysisType=Regression:Color:DrawProgressBar" );
std::cout << "-------------------- add variables ---------------- " << std::endl;
for (auto varname : variableNames+additionalVariableNames)
{
factory->AddVariable (varname.c_str (), 'F');
factory->AddTarget (varname.c_str (), 'F');
}
std::cout << "-------------------- add tree ---------------- " << std::endl;
// global event weights per tree (see below for setting event-wise weights)
Double_t regWeight = 1.0;
factory->AddRegressionTree (tree, regWeight);
std::cout << "-------------------- prepare ---------------- " << std::endl;
TCut mycut = ""; // for example: TCut mycut = "abs(var1)<0.5 && abs(var2-0.5)<1";
factory->PrepareTrainingAndTestTree( mycut,
"nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V" );
/* // This would set individual event weights (the variables defined in the */
/* // expression need to exist in the original TTree) */
/* factory->SetWeightExpression( "var1", "Regression" ); */
if (true)
{
TString layoutString ("Layout=TANH|100,TANH|20,TANH|40,LINEAR");
TString training0 ("LearningRate=1e-5,Momentum=0.5,Repetitions=1,ConvergenceSteps=500,BatchSize=50,TestRepetitions=7,WeightDecay=0.01,Regularization=NONE,DropConfig=0.5+0.5+0.5+0.5,DropRepetitions=2");
TString training1 ("LearningRate=1e-5,Momentum=0.9,Repetitions=1,ConvergenceSteps=500,BatchSize=30,TestRepetitions=7,WeightDecay=0.01,Regularization=L2,DropConfig=0.1+0.1+0.1,DropRepetitions=1");
TString training2 ("LearningRate=1e-4,Momentum=0.3,Repetitions=1,ConvergenceSteps=10,BatchSize=40,TestRepetitions=7,WeightDecay=0.1,Regularization=L2");
TString training3 ("LearningRate=1e-5,Momentum=0.1,Repetitions=1,ConvergenceSteps=10,BatchSize=10,TestRepetitions=7,WeightDecay=0.001,Regularization=NONE");
TString trainingStrategyString ("TrainingStrategy=");
trainingStrategyString += training0 + "|" + training1 + "|" + training2 ; //+ "|" + training3;
// TString trainingStrategyString ("TrainingStrategy=LearningRate=1e-1,Momentum=0.3,Repetitions=3,ConvergenceSteps=20,BatchSize=30,TestRepetitions=7,WeightDecay=0.0,L1=false,DropFraction=0.0,DropRepetitions=5");
TString nnOptions ("!H:V:ErrorStrategy=SUMOFSQUARES:VarTransform=N:WeightInitialization=XAVIERUNIFORM");
// TString nnOptions ("!H:V:VarTransform=Normalize:ErrorStrategy=CHECKGRADIENTS");
nnOptions.Append (":"); nnOptions.Append (layoutString);
nnOptions.Append (":"); nnOptions.Append (trainingStrategyString);
factory->BookMethod( TMVA::Types::kNN, TString("NN_")+tmstmp, nnOptions ); // NN
}
// --------------------------------------------------------------------------------------------------
factory->TrainAllMethods();
factory->TestAllMethods();
factory->EvaluateAllMethods();
outputFile->Close();
// TMVA::TMVARegGui (outfileName);
delete factory;
return TString("NN_")+tmstmp;
}