本文整理汇总了C++中Prediction类的典型用法代码示例。如果您正苦于以下问题:C++ Prediction类的具体用法?C++ Prediction怎么用?C++ Prediction使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Prediction类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: while
void SelectorTest::testSelect(TestDataSuite* tds)
{
while (tds->hasMoreTestData()) {
std::cerr << "Updating strstream: " << strstream->str() << '|' << std::endl
<< " with: " << tds->getUpdateString() << '|' << std::endl;
*strstream << tds->getUpdateString();
std::vector<std::string> selectedTokens;
selectedTokens = selector->select(tds->getInputPrediction());
Prediction expected = tds->getOutputPrediction();
CPPUNIT_ASSERT_EQUAL( (size_t)expected.size(), selectedTokens.size() );
std::vector<std::string>::const_iterator actual_it = selectedTokens.begin();
int index = 0;
while (actual_it != selectedTokens.end()) {
std::cerr << "[expected] " << expected.getSuggestion(index).getWord()
<< " [actual] " << *actual_it << std::endl;
CPPUNIT_ASSERT_EQUAL(expected.getSuggestion(index).getWord(),
*actual_it);
index++;
actual_it++;
}
contextTracker->update();
tds->nextTestData();
}
}
示例2: predict
Prediction DummyPlugin::predict(const size_t max_partial_predictions_size, const char** filter) const
{
// A real plugin would query its resources to retrieve the most
// probable completion of the prefix based on the current history,
// but this is just a dummy plugin that returns random suggestions.
//
Prediction result;
result.addSuggestion (Suggestion("foo1", 0.99));
result.addSuggestion (Suggestion("foo2", 0.98));
result.addSuggestion (Suggestion("foo3", 0.97));
result.addSuggestion (Suggestion("foo4", 0.96));
result.addSuggestion (Suggestion("foo5", 0.95));
result.addSuggestion (Suggestion("foo6", 0.94));
result.addSuggestion (Suggestion("bar1", 0.89));
result.addSuggestion (Suggestion("bar2", 0.88));
result.addSuggestion (Suggestion("bar3", 0.87));
result.addSuggestion (Suggestion("bar4", 0.86));
result.addSuggestion (Suggestion("bar5", 0.85));
result.addSuggestion (Suggestion("bar6", 0.84));
result.addSuggestion (Suggestion("foobar1", 0.79));
result.addSuggestion (Suggestion("foobar2", 0.78));
result.addSuggestion (Suggestion("foobar3", 0.77));
result.addSuggestion (Suggestion("foobar4", 0.76));
result.addSuggestion (Suggestion("foobar5", 0.75));
result.addSuggestion (Suggestion("foobar6", 0.74));
return result;
}
示例3: buildPrediction
/** SQLite callback function
Builds prediction from query results.
*/
int buildPrediction( void* callbackDataPtr,
int argc,
char** argv,
char** column )
{
// cast pointer to void back to pointer to CallbackData object
CallbackData* dataPtr = static_cast<CallbackData*>(callbackDataPtr);
Prediction* predictionPtr = dataPtr->predPtr;
size_t maxPredictionSize = dataPtr->predSize;
if (predictionPtr->size() > maxPredictionSize) {
return 1;
} else {
if( argc == 2 &&
strcmp( "word", column[ 0 ] ) == 0 &&
strcmp( "count", column[ 1 ] ) == 0 ) {
predictionPtr->addSuggestion(
Suggestion( argv[ argc - 2 ],
atof( argv[ argc - 1 ] )
)
);
} else {
std::cerr << "Invalid invocation of buildPrediction method!"
<< std::endl;
exit( 1 );
}
}
return 0;
}
示例4: predict
Prediction DictionaryPlugin::predict(const size_t max_partial_predictions_size, const char** filter) const
{
Prediction result;
std::string candidate;
std::string prefix = contextTracker->getPrefix();
std::ifstream dictionary_file;
dictionary_file.open(dictionary_path.c_str());
if(!dictionary_file)
logger << ERROR << "Error opening dictionary: " << dictionary_path << endl;
assert(dictionary_file); // REVISIT: handle with exceptions
// scan file entries until we get enough suggestions
unsigned int count = 0;
while(dictionary_file >> candidate && count < max_partial_predictions_size) {
if(candidate.find(prefix) == 0) {
result.addSuggestion(Suggestion(candidate,probability));
count++;
logger << NOTICE << "Found valid token: " << candidate << endl;
} else {
logger << INFO << "Discarding invalid token: " << candidate << endl;
}
}
dictionary_file.close();
return result;
}
示例5: switch
gMat2D<T>* KernelRLSWrapper<T>::eval(const gMat2D<T> &X)
{
Prediction<T> *pred;
PredKernelTrainTest<T> predkTrainTest;
gMat2D<T> empty;
switch (this->kType)
{
case KernelWrapper<T>::LINEAR:
pred = new PredPrimal<T>();
break;
case KernelWrapper<T>::RBF:
pred = new PredDual<T>();
this->opt->addOpt("predkernel", predkTrainTest.execute(X, empty, *(this->opt)));
}
OptMatrix<gMat2D<T> >* result = OptMatrix<gMat2D<T> >::dynacast(pred->execute(X, empty, *(this->opt)));
result->detachValue();
delete pred;
gMat2D<T>* ret = &(result->getValue());
delete result;
return ret;
}
示例6: predict
Prediction RecencyPredictor::predict (const size_t max, const char** filter) const
{
Prediction result;
std::string prefix = contextTracker->getPrefix();
logger << INFO << "prefix: " << prefix << endl;
if (!prefix.empty()) {
// Only build recency prediction if prefix is not empty: when
// prefix is empty, all previosly seen tokens are candidates
// for prediction. This is not desirable, because it means
// that recency prediction reduces to repetion of max previous
// tokens (i.e. the prediction would contain the most recent
// tokens in reverse order).
//
Suggestion suggestion;
size_t index = 1;
std::string token = contextTracker->getToken(index);
double prob = 0;
while (!token.empty() // context history exhausted
&& result.size() < max // need only max suggestions
&& index <= cutoff_threshold // look back only as far as cutoff
) {
logger << INFO << "token: " << token << endl;
if (token.find(prefix) == 0) { // if token starts with prefix
if (token_satisfies_filter (token, prefix, filter)) {
// compute probability according to exponential decay
// formula
//
prob = n_0 * exp(-(lambda * (index - 1)));
logger << INFO << "probability: " << prob << endl;
suggestion.setWord(token);
suggestion.setProbability(prob);
result.addSuggestion(suggestion);
}
}
index++;
token = contextTracker->getToken(index);
}
}
return result;
}
示例7: while
bool Prediction::operator== (const Prediction& right) const
{
// same instance is obviously equal to itself
if (&right == this) {
return true;
} else {
if (size() != right.size()) {
return false;
} else {
// need to compare each suggestion
bool result = true;
size_t i = 0;
while (i < size() && result) {
if (getSuggestion(i) != right.getSuggestion(i)) {
result = false;
}
i++;
}
return result;
}
}
}
示例8: while
void PredictorRegistryTest::testNext()
{
ContextTracker* pointer = static_cast<ContextTracker*>((void*)0xdeadbeef);
registry->setContextTracker(pointer);
PredictorRegistry::Iterator it = registry->iterator();
Predictor* predictor = 0;
while (it.hasNext()) {
predictor = it.next();
}
// since we've iterated till the end of the predictors list, predictor
// is now pointing to the DummyPredictor, so let's test we got the
// dummy prediction back
Prediction prediction = predictor->predict(20, 0);
CPPUNIT_ASSERT(predictor != 0);
size_t expected_size = 18;
CPPUNIT_ASSERT_EQUAL(expected_size, prediction.size());
CPPUNIT_ASSERT_EQUAL(Suggestion("foo1", 0.99), prediction.getSuggestion(0));
CPPUNIT_ASSERT_EQUAL(Suggestion("foobar6", 0.74), prediction.getSuggestion(17));
}
示例9: CPPUNIT_ASSERT_EQUAL
void NewSmoothedNgramPluginTest::testLearning()
{
// get pointer to plugin
Plugin* plugin = pluginRegistry->iterator().next();
{
*stream << "f";
ct->update();
Prediction actual = plugin->predict(SIZE, 0);
CPPUNIT_ASSERT_EQUAL(static_cast<size_t>(0), actual.size());
}
{
*stream << "o";
ct->update();
Prediction actual = plugin->predict(SIZE, 0);
CPPUNIT_ASSERT_EQUAL(static_cast<size_t>(0), actual.size());
}
{
*stream << "o ";
ct->update();
Prediction actual = plugin->predict(SIZE, 0);
CPPUNIT_ASSERT_EQUAL(static_cast<size_t>(1), actual.size());
CPPUNIT_ASSERT_EQUAL(std::string("foo"), actual.getSuggestion(0).getWord());
ct->update();
}
{
*stream << "bar";
ct->update();
Prediction actual = plugin->predict(SIZE, 0);
}
{
*stream << " ";
ct->update();
Prediction actual = plugin->predict(SIZE, 0);
CPPUNIT_ASSERT_EQUAL(static_cast<size_t>(2), actual.size());
CPPUNIT_ASSERT_EQUAL(std::string("foo"), actual.getSuggestion(0).getWord());
CPPUNIT_ASSERT_EQUAL(std::string("bar"), actual.getSuggestion(1).getWord());
}
{
*stream << "foobar ";
ct->update();
Prediction actual = plugin->predict(SIZE, 0);
CPPUNIT_ASSERT_EQUAL(static_cast<size_t>(3), actual.size());
CPPUNIT_ASSERT_EQUAL(std::string("foobar"), actual.getSuggestion(0).getWord());
CPPUNIT_ASSERT_EQUAL(std::string("foo"), actual.getSuggestion(1).getWord());
CPPUNIT_ASSERT_EQUAL(std::string("bar"), actual.getSuggestion(2).getWord());
}
{
*stream << "f";
ct->update();
Prediction actual = plugin->predict(SIZE, 0);
CPPUNIT_ASSERT_EQUAL(static_cast<size_t>(2), actual.size());
CPPUNIT_ASSERT_EQUAL(std::string("foobar"), actual.getSuggestion(0).getWord());
CPPUNIT_ASSERT_EQUAL(std::string("foo"), actual.getSuggestion(1).getWord());
}
}
示例10: predict
Prediction ARPAPlugin::predict(const size_t max_partial_prediction_size, const char** filter) const
{
logger << DEBUG << "predict()" << endl;
Prediction prediction;
int cardinality = 3;
std::vector<std::string> tokens(cardinality);
std::string prefix = strtolower(contextTracker->getToken(0));
std::string wd2Str = strtolower(contextTracker->getToken(1));
std::string wd1Str = strtolower(contextTracker->getToken(2));
std::multimap<float,std::string,cmp> result;
logger << DEBUG << "["<<wd1Str<<"]"<<" ["<<wd2Str<<"] "<<"["<<prefix<<"]"<<endl;
//search for the past tokens in the vocabulary
std::map<std::string,int>::const_iterator wd1It,wd2It;
wd1It = vocabCode.find(wd1Str);
wd2It = vocabCode.find(wd2Str);
/**
* note if we have not tokens to compute 3-gram probabilities we compute 2-gram or 1-gram probabilities.
* the following code might be repetitive but more efficient than having the main loop outside.
*/
//we have two valid past tokens available
if(wd1It!=vocabCode.end() && wd2It!=vocabCode.end())
{
//iterate over all vocab words
for(std::map<int,std::string>::const_iterator it = vocabDecode.begin(); it!=vocabDecode.end(); it++)
{
//if wd3 matches prefix and filter -> compute its backoff probability and add to the result set
if(matchesPrefixAndFilter(it->second,prefix,filter))
{
std::pair<float,std::string> p;
p.first = computeTrigramBackoff(wd1It->second,wd2It->second,it->first);
p.second = it->second;
result.insert(p);
}
}
}
//we have one valid past token available
else if(wd2It!=vocabCode.end())
{
//iterate over all vocab words
for(std::map<int,std::string>::const_iterator it = vocabDecode.begin(); it!=vocabDecode.end(); it++)
{
//if wd3 matches prefix and filter -> compute its backoff probability and add to the result set
if(matchesPrefixAndFilter(it->second,prefix,filter))
{
std::pair<float,std::string> p;
p.first = computeBigramBackoff(wd2It->second,it->first);
p.second = it->second;
result.insert(p);
}
}
}
//we have no valid past token available
else
{
//iterate over all vocab words
for(std::map<int,std::string>::const_iterator it = vocabDecode.begin(); it!=vocabDecode.end(); it++)
{
//if wd3 matches prefix and filter -> compute its backoff probability and add to the result set
if(matchesPrefixAndFilter(it->second,prefix,filter))
{
std::pair<float,std::string> p;
p.first = unigramMap.find(it->first)->second.logProb;
p.second = it->second;
result.insert(p);
}
}
}
size_t numSuggestions = 0;
for(std::multimap<float,std::string>::const_iterator it = result.begin(); it != result.end() && numSuggestions < max_partial_prediction_size; ++it)
{
prediction.addSuggestion(Suggestion(it->second,exp(it->first)));
numSuggestions++;
}
return prediction;
}
示例11:
void SelectorTest::TestDataSuite_S6_NR_T0::init()
{
TestData* td;
Prediction* ip;
Prediction* op;
td = new TestData;
ip = new Prediction;
op = new Prediction;
ip->addSuggestion(Suggestion("foo", 0.9));
ip->addSuggestion(Suggestion("foo1", 0.8));
ip->addSuggestion(Suggestion("foo2", 0.7));
ip->addSuggestion(Suggestion("foo3", 0.6));
ip->addSuggestion(Suggestion("foo4", 0.5));
ip->addSuggestion(Suggestion("foo5", 0.4));
ip->addSuggestion(Suggestion("foo6", 0.3));
ip->addSuggestion(Suggestion("foobar", 0.2));
ip->addSuggestion(Suggestion("foobar1", 0.1));
ip->addSuggestion(Suggestion("foobar2", 0.09));
ip->addSuggestion(Suggestion("foobar3", 0.08));
ip->addSuggestion(Suggestion("foobar4", 0.07));
ip->addSuggestion(Suggestion("foobar5", 0.06));
ip->addSuggestion(Suggestion("foobar6", 0.05));
ip->addSuggestion(Suggestion("foobar7", 0.04));
ip->addSuggestion(Suggestion("foobar8", 0.03));
ip->addSuggestion(Suggestion("foobar9", 0.02));
ip->addSuggestion(Suggestion("foobarfoo", 0.01));
ip->addSuggestion(Suggestion("foobarfoo1", 0.009));
ip->addSuggestion(Suggestion("foobarfoo2", 0.008));
ip->addSuggestion(Suggestion("foobarfoo3", 0.007));
op->addSuggestion(Suggestion("foo", 0.9));
op->addSuggestion(Suggestion("foo1", 0.8));
op->addSuggestion(Suggestion("foo2", 0.7));
op->addSuggestion(Suggestion("foo3", 0.6));
op->addSuggestion(Suggestion("foo4", 0.5));
op->addSuggestion(Suggestion("foo5", 0.4));
td->updateString = "f";
td->inputPrediction = *ip;
td->outputPrediction = *op;
testData.push_back(*td);
delete td;
delete op;
td = new TestData;
op = new Prediction;
op->addSuggestion(Suggestion("foo6", 0.3));
op->addSuggestion(Suggestion("foobar", 0.2));
op->addSuggestion(Suggestion("foobar1", 0.1));
op->addSuggestion(Suggestion("foobar2", 0.09));
op->addSuggestion(Suggestion("foobar3", 0.08));
op->addSuggestion(Suggestion("foobar4", 0.07));
td->updateString = "o";
td->inputPrediction = *ip;
td->outputPrediction = *op;
testData.push_back(*td);
delete td;
delete op;
td = new TestData;
op = new Prediction;
op->addSuggestion(Suggestion("foobar5", 0.06));
op->addSuggestion(Suggestion("foobar6", 0.05));
op->addSuggestion(Suggestion("foobar7", 0.04));
op->addSuggestion(Suggestion("foobar8", 0.03));
op->addSuggestion(Suggestion("foobar9", 0.02));
op->addSuggestion(Suggestion("foobarfoo", 0.01));
td->updateString = "o";
td->inputPrediction = *ip;
td->outputPrediction = *op;
testData.push_back(*td);
delete td;
delete op;
iter = testData.begin();
}
示例12: tokens
Prediction SmoothedNgramPredictor::predict(const size_t max_partial_prediction_size, const char** filter) const
{
logger << DEBUG << "predict()" << endl;
// Result prediction
Prediction prediction;
// Cache all the needed tokens.
// tokens[k] corresponds to w_{i-k} in the generalized smoothed
// n-gram probability formula
//
std::vector<std::string> tokens(cardinality);
for (int i = 0; i < cardinality; i++) {
tokens[cardinality - 1 - i] = contextTracker->getToken(i);
logger << DEBUG << "Cached tokens[" << cardinality - 1 - i << "] = " << tokens[cardinality - 1 - i] << endl;
}
// Generate list of prefix completition candidates.
//
// The prefix completion candidates used to be obtained from the
// _1_gram table because in a well-constructed ngram database the
// _1_gram table (which contains all known tokens). However, this
// introduced a skew, since the unigram counts will take
// precedence over the higher-order counts.
//
// The current solution retrieves candidates from the highest
// n-gram table, falling back on lower order n-gram tables if
// initial completion set is smaller than required.
//
std::vector<std::string> prefixCompletionCandidates;
for (size_t k = cardinality; (k > 0 && prefixCompletionCandidates.size() < max_partial_prediction_size); k--) {
logger << DEBUG << "Building partial prefix completion table of cardinality: " << k << endl;
// create n-gram used to retrieve initial prefix completion table
Ngram prefix_ngram(k);
copy(tokens.end() - k, tokens.end(), prefix_ngram.begin());
if (logger.shouldLog()) {
logger << DEBUG << "prefix_ngram: ";
for (size_t r = 0; r < prefix_ngram.size(); r++) {
logger << DEBUG << prefix_ngram[r] << ' ';
}
logger << DEBUG << endl;
}
// obtain initial prefix completion candidates
db->beginTransaction();
NgramTable partial;
if (filter == 0) {
partial = db->getNgramLikeTable(prefix_ngram,max_partial_prediction_size - prefixCompletionCandidates.size());
} else {
partial = db->getNgramLikeTableFiltered(prefix_ngram,filter, max_partial_prediction_size - prefixCompletionCandidates.size());
}
db->endTransaction();
if (logger.shouldLog()) {
logger << DEBUG << "partial prefixCompletionCandidates" << endl
<< DEBUG << "----------------------------------" << endl;
for (size_t j = 0; j < partial.size(); j++) {
for (size_t k = 0; k < partial[j].size(); k++) {
logger << DEBUG << partial[j][k] << " ";
}
logger << endl;
}
}
logger << DEBUG << "Partial prefix completion table contains " << partial.size() << " potential completions." << endl;
// append newly discovered potential completions to prefix
// completion candidates array to fill it up to
// max_partial_prediction_size
//
std::vector<Ngram>::const_iterator it = partial.begin();
while (it != partial.end() && prefixCompletionCandidates.size() < max_partial_prediction_size) {
// only add new candidates, iterator it points to Ngram,
// it->end() - 2 points to the token candidate
//
std::string candidate = *(it->end() - 2);
if (find(prefixCompletionCandidates.begin(),
prefixCompletionCandidates.end(),
candidate) == prefixCompletionCandidates.end()) {
prefixCompletionCandidates.push_back(candidate);
}
it++;
}
}
if (logger.shouldLog()) {
logger << DEBUG << "prefixCompletionCandidates" << endl
<< DEBUG << "--------------------------" << endl;
for (size_t j = 0; j < prefixCompletionCandidates.size(); j++) {
logger << DEBUG << prefixCompletionCandidates[j] << endl;
}
}
// compute smoothed probabilities for all candidates
//
db->beginTransaction();
//.........这里部分代码省略.........
示例13: run_DataGammaJetsZllToZnunu
void run_DataGammaJetsZllToZnunu(){
// defs ---------------------------------------------------------
gSystem->CompileMacro("../MT2Code/src/MT2Shapes.cc", "k");
// logStream
fLogStream = new std::ostringstream();
// create dir
if(!fOutDir.EndsWith("/")) fOutDir += "/";
char cmd[500];
sprintf(cmd,"mkdir -p %s", fOutDir.Data());
system(cmd);
DefineCutStreams(fHTmin, fHTmax, fMT2min, fMT2max);
TString filename=fOutDir+"/"+fRootFile;
fOutFile = new TFile(filename.Data(), "RECREATE");
fDir = (TDirectory*) fOutFile;
// fix output dir
// if(fHTmax <10000) fOutDir= TString::Format("%s_%d_HT_%d", fOutDir.Data(), abs(fHTmin), abs(fHTmax));
// else fOutDir= TString::Format("%s_%d_HT_%s", fOutDir.Data(), abs(fHTmin), "Inf");
// if(fMT2max<10000) fOutDir= TString::Format("%s_%d_MT2_%d", fOutDir.Data(), abs(fMT2min), abs(fMT2max));
// else fOutDir= TString::Format("%s_%d_MT2_%s", fOutDir.Data(), abs(fMT2min), "Inf");
// log MT2 and HT cuts
*fLogStream << "------------------------------------------------------------------------------------------------" << endl;
*fLogStream << "+++ new Znunu with Gamma+jets prediction +++" << endl;
*fLogStream << "+++ outputdir: " << fOutDir << "+++" << endl;
*fLogStream << "------------------------------------------------------------------------------------------------" << endl;
// new prediction ------------------------------------------------------
Prediction* prediction = new Prediction();
prediction->fVerbose=fVerbose;
prediction->fSave =fSaveResults;
// Photon Pt
if(fDoPhotonPtShape){
const int gNMT2bins = 11;
double gMT2bins[gNMT2bins+1] = {150, 160, 170, 180, 190, 200, 225, 250, 300, 400, 550, 800};
prediction->ChPhotonPt = new Channel("PhotonPt", "photon[0].lv.Pt()", cutStream_PhotonPt.str().c_str(), fTriggerStream.str().c_str(),fSamplesPhotonPt);
prediction->ChPhotonPt->fVerbose =prediction->fVerbose;
// prediction->ChPhotonPt->GetShapes("PhotonPt", "#gamma Pt", 2, 300, 800);
prediction->ChPhotonPt->GetShapes("PhotonPt", "#gamma Pt", 100, 150, 800);
// prediction->ChPhotonPt->GetShapes("PhotonPt", "#gamma Pt", gNMT2bins, gMT2bins);
}
// Zll Pt
if(fDoZllPtShape){
const int gNMT2bins = 11;
double gMT2bins[gNMT2bins+1] = {150, 160, 170, 180, 190, 200, 225, 250, 300, 400, 550, 800};
prediction->ChZllPt = new Channel("ZllPt", "RecoOSDiLeptPt(20,2.4,71,111)", cutStreamZll.str().c_str(), fTriggerStream.str().c_str(),fSamplesZllPt);
prediction->ChZllPt->fVerbose =prediction->fVerbose;
// prediction->ChZllPt->GetShapes("ZllPt", "Zll Pt", 2, 300, 800);
prediction->ChZllPt->GetShapes("ZllPt", "Zll Pt", 100, 150, 800);
// prediction->ChZllPt->GetShapes("ZllPt", "Zll Pt", gNMT2bins, gMT2bins);
}
// compute Zll/gamma pt ratio
if(fDoPhotonPtShape && fDoZllPtShape){
TH1D* hPhotonToZllPtRatio = prediction->GetRatio(fDoDataZllToPhotonRatio? prediction->ChZllPt->hData : prediction->ChZllPt->hZJetsToLL,
fDoDataZllToPhotonRatio? prediction->ChPhotonPt->hData : prediction->ChPhotonPt->hPhotons, 4);
DrawHisto(hPhotonToZllPtRatio,hPhotonToZllPtRatio->GetName(),"EX0");
TString rationame=hPhotonToZllPtRatio->GetName();
rationame +="_fittedRatio";
TF1 *f_lin = new TF1(rationame,"pol0(0)", fZllToGammaFitMin , fZllToGammaFitMax); f_lin->SetLineColor(8);
if(fDoFits){
hPhotonToZllPtRatio->Fit(rationame,"0L","", fZllToGammaFitMin, fZllToGammaFitMax); // set al weights to 1
fZllToPhotonRatio = f_lin->GetParameter(0);
} else{
fZllToPhotonRatio = prediction->GetLimitedRatio(fDoDataZllToPhotonRatio? prediction->ChZllPt->hData: prediction->ChZllPt->hZJetsToLL,
fDoDataZllToPhotonRatio? prediction->ChPhotonPt->hData : prediction->ChPhotonPt->hPhotons,
fZllToGammaFitMin, fZllToGammaFitMax, false, fZllToPhotonRatioRelErr);
f_lin->SetParameter(0,fZllToPhotonRatio);
}
const int nBins= 3;
const double Bins[nBins+1] = {150, fZllToGammaFitMin>150?fZllToGammaFitMin:150.0001, fZllToGammaFitMax<800? fZllToGammaFitMax:799.99, 800};
TH1D* hErrorbar = new TH1D(rationame.Data(), "", nBins, Bins);
hErrorbar->SetBinContent(2,fZllToPhotonRatio);
hErrorbar->SetBinError(2,fZllToPhotonRatioRelErr*fZllToPhotonRatio);
hErrorbar->SetBinContent(1,-10);
hErrorbar->SetBinError( 1,0);
hErrorbar->SetFillColor(5);
hErrorbar->SetFillStyle(3001);
hErrorbar->Draw("e2same");
f_lin->Draw("same");
hPhotonToZllPtRatio->Draw("EX0same");
}
// GenLevel Zll Pt, no acceptance cuts
if(fDoGenZllShape){
prediction->ChGenZllPt = new Channel("GenZllPt", "GenDiLeptPt(0,10,0,1000,true)", cutStreamGenZll.str().c_str(), fTriggerStream.str().c_str(),fSamplesZllPtMConly);
prediction->ChGenZllPt->fVerbose =prediction->fVerbose;
prediction->ChGenZllPt->GetShapes("GenZllPt", "GenZll Pt", 8, 0, 800);
}
// GenLevel Zll Pt, within acceptance
if(fDoGenAccZllShape){
prediction->ChGenZllPtAcc = new Channel("GenZllPtAcc", "GenDiLeptPt(20,2.4,71,111,true)", cutStreamGenZllAcc.str().c_str(), fTriggerStream.str().c_str(),fSamplesZllPtMConly);
//.........这里部分代码省略.........
示例14: GetBinPrediction
int GetBinPrediction()
{
cout << "Loading Ra2bBin.C" << endl;
gROOT->ProcessLine(".L RA2bBin.C+");
cout << "Loading Prediction.C" << endl;
gROOT->ProcessLine(".L Prediction.C+");
cout << "Loading Expectation.C" << endl;
gROOT->ProcessLine(".L Expectation.C+");
// ------------------------------------------------------------------- //
Expectation* expec = new Expectation();
TChain* expectation = new TChain("PredictionTree");
Prediction* predic = new Prediction();
TChain* prediction = new TChain("PredictionTree");
// ------------------------------------------------------------------- //
ifstream myfile1 ("filelist.txt");
string root_file;
if (myfile1.is_open()) {
while( myfile1.good() ) {
getline (myfile1,root_file);
cout << root_file << endl;
if (root_file.length() > 0) {
TString path = root_file;
expectation->Add(path);
prediction->Add(path);
}
}
myfile1.close();
}
// ------------------------------------------------------------------- //
expectation->Process(expec);
prediction->Process(predic);
// ------------------------------------------------------------------- //
gROOT->SetStyle("Plain");
gStyle->SetPalette(51, 0);
// For the canvas:
gStyle->SetCanvasColor(0);
gStyle->SetCanvasBorderMode(0);
// For the Pad:
gStyle->SetPadColor(0);
gStyle->SetPadTickX(1);
gStyle->SetPadTickY(1);
gStyle->SetPadBorderSize(2);
gStyle->SetPadBorderMode(0);
// For the frame:
gStyle->SetFrameBorderMode(0);
// For the histo:
gStyle->SetMarkerSize(0.7);
gStyle->SetMarkerStyle(20);
gStyle->SetMarkerColor(1);
// For the statistics box:
gStyle->SetOptStat(0);
gStyle->SetOptFit(1011);
// Margins:
gStyle->SetPadBottomMargin(0.25);
gStyle->SetPadTopMargin(0.15);
gStyle->SetPadLeftMargin(0.15);
gStyle->SetPadRightMargin(0.1);
// For the Global title:
gStyle->SetOptTitle(0);
gStyle->SetTitleColor(1);
gStyle->SetTitleFillColor(10);
gStyle->SetTitleTextColor(1);
gStyle->SetTitleFont(42);
gStyle->SetTitleFontSize(0.05);
gStyle->SetTitleBorderSize(0);
// For the axis
gStyle->SetNdivisions(510, "X");
gStyle->SetNdivisions(510, "Y");
gStyle->SetTickLength(0.03);
// For the axis titles:
gStyle->SetTitleOffset(1.4, "X");
gStyle->SetTitleOffset(1.25, "Y");
gStyle->SetTitleOffset(1.2, "Y");
gStyle->SetTitleOffset(0.5, "Z");
gStyle->SetTitleSize(0.05, "XYZ");
gStyle->SetTitleSize(0.061, "XYZ");
gStyle->SetTitleFont(42, "XYZ");
gStyle->SetTitleX(0.15);
gStyle->SetTitleY(0.99);
//.........这里部分代码省略.........
示例15: CPPUNIT_ASSERT_EQUAL
void DejavuPredictorTest::testPredict()
{
*stream << "polly wants a cracker ";
ct->update();
// get pointer to dejavu predictor
Predictor* predictor = predictorRegistry->iterator().next();
{
*stream << "polly ";
Prediction expected;
CPPUNIT_ASSERT_EQUAL(expected, predictor->predict(SIZE, 0));
ct->update();
}
{
*stream << "wants ";
Prediction expected;
CPPUNIT_ASSERT_EQUAL(expected, predictor->predict(SIZE, 0));
ct->update();
}
{
*stream << "a ";
Prediction expected;
expected.addSuggestion(Suggestion("cracker", 1.0));
CPPUNIT_ASSERT_EQUAL(expected, predictor->predict(SIZE, 0));
ct->update();
}
*stream << "soda ";
ct->update();
{
*stream << "polly ";
Prediction expected;
CPPUNIT_ASSERT_EQUAL(expected, predictor->predict(SIZE, 0));
ct->update();
}
{
*stream << "wants ";
Prediction expected;
CPPUNIT_ASSERT_EQUAL(expected, predictor->predict(SIZE, 0));
ct->update();
}
{
*stream << "a ";
Prediction expected;
expected.addSuggestion(Suggestion("cracker", 1.0));
expected.addSuggestion(Suggestion("soda", 1.0));
CPPUNIT_ASSERT_EQUAL(expected, predictor->predict(SIZE, 0));
ct->update();
}
*stream << "cake ";
ct->update();
{
*stream << "polly ";
Prediction expected;
CPPUNIT_ASSERT_EQUAL(expected, predictor->predict(SIZE, 0));
ct->update();
}
{
*stream << "wants ";
Prediction expected;
CPPUNIT_ASSERT_EQUAL(expected, predictor->predict(SIZE, 0));
ct->update();
}
{
*stream << "a ";
Prediction expected;
expected.addSuggestion(Suggestion("cake", 1.0));
expected.addSuggestion(Suggestion("cracker", 1.0));
expected.addSuggestion(Suggestion("soda", 1.0));
CPPUNIT_ASSERT_EQUAL(expected, predictor->predict(SIZE, 0));
ct->update();
}
*stream << "crumble ";
ct->update();
{
// test filter
const char* filter[] = { "cra", "so", 0 };
*stream << "polly wants a ";
Prediction expected;
expected.addSuggestion(Suggestion("cracker", 1.0));
expected.addSuggestion(Suggestion("soda", 1.0));
CPPUNIT_ASSERT_EQUAL(expected, predictor->predict(SIZE, filter));
ct->update();
}
*stream << "break ";
ct->update();
//.........这里部分代码省略.........