本文整理汇总了C++中Model::Alpha方法的典型用法代码示例。如果您正苦于以下问题:C++ Model::Alpha方法的具体用法?C++ Model::Alpha怎么用?C++ Model::Alpha使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Model
的用法示例。
在下文中一共展示了Model::Alpha方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: RefineStartingConditions
void Individual::RefineStartingConditions(bool optModel, FLOAT_TYPE branchPrec){
bool optOmega, optAlpha, optFlex, optPinv, optFreqs, optRelRates, optSubsetRates;
optOmega = optAlpha = optFlex = optPinv = optFreqs = optRelRates = optSubsetRates = false;
if(optModel){
for(int modnum = 0;modnum < modPart.NumModels();modnum++){
Model *mod = modPart.GetModel(modnum);
const ModelSpecification *modSpec = mod->GetCorrespondingSpec();
if(modSpec->numRateCats > 1 && modSpec->IsNonsynonymousRateHet() == false && modSpec->IsFlexRateHet() == false) optAlpha = true;
if(modSpec->IsFlexRateHet()) optFlex = true;
if(modSpec->includeInvariantSites && modSpec->fixInvariantSites == false) optPinv = true;
if(modSpec->IsCodon()) optOmega = true;
#ifdef MORE_DETERM_OPT
if(modSpec->IsCodon() == false && modSpec->fixStateFreqs == false && modSpec->IsEqualStateFrequencies() == false && modSpec->IsEmpiricalStateFrequencies() == false)
optFreqs = true;
if(modSpec->fixRelativeRates == false && (modSpec->Nst() > 1))
optRelRates = true;
#endif
}
if(modSpecSet.InferSubsetRates() && modSpecSet.NumSpecs() > 1)
optSubsetRates = true;
}
outman.UserMessageNoCR("optimizing: starting branch lengths");
if(optAlpha) outman.UserMessageNoCR(", alpha shape");
if(optPinv) outman.UserMessageNoCR(", prop. invar");
#ifdef MORE_DETERM_OPT
if(optRelRates) outman.UserMessageNoCR(", rel rates");
if(optFreqs) outman.UserMessageNoCR(", eq freqs");
#endif
if(optOmega) outman.UserMessageNoCR(", dN/dS (aka omega) parameters");
if(optSubsetRates) outman.UserMessageNoCR(", subset rates");
outman.UserMessage("...");
FLOAT_TYPE improve=(FLOAT_TYPE)999.9;
CalcFitness(0);
for(int i=1;improve > branchPrec;i++){
FLOAT_TYPE alphaOptImprove=0.0, pinvOptImprove = 0.0, omegaOptImprove = 0.0, flexOptImprove = 0.0, optImprove=0.0, scaleOptImprove=0.0, subsetRateImprove=0.0, rateOptImprove=0.0;
FLOAT_TYPE freqOptImprove=0.0;
CalcFitness(0);
FLOAT_TYPE passStart=Fitness();
optImprove=treeStruct->OptimizeAllBranches(branchPrec);
CalcFitness(0);
FLOAT_TYPE trueImprove= Fitness() - passStart;
assert(trueImprove >= -1.0);
if(trueImprove < ZERO_POINT_ZERO) trueImprove = ZERO_POINT_ZERO;
scaleOptImprove=treeStruct->OptimizeTreeScale(branchPrec);
//if some of the branch lengths are at the minimum or maximum boundaries the scale optimization
//can actually worsen the score. This isn't particularly important during initial refinement,
//so just hide it to keep the user from thinking that there is something terribly wrong
if(scaleOptImprove < ZERO_POINT_ZERO) scaleOptImprove = ZERO_POINT_ZERO;
CalcFitness(0);
if(optModel){
for(int modnum = 0;modnum < modPart.NumModels();modnum++){
Model *mod = modPart.GetModel(modnum);
const ModelSpecification *modSpec = mod->GetCorrespondingSpec();
if(modSpec->IsCodon())//optimize omega even if there is only 1
omegaOptImprove += treeStruct->OptimizeOmegaParameters(branchPrec, modnum);
else if(mod->NRateCats() > 1){
if(modSpec->IsFlexRateHet()){//Flex rates
//no longer doing alpha first, it was too hard to know if the flex rates had been partially optimized
//already during making of a stepwise tree
//if(i == 1) rateOptImprove = treeStruct->OptimizeAlpha(branchPrec);
//if(i == 1 && modSpec.gotFlexFromFile==false) rateOptImprove = treeStruct->OptimizeBoundedParameter(branchPrec, mod->Alpha(), 0, 1.0e-8, 999.9, &Model::SetAlpha);
flexOptImprove += treeStruct->OptimizeFlexRates(branchPrec, modnum);
}
else if(modSpec->fixAlpha == false){//normal gamma
//rateOptImprove = treeStruct->OptimizeAlpha(branchPrec);
//do NOT let alpha go too low here - on bad or random starting trees the branch lengths get crazy long
//rateOptImprove = treeStruct->OptimizeBoundedParameter(branchPrec, mod->Alpha(), 0, 1.0e-8, 999.9, &Model::SetAlpha);
alphaOptImprove += treeStruct->OptimizeBoundedParameter(branchPrec, mod->Alpha(), 0, 0.05, 999.9, modnum, &Model::SetAlpha);
}
}
if(modSpec->includeInvariantSites && !modSpec->fixInvariantSites)
pinvOptImprove += treeStruct->OptimizeBoundedParameter(branchPrec, mod->PropInvar(), 0, 1.0e-8, mod->maxPropInvar, modnum, &Model::SetPinv);
#ifdef MORE_DETERM_OPT
if(modSpec->IsCodon() == false && modSpec->fixStateFreqs == false && modSpec->IsEqualStateFrequencies() == false && modSpec->IsEmpiricalStateFrequencies() == false)
freqOptImprove += treeStruct->OptimizeEquilibriumFreqs(branchPrec, modnum);
if(modSpec->fixRelativeRates == false && (modSpec->Nst() > 1))
rateOptImprove += treeStruct->OptimizeRelativeNucRates(branchPrec, modnum);
#endif
}
if(optSubsetRates){
subsetRateImprove += treeStruct->OptimizeSubsetRates(branchPrec);
}
}
improve=scaleOptImprove + trueImprove + alphaOptImprove + pinvOptImprove + flexOptImprove + omegaOptImprove + subsetRateImprove;
outman.precision(8);
outman.UserMessageNoCR("pass%2d:+%9.3f (branch=%7.2f scale=%6.2f", i, improve, trueImprove, scaleOptImprove);
if(optOmega) outman.UserMessageNoCR(" omega=%6.2f", omegaOptImprove);
if(optAlpha) outman.UserMessageNoCR(" alpha=%6.2f", alphaOptImprove);
//.........这里部分代码省略.........
示例2: MakeStepwiseTree
//.........这里部分代码省略.........
scratchT->OptimizeBranchesWithinRadius(added->anc, optPrecision, 0, NULL);
//backup what we have now
CopySecByRearrangingNodesOfFirst(treeStruct, &scratchI, true);
FLOAT_TYPE bestScore = scratchT->lnL;
//collect reconnection points - this will automatically filter for constraints
scratchT->GatherValidReconnectionNodes(scratchT->NTax()*2, added, NULL, &mask);
// stepout << i << "\t" << k << "\t" << bestScore << "\t";
//start swappin
int num=0;
//for(list<ReconNode>::iterator b = scratchT->sprRang.begin();b != scratchT->sprRang.end();b++){
ReconList attempted;
while(num < attachesPerTaxon && scratchT->sprRang.size() > 0){
int connectNum = rnd.random_int(scratchT->sprRang.size());
listIt broken = scratchT->sprRang.NthElement(connectNum);
//try a reattachment point
scratchT->SPRMutate(added->nodeNum, &(*broken), optPrecision, 0);
//record the score
broken->chooseProb = scratchT->lnL;
attempted.AddNode(*broken);
scratchT->sprRang.RemoveNthElement(connectNum);
// stepout << scratchT->lnL << "\t";
//restore the tree
scratchI.CopySecByRearrangingNodesOfFirst(scratchT, this, true);
num++;
}
//now find the best score
ReconNode *best = NULL;
//For debugging, add to random place, to check correct filtering of attachment points for constraints
/*
if(attempted.size() != 0)
best = attempted.RandomReconNode();
*/
for(list<ReconNode>::iterator b = attempted.begin();b != attempted.end();b++){
if((*b).chooseProb > bestScore){
best = &(*b);
bestScore = (*b).chooseProb;
}
}
//if we didn't find anything better than the initial random attachment we don't need to do anything
if(best != NULL){
scratchT->SPRMutate(added->nodeNum, best, optPrecision, 0);
}
else scratchT->Score();
scratchI.CalcFitness(0);
// stepout << scratchT->lnL << endl;
CopySecByRearrangingNodesOfFirst(treeStruct, &scratchI, true);
//outman.UserMessage(" %d %f", i+1, scratchT->lnL);
outman.UserMessageNoCR(" %d ", i+1);
outman.flush();
//when we've added half the taxa optimize alpha, flex or omega
if(i == (n/2)){
FLOAT_TYPE improve = 0.0;
for(int modnum = 0;modnum < modPart.NumModels();modnum++){
Model *mod = scratchI.modPart.GetModel(modnum);
const ModelSpecification *modSpec = mod->GetCorrespondingSpec();
if(modSpec->IsCodon())//optimize omega even if there is only 1
improve += scratchT->OptimizeOmegaParameters(optPrecision, modnum);
else if(mod->NRateCats() > 1){
if(modSpec->IsFlexRateHet()){//Flex rates
//no longer doing alpha first, it was too hard to know if the flex rates had been partially optimized
//already during making of a stepwise tree
improve += scratchT->OptimizeFlexRates(optPrecision, modnum);
}
else if(modSpec->fixAlpha == false){//normal gamma
//do NOT let alpha go too low here - on bad or random starting trees the branch lengths get crazy long
improve += scratchT->OptimizeBoundedParameter(optPrecision, mod->Alpha(), 0, 0.05, 999.9, modnum, &Model::SetAlpha);
}
}
if(modSpec->includeInvariantSites && !modSpec->fixInvariantSites)
improve += scratchT->OptimizeBoundedParameter(optPrecision, mod->PropInvar(), 0, 1.0e-8, mod->maxPropInvar, modnum, &Model::SetPinv);
}
if(modSpecSet.InferSubsetRates()){
improve += scratchT->OptimizeSubsetRates(optPrecision);
}
if(!FloatingPointEquals(improve, 0.0, 1e-8)) outman.UserMessage("\n Optimizing parameters... improved %8.3f lnL", improve);
// this used to depend on param improvement - not sure why
// if(rateOptImprove > 0.0){
scratchT->Score();
FLOAT_TYPE start=scratchT->lnL;
scratchT->OptimizeAllBranches(optPrecision);
FLOAT_TYPE bimprove = scratchT->lnL - start;
outman.UserMessage("\nOptimizing branchlengths... improved %f lnL", bimprove);
// }
}
}
// stepout.close();
outman.UserMessage("");
scratchI.treeStruct->RemoveTreeFromAllClas();
delete scratchI.treeStruct;
scratchI.treeStruct=NULL;
}