本文整理汇总了C++中ParameterList::getParameterNames方法的典型用法代码示例。如果您正苦于以下问题:C++ ParameterList::getParameterNames方法的具体用法?C++ ParameterList::getParameterNames怎么用?C++ ParameterList::getParameterNames使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ParameterList
的用法示例。
在下文中一共展示了ParameterList::getParameterNames方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: if
unsigned int OptimizationTools::optimizeNumericalParametersWithGlobalClock2(
DiscreteRatesAcrossSitesClockTreeLikelihood* cl,
const ParameterList& parameters,
OptimizationListener* listener,
double tolerance,
unsigned int tlEvalMax,
OutputStream* messageHandler,
OutputStream* profiler,
unsigned int verbose,
const std::string& optMethodDeriv)
throw (Exception)
{
AbstractNumericalDerivative* fun = 0;
// Build optimizer:
Optimizer* optimizer = 0;
if (optMethodDeriv == OPTIMIZATION_GRADIENT)
{
fun = new TwoPointsNumericalDerivative(cl);
fun->setInterval(0.0000001);
optimizer = new ConjugateGradientMultiDimensions(fun);
}
else if (optMethodDeriv == OPTIMIZATION_NEWTON)
{
fun = new ThreePointsNumericalDerivative(cl);
fun->setInterval(0.0001);
optimizer = new PseudoNewtonOptimizer(fun);
}
else
throw Exception("OptimizationTools::optimizeBranchLengthsParameters. Unknown optimization method: " + optMethodDeriv);
// Numerical derivatives:
ParameterList tmp = parameters.getCommonParametersWith(cl->getParameters());
fun->setParametersToDerivate(tmp.getParameterNames());
optimizer->setVerbose(verbose);
optimizer->setProfiler(profiler);
optimizer->setMessageHandler(messageHandler);
optimizer->setMaximumNumberOfEvaluations(tlEvalMax);
optimizer->getStopCondition()->setTolerance(tolerance);
// Optimize TreeLikelihood function:
optimizer->setConstraintPolicy(AutoParameter::CONSTRAINTS_AUTO);
if (listener)
optimizer->addOptimizationListener(listener);
optimizer->init(parameters);
optimizer->optimize();
if (verbose > 0)
ApplicationTools::displayMessage("\n");
// We're done.
unsigned int n = optimizer->getNumberOfEvaluations();
delete optimizer;
// We're done.
return n;
}
示例2: optimizeNumericalParametersWithGlobalClock
unsigned int OptimizationTools::optimizeNumericalParametersWithGlobalClock(
DiscreteRatesAcrossSitesClockTreeLikelihood* cl,
const ParameterList& parameters,
OptimizationListener* listener,
unsigned int nstep,
double tolerance,
unsigned int tlEvalMax,
OutputStream* messageHandler,
OutputStream* profiler,
unsigned int verbose,
const std::string& optMethodDeriv)
throw (Exception)
{
AbstractNumericalDerivative* fun = 0;
// Build optimizer:
MetaOptimizerInfos* desc = new MetaOptimizerInfos();
if (optMethodDeriv == OPTIMIZATION_GRADIENT)
{
fun = new TwoPointsNumericalDerivative(cl);
fun->setInterval(0.0000001);
desc->addOptimizer("Branch length parameters", new ConjugateGradientMultiDimensions(fun), cl->getBranchLengthsParameters().getParameterNames(), 2, MetaOptimizerInfos::IT_TYPE_FULL);
}
else if (optMethodDeriv == OPTIMIZATION_NEWTON)
{
fun = new ThreePointsNumericalDerivative(cl);
fun->setInterval(0.0001);
desc->addOptimizer("Branch length parameters", new PseudoNewtonOptimizer(fun), cl->getBranchLengthsParameters().getParameterNames(), 2, MetaOptimizerInfos::IT_TYPE_FULL);
}
else
throw Exception("OptimizationTools::optimizeNumericalParametersWithGlobalClock. Unknown optimization method: " + optMethodDeriv);
// Numerical derivatives:
ParameterList tmp = parameters.getCommonParametersWith(cl->getBranchLengthsParameters());
fun->setParametersToDerivate(tmp.getParameterNames());
ParameterList plsm = parameters.getCommonParametersWith(cl->getSubstitutionModelParameters());
if (plsm.size() < 10)
desc->addOptimizer("Substitution model parameter", new SimpleMultiDimensions(cl), plsm.getParameterNames(), 0, MetaOptimizerInfos::IT_TYPE_STEP);
else
desc->addOptimizer("Substitution model parameters", new DownhillSimplexMethod(cl), plsm.getParameterNames(), 0, MetaOptimizerInfos::IT_TYPE_FULL);
ParameterList plrd = parameters.getCommonParametersWith(cl->getRateDistributionParameters());
if (plrd.size() < 10)
desc->addOptimizer("Rate distribution parameter", new SimpleMultiDimensions(cl), plrd.getParameterNames(), 0, MetaOptimizerInfos::IT_TYPE_STEP);
else
desc->addOptimizer("Rate dsitribution parameters", new DownhillSimplexMethod(cl), plrd.getParameterNames(), 0, MetaOptimizerInfos::IT_TYPE_FULL);
MetaOptimizer optimizer(fun, desc, nstep);
optimizer.setVerbose(verbose);
optimizer.setProfiler(profiler);
optimizer.setMessageHandler(messageHandler);
optimizer.setMaximumNumberOfEvaluations(tlEvalMax);
optimizer.getStopCondition()->setTolerance(tolerance);
// Optimize TreeLikelihood function:
optimizer.setConstraintPolicy(AutoParameter::CONSTRAINTS_AUTO);
if (listener)
optimizer.addOptimizationListener(listener);
optimizer.init(parameters);
optimizer.optimize();
if (verbose > 0)
ApplicationTools::displayMessage("\n");
// We're done.
return optimizer.getNumberOfEvaluations();
}
示例3: optimizeNumericalParameters
unsigned int OptimizationTools::optimizeNumericalParameters(
DiscreteRatesAcrossSitesTreeLikelihood* tl,
const ParameterList& parameters,
OptimizationListener* listener,
unsigned int nstep,
double tolerance,
unsigned int tlEvalMax,
OutputStream* messageHandler,
OutputStream* profiler,
bool reparametrization,
unsigned int verbose,
const std::string& optMethodDeriv,
const std::string& optMethodModel)
throw (Exception)
{
DerivableSecondOrder* f = tl;
ParameterList pl = parameters;
// Shall we reparametrize the function to remove constraints?
auto_ptr<DerivableSecondOrder> frep;
if (reparametrization)
{
frep.reset(new ReparametrizationDerivableSecondOrderWrapper(f, parameters));
f = frep.get();
// Reset parameters to remove constraints:
pl = f->getParameters().subList(parameters.getParameterNames());
}
// ///////////////
// Build optimizer:
// Branch lengths
MetaOptimizerInfos* desc = new MetaOptimizerInfos();
MetaOptimizer* poptimizer = 0;
AbstractNumericalDerivative* fnum = new ThreePointsNumericalDerivative(f);
if (optMethodDeriv == OPTIMIZATION_GRADIENT)
desc->addOptimizer("Branch length parameters", new ConjugateGradientMultiDimensions(f), tl->getBranchLengthsParameters().getParameterNames(), 2, MetaOptimizerInfos::IT_TYPE_FULL);
else if (optMethodDeriv == OPTIMIZATION_NEWTON)
desc->addOptimizer("Branch length parameters", new PseudoNewtonOptimizer(f), tl->getBranchLengthsParameters().getParameterNames(), 2, MetaOptimizerInfos::IT_TYPE_FULL);
else if (optMethodDeriv == OPTIMIZATION_BFGS)
desc->addOptimizer("Branch length parameters", new BfgsMultiDimensions(f), tl->getBranchLengthsParameters().getParameterNames(), 2, MetaOptimizerInfos::IT_TYPE_FULL);
else
throw Exception("OptimizationTools::optimizeNumericalParameters. Unknown optimization method: " + optMethodDeriv);
// Other parameters
if (optMethodModel == OPTIMIZATION_BRENT)
{
ParameterList plsm = parameters.getCommonParametersWith(tl->getSubstitutionModelParameters());
desc->addOptimizer("Substitution model parameter", new SimpleMultiDimensions(f), plsm.getParameterNames(), 0, MetaOptimizerInfos::IT_TYPE_STEP);
ParameterList plrd = parameters.getCommonParametersWith(tl->getRateDistributionParameters());
desc->addOptimizer("Rate distribution parameter", new SimpleMultiDimensions(f), plrd.getParameterNames(), 0, MetaOptimizerInfos::IT_TYPE_STEP);
poptimizer = new MetaOptimizer(f, desc, nstep);
}
else if (optMethodModel == OPTIMIZATION_BFGS)
{
vector<string> vNameDer;
ParameterList plsm = parameters.getCommonParametersWith(tl->getSubstitutionModelParameters());
vNameDer = plsm.getParameterNames();
ParameterList plrd = parameters.getCommonParametersWith(tl->getRateDistributionParameters());
vector<string> vNameDer2 = plrd.getParameterNames();
vNameDer.insert(vNameDer.begin(), vNameDer2.begin(), vNameDer2.end());
fnum->setParametersToDerivate(vNameDer);
desc->addOptimizer("Rate & model distribution parameters", new BfgsMultiDimensions(fnum), vNameDer, 1, MetaOptimizerInfos::IT_TYPE_FULL);
poptimizer = new MetaOptimizer(fnum, desc, nstep);
}
else
throw Exception("OptimizationTools::optimizeNumericalParameters. Unknown optimization method: " + optMethodModel);
poptimizer->setVerbose(verbose);
poptimizer->setProfiler(profiler);
poptimizer->setMessageHandler(messageHandler);
poptimizer->setMaximumNumberOfEvaluations(tlEvalMax);
poptimizer->getStopCondition()->setTolerance(tolerance);
// Optimize TreeLikelihood function:
poptimizer->setConstraintPolicy(AutoParameter::CONSTRAINTS_AUTO);
NaNListener* nanListener = new NaNListener(poptimizer, tl);
poptimizer->addOptimizationListener(nanListener);
if (listener)
poptimizer->addOptimizationListener(listener);
poptimizer->init(pl);
poptimizer->optimize();
if (verbose > 0)
ApplicationTools::displayMessage("\n");
// We're done.
unsigned int nb = poptimizer->getNumberOfEvaluations();
delete poptimizer;
//.........这里部分代码省略.........