本文整理汇总了Java中org.apache.commons.math3.special.Gamma.logGamma方法的典型用法代码示例。如果您正苦于以下问题:Java Gamma.logGamma方法的具体用法?Java Gamma.logGamma怎么用?Java Gamma.logGamma使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.commons.math3.special.Gamma
的用法示例。
在下文中一共展示了Gamma.logGamma方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: TDistribution
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
/**
* Creates a t distribution.
*
* @param rng Random number generator.
* @param degreesOfFreedom Degrees of freedom.
* @param inverseCumAccuracy the maximum absolute error in inverse
* cumulative probability estimates
* (defaults to {@link #DEFAULT_INVERSE_ABSOLUTE_ACCURACY}).
* @throws NotStrictlyPositiveException if {@code degreesOfFreedom <= 0}
* @since 3.1
*/
public TDistribution(RandomGenerator rng,
double degreesOfFreedom,
double inverseCumAccuracy)
throws NotStrictlyPositiveException {
super(rng);
if (degreesOfFreedom <= 0) {
throw new NotStrictlyPositiveException(LocalizedFormats.DEGREES_OF_FREEDOM,
degreesOfFreedom);
}
this.degreesOfFreedom = degreesOfFreedom;
solverAbsoluteAccuracy = inverseCumAccuracy;
final double n = degreesOfFreedom;
final double nPlus1Over2 = (n + 1) / 2;
factor = Gamma.logGamma(nPlus1Over2) -
0.5 * (FastMath.log(FastMath.PI) + FastMath.log(n)) -
Gamma.logGamma(n / 2);
}
示例2: computeWordLLH
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
/**
* Computes log likelihood for word-topic vectors (P(w|z)) according to Eq. [2] in the reference.
* <ul>
* <li>T: {@code numTopics}</li>
* <li>W: {@code numVocabs}</li>
* <li>n(j, w): <i>j</i>th topic's number of assignments to <i>w</i>th vocabulary</li>
* </ul>
* @return a portion of log likelihood computed from the given word-topic vectors
*/
double computeWordLLH(final Collection<int[]> wordTopicCounts, final int[] wordTopicCountsSummary) {
double result = numTopics * (Gamma.logGamma(numVocabs * beta) - numVocabs * Gamma.logGamma(beta));
for (final int[] wordTopicCount : wordTopicCounts) {
// For computing log-likelihood, we need only the values. Please refer to SparseArrayCodec.
for (int j = 1; j < wordTopicCount.length; j += 2) {
result += Gamma.logGamma(wordTopicCount[j] + beta);
}
// handle the case of zero values separately
result += logGammaBeta * (numTopics - wordTopicCount.length / 2);
}
for (int j = 1; j < wordTopicCountsSummary.length; j += 2) {
result -= Gamma.logGamma(wordTopicCountsSummary[j] + numVocabs * beta);
}
// handle the case of zero values separately
result -= Gamma.logGamma(numVocabs * beta) * (numTopics - wordTopicCountsSummary.length / 2);
return result;
}
示例3: testHetLogLikelihoodMinorFractionNearZero
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
@Test
public void testHetLogLikelihoodMinorFractionNearZero() {
final double pi = 0.01; //pi is just a prefactor so we don't need to test it thoroughly here
for (final double f : Arrays.asList(1e-6, 1e-7, 1e-8)) {
for (final double mean : Arrays.asList(0.9, 1.0, 1.1)) {
for (final double variance : Arrays.asList(0.01, 0.005, 0.001)) {
final double alpha = mean * mean / variance;
final double beta = mean / variance;
final AlleleFractionGlobalParameters parameters = new AlleleFractionGlobalParameters(mean, variance, pi);
for (final int a : Arrays.asList(1, 2, 3)) { //alt count
for (final int r : Arrays.asList(50, 100, 200)) { //ref count
final AllelicCount count = new AllelicCount(DUMMY, r, a);
final double actual = AlleleFractionLikelihoods.hetLogLikelihood(parameters, f, count, AlleleFractionIndicator.ALT_MINOR);
final double expected = a * log(beta) + Gamma.logGamma(alpha - a) - Gamma.logGamma(alpha)
+ log((1 - pi) / 2) + a * log(f / (1 - f));
Assert.assertEquals(actual, expected, 1e-3);
}
}
}
}
}
}
示例4: testHetLogLikelihoodMinorFractionNearOne
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
@Test
public void testHetLogLikelihoodMinorFractionNearOne() {
final double pi = 0.01; //pi is just a prefactor so we don't need to test it thoroughly here
for (final double f : Arrays.asList(1 - 1e-6, 1 - 1e-7, 1 - 1e-8)) {
for (final double mean : Arrays.asList(0.9, 1.0, 1.1)) {
for (final double variance : Arrays.asList(0.01, 0.005, 0.001)) {
final double alpha = mean * mean / variance;
final double beta = mean / variance;
final AlleleFractionGlobalParameters parameters = new AlleleFractionGlobalParameters(mean, variance, pi);
for (final int a : Arrays.asList(1, 10, 20)) { //alt count
for (final int r : Arrays.asList(1, 10, 20)) { //ref count
final AllelicCount count = new AllelicCount(DUMMY, r, a);
final double actual = AlleleFractionLikelihoods.hetLogLikelihood(parameters, f, count, AlleleFractionIndicator.ALT_MINOR);
final double expected = -r * log(beta) + Gamma.logGamma(alpha + r) - Gamma.logGamma(alpha)
+ log((1 - pi) / 2) - r * log(f / (1 - f));
Assert.assertEquals(actual, expected,1e-4);
}
}
}
}
}
}
示例5: loggamma
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
public ReflexValue loggamma(List<ReflexValue> params) {
if (params.size() != 1) {
throw new ReflexException(-1, "digamma needs one number parameter");
}
if (!params.get(0).isNumber()) {
throw new ReflexException(-1, "digamma needs one number parameter");
}
double value = params.get(0).asDouble();
return new ReflexValue(Gamma.logGamma(value));
}
示例6: betabinPMFG
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
private static double betabinPMFG(int n, int k, double a, double b) {
double b1 = Gamma.logGamma(n + 1);
double b2 = Gamma.logGamma(k + 1) + Gamma.logGamma(n - k + 1);
double b3 = Gamma.logGamma(k + a) + Gamma.logGamma(n - k + b);
double b4 = Gamma.logGamma(n + a + b);
double b5 = Gamma.logGamma(a + b);
double b6 = Gamma.logGamma(a) + Gamma.logGamma(b);
double v = b1 - b2 + b3 - b4 + b5 - b6;
return Math.exp(v);
}
示例7: LDAStatCalculator
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
@Inject
private LDAStatCalculator(@Parameter(Alpha.class) final double alpha,
@Parameter(Beta.class) final double beta,
@Parameter(NumTopics.class) final int numTopics,
@Parameter(NumVocabs.class) final int numVocabs) {
this.alpha = alpha;
this.beta = beta;
this.numTopics = numTopics;
this.numVocabs = numVocabs;
this.logGammaAlpha = Gamma.logGamma(alpha);
this.logGammaBeta = Gamma.logGamma(beta);
}
示例8: computeDocLLH
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
/**
* Computes log likelihood for documents (P(z)) according to Eq. [3] in the reference.
* <ul>
* <li>T: {@code numTopics}</li>
* <li>D: Total number of documents</li>
* <li>n(j, d): <i>j</i>th topic's number of assignments to <i>d</i>th document</li>
* </ul>
* @param workload a collection of documents assigned to this trainer
* @return a portion of log likelihood computed from the given workload
*/
double computeDocLLH(final Collection<Document> workload) {
double result = workload.size() * (Gamma.logGamma(numTopics * alpha) - numTopics * Gamma.logGamma(alpha));
for (final Document doc : workload) {
for (int j = 0; j < numTopics; j++) {
final int topicCount = doc.getTopicCount(j);
if (topicCount < 0) {
doc.setTopicCount(j, 0);
}
result += topicCount <= 0 ? logGammaAlpha : Gamma.logGamma(topicCount + alpha);
}
result -= Gamma.logGamma(doc.size() + numTopics * alpha);
}
return result;
}
示例9: logFactorial
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
/**
* Return the log of the factorial for the given real number, using the gamma function
*
* @param k
* @return the log factorial
*/
public static double logFactorial(double k)
{
if (k <= 1)
return 0;
return Gamma.logGamma(k + 1);
}
示例10: NonCentralChiSquaredDistribution
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
/**
* Creates an instance.
*
* @param degrees The number of degrees of freedom, not negative or zero
* @param nonCentrality The non-centrality parameter, not negative
*/
public NonCentralChiSquaredDistribution(double degrees, double nonCentrality) {
ArgChecker.isTrue(degrees > 0, "degrees of freedom must be > 0, have " + degrees);
ArgChecker.isTrue(nonCentrality >= 0, "non-centrality must be >= 0, have " + nonCentrality);
_dofOverTwo = degrees / 2.0;
_lambdaOverTwo = nonCentrality / 2.0;
_k = (int) Math.round(_lambdaOverTwo);
if (_lambdaOverTwo == 0) {
_pStart = 0.0;
} else {
double logP = -_lambdaOverTwo + _k * Math.log(_lambdaOverTwo) - Gamma.logGamma(_k + 1);
_pStart = Math.exp(logP);
}
}
示例11: recomputeZ
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
/** Recompute the normalization factor. */
private void recomputeZ() {
if (Double.isNaN(z)) {
z = Gamma.logGamma(alpha) + Gamma.logGamma(beta) - Gamma.logGamma(alpha + beta);
}
}
示例12: log10DirichletNormalization
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
public static double log10DirichletNormalization(final double[] dirichletParams) {
final double logNumerator = Gamma.logGamma(MathUtils.sum(dirichletParams));
final double logDenominator = MathUtils.sum(MathUtils.applyToArray(dirichletParams, Gamma::logGamma));
return MathUtils.logToLog10(logNumerator - logDenominator);
}
示例13: optimizeBetaMinka
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
/**
* Optimize beta using Minka's Fixed Point Iteration
*/
public void optimizeBetaMinka() {
// Optimize beta
final int nTopics = Topic.nTopics;
final int nBackTopics = Topic.nBackTopics;
final double[] residual = new double[nTopics];
final double W = (double) nTokensCorpus;
do {
// Get necessary sums
final double[] topSum = new double[nTopics];
final double[] bottomSum = new double[nTopics];
// minus parts
for (int k = 0; k < nTopics; k++) {
topSum[k] -= W * digamma(beta[k]);
bottomSum[k] -= digamma(W * beta[k]);
}
// Get summed topic counts
@SuppressWarnings("unchecked")
final HashMultiset<Integer>[] sums = (HashMultiset<Integer>[]) new HashMultiset[nTopics];
for (int k = 0; k < nTopics; k++)
sums[k] = HashMultiset.create();
for (int b = 0; b < nBackTopics; b++)
sums[Topic.BACKGROUND[b]].addAll(btopic[b].getTokenMultiSet());
for (int ci = 0; ci < nclusters; ci++) {
sums[Topic.CONTENT].addAll(ctopic[ci].getTokenMultiSet());
for (int di = 0; di < corpus.getCluster(ci).ndocs(); di++) {
sums[Topic.DOCUMENT].addAll(dtopic[ci].get(di)
.getTokenMultiSet());
// for (int si = 0; si < corpus.getCluster(ci).getDoc(di)
// .nsents(); si++)
// sums[Topic.SENTENCE].addAll(stopic[ci].get(di)[si]
// .getTokenMultiSet());
}
}
// topics top sum and total summed topic counts
final double[] sumTotals = new double[nTopics];
for (int k = 0; k < nTopics; k++) {
for (final Integer ti : sums[k].elementSet()) {
topSum[k] += Gamma.logGamma(sums[k].count(ti) + beta[k]);
sumTotals[k] += sums[k].count(ti);
}
}
// topics bottom sum
for (int k = 0; k < nTopics; k++)
bottomSum[k] += Gamma.logGamma(sumTotals[k] + W * beta[k]);
// Estimate beta_k
for (int k = 0; k < nTopics; k++) {
residual[k] = beta[k];
beta[k] = (beta[k] * topSum[k]) / (W * bottomSum[k]);
residual[k] -= beta[k];
}
} while (StatsUtil.norm(residual) > HYPER_BETA_TOL);
}
示例14: logLikelihood
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
/**
* Returns the log likelihood of (c | t, alpha, beta)
* <p/>
* <c>\mathcal L = \left(\frac{\beta^\alpha}{\Gamma(\alpha)}\right)^L \prod_\ell \frac{t_\ell^{C_\ell}}{\Gamma(C_\ell+1)} \frac{\Gamma(C_\ell + \alpha)}{(t_\ell + \beta)^{C_\ell + \alpha}}</c>
*
* @param alpha Alpha parameter of gamma distribution
* @param beta Beta parameter of gamma distribution
* @param c Counts
* @param t Branch lengths
* @return log-likelihood
*/
@VisibleForTesting
static double logLikelihood(final double alpha, final double beta,
final double[] c, final double[] t) {
Preconditions.checkNotNull(c);
Preconditions.checkNotNull(t);
Preconditions.checkArgument(c.length == t.length,
"Non-matching array lengths: %s vs %s", c.length, t.length);
double result = 0.0;
final int n = c.length;
// \mathcal L = \left(\frac{\beta^\alpha}{\Gamma(\alpha)}\right)^L \prod_\ell \frac{t_\ell^{C_\ell}}{\Gamma(C_\ell+1)} \frac{\Gamma(C_\ell + \alpha)}{(t_\ell + \beta)^{C_\ell + \alpha}} $$
result = 0;
// Over sites
for (int i = 0; i < n; i++) {
final double ci = c[i], ti = t[i];
// Special case for zero branch length (no coverage)
if (ti <= 1e-8) {
continue;
}
// Per Wolfram Alpha,
// log(b^a×(t^c/(Gamma(c+1)))/(Gamma(a))×(Gamma(c+a))/(t+b)^(c+a)) simplifies to
// (-a-c) log(b+t)+a log(b)+log(Gamma(a+c))-log(Gamma(a))+c log(t)-log(Gamma(c+1))
double x = (-alpha - ci) * Math.log(beta + ti) +
alpha * Math.log(beta) +
Gamma.logGamma(alpha + ci) -
Gamma.logGamma(alpha) +
ci * Math.log(ti) -
Gamma.logGamma(ci + 1);
result += x;
}
logger.log(Level.FINE, "alpha={0} beta={1} LL={2}",
new Object[]{alpha, beta, result});
return result;
}
示例15: recomputeZ
import org.apache.commons.math3.special.Gamma; //导入方法依赖的package包/类
private void recomputeZ() {
if (Double.isNaN(z)) {
z = Gamma.logGamma(alpha) + Gamma.logGamma(beta) - Gamma.logGamma(alpha + beta);
}
}