当前位置: 首页>>代码示例>>Java>>正文


Java Vector.norm1方法代码示例

本文整理汇总了Java中gov.sandia.cognition.math.matrix.Vector.norm1方法的典型用法代码示例。如果您正苦于以下问题:Java Vector.norm1方法的具体用法?Java Vector.norm1怎么用?Java Vector.norm1使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在gov.sandia.cognition.math.matrix.Vector的用法示例。


在下文中一共展示了Vector.norm1方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: evaluatePerformance

import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
public Double evaluatePerformance(
    Collection<? extends TargetEstimatePair<? extends Vector, ? extends Vector>> data )
{
    double denominator = 0.0;
    double sumL1 = 0.0;
    for (TargetEstimatePair<? extends Vector, ? extends Vector> pair : data)
    {
        Vector target = pair.getTarget();
        Vector estimate = pair.getEstimate();
        Vector error = target.minus( estimate );
        double weight = DatasetUtil.getWeight(pair);
        double errorL1 = weight * error.norm1();

        sumL1 += errorL1;
        denominator += weight;
    }

    double meanWeightedL1Error = 0.0;
    if (denominator != 0.0)
    {
        meanWeightedL1Error = sumL1 / denominator;
    }

    return meanWeightedL1Error;
}
 
开发者ID:algorithmfoundry,项目名称:Foundry,代码行数:27,代码来源:MeanL1CostFunction.java

示例2: computeForwardProbabilities

import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
 * Computes the recursive solution to the forward probabilities of the
 * HMM.
 * @param alpha
 * Previous alpha value.
 * @param b
 * Current observation-emission likelihood.
 * @param normalize
 * True to normalize the alphas, false to leave them unnormalized.
 * @return
 * Alpha with the associated weighting (will be 1 if unnormalized).
 */
protected WeightedValue<Vector> computeForwardProbabilities(
    Vector alpha,
    Vector b,
    boolean normalize )
{
    Vector alphaNext = this.getTransitionProbability().times( alpha );
    alphaNext.dotTimesEquals(b);

    double weight;
    if( normalize )
    {
        weight = 1.0/alphaNext.norm1();
        alphaNext.scaleEquals(weight);
    }
    else
    {
        weight = 1.0;
    }

    return new DefaultWeightedValue<Vector>( alphaNext, weight );

}
 
开发者ID:algorithmfoundry,项目名称:Foundry,代码行数:35,代码来源:HiddenMarkovModel.java

示例3: computeLocalWeights

import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
public Vector computeLocalWeights(
    final Vector counts)
{
    // Compute the local weights.
    final Vector result = super.computeLocalWeights(counts);

    final int dimensionality = result.getDimensionality();
    if (dimensionality != 0)
    {
        final double average = counts.norm1() / dimensionality;
        final double divisor = Math.log(1.0 + average);
        result.scaleEquals(1.0 / divisor);
    }

    return result;
}
 
开发者ID:algorithmfoundry,项目名称:Foundry,代码行数:18,代码来源:NormalizedLogLocalTermWeighter.java

示例4: computeObservationLogLikelihood

import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
 * Computes the log-likelihood of the observation sequence, given the
 * current HMM's parameterization.  This is the answer to Rabiner's
 * "Three Basic Problems for HMMs, Problem 1: Probability Evaluation".
 * @param observations
 * Observations to consider.
 * @return
 * Log-likelihood of the given observation sequence.
 */
public double computeObservationLogLikelihood(
    Collection<? extends ObservationType> observations )
{

    final int k = this.getNumStates();
    Vector b = VectorFactory.getDefault().createVector(k);
    Vector alpha = this.getInitialProbability().clone();
    Matrix A = this.getTransitionProbability();
    int index = 0;
    double logLikelihood = 0.0;
    for( ObservationType observation : observations )
    {
        if( index > 0 )
        {
            alpha = A.times( alpha );
        }
        this.computeObservationLikelihoods(observation, b);
        alpha.dotTimesEquals(b);
        final double weight = alpha.norm1();
        alpha.scaleEquals(1.0/weight);
        logLikelihood += Math.log(weight);
        index++;
    }

    return logLikelihood;

}
 
开发者ID:algorithmfoundry,项目名称:Foundry,代码行数:37,代码来源:HiddenMarkovModel.java

示例5: computeLocalWeights

import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
public Vector computeLocalWeights(
    final Vector counts)
{
    // Since the counts are positive, the 1-norm of them is their sum.
    final Vector result = this.vectorFactory.copyVector(counts);
    final double countSum = counts.norm1();

    if (countSum != 0.0)
    {
        result.scaleEquals(1.0 / countSum);
    }

    return result;
}
 
开发者ID:algorithmfoundry,项目名称:Foundry,代码行数:15,代码来源:TermFrequencyLocalTermWeighter.java

示例6: step

import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
protected boolean step()
{
    // We create this array to be used as a workspace to avoid having to
    // recreate it inside the sampling function.
    int document = 0;
    int occurrence = 0;
    
    //Create the task list:
    ArrayList<DocumentSampleTask> samplingTaskList = new ArrayList<DocumentSampleTask>(this.documentCount);
    for (Vectorizable m : this.data ) 
    {
        Vector av = m.convertToVector();
        samplingTaskList.add( new DocumentSampleTask( av, document, occurrence));
        document++;
        occurrence += av.norm1();
    }
    
    try
    {
        ParallelUtil.executeInParallel(samplingTaskList, this.getThreadPool());
    }
    catch( Exception ex )
    {
        throw new RuntimeException( ex );
    }

    if (this.iteration >= this.burnInIterations
        && (this.iteration - this.burnInIterations)
            % this.iterationsPerSample == 0)
    {
        this.readParameters();
    }

    return true;
}
 
开发者ID:algorithmfoundry,项目名称:Foundry,代码行数:37,代码来源:ParallelLatentDirichletAllocationVectorGibbsSampler.java

示例7: logEvaluate

import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
public double logEvaluate(
    final Vector input)
{

    final int N = this.getInputDimensionality();
    input.assertDimensionalityEquals( N );

    Vector p = this.getParameters();
    double psum = p.norm1();
    double logCoeff = MathUtil.logFactorial( this.getNumTrials() );
    double logProb = 0.0;
    int total = 0;
    for( int i = 0; i < N; i++ )
    {
        int xi = (int) input.getElement(i);
        total += xi;
        double pi = p.getElement(i) / psum;
        if( pi < 0.0 )
        {
            throw new IllegalArgumentException( "pi < 0.0" + p );
        }
        else if( pi == 0.0 )
        {
            // if we've got 0 probability and nonzero successes,
            // then this is impossible: probability == 0.0
            if( xi != 0 )
            {
                return Math.log(0.0);
            }
        }
        else
        {
            if( xi != 0 )
            {
                logCoeff -= MathUtil.logFactorial(xi);
                logProb += xi * Math.log(pi);
            }
        }
    }

    if( total != this.getNumTrials() )
    {
        throw new IllegalArgumentException(
            "Integer input sum != num trials" );
    }

    return logCoeff + logProb;
}
 
开发者ID:algorithmfoundry,项目名称:Foundry,代码行数:50,代码来源:MultinomialDistribution.java

示例8: computeEquivalentSampleSize

import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
public double computeEquivalentSampleSize(
    DirichletDistribution belief)
{
    Vector a = belief.getParameters();
    return a.norm1() / this.getNumTrials();
}
 
开发者ID:algorithmfoundry,项目名称:Foundry,代码行数:7,代码来源:MultinomialBayesianEstimator.java


注:本文中的gov.sandia.cognition.math.matrix.Vector.norm1方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。