本文整理汇总了Java中gov.sandia.cognition.math.matrix.Vector.dotTimesEquals方法的典型用法代码示例。如果您正苦于以下问题:Java Vector.dotTimesEquals方法的具体用法?Java Vector.dotTimesEquals怎么用?Java Vector.dotTimesEquals使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类gov.sandia.cognition.math.matrix.Vector
的用法示例。
在下文中一共展示了Vector.dotTimesEquals方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: evaluate
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Evaluates the weighted Euclidean distance between two vectors.
*
* @param first
* The first vector.
* @param second
* The second vector.
* @return
* The weighted Euclidean distance between the two vectors.
*/
@Override
public double evaluate(
final Vectorizable first,
final Vectorizable second)
{
// \sqrt(\sum_i w_i * (x_i - y_i)^2)
// First compute the difference between the two vectors.
final Vector difference =
first.convertToVector().minus(second.convertToVector());
// Now square it.
difference.dotTimesEquals(difference);
// Now compute the square root of the weights times the squared
// difference.
return Math.sqrt(this.weights.dotProduct(difference));
}
示例2: computeForwardProbabilities
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Computes the recursive solution to the forward probabilities of the
* HMM.
* @param alpha
* Previous alpha value.
* @param b
* Current observation-emission likelihood.
* @param normalize
* True to normalize the alphas, false to leave them unnormalized.
* @return
* Alpha with the associated weighting (will be 1 if unnormalized).
*/
protected WeightedValue<Vector> computeForwardProbabilities(
Vector alpha,
Vector b,
boolean normalize )
{
Vector alphaNext = this.getTransitionProbability().times( alpha );
alphaNext.dotTimesEquals(b);
double weight;
if( normalize )
{
weight = 1.0/alphaNext.norm1();
alphaNext.scaleEquals(weight);
}
else
{
weight = 1.0;
}
return new DefaultWeightedValue<Vector>( alphaNext, weight );
}
示例3: computeViterbiRecursion
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Computes the Viterbi recursion for a given "delta" and "b"
* @param delta
* Previous value of the Viterbi recursion.
* @param bn
* Current observation likelihood.
* @return
* Updated "delta" and state backpointers.
*/
protected Pair<Vector,int[]> computeViterbiRecursion(
Vector delta,
Vector bn )
{
final int k = delta.getDimensionality();
final Vector dn = VectorFactory.getDefault().createVector(k);
final int[] psi = new int[ k ];
for( int i = 0; i < k; i++ )
{
WeightedValue<Integer> transition =
this.findMostLikelyState(i, delta);
psi[i] = transition.getValue();
dn.setElement(i, transition.getWeight());
}
dn.dotTimesEquals( bn );
delta = dn;
delta.scaleEquals( 1.0/delta.norm1() );
return DefaultPair.create( delta, psi );
}
示例4: computeObservationLogLikelihood
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Computes the log-likelihood of the observation sequence, given the
* current HMM's parameterization. This is the answer to Rabiner's
* "Three Basic Problems for HMMs, Problem 1: Probability Evaluation".
* @param observations
* Observations to consider.
* @return
* Log-likelihood of the given observation sequence.
*/
public double computeObservationLogLikelihood(
Collection<? extends ObservationType> observations )
{
final int k = this.getNumStates();
Vector b = VectorFactory.getDefault().createVector(k);
Vector alpha = this.getInitialProbability().clone();
Matrix A = this.getTransitionProbability();
int index = 0;
double logLikelihood = 0.0;
for( ObservationType observation : observations )
{
if( index > 0 )
{
alpha = A.times( alpha );
}
this.computeObservationLikelihoods(observation, b);
alpha.dotTimesEquals(b);
final double weight = alpha.norm1();
alpha.scaleEquals(1.0/weight);
logLikelihood += Math.log(weight);
index++;
}
return logLikelihood;
}
示例5: dotTimes
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
final public Vector dotTimes(
final Vector v)
{
// By switch from this.dotTimes(v) to v.dotTimes(this), we get sparse
// vectors dotted with dense still being sparse and dense w/ dense is
// still dense. The way this was originally implemented in the Foundry
// (this.clone().dotTimesEquals(v)), if v is sparse, it returns a
// dense vector type storing sparse data.
Vector result = v.clone();
result.dotTimesEquals(this);
return result;
}
示例6: evaluate
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
public Vector evaluate(
final Vector document)
{
Vector weights = null;
if (this.localWeighter == null)
{
// If there is no local weighting scheme, start out with a copy of
// the vector.
weights = document.clone();
}
else
{
weights = this.localWeighter.computeLocalWeights(document);
}
Vector globalWeights = null;
if (this.globalWeighter != null)
{
globalWeights = this.globalWeighter.getGlobalWeights();
if (globalWeights != null)
{
weights.dotTimesEquals(globalWeights);
}
}
// else - Don't apply global weights.
if (this.normalizer != null)
{
this.normalizer.normalizeWeights(weights, document, globalWeights);
}
// else - Don't apply normalization.
return weights;
}
示例7: computeViterbiRecursion
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
protected Pair<Vector, int[]> computeViterbiRecursion(
Vector delta,
Vector bn )
{
final int k = this.getNumStates();
if( this.viterbiTasks == null )
{
this.viterbiTasks = new ArrayList<ViterbiTask>( k );
}
this.viterbiTasks.ensureCapacity(k);
while( this.viterbiTasks.size() > k )
{
this.viterbiTasks.remove(
this.viterbiTasks.size()-1 );
}
while( this.viterbiTasks.size() < k )
{
this.viterbiTasks.add( new ViterbiTask() );
}
for( int i = 0; i < k; i++ )
{
final ViterbiTask task = this.viterbiTasks.get(i);
task.destinationState = i;
task.delta = delta;
}
ArrayList<WeightedValue<Integer>> results;
try
{
results = ParallelUtil.executeInParallel(
this.viterbiTasks, this.getThreadPool() );
}
catch (Exception e)
{
throw new RuntimeException( e );
}
int[] psis = new int[ k ];
Vector nextDelta = VectorFactory.getDefault().createVector(k);
for( int i = 0; i < k; i++ )
{
WeightedValue<Integer> value = results.get(i);
psis[i] = value.getValue();
nextDelta.setElement(i, value.getWeight() );
}
nextDelta.dotTimesEquals(bn);
nextDelta.scaleEquals( 1.0/nextDelta.norm1() );
return DefaultPair.create( nextDelta, psis );
}