本文整理汇总了Java中gov.sandia.cognition.math.matrix.Vector.plus方法的典型用法代码示例。如果您正苦于以下问题:Java Vector.plus方法的具体用法?Java Vector.plus怎么用?Java Vector.plus使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类gov.sandia.cognition.math.matrix.Vector
的用法示例。
在下文中一共展示了Vector.plus方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: iterate
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
final protected double iterate()
{
Vector q = A.evaluate(d);
double alpha = delta / (d.dotProduct(q));
x.plusEquals(d.scale(alpha));
if (((iterationCounter + 1) % 50) == 0)
{
residual = rhs.minus(A.evaluate(x));
}
else
{
residual = residual.minus(q.scale(alpha));
}
Vector s = A.precondition(residual);
double delta_old = delta;
delta = residual.dotProduct(s);
double beta = delta / delta_old;
d = s.plus(d.scale(beta));
return delta;
}
开发者ID:algorithmfoundry,项目名称:Foundry,代码行数:23,代码来源:ConjugateGradientWithPreconditionerMatrixSolver.java
示例2: testDifferentiate
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Test of differentiate method, of class gov.sandia.cognition.math.matrix.DifferentiableVectorFunction.
*/
public static void testDifferentiate(
DifferentiableVectorFunction function,
Vector x1,
Random random)
{
System.out.println("testDifferentiate:" + function.getClass() );
double small = 0.1;
Vector delta = VectorFactory.getDefault().createUniformRandom( x1.getDimensionality(), -small, small, random );
Vector x2 = x1.plus( delta );
Vector y1 = function.evaluate( x1 );
Vector y2 = function.evaluate( x2 );
Matrix d1 = function.differentiate( x1 );
assertEquals( y1.getDimensionality(), d1.getNumRows() );
assertEquals( x1.getDimensionality(), d1.getNumColumns() );
Vector y2hat = y1.plus( d1.times( x2.minus( x1 ) ) );
System.out.println( "Error norm2: " + y2.minus( y2hat ).norm2() );
assertTrue( y2.equals( y2hat, small ) );
}
示例3: evaluate
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Overrides the default implementation so that L_tilde can be raised to a
* power and the diagonal weights can be added implicitly (which is much
* faster and memory efficient than the explicit representation).
*
* @param input The vector to multiply by the implicit represetation of the
* matrix
* @return The result of the function.
*/
@Override
public Vector evaluate(Vector input)
{
Vector v = input;
for (int i = 0; i < power; ++i)
{
v = m.times(v);
}
Vector plusV = additional.times(input);
return v.plus(plusV);
}
示例4: update
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
public void update(
DirichletDistribution belief,
Vector value)
{
Vector a = belief.getParameters();
Vector anext = a.plus( value );
belief.setParameters(anext);
}
示例5: initializeClusters
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
public ArrayList<GaussianCluster> initializeClusters(
int numClusters,
Collection<? extends Vector> elements)
{
if (numClusters < 0)
{
// Error: Bad number of clusters.
throw new IllegalArgumentException(
"The number of clusters cannot be negative.");
}
else if (elements == null)
{
// Error: Bad elements.
throw new NullPointerException("The elements cannot be null.");
}
else if (numClusters == 0 || elements.size() == 0)
{
// No clusters to create.
return new ArrayList<GaussianCluster>();
}
ArrayList<Vector> elementsArray =
new ArrayList<Vector>(elements);
// Initialize the cluster objects.
ArrayList<GaussianCluster> clusterList =
new ArrayList<GaussianCluster>();
for (int k = 0; k < numClusters; k++)
{
// keep selecting random point until and ensure that the
// means aren't on top of eachother
Vector mean = null;
double minDiff = 0.0;
while (minDiff <= 0.0)
{
// select a new data point at random and place the cluster
// on top of it
int index = this.random.nextInt( elements.size() );
// add some noise to the point
Vector data = elementsArray.get(index);
Vector randomNoise = VectorFactory.getDefault().createUniformRandom(
data.getDimensionality(),
-this.getRandomRange(), this.getRandomRange(), this.random );
mean = data.plus(randomNoise);
minDiff = Double.POSITIVE_INFINITY;
for (int i = 0; i < k; i++)
{
Vector otherMean =
clusterList.get(i).getGaussian().getMean();
double diff = mean.euclideanDistance(otherMean);
if (minDiff > diff)
{
minDiff = diff;
}
}
}
// create a diagonal covariance matrix with "defaultCovariance"
// on the diagonal, and zeros elsewhere
int M = mean.getDimensionality();
Matrix covariance = MatrixFactory.getDefault().createIdentity(M, M).scale(this.getDefaultCovariance());
MultivariateGaussian.PDF gaussian =
new MultivariateGaussian.PDF(mean, covariance);
GaussianCluster cluster = new GaussianCluster(null, gaussian);
clusterList.add(cluster);
}
return clusterList;
}
示例6: computeLocalWeights
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
public Vector computeLocalWeights(
final Vector document)
{
return document.plus(document);
}