本文整理汇总了Java中gov.sandia.cognition.math.matrix.Vector.plusEquals方法的典型用法代码示例。如果您正苦于以下问题:Java Vector.plusEquals方法的具体用法?Java Vector.plusEquals怎么用?Java Vector.plusEquals使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类gov.sandia.cognition.math.matrix.Vector
的用法示例。
在下文中一共展示了Vector.plusEquals方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testNorm2Squared
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Test of norm2Squared method, of class KernelUtil.
*/
@Test
public void testNorm2Squared()
{
int d = this.random.nextInt(20) + 1;
VectorFactory<?> vf = VectorFactory.getDenseDefault();
Vector w = vf.createVector(d);
DefaultKernelBinaryCategorizer<Vectorizable> f =
new DefaultKernelBinaryCategorizer<Vectorizable>(new LinearKernel());
f.setBias(this.random.nextGaussian());
int count = this.random.nextInt(5) + 1;
for (int i = 0; i < count; i++)
{
Vector x = vf.createUniformRandom(d, -10, +10, random);
double a = random.nextGaussian();
w.plusEquals(x.scale(a));
f.add(x, a);
assertEquals(x.norm2Squared(), KernelUtil.norm2Squared(x, new LinearKernel()), 1e-10);
}
assertEquals(w.norm2Squared(), KernelUtil.norm2Squared(f), 1e-10);
}
示例2: updateCluster
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Updates the clusters for all the given points.
*
* @param dataPoints
* The examples to update.
*/
public void updateCluster(Collection<? extends Vector> dataPoints)
{
int initNumUpdates = numUpdates;
this.numUpdates += dataPoints.size();
double finalEta = 1 / (double) numUpdates;
Vector shiftVector = DenseVectorFactoryMTJ.INSTANCE.createVector(
centroid.getDimensionality());
for (Vector sample : dataPoints)
{
shiftVector.plusEquals(sample);
}
// Move centroid towards data point
centroid.scaleEquals(initNumUpdates * finalEta);
centroid.scaledPlusEquals(finalEta, shiftVector);
}
示例3: step
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* {@inheritDoc}
* @return {@inheritDoc}
*/
protected boolean step()
{
Vector xhat = this.result.getInput();
// Compute the gradient and scale it by the learningRate
Vector gradient = this.data.differentiate( xhat );
Vector delta = gradient.scale( -this.learningRate );
// See if we should add a momentum term
if (this.previousDelta != null)
{
if (this.momentum != 0.0)
{
delta.plusEquals( this.previousDelta.scale( this.momentum ) );
}
}
this.previousDelta = delta;
xhat.plusEquals( delta );
return !MinimizationStoppingCriterion.convergence(
xhat, null, gradient, delta, this.getTolerance() );
}
示例4: update
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
public void update(
MultivariateGaussian target,
Iterable<? extends Vector> data)
{
int N = CollectionUtil.size(data);
Matrix Ci0 = target.getCovarianceInverse();
Matrix CiN = this.getKnownCovarianceInverse().clone();
if( N > 1 )
{
CiN.scaleEquals(N);
}
Vector sampleMean = MultivariateStatisticsUtil.computeMean(data);
Vector t0 = Ci0.times( target.getMean() );
t0.plusEquals( CiN.times( sampleMean ) );
// Saving another Matrix creation here... just make sure the
// "t0" stuff gets completed first
CiN.plusEquals(Ci0);
Matrix updatedCovariance = CiN.inverse();
Vector updatedMean = updatedCovariance.times( t0 );
target.setMean(updatedMean);
target.setCovariance(updatedCovariance);
}
示例5: testEvaluate
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Test of evaluate method, of class gov.sandia.cognition.learning.util.function.LinearCombinationVectorFunction.
*/
public void testEvaluate()
{
System.out.println("evaluate");
for (int i = 0; i < 100; i++)
{
int M = RANDOM.nextInt(10) + 1;
Vector x = VectorFactory.getDefault().createUniformRandom(M, -1, 1, RANDOM);
LinearCombinationVectorFunction f = this.createInstance();
Vector y = VectorFactory.getDefault().createVector(M);
for (int n = 0; n < f.getBasisFunctions().size(); n++)
{
y.plusEquals(f.getBasisFunctions().get(n).evaluate(x).scale(
f.getCoefficients().getElement(n)));
}
Vector yhat = f.evaluate(x);
if (y.equals(yhat, 1e-5) == false)
{
assertEquals(y, yhat);
}
}
}
示例6: evaluate
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
public Vector evaluate(
Vector input)
{
Vector discriminant = super.evaluate( input );
discriminant.plusEquals(this.bias);
return discriminant;
}
示例7: evaluateOutputFromSquashedHiddenLayerActivation
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Evaluates the output from the squashed hidden-layer activation.
* @param squashedHiddenActivation
* Squashed hidden-layer activation.
* @return
* Output of the neural net.
*/
protected Vector evaluateOutputFromSquashedHiddenLayerActivation(
Vector squashedHiddenActivation )
{
Vector outputActivation = this.hiddenToOutputWeights.times(
squashedHiddenActivation );
outputActivation.plusEquals( this.hiddenToOutputBiasWeights );
return outputActivation;
}
示例8: createCluster
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
public CentroidCluster<Vector> createCluster(
final Collection<? extends Vector> members)
{
if (members.size() <= 0)
{
// No members to create the cluster from.
return new CentroidCluster<Vector>(null, members);
}
// We are going to create the mean centroid of the cluster.
Vector centroid = null;
for (Vector member : members)
{
if (centroid == null)
{
centroid = member.clone();
}
else
{
centroid.plusEquals(member);
}
}
centroid.scaleEquals(1.0 / (double) members.size());
return new CentroidCluster<Vector>(centroid, members);
}
示例9: plus
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
final public Vector plus(
final Vector v)
{
// I need to flip this so that if it the input is a dense vector, I
// return a dense vector. If it's a sparse vector, then a sparse vector
// is still returned.
Vector result = v.clone();
result.plusEquals(this);
return result;
}
示例10: testAdd
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Test of add method, of class AbstractEntropyBasedGlobalTermWeighter.
*/
@Test
public void testAdd()
{
AbstractEntropyBasedGlobalTermWeighter instance =
new DummyEntropyBasedGlobalTermWeighter();
Vector expectedTermEntropiesSum = new Vector3();
instance.add(new Vector3(3.0, 0.0, 1.0));
assertEquals(1, instance.getDocumentCount());
assertEquals(new Vector3(1.0, 0.0, 1.0), instance.getTermDocumentFrequencies());
assertEquals(new Vector3(3.0, 0.0, 1.0), instance.getTermGlobalFrequencies());
expectedTermEntropiesSum.plusEquals(new Vector3(3.0 * Math.log(3.0), 0.0, 0.0));
assertEquals(expectedTermEntropiesSum, instance.getTermEntropiesSum());
instance.add(new Vector3());
assertEquals(2, instance.getDocumentCount());
assertEquals(new Vector3(1.0, 0.0, 1.0), instance.getTermDocumentFrequencies());
assertEquals(new Vector3(3.0, 0.0, 1.0), instance.getTermGlobalFrequencies());
expectedTermEntropiesSum.plusEquals(new Vector3());
assertEquals(expectedTermEntropiesSum, instance.getTermEntropiesSum());
instance.add(new Vector3(0.0, 1.0, 1.0));
assertEquals(3, instance.getDocumentCount());
assertEquals(new Vector3(1.0, 1.0, 2.0), instance.getTermDocumentFrequencies());
assertEquals(new Vector3(3.0, 1.0, 2.0), instance.getTermGlobalFrequencies());
expectedTermEntropiesSum.plusEquals(new Vector3());
assertEquals(expectedTermEntropiesSum, instance.getTermEntropiesSum());
}
示例11: testRemove
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Test of remove method, of class AbstractEntropyBasedGlobalTermWeighter.
*/
@Test
public void testRemove()
{
AbstractEntropyBasedGlobalTermWeighter instance =
new DummyEntropyBasedGlobalTermWeighter();
Vector expectedTermEntropiesSum = new Vector3();
instance.add(new Vector3(3.0, 0.0, 1.0));
instance.add(new Vector3());
instance.add(new Vector3(0.0, 1.0, 1.0));
expectedTermEntropiesSum.plusEquals(new Vector3(3.0 * Math.log(3.0), 0.0, 0.0));
assertEquals(3, instance.getDocumentCount());
assertEquals(new Vector3(1.0, 1.0, 2.0), instance.getTermDocumentFrequencies());
assertEquals(new Vector3(3.0, 1.0, 2.0), instance.getTermGlobalFrequencies());
assertEquals(expectedTermEntropiesSum, instance.getTermEntropiesSum());
instance.remove(new Vector3());
assertEquals(2, instance.getDocumentCount());
assertEquals(new Vector3(1.0, 1.0, 2.0), instance.getTermDocumentFrequencies());
assertEquals(new Vector3(3.0, 1.0, 2.0), instance.getTermGlobalFrequencies());
expectedTermEntropiesSum.minusEquals(new Vector3());
assertEquals(expectedTermEntropiesSum, instance.getTermEntropiesSum());
instance.remove(new Vector3(3.0, 0.0, 1.0));
assertEquals(1, instance.getDocumentCount());
assertEquals(new Vector3(0.0, 1.0, 1.0), instance.getTermDocumentFrequencies());
assertEquals(new Vector3(0.0, 1.0, 1.0), instance.getTermGlobalFrequencies());
expectedTermEntropiesSum.minusEquals(new Vector3(3.0 * Math.log(3.0), 0.0, 0.0));
assertEquals(expectedTermEntropiesSum, instance.getTermEntropiesSum());
instance.remove(new Vector3(0.0, 1.0, 1.0));
assertEquals(0, instance.getDocumentCount());
assertEquals(new Vector3(), instance.getTermDocumentFrequencies());
assertEquals(new Vector3(), instance.getTermGlobalFrequencies());
expectedTermEntropiesSum.minusEquals(new Vector3());
assertEquals(expectedTermEntropiesSum, instance.getTermEntropiesSum());
}
示例12: update
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
public void update(
NormalInverseWishartDistribution prior,
Iterable<? extends Vector> data)
{
final int n = CollectionUtil.size(data);
Pair<Vector,Matrix> pair =
MultivariateStatisticsUtil.computeMeanAndCovariance(data);
Vector sampleMean = pair.getFirst();
Matrix sampleCovariance = pair.getSecond();
Vector lambda = prior.getGaussian().getMean();
double nu = prior.getCovarianceDivisor();
int alpha = prior.getInverseWishart().getDegreesOfFreedom();
Matrix beta = prior.getInverseWishart().getInverseScale();
int alphahat = alpha + n;
double nuhat = nu+n;
Vector lambdahat = lambda.scale(nu/n);
lambdahat.plusEquals( sampleMean );
lambdahat.scaleEquals( n/nuhat );
Vector delta = sampleMean;
delta.minusEquals(lambda);
Matrix betahat = sampleCovariance;
if( n > 1 )
{
betahat.scaleEquals(n);
}
betahat.plusEquals(beta);
betahat.plusEquals( delta.outerProduct(delta.scale((n*nu)/nuhat)) );
prior.getGaussian().setMean(lambdahat);
prior.setCovarianceDivisor(nuhat);
prior.getInverseWishart().setDegreesOfFreedom(alphahat);
prior.getInverseWishart().setInverseScale(betahat);
}
开发者ID:algorithmfoundry,项目名称:Foundry,代码行数:41,代码来源:MultivariateGaussianMeanCovarianceBayesianEstimator.java
示例13: WineDataset
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Loads the wine dataset from wine.data
*
* @param normalise
* whether to mean center the dataset
* @param clusters
* valid clusters, if empty all clusters are chosen
*/
public WineDataset(boolean normalise, Integer... clusters) {
final BufferedReader br = new BufferedReader(
new InputStreamReader(WineDataset.class.getResourceAsStream("wine.data")));
String line = null;
Vector mean = null;
Set<Integer> clusterSet = null;
if (clusters.length != 0) {
clusterSet = new HashSet<Integer>();
clusterSet.addAll(Arrays.asList(clusters));
}
try {
while ((line = br.readLine()) != null) {
final String[] parts = line.split(",");
final int cluster = Integer.parseInt(parts[0].trim());
if (clusterSet != null && !clusterSet.contains(cluster))
continue;
final double[] data = new double[parts.length - 1];
for (int i = 0; i < data.length; i++) {
data[i] = Double.parseDouble(parts[i + 1]);
}
ListDataset<double[]> ds = this.get(cluster);
if (ds == null)
this.put(cluster, ds = new ListBackedDataset<double[]>());
ds.add(data);
final Vector copyArray = VectorFactory.getDefault().copyArray(data);
if (mean == null) {
mean = copyArray.clone();
}
else {
mean.plusEquals(copyArray);
}
}
mean.scaleEquals(1. / this.numInstances());
if (normalise) {
normalise(mean);
}
} catch (final Exception e) {
logger.error("Wine dataset failed to load", e);
}
}
示例14: update
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
public void update(
final LinearBinaryCategorizer target,
final Vector input,
final boolean label)
{
Vector weights = target.getWeights();
if (weights == null)
{
// This is the first example, so initialize the weight vector.
weights = this.getVectorFactory().createVector(
input.getDimensionality());
target.setWeights(weights);
}
// else - Use the existing weights.
// Predict the output as a double (negative values are false, positive
// are true).
final double prediction = target.evaluateAsDouble(input);
final double actual = label ? +1.0 : -1.0;
final double margin = prediction * actual;
boolean error = false;
if (margin <= 0.0)
{
// An actual mistake: Use the standard perceptron update rule.
error = true;
}
else
{
final double weightNorm = weights.norm2();
if (margin / weightNorm <= this.getRadius())
{
// This is one way to implement this. However, it is not as
// efficient as the following way with sparse vectors, which
// is based on the derivation:
// final Vector change = weights.scale(
// -actual * this.getRadius() / weightNorm);
// change.plusEquals(input);
// change.scaleEquals(actual);
// weights.plusEquals(change);
final double scale = 1.0 - this.getRadius() / weightNorm;
weights.scaleEquals(scale);
error = true;
}
// else - No margin mistake change.
}
if (error)
{
if (label)
{
weights.plusEquals(input);
}
else
{
weights.minusEquals(input);
}
}
}
示例15: createCluster
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
public NormalizedCentroidCluster<Vectorizable> createCluster(
final Collection<? extends Vectorizable> members)
{
if (members.isEmpty())
{
// No members to create the cluster from.
return new NormalizedCentroidCluster<>(null, null, members);
}
// We are going to create the centroid of the cluster.
Vectorizable centroid = null;
Vector data = null;
Vectorizable normalizedCentroid = null;
Vector normalizedData = null;
for (Vectorizable member : members)
{
Vector memberVector = member.convertToVector();
if (data == null)
{
centroid = member.clone();
data = memberVector.clone();
normalizedCentroid = member.clone();
normalizedData = memberVector.norm2() != 0.0
? memberVector.scale(1.0 / memberVector.norm2())
: memberVector;
}
else
{
data.plusEquals(memberVector);
if (memberVector.norm2() != 0.0)
{
normalizedData.plusEquals(memberVector.scale(1.0
/ memberVector.norm2()));
}
}
}
data.scaleEquals(1.0 / (double) members.size());
normalizedData.scaleEquals(1.0 / (double) members.size());
centroid.convertFromVector(data);
normalizedCentroid.convertFromVector(normalizedData);
return new NormalizedCentroidCluster<>(centroid,
normalizedCentroid, members);
}