本文整理汇总了Java中gov.sandia.cognition.math.matrix.Vector.clone方法的典型用法代码示例。如果您正苦于以下问题:Java Vector.clone方法的具体用法?Java Vector.clone怎么用?Java Vector.clone使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类gov.sandia.cognition.math.matrix.Vector
的用法示例。
在下文中一共展示了Vector.clone方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: solve
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
final public Vector solve(
final Vector b)
{
checkSolveDimensions(b);
Vector result = b.clone();
for (int i = 0; i < diagonal.length; ++i)
{
if (diagonal[i] == 0)
{
if (result.get(i) != 0)
{
throw new UnsupportedOperationException("Unable to solve "
+ "Ax=b because b spans different space than A");
}
}
else
{
result.setElement(i, result.get(i) / diagonal[i]);
}
}
return result;
}
示例2: testNormalizeWeights
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Test of normalizeWeights method, of class UnitTermWeightNormalizer.
*/
@Test
public void testNormalizeWeights()
{
Random random = new Random();
UnitTermWeightNormalizer instance =
new UnitTermWeightNormalizer();
int dimensionality = 10;
Vector weights = VectorFactory.getDefault().createUniformRandom(
dimensionality, 0.0, 1.0, random);
Vector expected = weights.unitVector();
instance.normalizeWeights(weights, null, null);
assertEquals(expected, weights);
weights.zero();
expected = weights.clone();
instance.normalizeWeights(weights, null, null);
assertEquals(expected, weights);
weights.setElement(0, 1.0);
expected = weights.clone();
instance.normalizeWeights(weights, null, null);
assertEquals(expected, weights);
}
示例3: testKnownValues
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
public void testKnownValues()
{
System.out.println( "Known Values" );
int numTrials = 4;
Vector a = VectorFactory.getDefault().copyValues( 1.0, 1.0, 1.0 );
MultinomialBayesianEstimator instance = new MultinomialBayesianEstimator(
new DirichletDistribution(a.clone()), numTrials );
Vector x0 = VectorFactory.getDefault().copyValues( 2.0, 2.0, 0.0 );
DirichletDistribution belief = instance.getInitialBelief();
instance.update( belief, x0 );
assertEquals( a.plus(x0), belief.getParameters() );
Vector x1 = VectorFactory.getDefault().copyValues( 1.0, 2.0, 1.0 );
instance.update( belief, x1 );
assertEquals( a.plus(x0).plus(x1), belief.getParameters() );
}
示例4: testConvertFromVector
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Test of convertFromVector method, of class gov.sandia.cognition.learning.util.function.PolynomialFunction.
*/
public void testConvertFromVector()
{
System.out.println( "convertFromVector" );
double exponent = random.nextDouble();
PolynomialFunction instance = new PolynomialFunction( exponent );
assertEquals( exponent, instance.getExponent() );
Vector params = instance.convertToVector();
assertEquals( 1, params.getDimensionality() );
assertEquals( exponent, params.getElement( 0 ) );
Vector p2 = params.clone();
assertNotSame( p2, params );
assertEquals( p2, params );
double e2 = exponent + 1;
p2.setElement( 0, e2 );
instance.convertFromVector( p2 );
assertEquals( e2, instance.getExponent() );
}
示例5: testCrossover
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Test of crossover method, of class
* gov.sandia.isrc.learning.reinforcement.VectorizableCrossoverFunction.
*/
public void testCrossover() {
System.out.println("crossover");
Vector vector1 = VectorFactory.getDefault().createUniformRandom(10, -10, 10, random);
Vector vectorizable1 = vector1.clone();
Vector vector2 = VectorFactory.getDefault().createUniformRandom(10, -10, 10, random);
Vector vectorizable2 = vector2.clone();
double probabilityCrossover = 0.5;
VectorizableCrossoverFunction vcf =
new VectorizableCrossoverFunction( probabilityCrossover );
Vectorizable result1 = vcf.crossover(vectorizable1, vectorizable2);
this.verifyCrossover(vectorizable1, vectorizable2, result1);
Vectorizable result2 = vcf.crossover(vectorizable1, vectorizable2);
this.verifyCrossover(vectorizable1, vectorizable2, result2);
assertTrue(
"Crossover produced the same result twice",
!result1.convertToVector().equals(result2.convertToVector()));
}
示例6: testComputeLocalWeights
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Test of computeLocalWeights method, of class BinaryLocalTermWeighter.
*/
@Test
public void testComputeLocalWeights()
{
BinaryLocalTermWeighter instance = new BinaryLocalTermWeighter();
Vector input = new Vector3(3.0, 0.0, 1.0);
Vector inputClone = input.clone();
Vector expected = new Vector3(1.0, 0.0, 1.0);
assertEquals(expected, instance.computeLocalWeights(
input));
assertEquals(inputClone, input);
input = new Vector3();
inputClone = input.clone();
expected = new Vector3();
assertEquals(expected, instance.computeLocalWeights(
input));
assertEquals(inputClone, input);
input = new Vector3(0.0, -1.0, 2.3);
inputClone = input.clone();
expected = new Vector3(0.0, 1.0, 1.0);
assertEquals(expected, instance.computeLocalWeights(
input));
assertEquals(inputClone, input);
}
示例7: dotTimes
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
final public Vector dotTimes(
final Vector v)
{
// By switch from this.dotTimes(v) to v.dotTimes(this), we get sparse
// vectors dotted with dense still being sparse and dense w/ dense is
// still dense. The way this was originally implemented in the Foundry
// (this.clone().dotTimesEquals(v)), if v is sparse, it returns a
// dense vector type storing sparse data.
Vector result = v.clone();
result.dotTimesEquals(this);
return result;
}
示例8: testComputeLocalWeights
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Test of computeLocalWeights method, of class TermFrequencyLocalTermWeighter.
*/
@Test
public void testComputeLocalWeights()
{
TermFrequencyLocalTermWeighter instance = new TermFrequencyLocalTermWeighter();
Vector input = new Vector3(3.0, 0.0, 1.0);
Vector inputClone = input.clone();
Vector expected = new Vector3(3.0 / 4.0, 0.0, 1.0 / 4.0);
assertEquals(expected, instance.computeLocalWeights(
input));
assertEquals(inputClone, input);
input = new Vector3();
inputClone = input.clone();
expected = new Vector3();
assertEquals(expected, instance.computeLocalWeights(
input));
assertEquals(inputClone, input);
// These are really just testing boundary cases.
input = new Vector3(0.0, -1.0, 2.3);
inputClone = input.clone();
expected = new Vector3(0.0, -1.0 / 3.3, 2.3 / 3.3);
assertEquals(expected, instance.computeLocalWeights(
input));
assertEquals(inputClone, input);
}
示例9: IterativeMatrixSolver
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Inititalizes a solver with all user-definable parameters
*
* @param x0 The initial guess for x
* @param rhs The "b" to solve
* @param tolerance The minimum acceptable error
* @param maxIterations The maximum number of iterations
*/
protected IterativeMatrixSolver(Vector x0,
Vector rhs,
double tolerance,
int maxIterations)
{
this.x0 = x0.clone();
this.rhs = rhs.clone();
setTolerance(tolerance);
setMaxIterations(maxIterations);
listeners = new HashSet<IterativeAlgorithmListener>();
iterationCounter = -1;
shouldStop = false;
result = null;
}
示例10: testConvertToVector
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Test of convertToVector method, of class FactorizationMachine.
*/
@Test
public void testConvertToVector()
{
FactorizationMachine instance = new FactorizationMachine();
Vector result = instance.convertToVector();
assertEquals(instance.getParameterCount(), result.getDimensionality());
assertTrue(result.isZero());
int d = 7;
int k = 4;
instance = new FactorizationMachine(d, k);
result = instance.convertToVector();
assertEquals(instance.getParameterCount(), result.getDimensionality());
assertTrue(result.isZero());
double bias = this.random.nextGaussian();
Vector weights = VectorFactory.getDefault().createUniformRandom(d, -1, 1, random);
Matrix factors = MatrixFactory.getDefault().createUniformRandom(k, d, -1, 1, random);
instance = new FactorizationMachine(bias, weights.clone(), factors.clone());
result = instance.convertToVector();
assertEquals(instance.getParameterCount(), result.getDimensionality());
assertTrue(result.equals(new Vector1(bias).stack(weights).stack(factors.transpose().convertToVector())));
// Try with weights disabled.
instance.setWeights(null);
result = instance.convertToVector();
assertEquals(instance.getParameterCount(), result.getDimensionality());
assertTrue(result.equals(new Vector1(bias).stack(factors.transpose().convertToVector())));
// Try with factors disabled.
instance.setWeights(weights.clone());
instance.setFactors(null);
result = instance.convertToVector();
assertEquals(instance.getParameterCount(), result.getDimensionality());
assertTrue(result.equals(new Vector1(bias).stack(weights)));
}
示例11: testComputeLocalWeights
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
/**
* Test of computeLocalWeights method, of class NormalizedLogLocalTermWeighter.
*/
@Test
public void testComputeLocalWeights()
{
final double EPSILON = 1.0e-5;
NormalizedLogLocalTermWeighter instance = new NormalizedLogLocalTermWeighter();
Vector input = new Vector3(3.0, 0.0, 1.0);
Vector inputClone = input.clone();
double normalizer = Math.log(1.0 + 4.0 / 3.0);
Vector expected = new Vector3(Math.log(4.0) / normalizer, 0.0, Math.log(2.0) / normalizer);
assertTrue(expected.equals(instance.computeLocalWeights(
input), EPSILON));
assertEquals(inputClone, input);
input = new Vector3();
inputClone = input.clone();
expected = new Vector3();
assertEquals(expected, instance.computeLocalWeights(
input));
assertEquals(inputClone, input);
// These are really just testing boundary cases.
input = new Vector3(0.0, -1.0, 2.3);
inputClone = input.clone();
normalizer = Math.log(1.0 + 3.3 / 3.0);
expected = new Vector3(0.0, 0.0, Math.log(3.3) / normalizer);
assertTrue(expected.equals(instance.computeLocalWeights(
input), EPSILON));
assertEquals(inputClone, input);
}
示例12: plus
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
final public Vector plus(
final Vector v)
{
// I need to flip this so that if it the input is a dense vector, I
// return a dense vector. If it's a sparse vector, then a sparse vector
// is still returned.
Vector result = v.clone();
result.plusEquals(this);
return result;
}
示例13: createPopulation
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
protected ArrayList<Vectorizable> createPopulation( int size, double range )
{
ArrayList<Vectorizable> population = new ArrayList<Vectorizable>(size);
for(int i = 0; i < size; i++)
{
Vector initialVector = VectorFactory.getDefault().createUniformRandom(this.getVectorSize(), -range, range, random);
Vector initial = initialVector.clone();
population.add(initial);
}
return population;
}
示例14: update
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
public void update(
InputOutputPair<? extends Vectorizable, Double> value)
{
this.count++;
Vector v = value.getInput().convertToVector();
Vector x1 = v;
Vector x2 = v.clone();
final double y = value.getOutput();
final double beta = DatasetUtil.getWeight(value) / outputVariance;
if( beta != 1.0 )
{
x2.scaleEquals(beta);
}
if( this.covarianceInverse == null )
{
this.covarianceInverse = x1.outerProduct(x2);
}
else
{
this.covarianceInverse.plusEquals( x1.outerProduct(x2) );
}
if( y != 1.0 )
{
x2.scaleEquals( y );
}
if( this.z == null )
{
this.z = x2;
}
else
{
this.z.plusEquals( x2 );
}
}
示例15: minus
import gov.sandia.cognition.math.matrix.Vector; //导入方法依赖的package包/类
@Override
final public Vector minus(
final Vector v)
{
// I need to flip this so that if it the input is a dense vector, I
// return a dense vector. If it's a sparse vector, then a sparse vector
// is still returned.
Vector result = v.clone();
result.negativeEquals();
result.plusEquals(this);
return result;
}