当前位置: 首页>>代码示例>>Java>>正文


Java DoubleMatrix类代码示例

本文整理汇总了Java中org.jblas.DoubleMatrix的典型用法代码示例。如果您正苦于以下问题:Java DoubleMatrix类的具体用法?Java DoubleMatrix怎么用?Java DoubleMatrix使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


DoubleMatrix类属于org.jblas包,在下文中一共展示了DoubleMatrix类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testXor_batchGradientDescent

import org.jblas.DoubleMatrix; //导入依赖的package包/类
@Test
public void testXor_batchGradientDescent() {
    DoubleMatrix actual;
    DoubleMatrix[] input = new DoubleMatrix[4];
    DoubleMatrix batchInput = new DoubleMatrix(2,4,0,0,0,1,1,0,1,1);
    input[0] = new DoubleMatrix(2,1,0,0);
    input[1] = new DoubleMatrix(2,1,0,1);
    input[2] = new DoubleMatrix(2,1,1,0);
    input[3] = new DoubleMatrix(2,1,1,1);
    DoubleMatrix[] expected = new DoubleMatrix[4];
    expected[0] = new DoubleMatrix(1,4,0,1,1,0);
    for(int i=0 ; i<79000; i++) {
        actual = network.forwardPropagate(batchInput);
        network.backPropagate(expected[0], actual);
    }
    assertEquals(0, network.forwardPropagate(input[0]).get(0), 0.03);
    assertEquals(1, network.forwardPropagate(input[1]).get(0), 0.03);
    assertEquals(1, network.forwardPropagate(input[2]).get(0), 0.03);
    assertEquals(0, network.forwardPropagate(input[0]).get(0), 0.03);
}
 
开发者ID:nordsam,项目名称:tic-tac-toe,代码行数:21,代码来源:XorTest.java

示例2: forwardPropagate

import org.jblas.DoubleMatrix; //导入依赖的package包/类
/**
 * Forward propagates input through network
 * @param input DoubleMatrix Input of neural network
 * @return DoubleMatrix Return output of the forward propagation
 */
public DoubleMatrix forwardPropagate(DoubleMatrix input) {
    if(input.rows != layers[0].rows) return null;
    layers[0] = input;
    for (int i = 0; i < weights.length; i++) {
        layers[i + 1] = weights[i].mmul(layers[i]).addColumnVector(biasWeights[i]); //Z
        if (i < weights.length - 1)
            layers[i + 1] = MatrixFunctions.tanh(layers[i+1]); //Activation function
    }
    
    if (timestep % 2001 == 0 && timestep != 0){
      //  logger.info(Integer.toString(timestep / 2000));
        logger.log(Level.INFO, "Input: {0}", input.getColumn(0));
        logger.log(Level.INFO, "Q-values: {0}", layers[layers.length-1].getColumn(0));
        logger.log(Level.INFO, "Weights: {0}", weights[0]);
        logger.log(Level.INFO, "Weights: {1}", weights[1]);
        logger.log(Level.INFO, "Weights: {2}", weights[2]);
    }
    
    return layers[layers.length - 1];
}
 
开发者ID:nordsam,项目名称:tic-tac-toe,代码行数:26,代码来源:NeuralNetwork.java

示例3: JBLASExample

import org.jblas.DoubleMatrix; //导入依赖的package包/类
public void JBLASExample() {
    DoubleMatrix A = new DoubleMatrix(new double[][]{
        {0.1950, 0.0311},
        {0.3588, 0.2203},
        {0.1716, 0.5931},
        {0.2105, 0.3242}});

    DoubleMatrix B = new DoubleMatrix(new double[][]{
        {0.0502, 0.9823, 0.9472},
        {0.5732, 0.2694, 0.916}});
    DoubleMatrix C;

    C = A.mmul(B);

    for (int i = 0; i < C.getRows(); i++) {
        out.println(C.getRow(i));
    }
}
 
开发者ID:PacktPublishing,项目名称:Machine-Learning-End-to-Endguide-for-Java-developers,代码行数:19,代码来源:MathExamples.java

示例4: initAndValidate

import org.jblas.DoubleMatrix; //导入依赖的package包/类
@Override
public void initAndValidate(){    	
	treeIntervalsInput.get().calculateIntervals();       
	stateProbabilities = new DoubleMatrix[treeIntervalsInput.get().getSampleCount()];
    nrSamples = treeIntervalsInput.get().getSampleCount() + 1;    
    states = dynamicsInput.get().getDimension();
            
	int intCount = treeIntervalsInput.get().getIntervalCount();

	// initialize storing arrays and ArrayLists
	coalLinProbs = new double[intCount][];
	coalLogP = new double[intCount];
	coalRatesInterval = new int[intCount];
	coalActiveLineages = new ArrayList<>();
	ArrayList<Integer> emptyList = new ArrayList<>();
	for (int i = 0; i <= intCount; i++) coalActiveLineages.add(emptyList);
	    	
}
 
开发者ID:nicfel,项目名称:Mascot,代码行数:19,代码来源:Mascot.java

示例5: resetFilter

import org.jblas.DoubleMatrix; //导入依赖的package包/类
/** called when filter is reset
 *
 */
@Override
public void resetFilter() {
    erbm = new ERBM(vis_size, h_size);
    slow_recon = new DoubleMatrix(vis_size);
    slow_orig = new DoubleMatrix(vis_size);

    erbm.weights = DoubleMatrix.rand(vis_size, h_size).muli(0.1f);
    //float char_tau = 0.01f;

    setSTDPWin(getSTDPWin());
    setLearnRate(getLearnRate());
    setThrLearnRate(getThrLearnRate());
    setTRefrac(getTRefrac());
    setTau(getTau());
    setReconTau(getReconTau());
}
 
开发者ID:SensorsINI,项目名称:jaer,代码行数:20,代码来源:ERBMLearnFilter.java

示例6: computeQ

import org.jblas.DoubleMatrix; //导入依赖的package包/类
protected DoubleMatrix computeQ(DoubleMatrix Y, DoubleMatrix F, DoubleMatrix G, DoubleMatrix Coef) {    	
  	DoubleMatrix Q = DoubleMatrix.zeros(numStates,numStates);
 	
  	double rateOutByBirth, rateOutByMigration, totalRate;
for (int k = 0; k < numStates; k++) /* k row */
{
	double colSum = 0.0, diag;
	if (Y.get(k) > 0) {
		for (int l = 0; l < numStates; l++) 
		{				
			rateOutByBirth = F.get(l,k) * Coef.get(l) / Y.get(k);
			//rateOutByBirth = F.get(l,k)  / Y.get(k);
			rateOutByMigration = G.get(l,k) / (Y.get(k));
			totalRate = rateOutByBirth + rateOutByMigration;
			Q.put(k, l, totalRate);
			colSum += totalRate;				
			
		}
		diag = Q.get(k,k);
		Q.put(k,k,diag-colSum);
	}
	// else skip and leave row k with zero values
	// Q.put(k, l, 0.0);		
}
return Q;   	
  }
 
开发者ID:mrc-ide,项目名称:PhyDyn,代码行数:27,代码来源:STreeLikelihoodExp.java

示例7: init

import org.jblas.DoubleMatrix; //导入依赖的package包/类
@Override
public void init(int numNodes, int numStates) {
	// default first
	this.numNodes = numNodes;
	this.numStates= numStates;
	extantLineages = new int[numNodes];
	extantIndex = new int[numNodes];
	extantProbs = new DoubleMatrix[numNodes];
	//if (this.numNodes != numNodes) {
	//	this.numNodes = numNodes;
	//	extantLineages = new int[numNodes];
	//	extantIndex = new int[numNodes];
	//	extantProbs = new DoubleMatrix[numNodes];
	//}
       initExtantLineages();
}
 
开发者ID:mrc-ide,项目名称:PhyDyn,代码行数:17,代码来源:StateProbabilitiesArray.java

示例8: getCoalescentVectors

import org.jblas.DoubleMatrix; //导入依赖的package包/类
public List<DoubleMatrix> getCoalescentVectors(STreeIntervals intervals, int interval) {
	List<Node> coalLines = intervals.getLineagesRemoved(interval);
	//if (coalLines.size() > 2) return null;
 	
   	if (coalLines.size() > 2) {
		throw new RuntimeException("Unsupported coalescent at non-binary node");
	}
	int childIdx1 = extantIndex[coalLines.get(0).getNr()];
	int childIdx2 = extantIndex[coalLines.get(1).getNr()];
	
	// if swap was already performed, this check is not needed
	if (childIdx1 == -1 || childIdx2 == -1){
    	intervals.swap();
    	coalLines = intervals.getLineagesRemoved(interval);
    	childIdx1 = extantIndex[coalLines.get(0).getNr()];
    	childIdx2 = extantIndex[coalLines.get(1).getNr()];
	}
	List<DoubleMatrix> result = new ArrayList<DoubleMatrix>();
	DoubleMatrix pvec1, pvec2;		
	pvec1 = this.getStateProbsFromIndex(childIdx1);
	pvec2 = this.getStateProbsFromIndex(childIdx2);
			
	result.add(pvec1);  result.add(pvec2);
	
	return result;		
}
 
开发者ID:mrc-ide,项目名称:PhyDyn,代码行数:27,代码来源:StateProbabilitiesArray.java

示例9: computeDerivatives

import org.jblas.DoubleMatrix; //导入依赖的package包/类
@Override
public void computeDerivatives(double t, double[] y, double[] yDot)
		throws MaxCountExceededException, DimensionMismatchException {
	int i,j;
	
	DoubleMatrix demeYdot;
	
	updateMatrices(t,y);
	demeYdot = births.columnSums().transpose();
	demeYdot.addi(migrations.columnSums().transpose());
	demeYdot.subi(migrations.rowSums());
	demeYdot.subi(deaths);
	
	for(i=0; i<numDemes; i++) { yDot[i] = demeYdot.get(i,0); }
	for(j=0; i<numDemes+numNonDemes; i++,j++) { yDot[i] = nondemeYdot[j]; }
	
	/* -- previous code optimisation
	if (this.fixedStepSize) {
		timeseries.addFG(t, births, migrations);
	}
	*/
	
}
 
开发者ID:mrc-ide,项目名称:PhyDyn,代码行数:24,代码来源:PopModelODE.java

示例10: log

import org.jblas.DoubleMatrix; //导入依赖的package包/类
/**
 * log this sample for current state to PrintStream,
 * e.g. value of a parameter, list of parameters or Newick tree
 *
 * @param nSample chain sample number
 * @param out     log stream
 */
public void log(int nSample, PrintStream out) {
	out.print(nSample+"\t");
	Tree tree = (Tree)treeInput.get().getCurrent();
	Node root = tree.getRoot();
	StateProbabilities sp = densityInput.get().getStateProbabilities();
	DoubleMatrix probs = sp.getStateProbs(root.getNr());
	if (probs != null) {
		for(int i=0; i < numStates; i++)
			out.print(probs.get(i)+"\t");
	} else {
		for(int i=0; i < numStates; i++)
			out.print(0.00+"\t");
	}
	// out.println("");
}
 
开发者ID:mrc-ide,项目名称:PhyDyn,代码行数:23,代码来源:STreeRootLogger.java

示例11: trainOn

import org.jblas.DoubleMatrix; //导入依赖的package包/类
/**
 * Adjusts the network parameters based on the gradient of the cross-entropy loss function
 * for probabilities assigned by the network (based on the specified active variables) and
 * the target correct label.
 *  
 * The gradient is back-propagated through the network, and the parameters at each layer
 * are adjusted using a step scaled by the learning rate.
 * @return 
 */
public double trainOn(MultitaggerTrainingSentence sentence, int i, 
		double correctLabel, double learningRate) {
	DoubleMatrix lookupOutput = this.myLookup.getOutput(sentence, i);
	DoubleMatrix linearOutput = this.firstLinearLayer.output(lookupOutput);
	double prediction = this.outputLayer.output(linearOutput);
	double loss = this.outputLayer.calculateGradientOfCostFunction(prediction, correctLabel);
	for(int j=0; j<this.costGrad.length; j++) {
		this.costGrad.put(j, loss*learningRate);
	}
	// Update gradient w.r.t. linear layer
	//linearOutput.muli(loss, this.costGrad);		
	DoubleMatrix linGrad = 
			this.firstLinearLayer.calculateGradientWithRespectToInput(lookupOutput, this.costGrad);
	this.firstLinearLayer.updateParameters(this.costGrad, 1.0);
	this.myLookup.updateParameters(linGrad, 1.0);
	//this.firstLinearLayer.updateParameters(this.costGrad, learningRate);
	//this.myLookup.updateParameters(linGrad, learningRate); // (learning rate already added to cost grad above)
	double pred = (prediction > 0.5) ? 1.0 : 0.0;
	return pred == correctLabel ? 1.0 : 0.0;
}
 
开发者ID:ramusa2,项目名称:CandCNFPerceptronParser,代码行数:30,代码来源:FirstMultitaggerClassifier.java

示例12: fillOutputVector

import org.jblas.DoubleMatrix; //导入依赖的package包/类
@Override
protected void fillOutputVector(DoubleMatrix output,
		MultitaggerTrainingSentence sentence, int position) {
	String cat = sentence.getItems().get(position).getCat(0);
	Integer ind = this.index.get(cat);
	if(ind == null) {
		ind = this.index.get(MISSING);
		cat = MISSING;
	}
	for(int i=0; i<this.getOutputDimension(); i++) {
		output.put(i, 0);
	}
	DoubleMatrix vec = this.getFeatureVector(cat);
	for(int i=this.dimension*ind; i<this.dimension*(ind+1); i++) {
		int j = i - this.dimension*ind;
		this.outputVector.put(i, vec.get(j));
	}
	this.inputFeatureIndex = ind;
}
 
开发者ID:ramusa2,项目名称:CandCNFPerceptronParser,代码行数:20,代码来源:SingleFeatureLookupLayer.java

示例13: testXor_batchGradientDescentWithAdam

import org.jblas.DoubleMatrix; //导入依赖的package包/类
@Test
public void testXor_batchGradientDescentWithAdam() throws CloneNotSupportedException {
    DoubleMatrix actual;
    DoubleMatrix[] input = new DoubleMatrix[4];
    DoubleMatrix batchInput = new DoubleMatrix(2,4,0,0,0,1,1,0,1,1);
    input[0] = new DoubleMatrix(2,1,0,0);
    input[1] = new DoubleMatrix(2,1,0,1);
    input[2] = new DoubleMatrix(2,1,1,0);
    input[3] = new DoubleMatrix(2,1,1,1);
    DoubleMatrix[] expected = new DoubleMatrix[4];
    expected[0] = new DoubleMatrix(1,4,0,1,1,0);
    for(int i=0 ; i<20000; i++) {
        actual = network.forwardPropagate(batchInput);
        network.backPropagateWithAdam(expected[0], actual);
    }
    assertEquals(0, network.forwardPropagate(input[0]).get(0), 0.03);
    assertEquals(1, network.forwardPropagate(input[1]).get(0), 0.03);
    assertEquals(1, network.forwardPropagate(input[2]).get(0), 0.03);
    assertEquals(0, network.forwardPropagate(input[0]).get(0), 0.03);
}
 
开发者ID:nordsam,项目名称:tic-tac-toe,代码行数:21,代码来源:XorTest.java

示例14: testXor_stochasticGradientDescent

import org.jblas.DoubleMatrix; //导入依赖的package包/类
@Test
public void testXor_stochasticGradientDescent() {
    DoubleMatrix actual;
    DoubleMatrix[] input = new DoubleMatrix[4];
    input[0] = new DoubleMatrix(2,1,0,0);
    input[1] = new DoubleMatrix(2,1,0,1);
    input[2] = new DoubleMatrix(2,1,1,0);
    input[3] = new DoubleMatrix(2,1,1,1);
    DoubleMatrix[] expected = new DoubleMatrix[4];
    expected[0] = new DoubleMatrix(1,1,0);
    expected[1] = new DoubleMatrix(1,1,1);
    expected[2] = new DoubleMatrix(1,1,1);
    expected[3] = new DoubleMatrix(1,1,0);
    for(int i=0 ; i<81000; i++) {
        actual = network.forwardPropagate(input[i%4]);
        network.backPropagate(expected[i%4], actual);
    }
    assertEquals(0, network.forwardPropagate(input[0]).get(0), 0.03);
    assertEquals(1, network.forwardPropagate(input[1]).get(0), 0.03);
    assertEquals(1, network.forwardPropagate(input[2]).get(0), 0.03);
    assertEquals(0, network.forwardPropagate(input[0]).get(0), 0.03);
}
 
开发者ID:nordsam,项目名称:tic-tac-toe,代码行数:23,代码来源:XorTest.java

示例15: computeHighDimDistances

import org.jblas.DoubleMatrix; //导入依赖的package包/类
private DoubleMatrix computeHighDimDistances(DoubleMatrix dataHighDim, BiFunction<DoubleMatrix, DoubleMatrix, Double> distHighDim) {
    DoubleMatrix dists = new DoubleMatrix((dataHighDim.rows * (dataHighDim.rows - 1)) / 2);
    for (int i = 0; i < dataHighDim.rows - 1; i++) {
        DoubleMatrix x1 = dataHighDim.get(new PointRange(i), new AllRange());
        for (int j = i + 1; j < dataHighDim.rows; j++) {
            DoubleMatrix x2 = dataHighDim.get(new PointRange(j), new AllRange());
            double dist = distHighDim.apply(x1, x2);
            int idx = this.compactIndex(i, j);
            dists.put(idx, dist);
        }
    }
    /* Normalize the distances s.t. the largest is 1. */
    this.scaling = 1 / dists.max();
    dists.muli(this.scaling);

    return dists;
}
 
开发者ID:vmware,项目名称:hillview,代码行数:18,代码来源:MetricMDS.java


注:本文中的org.jblas.DoubleMatrix类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。