当前位置: 首页>>代码示例>>Java>>正文


Java DoubleMatrix.muli方法代码示例

本文整理汇总了Java中org.jblas.DoubleMatrix.muli方法的典型用法代码示例。如果您正苦于以下问题:Java DoubleMatrix.muli方法的具体用法?Java DoubleMatrix.muli怎么用?Java DoubleMatrix.muli使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.jblas.DoubleMatrix的用法示例。


在下文中一共展示了DoubleMatrix.muli方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: computeHighDimDistances

import org.jblas.DoubleMatrix; //导入方法依赖的package包/类
private DoubleMatrix computeHighDimDistances(DoubleMatrix dataHighDim, BiFunction<DoubleMatrix, DoubleMatrix, Double> distHighDim) {
    DoubleMatrix dists = new DoubleMatrix((dataHighDim.rows * (dataHighDim.rows - 1)) / 2);
    for (int i = 0; i < dataHighDim.rows - 1; i++) {
        DoubleMatrix x1 = dataHighDim.get(new PointRange(i), new AllRange());
        for (int j = i + 1; j < dataHighDim.rows; j++) {
            DoubleMatrix x2 = dataHighDim.get(new PointRange(j), new AllRange());
            double dist = distHighDim.apply(x1, x2);
            int idx = this.compactIndex(i, j);
            dists.put(idx, dist);
        }
    }
    /* Normalize the distances s.t. the largest is 1. */
    this.scaling = 1 / dists.max();
    dists.muli(this.scaling);

    return dists;
}
 
开发者ID:vmware,项目名称:hillview,代码行数:18,代码来源:MetricMDS.java

示例2: updateParameters

import org.jblas.DoubleMatrix; //导入方法依赖的package包/类
public void updateParameters(DoubleMatrix input, DoubleMatrix outputGradient, 
		double learningRate, Regularization regType, double regWeight) {
	// Scale gradient by learning rate
	outputGradient.muli(learningRate);
	// Update bias (bias unaffected by regularization)
	this.bias.addi(outputGradient);
	// Apply update from regularization, if applicable
	if(regType == Regularization.L1) {
		double p_ij;
		double delta;
		double step = regWeight*learningRate;
		for(int i=0; i<this.parameters.rows; i++) {
			for(int j=0; j<this.parameters.columns; j++) {
				p_ij = this.parameters.get(i, j);
				delta = Math.signum(p_ij) * Math.min(step, Math.abs(p_ij));
				this.parameters.put(i, j, p_ij - delta);
			}
		}
	}
	else if(regType == Regularization.L2) {
		this.parameters.mmuli(1.0 - learningRate*regWeight);
	}
	// Apply update from cost function
	outputGradient.mmuli(input.transpose(), this.updateIntermediate);
	this.parameters.addi(this.updateIntermediate);
}
 
开发者ID:ramusa2,项目名称:CandCNFPerceptronParser,代码行数:27,代码来源:LinearLayer.java

示例3: testBigCorrelation

import org.jblas.DoubleMatrix; //导入方法依赖的package包/类
@Test
public void testBigCorrelation() {
    Random.seed(43);
    DoubleMatrix mat = DoubleMatrix.rand(20000, 5);
    mat.muli(4.3);
    ITable bigTable = BlasConversions.toTable(mat);
    String[] colNames = bigTable.getSchema().getColumnNames();
    IDataSet<ITable> dataset = TestTables.makeParallel(bigTable, 10);

    PCACorrelationSketch fcs = new PCACorrelationSketch(colNames);
    CorrMatrix cm = dataset.blockingSketch(fcs);

    // Construct the correlation matrix that we compare against by using pure JBLAS.
    DoubleMatrix cmCheck = new DoubleMatrix(colNames.length, colNames.length);
    DoubleMatrix means = mat.columnMeans();
    DoubleMatrix sigmas = MatrixFunctions.sqrt(
            mat.subRowVector(means).mul(mat.subRowVector(means)).columnMeans()
    );
    for (int i = 0; i < cmCheck.columns; i++) {
        DoubleMatrix c1 = mat.get(new AllRange(), i);
        for (int j = 0; j < cmCheck.rows; j++) {
            DoubleMatrix c2 = mat.get(new AllRange(), j);
            double corr = c1.dot(c2) / mat.rows;
            corr -= means.get(i) * means.get(j);
            corr /= sigmas.get(i) * sigmas.get(j);
            cmCheck.put(i, j, corr);
            cmCheck.put(j, i, corr);
        }
    }
    for (int i = 0; i < cm.getCorrelationMatrix().length; i++) {
        double[] row = cm.getCorrelationMatrix()[i];
        for (int j = 0; j < row.length; j++) {
            double actual = cm.getCorrelationMatrix()[i][j];
            double expected = cmCheck.get(i, j);
            Assert.assertEquals(expected, actual, 1e-5);
        }
    }
}
 
开发者ID:vmware,项目名称:hillview,代码行数:39,代码来源:CorrelationTest.java

示例4: updateParameters

import org.jblas.DoubleMatrix; //导入方法依赖的package包/类
/**
 * Update the feature weights for the cached active variables based on
 * the back-propagated gradient.
 */
public void updateParameters(DoubleMatrix outputGradient, double learningRate) {
	// Scale gradient by the learning rate
	outputGradient.muli(learningRate);
	// Update weights
	int offset = 0;
	for(LSVariableEntry entry : this.cachedTrainingContext) {
		// Update word embeddings
		double[] embeddings = this.getWordEmbeddingWeights(entry.getEmbeddingIndex());
		for(int w=0; w<embeddings.length; w++) {
			embeddings[w] += outputGradient.get(offset);
			offset++;
		}
		// Update capitalization feature weights
		double[] caps = this.getCapitalizationWeights(entry.getCapitalizationIndex());
		for(int c=0; c<caps.length; c++) {
			caps[c] += outputGradient.get(offset);
			offset++;
		}			
		// Update suffix feature weights
		double[] suff = this.getSuffixWeights(entry.getSuffixIndex());
		for(int s=0; s<suff.length; s++) {
			suff[s] += outputGradient.get(offset);
			offset++;
		}		
	}
}
 
开发者ID:ramusa2,项目名称:CandCNFPerceptronParser,代码行数:31,代码来源:LSLookupTableLayer.java

示例5: backward

import org.jblas.DoubleMatrix; //导入方法依赖的package包/类
@Override
protected DoubleMatrix backward(DoubleMatrix inputVector,
		DoubleMatrix outputGradient) {
	double out = outputGradient.get(0);
	inputVector.muli(out*(1.0-out), this.inputGradient);
	return this.inputGradient;
}
 
开发者ID:ramusa2,项目名称:CandCNFPerceptronParser,代码行数:8,代码来源:Logistic.java

示例6: updateSegment

import org.jblas.DoubleMatrix; //导入方法依赖的package包/类
private void updateSegment(int segment, DoubleMatrix outputGradient,
		double learningRate) {
	DoubleMatrix input = this.inputVectors.get(segment);
	DoubleMatrix otherGradient = this.mappingFunctions.get(segment).calculateGradientWithRespectToInput(input, outputGradient);
	this.mappingFunctions.get(segment).updateParameters(
			this.inputVectors.get(segment), outputGradient, learningRate);
	otherGradient.muli(learningRate);
	this.inputVectors.get(segment).addi(otherGradient);
}
 
开发者ID:ramusa2,项目名称:CandCNFPerceptronParser,代码行数:10,代码来源:ContextSensitiveLaterClassifierLookupLayer.java

示例7: updateParameters

import org.jblas.DoubleMatrix; //导入方法依赖的package包/类
@Override
public void updateParameters(DoubleMatrix outputGradient,
		double learningRate) {
	outputGradient.muli(learningRate);
	DoubleMatrix vec = this.getFeatureVector(this.inputFeature);
	int offset = this.dimension*this.inputFeatureIndex;
	for(int i=0; i<this.dimension; i++) {
		vec.put(i, vec.get(i) + outputGradient.get(i+offset));
	}
}
 
开发者ID:ramusa2,项目名称:CandCNFPerceptronParser,代码行数:11,代码来源:SingleFeatureLookupLayer.java

示例8: computeDerivatives

import org.jblas.DoubleMatrix; //导入方法依赖的package包/类
@Override
public void computeDerivatives(double h, double[] ql, double[] dql)
		throws MaxCountExceededException, DimensionMismatchException {
	
	int tsPointCurrent = ts.getTimePoint(tsTimes0-h, tsPointLast);
	
	tsPointLast = tsPointCurrent;
	DoubleMatrix Y = ts.getYs()[tsPointCurrent];
	DoubleMatrix F = ts.getFs()[tsPointCurrent];
	DoubleMatrix G = ts.getGs()[tsPointCurrent];
	
	if (forgiveY) Y.maxi(1.0); else Y.maxi(MIN_Y);
			
	int i;
	for(i=0; i < numStatesSQ; i++) qdata[i] = ql[i];
	DoubleMatrix Q =  new DoubleMatrix(numStates,numStates,qdata);		
	DoubleMatrix Qnorm = Q.dup();
	Qnorm.diviRowVector(Q.columnSums());
	
	DoubleMatrix A = Qnorm.mmul(A0);
	A.divi(A.sum());  // normalised
	A.muli(sumA0);    // sum of A = sum(A0)
	//A = A.mul(sumA0).div(A.sum());
	DoubleMatrix a = A.div(Y);  // column vector 

	// DoubleMatrix dQ = new DoubleMatrix(numStates,numStates,dql);
	double dL = 0;
	DoubleMatrix FG = F.add(G);
	double accum;
	int k,l,z;
	i=0;
    for (z = 0; z < numStates; z++){
    	for (k = 0; k < numStates; k++){
        	accum = 0;
        	for(l=0; l < numStates; l++) {
        		if (k != l) {
        			if (Q.get(l,z) > 0) {
        				accum += FG.get(k,l) *  Q.get(l,z)/ Math.max(Q.get(l,z), Y.get(l));
        			}
        			if (Q.get(k,z) > 0) {
        				accum -= FG.get(l,k) *  Q.get(k,z)/  Math.max(Q.get(k,z), Y.get(k));
        			}
        		}
        		if (Q.get(k,z) > 0) {
        			accum -= F.get(k,l) * a.get(l) * Q.get(k,z)/  Math.max(Q.get(k,z), Y.get(k));
        		}
        	}
        	//dQ.put(k,z,accum);
        	dql[i++] = accum;
        }
    }	
    for (k= 0; k < numStates; k++){
    	for (l =0 ; l < numStates; l++){			
    		if (k == l && A.get(k) >= 1. ){
    			dL += (A.get(k) / Y.get(k)) * ((A.get(k)-1.) / Y.get(k)) * F.get(k,l) ; 
      		} else {
      			dL += a.get(k) * a.get(l) * F.get(k,l);
      		}
    	}
    }
    dL = Math.max(dL, 0.);   
    dql[numStatesSQ] = dL;					

}
 
开发者ID:mrc-ide,项目名称:PhyDyn,代码行数:65,代码来源:SolverQL.java

示例9: regularize

import org.jblas.DoubleMatrix; //导入方法依赖的package包/类
@Override
public DoubleMatrix regularize(DoubleMatrix mat, double learningRate) {
	double scale = 1.0 - learningRate;
	return mat.muli(scale, mat);
}
 
开发者ID:ramusa2,项目名称:CandCNFPerceptronParser,代码行数:6,代码来源:L2Regularizer.java

示例10: updateParameters

import org.jblas.DoubleMatrix; //导入方法依赖的package包/类
@Override
	public void updateParameters(DoubleMatrix outputGradient,
			double learningRate) {
		outputGradient.muli(learningRate, outputGradient);
		for(int segment=0; segment<this.mappingFunctions.size(); segment++) {
			this.updateSegment(segment, outputGradient.get(this.segmentIndices.get(segment)), learningRate);
		}
		/*
		
		int index = 0;
		ArrayList<MultitaggerTrainingItem> items =  this.inputSentence.getItems();
		
		// update word embedding
		DoubleMatrix wordEmbedding = this.getWordEmbedding(this.inputSentence.sentence().getTokens()[this.inputPosition].getWord());
		for(int i=0; i<wordEmbedding.length; i++) {
			wordEmbedding.put(i, wordEmbedding.get(i)+outputGradient.get(index++));
		}
		
		// Add cat embeddings and prob for this token to input
		MultitaggerTrainingItem thisItem = items.get(this.inputPosition);
		for(int k=0; k<this.NUM_CATS_IN_LIST; k++) {
			index++; // don't update prob
			DoubleMatrix vec = this.getCategoryEmbedding(thisItem.getCat(k));
			for(int j=0; j<vec.length; j++) {
				vec.put(j, vec.get(j)+outputGradient.get(index++));
			}
		}
		// Add surrounding category context to list
		for(int c=-this.CXT_WINDOW_SIZE; c<=this.CXT_WINDOW_SIZE; c++) {
			if(c==0) {
				continue;
			}
			int j = this.inputPosition+c;
			DoubleMatrix cxtVec;
			if(j < 0){
				cxtVec = this.getCategoryEmbedding(CAT_START);
				for(int v=0; v<cxtVec.length; v++) {
					cxtVec.put(v, cxtVec.get(v)+outputGradient.get(index++));
				}
			}
			else if(j >= this.inputSentence.sentence().length()) {
				cxtVec = this.getCategoryEmbedding(CAT_END);
				for(int v=0; v<cxtVec.length; v++) {
					cxtVec.put(v, cxtVec.get(v)+outputGradient.get(index++));
				}
			}
			else {
				
				//DoubleMatrix itemVec = this.getWordEmbedding(this.currentSentence.sentence().get(j).getWord());
				//for(int v=0; v<itemVec.length; v++) {
				//	itemVec.put(v, itemVec.get(v)+outputGradient.get(index+v));
				//}
				//index += this.WORD_DIM;
				
				MultitaggerTrainingItem item = items.get(j);
				for(int k=0; k<this.CXT_DEPTH; k++) {
					double itemProb = item.getProb(k);
					DoubleMatrix itemVec = this.getCategoryEmbedding(item.getCat(k));
					for(int v=0; v<itemVec.length; v++) {
						itemVec.put(v, itemVec.get(v)+outputGradient.get(index+v)*itemProb);
					}
				}
				index += this.CAT_EMBEDDING_DIM;
				
			}
		}	
*/
	}
 
开发者ID:ramusa2,项目名称:CandCNFPerceptronParser,代码行数:69,代码来源:ContextSensitiveLaterClassifierLookupLayer.java

示例11: updateParameters

import org.jblas.DoubleMatrix; //导入方法依赖的package包/类
@Override
public void updateParameters(DoubleMatrix outputGradient,
		double learningRate) {
	outputGradient.muli(learningRate, outputGradient);
	for(int segment=0; segment<this.mappingFunctions.size(); segment++) {
		this.updateSegment(segment, outputGradient.get(this.segmentIndices.get(segment)), learningRate);
	}
	/*

	int index = 0;
	ArrayList<MultitaggerTrainingItem> items =  this.inputSentence.getItems();

	// update word embedding
	DoubleMatrix wordEmbedding = this.getWordEmbedding(this.inputSentence.sentence().getTokens()[this.inputPosition].getWord());
	for(int i=0; i<wordEmbedding.length; i++) {
		wordEmbedding.put(i, wordEmbedding.get(i)+outputGradient.get(index++));
	}

	// Add cat embeddings and prob for this token to input
	MultitaggerTrainingItem thisItem = items.get(this.inputPosition);
	for(int k=0; k<this.NUM_CATS_IN_LIST; k++) {
		index++; // don't update prob
		DoubleMatrix vec = this.getCategoryEmbedding(thisItem.getCat(k));
		for(int j=0; j<vec.length; j++) {
			vec.put(j, vec.get(j)+outputGradient.get(index++));
		}
	}
	// Add surrounding category context to list
	for(int c=-this.CXT_WINDOW_SIZE; c<=this.CXT_WINDOW_SIZE; c++) {
		if(c==0) {
			continue;
		}
		int j = this.inputPosition+c;
		DoubleMatrix cxtVec;
		if(j < 0){
			cxtVec = this.getCategoryEmbedding(CAT_START);
			for(int v=0; v<cxtVec.length; v++) {
				cxtVec.put(v, cxtVec.get(v)+outputGradient.get(index++));
			}
		}
		else if(j >= this.inputSentence.sentence().length()) {
			cxtVec = this.getCategoryEmbedding(CAT_END);
			for(int v=0; v<cxtVec.length; v++) {
				cxtVec.put(v, cxtVec.get(v)+outputGradient.get(index++));
			}
		}
		else {

			//DoubleMatrix itemVec = this.getWordEmbedding(this.currentSentence.sentence().get(j).getWord());
			//for(int v=0; v<itemVec.length; v++) {
			//	itemVec.put(v, itemVec.get(v)+outputGradient.get(index+v));
			//}
			//index += this.WORD_DIM;

			MultitaggerTrainingItem item = items.get(j);
			for(int k=0; k<this.CXT_DEPTH; k++) {
				double itemProb = item.getProb(k);
				DoubleMatrix itemVec = this.getCategoryEmbedding(item.getCat(k));
				for(int v=0; v<itemVec.length; v++) {
					itemVec.put(v, itemVec.get(v)+outputGradient.get(index+v)*itemProb);
				}
			}
			index += this.CAT_EMBEDDING_DIM;

		}
	}	
	 */
}
 
开发者ID:ramusa2,项目名称:CandCNFPerceptronParser,代码行数:69,代码来源:MappingFunctionInputLayer.java


注:本文中的org.jblas.DoubleMatrix.muli方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。