本文整理汇总了Java中org.nd4j.linalg.api.ndarray.INDArray.putRow方法的典型用法代码示例。如果您正苦于以下问题:Java INDArray.putRow方法的具体用法?Java INDArray.putRow怎么用?Java INDArray.putRow使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.nd4j.linalg.api.ndarray.INDArray
的用法示例。
在下文中一共展示了INDArray.putRow方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getPar2HierVector
import org.nd4j.linalg.api.ndarray.INDArray; //导入方法依赖的package包/类
/**
* base case: on a leaf hv = pv
* on a non-leaf node with n children: hv = pv + k centroids of the n hv
*/
private static INDArray getPar2HierVector(WeightLookupTable<VocabWord> lookupTable, PatriciaTrie<String> trie, String node,
int k, Map<String, INDArray> hvs, Method method) {
if (hvs.containsKey(node)) {
return hvs.get(node);
}
INDArray hv = lookupTable.vector(node);
String[] split = node.split(REGEX);
Collection<String> descendants = new HashSet<>();
if (split.length == 2) {
String separator = ".";
String prefix = node.substring(0, node.indexOf(split[1])) + separator;
SortedMap<String, String> sortedMap = trie.prefixMap(prefix);
for (Map.Entry<String, String> entry : sortedMap.entrySet()) {
if (prefix.lastIndexOf(separator) == entry.getKey().lastIndexOf(separator)) {
descendants.add(entry.getValue());
}
}
} else {
descendants = Collections.emptyList();
}
if (descendants.size() == 0) {
// just the pv
hvs.put(node, hv);
return hv;
} else {
INDArray chvs = Nd4j.zeros(descendants.size(), hv.columns());
int i = 0;
for (String desc : descendants) {
// child hierarchical vector
INDArray chv = getPar2HierVector(lookupTable, trie, desc, k, hvs, method);
chvs.putRow(i, chv);
i++;
}
double[][] centroids;
if (chvs.rows() > k) {
centroids = Par2HierUtils.getTruncatedVT(chvs, k);
} else if (chvs.rows() == 1) {
centroids = Par2HierUtils.getDoubles(chvs.getRow(0));
} else {
centroids = Par2HierUtils.getTruncatedVT(chvs, 1);
}
switch (method) {
case CLUSTER:
INDArray matrix = Nd4j.zeros(centroids.length + 1, hv.columns());
matrix.putRow(0, hv);
for (int c = 0; c < centroids.length; c++) {
matrix.putRow(c + 1, Nd4j.create(centroids[c]));
}
hv = Nd4j.create(Par2HierUtils.getTruncatedVT(matrix, 1));
break;
case SUM:
for (double[] centroid : centroids) {
hv.addi(Nd4j.create(centroid));
}
break;
}
hvs.put(node, hv);
return hv;
}
}
示例2: main
import org.nd4j.linalg.api.ndarray.INDArray; //导入方法依赖的package包/类
public static void main(String[] args) {
final Random rng = new Random(1234); // seed random
// Declare variables and constants
final int patterns = 2; // number of classes, nOut
final int trainSetSize = 4;
final int testSetSize = 4;
final int nIn = 2;
/*
* Training data for demo
* class 1 : [0, 0], [1, 1] for negative class
* class 2 : [0, 1], [1, 0] for positive class
*/
INDArray trainSet = Nd4j.create(new double[] {0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0}, new int[] {trainSetSize, nIn});
INDArray trainLabel = Nd4j.create(new double[] {0, 1, 1, 0, 1, 0, 0, 1}, new int[] {trainSetSize, patterns});
INDArray testSet = Nd4j.create(new double[] {0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0}, new int[] {testSetSize, nIn});
INDArray testLabel = Nd4j.create(new double[] {0, 1, 1, 0, 1, 0, 0, 1}, new int[] {testSetSize, patterns});
final int epochs = 2000;
double learningRate = 0.2;
int minibatchSize = 1; // set 1 for on-line training
int minibatchNumber = trainSetSize / minibatchSize;
List<INDArray> train_X_minibatch = new ArrayList<>();
List<INDArray> train_T_minibatch = new ArrayList<>();
List<Integer> minibatchIndex = new ArrayList<>();
for (int i = 0; i < trainSetSize; i++) minibatchIndex.add(i);
Collections.shuffle(minibatchIndex, rng);
// create minibatches
for (int i = 0; i < minibatchNumber; i++) {
INDArray tmpX = Nd4j.create(new double[minibatchSize * nIn], new int[] {minibatchSize, nIn});
INDArray tmpT = Nd4j.create(new double[minibatchSize * patterns], new int[] {minibatchSize, patterns});
for (int j = 0; j < minibatchSize; j++) {
tmpX.putRow(j, trainSet.getRow(minibatchIndex.get(i * minibatchSize + j)));
tmpT.putRow(j, trainLabel.getRow(minibatchIndex.get(i * minibatchSize + j)));
}
train_X_minibatch.add(tmpX);
train_T_minibatch.add(tmpT);
}
// Build Logistic Regression model
OutputLayer classifier = new OutputLayer(nIn, patterns, WeightInit.ZERO, null, Activation.Softmax);
// train
for (int epoch = 0; epoch < epochs; epoch++) {
for (int batch = 0; batch < minibatchNumber; batch++) {
classifier.train(train_X_minibatch.get(batch), train_T_minibatch.get(batch), minibatchSize, learningRate);
}
learningRate *= 0.95;
}
// test
INDArray predicted_T = classifier.predict(testSet);
// evaluate
for (int i = 0; i < testSetSize; i++) {
System.out.print("[" + testSet.getDouble(i, 0) + ", " + testSet.getDouble(i, 1) + "] -> Prediction: ");
if (predicted_T.getDouble(i, 0) > predicted_T.getDouble(i, 1)) {
System.out.print("Positive, ");
System.out.print("probability = " + predicted_T.getDouble(i, 0));
} else {
System.out.print("Negative, ");
System.out.print("probability = " + predicted_T.getDouble(i, 1));
}
System.out.print("; Actual: ");
if (testLabel.getDouble(i, 0) == 1) {
System.out.println("Positive");
} else {
System.out.println("Negative");
}
}
}
示例3: main
import org.nd4j.linalg.api.ndarray.INDArray; //导入方法依赖的package包/类
public static void main(String[] args) {
final Random rng = new Random(123); // seed random
// Declare variables and constants
final int patterns = 2; // nOut
final int trainSetSize = 4;
final int testSetSize = 4;
final int nIn = 2;
final int nHidden = 3;
INDArray trainSet = Nd4j.create(new double[]{0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0}, new int[]{trainSetSize, 2});
INDArray trainLabel = Nd4j.create(new double[]{0, 1, 1, 0, 1, 0, 0, 1}, new int[]{4, 2});
INDArray testSet = Nd4j.create(new double[]{0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0}, new int[]{testSetSize, 2});
INDArray testLabel = Nd4j.create(new double[]{0, 1, 1, 0, 1, 0, 0, 1}, new int[]{4, 2});
final int epochs = 5000;
double learningRate = 0.1;
final int minibatchSize = 1; // here, we do online training
int minibatchNumber = trainSetSize / minibatchSize;
List<INDArray> trainSetMinibatch = new ArrayList<>();
List<INDArray> trainLabelMinibatch = new ArrayList<>();
List<Integer> minibatchIndex = new ArrayList<>(); // data index for minibatch to apply SGD
for (int i = 0; i < trainSetSize; i++) minibatchIndex.add(i);
Collections.shuffle(minibatchIndex, rng); // shuffle data index for SGD
// create minibatches with training data
for (int i = 0; i < minibatchNumber; i++) {
INDArray trainX = Nd4j.create(new double[minibatchSize * nIn], new int[] {minibatchSize, nIn});
INDArray trainT = Nd4j.create(new double[minibatchSize * patterns], new int[] {minibatchSize, patterns});
for (int j = 0; j < minibatchSize; j++) {
trainX.putRow(j, trainSet.getRow(minibatchIndex.get(i * minibatchSize + j)));
trainT.putRow(j, trainLabel.getRow(minibatchIndex.get(i * minibatchSize + j)));
}
trainSetMinibatch.add(trainX);
trainLabelMinibatch.add(trainT);
}
// Build Multi-Layer Perceptrons model
// construct
MultiLayerPerceptron classifier = new MultiLayerPerceptron(nIn, nHidden, patterns, rng);
// train
for (int epoch = 0; epoch < epochs; epoch++) {
for (int batch = 0; batch < minibatchNumber; batch++) {
classifier.train(trainSetMinibatch.get(batch), trainLabelMinibatch.get(batch), minibatchSize, learningRate);
}
}
// test
INDArray predicted_T = classifier.predict(testSet);
// Evaluate the model
Evaluation evaluation = new Evaluation(predicted_T, testLabel).fit();
double accuracy = evaluation.getAccuracy();
double[] precision = evaluation.getPrecision();
double[] recall = evaluation.getRecall();
System.out.println("MLP model evaluation");
System.out.println("--------------------");
System.out.printf("Accuracy: %.1f %%\n", accuracy * 100);
System.out.println("Precision:");
for (int i = 0; i < patterns; i++) System.out.printf(" class %d: %.1f %%\n", i + 1, precision[i] * 100);
System.out.println("Recall:");
for (int i = 0; i < patterns; i++) System.out.printf(" class %d: %.1f %%\n", i + 1, recall[i] * 100);
}