本文整理汇总了C++中MyMatrix::array方法的典型用法代码示例。如果您正苦于以下问题:C++ MyMatrix::array方法的具体用法?C++ MyMatrix::array怎么用?C++ MyMatrix::array使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类MyMatrix
的用法示例。
在下文中一共展示了MyMatrix::array方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: main
//.........这里部分代码省略.........
gradB[n_layers-2] = out - one_hot_batch;
// Backpropagation
bprop(Wt, W_out, Ap, gradB);
// Backpropagation for conv layer
std::vector<MyMatrix> conv_gradB(conv_W.size());
MyMatrix layer_gradB = (gradB[0] * W[0].transpose());
MyMatrix pool_gradB;
layer2pool(curr_batch_size, pool_params[conv_W.size()-1].N, conv_params[conv_W.size()-1].n_filter, layer_gradB, pool_gradB);
convBprop(curr_batch_size, conv_params, pool_params, conv_W_T, conv_Ap, pool_gradB, conv_gradB, poolIdxX1, poolIdxY1);
if(params.algo == "bprop"){
update(eta, gradB, A, z0, params.regularizer, params.lambda, W_out, W, Wt, B);
convUpdate(curr_batch_size, eta, conv_params, conv_gradB, conv_A, X_batch, "", 0., conv_W, conv_W_T, conv_B);
}else{
// Compute the metric
std::vector<MyMatrix> metric_gradB(n_layers-1);
std::vector<MyMatrix> metric_conv_gradB(conv_params.size());
if(params.algo=="qdMCNat"){
// Monte-Carlo Approximation of the metric
std::vector<MyMatrix> mc_gradB(n_layers-1);
computeMcError(out, mc_gradB[n_layers-2]);
// Backpropagation
bprop(Wt, W_out, Ap, mc_gradB);
for(unsigned k = 0; k < gradB.size(); k++){
metric_gradB[k] = mc_gradB[k].array().square();
}
// Backpropagation for conv layer
std::vector<MyMatrix> mc_conv_gradB(conv_W.size());
MyMatrix mc_layer_gradB = (mc_gradB[0] * W[0].transpose());
MyMatrix mc_pool_gradB;
layer2pool(curr_batch_size, pool_params[conv_W.size()-1].N, conv_params[conv_W.size()-1].n_filter, mc_layer_gradB, mc_pool_gradB);
convBprop(curr_batch_size, conv_params, pool_params, conv_W_T, conv_Ap, mc_pool_gradB, mc_conv_gradB, poolIdxX1, poolIdxY1);
for(unsigned k = 0; k < conv_params.size(); k++){
metric_conv_gradB[k] = mc_conv_gradB[k].array().square();
}
}
else if(params.algo=="qdop"){
for(unsigned k = 0; k < conv_params.size(); k++){
metric_conv_gradB[k] = conv_gradB[k].array().square();
}
for(unsigned k = 0; k < gradB.size(); k++){
metric_gradB[k] = gradB[k].array().square();
}
}
else if(params.algo=="qdNat"){
for(unsigned k = 0; k < conv_params.size(); k++){
metric_conv_gradB[k] = conv_gradB[k].array().square();
}
for(unsigned k = 0; k < metric_gradB.size(); k++){
metric_gradB[k] = MyMatrix::Zero(gradB[k].rows(),gradB[k].cols());
}