本文整理汇总了C++中Mat::dims方法的典型用法代码示例。如果您正苦于以下问题:C++ Mat::dims方法的具体用法?C++ Mat::dims怎么用?C++ Mat::dims使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Mat
的用法示例。
在下文中一共展示了Mat::dims方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: GRAD
bool Other<R>::grad_allclose(Mat<R> a, Mat<R> b, double tol) {
if (a.dims() != b.dims())
return false;
return GRAD(a).allclose(GRAD(b), tol);
}
示例2: MAT
bool Other<R>::equals(Mat<R> a, Mat<R> b) {
// wrong dimensions
if (a.dims() != b.dims())
return false;
return MAT(a) == MAT(b);
}
示例3: mpc
typename StackedGatedModel<Z>::MaskedActivation StackedGatedModel<Z>::masked_predict_cost(
Mat<int> data,
Mat<int> target_data,
Mat<Z> mask,
Z drop_prob,
int temporal_offset,
uint softmax_offset) const {
utils::Timer mpc("masked_predict_cost");
auto state = this->initial_states();
mat total_error(1,1);
mat memory;
mat memory_error(1,1);
auto n = data.dims(0);
assert (temporal_offset < n);
assert (target_data.dims(0) >= data.dims(0));
for (uint timestep = 0; timestep < n - temporal_offset; ++timestep) {
// pick this letter from the embedding
utils::Timer gte("get the embeddings");
auto input_vector = this->embedding[data[timestep]];
memory = gate.activate(
{
input_vector,
state.back().hidden
}).sigmoid();
input_vector = input_vector.eltmul_broadcast_colwise(memory);
gte.stop();
utils::Timer flstm("forward lstm");
state = this->stacked_lstm.activate(
state,
input_vector,
drop_prob
);
flstm.stop();
// classifier takes as input the final hidden layer's activation:
utils::Timer decode_tm("decode");
auto logprobs = this->decode(input_vector, state);
decode_tm.stop();
auto target = target_data[timestep + temporal_offset];
if (softmax_offset > 0) {
target -= softmax_offset;
}
utils::Timer softmax_tm("softmax cross entropy");
auto errors = MatOps<Z>::softmax_cross_entropy_rowwise(logprobs, target);
softmax_tm.stop();
utils::Timer masking_tm("masking");
errors *= mask[timestep + temporal_offset].T();
memory *= mask[timestep + temporal_offset].T();
masking_tm.stop();
total_error += errors.sum();
memory_error += memory.sum() * memory_penalty;
}
mpc.stop();
return MaskedActivation(total_error, memory_error);
}