本文整理汇总了C++中LayerParameter::bottom方法的典型用法代码示例。如果您正苦于以下问题:C++ LayerParameter::bottom方法的具体用法?C++ LayerParameter::bottom怎么用?C++ LayerParameter::bottom使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类LayerParameter
的用法示例。
在下文中一共展示了LayerParameter::bottom方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: UpgradeLayerParameter
bool UpgradeLayerParameter(const LayerParameter& v0_layer_connection,
LayerParameter* layer_param) {
bool is_fully_compatible = true;
layer_param->Clear();
for (int i = 0; i < v0_layer_connection.bottom_size(); ++i) {
layer_param->add_bottom(v0_layer_connection.bottom(i));
}
for (int i = 0; i < v0_layer_connection.top_size(); ++i) {
layer_param->add_top(v0_layer_connection.top(i));
}
if (v0_layer_connection.has_layer()) {
const V0LayerParameter& v0_layer_param = v0_layer_connection.layer();
if (v0_layer_param.has_name()) {
layer_param->set_name(v0_layer_param.name());
}
const string& type = v0_layer_param.type();
if (v0_layer_param.has_type()) {
layer_param->set_type(UpgradeV0LayerType(type));
}
for (int i = 0; i < v0_layer_param.blobs_size(); ++i) {
layer_param->add_blobs()->CopyFrom(v0_layer_param.blobs(i));
}
for (int i = 0; i < v0_layer_param.blobs_lr_size(); ++i) {
layer_param->add_blobs_lr(v0_layer_param.blobs_lr(i));
}
for (int i = 0; i < v0_layer_param.weight_decay_size(); ++i) {
layer_param->add_weight_decay(v0_layer_param.weight_decay(i));
}
if (v0_layer_param.has_num_output()) {
if (type == "conv") {
layer_param->mutable_convolution_param()->set_num_output(
v0_layer_param.num_output());
} else if (type == "innerproduct") {
layer_param->mutable_inner_product_param()->set_num_output(
v0_layer_param.num_output());
} else {
LOG(ERROR) << "Unknown parameter num_output for layer type " << type;
is_fully_compatible = false;
}
}
if (v0_layer_param.has_biasterm()) {
if (type == "conv") {
layer_param->mutable_convolution_param()->set_bias_term(
v0_layer_param.biasterm());
} else if (type == "innerproduct") {
layer_param->mutable_inner_product_param()->set_bias_term(
v0_layer_param.biasterm());
} else {
LOG(ERROR) << "Unknown parameter biasterm for layer type " << type;
is_fully_compatible = false;
}
}
if (v0_layer_param.has_weight_filler()) {
if (type == "conv") {
layer_param->mutable_convolution_param()->
mutable_weight_filler()->CopyFrom(v0_layer_param.weight_filler());
} else if (type == "innerproduct") {
layer_param->mutable_inner_product_param()->
mutable_weight_filler()->CopyFrom(v0_layer_param.weight_filler());
} else {
LOG(ERROR) << "Unknown parameter weight_filler for layer type " << type;
is_fully_compatible = false;
}
}
if (v0_layer_param.has_bias_filler()) {
if (type == "conv") {
layer_param->mutable_convolution_param()->
mutable_bias_filler()->CopyFrom(v0_layer_param.bias_filler());
} else if (type == "innerproduct") {
layer_param->mutable_inner_product_param()->
mutable_bias_filler()->CopyFrom(v0_layer_param.bias_filler());
} else {
LOG(ERROR) << "Unknown parameter bias_filler for layer type " << type;
is_fully_compatible = false;
}
}
if (v0_layer_param.has_pad()) {
if (type == "conv") {
layer_param->mutable_convolution_param()->set_pad(v0_layer_param.pad());
} else if (type == "pool") {
layer_param->mutable_pooling_param()->set_pad(v0_layer_param.pad());
} else {
LOG(ERROR) << "Unknown parameter pad for layer type " << type;
is_fully_compatible = false;
}
}
if (v0_layer_param.has_kernelsize()) {
if (type == "conv") {
layer_param->mutable_convolution_param()->set_kernel_size(
v0_layer_param.kernelsize());
} else if (type == "pool") {
layer_param->mutable_pooling_param()->set_kernel_size(
v0_layer_param.kernelsize());
} else {
LOG(ERROR) << "Unknown parameter kernelsize for layer type " << type;
is_fully_compatible = false;
}
}
if (v0_layer_param.has_group()) {
if (type == "conv") {
//.........这里部分代码省略.........