本文整理汇总了C++中NetParameter类的典型用法代码示例。如果您正苦于以下问题:C++ NetParameter类的具体用法?C++ NetParameter怎么用?C++ NetParameter使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了NetParameter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1:
void Net<Dtype>::CopyTrainedLayersFrom(const NetParameter& param) {
int num_source_layers = param.layers_size();
for (int i = 0; i < num_source_layers; ++i) {
const LayerParameter& source_layer = param.layers(i);
const string& source_layer_name = source_layer.name();
int target_layer_id = 0;
while (target_layer_id != layer_names_.size() &&
layer_names_[target_layer_id] != source_layer_name) {
++target_layer_id;
}
if (target_layer_id == layer_names_.size()) {
DLOG(INFO) << "Ignoring source layer " << source_layer_name;
continue;
}
DLOG(INFO) << "Copying source layer " << source_layer_name;
vector<shared_ptr<Blob<Dtype> > >& target_blobs =
layers_[target_layer_id]->blobs();
// blob 0# weights, 1# bias term
CHECK_EQ(target_blobs.size(), source_layer.blobs_size())
<< "Incompatible number of blobs for layer " << source_layer_name;
for (int j = 0; j < target_blobs.size(); ++j) {
CHECK_EQ(target_blobs[j]->num(), source_layer.blobs(j).num());
CHECK_EQ(target_blobs[j]->channels(), source_layer.blobs(j).channels());
CHECK_EQ(target_blobs[j]->height(), source_layer.blobs(j).height());
CHECK_EQ(target_blobs[j]->width(), source_layer.blobs(j).width());
target_blobs[j]->FromProto(source_layer.blobs(j));
}
}
}
示例2: state
void Net<Dtype>::filterNet(const NetParameter& param, NetParameter* filtered_param){
NetState state(param.state());
filtered_param->CopyFrom(param);
// remove all layer params and then filter
filtered_param->clear_layer();
for (int i = 0; i < param.layer_size(); i++){
const LayerParameter& layer_param = param.layer(i);
const string& layer_name = layer_param.name();
// usually a layer has not any include/exclude rules
CHECK(layer_param.include_size() == 0 || layer_param.exclude_size() == 0)
<< "Specify either include or exclude rules.";
bool layer_included = (layer_param.include_size() == 0);
// assume 'included' and check if meet any excluded rules
for (int j = 0; layer_included&&j < layer_param.exclude_size(); j++){
if (stateMeetRule(state, layer_param.exclude(j), layer_name)){
// cancel 'included'
layer_included = false;
}
}
// assume 'excluded' and check if meet any included rules
for (int j = 0; !layer_included&&j < layer_param.include_size(); j++){
if (stateMeetRule(state, layer_param.include(j), layer_name)){
// cancel 'excluded'
layer_included = true;
}
}
// copy the included layer to filtered_param
if (layer_included) filtered_param->add_layer()->CopyFrom(layer_param);
}
}
示例3:
SolverParameter ModelServer<Dtype>::prepare_model() {
NetParameter net;
solver->net()->ToProto(&net);
for (int i = 0; i < net.layer_size(); ++i) {
LayerParameter& layer = *net.mutable_layer(i);
layer.clear_blobs();
if ((layer.type().find("Data") != std::string::npos)
&& (layer.has_remote_data_param())) {
layer.set_type("RemoteData");
for (int j = 0; j < layer.top_size(); ++j) {
*layer.mutable_remote_data_param()->add_shape()
= blob_shape_by_name(layer.top(j));
}
}
}
SolverParameter ret = solver->param();
ret.clear_net();
ret.clear_net_param();
ret.clear_test_net();
ret.clear_test_net_param();
ret.clear_train_net();
*ret.mutable_train_net_param() = net;
return ret;
}
示例4: to_proto
void InputProducer::to_proto(NetParameter& net) const {
*net.add_input() = name_;
net.add_input_dim(dims_[0]);
net.add_input_dim(dims_[1]);
net.add_input_dim(dims_[2]);
net.add_input_dim(dims_[3]);
}
示例5: root_net
Net<Dtype>::Net(const string& param_file, Phase phase, const Net* root_net = NULL):
root_net(root_net){
NetParameter param;
readNetParamsFromTextFileOrDie(param_file, ¶m);
param.mutable_state()->set_phase(phase);
Init(param);
}
示例6: CHECK
void NGNet::Init( ) {
input_layer_top_idx_ = 0;
output_layer_top_idx_ = 0;
/* Load the network. */
net_.reset(new Net<float>(model_file_, TEST));
NetParameter param;
CHECK(ReadProtoFromTextFile(model_file_, ¶m))
<< "Failed to parse NetParameter file: " << model_file_;
for (int ip = 0; ip < param.layer_size(); ip++) {
LayerParameter layer_param = param.layer(ip);
if (layer_param.has_inner_product_param()) {
InnerProductParameter* inner_product_param = layer_param.mutable_inner_product_param();
int num_output = inner_product_param->num_output();
if (num_output > 0) {
inner_product_param->set_num_output(num_output * 2);
}
}
}
// //param.mutable_state()->set_phase(phase);
Net<float> * new_net = new Net<float>(param);
net_->CopyTrainedLayersFrom(trained_file_);
int input_layer_idx = -1;
for (size_t layer_id = 0; layer_id < net_->layer_names().size(); ++layer_id) {
if (net_->layer_names()[layer_id] == input_layer_name_) {
input_layer_idx = layer_id;
break;
}
}
if (input_layer_idx == -1) {
LOG(FATAL) << "Unknown layer name " << input_layer_name_;
}
input_layer_idx_ = input_layer_idx;
input_layer_top_idx_ = 0;
Blob<float>* input_layer = net_->top_vecs()[input_layer_idx_][input_layer_top_idx_];
input_layer_dim_ = input_layer->shape(1);
int output_layer_idx = -1;
for (size_t layer_id = 0; layer_id < net_->layer_names().size(); ++layer_id) {
if (net_->layer_names()[layer_id] == output_layer_name_) {
output_layer_idx = layer_id;
break;
}
}
if (output_layer_idx == -1) {
LOG(FATAL) << "Unknown layer name " << output_layer_name_;
}
output_layer_idx_ = output_layer_idx;
}
示例7: NetNeedsV0ToV1Upgrade
bool NetNeedsV0ToV1Upgrade(const NetParameter& net_param) {
for (int i = 0; i < net_param.layers_size(); ++i) {
if (net_param.layers(i).has_layer()) {
return true;
}
}
return false;
}
示例8: UpgradeV0PaddingLayers
void UpgradeV0PaddingLayers(const NetParameter& param,
NetParameter* param_upgraded_pad) {
// Copy everything other than the layers from the original param.
param_upgraded_pad->Clear();
param_upgraded_pad->CopyFrom(param);
param_upgraded_pad->clear_layers();
// Figure out which layer each bottom blob comes from.
map<string, int> blob_name_to_last_top_idx;
for (int i = 0; i < param.input_size(); ++i) {
const string& blob_name = param.input(i);
blob_name_to_last_top_idx[blob_name] = -1;
}
for (int i = 0; i < param.layers_size(); ++i) {
const V1LayerParameter& layer_connection = param.layers(i);
const V0LayerParameter& layer_param = layer_connection.layer();
// Add the layer to the new net, unless it's a padding layer.
if (layer_param.type() != "padding") {
param_upgraded_pad->add_layers()->CopyFrom(layer_connection);
}
for (int j = 0; j < layer_connection.bottom_size(); ++j) {
const string& blob_name = layer_connection.bottom(j);
if (blob_name_to_last_top_idx.find(blob_name) ==
blob_name_to_last_top_idx.end()) {
LOG(FATAL) << "Unknown blob input " << blob_name << " to layer " << j;
}
const int top_idx = blob_name_to_last_top_idx[blob_name];
if (top_idx == -1) {
continue;
}
const V1LayerParameter& source_layer = param.layers(top_idx);
if (source_layer.layer().type() == "padding") {
// This layer has a padding layer as input -- check that it is a conv
// layer or a pooling layer and takes only one input. Also check that
// the padding layer input has only one input and one output. Other
// cases have undefined behavior in Caffe.
CHECK((layer_param.type() == "conv") || (layer_param.type() == "pool"))
<< "Padding layer input to "
"non-convolutional / non-pooling layer type "
<< layer_param.type();
CHECK_EQ(layer_connection.bottom_size(), 1)
<< "Conv Layer takes a single blob as input.";
CHECK_EQ(source_layer.bottom_size(), 1)
<< "Padding Layer takes a single blob as input.";
CHECK_EQ(source_layer.top_size(), 1)
<< "Padding Layer produces a single blob as output.";
int layer_index = param_upgraded_pad->layers_size() - 1;
param_upgraded_pad->mutable_layers(layer_index)->mutable_layer()
->set_pad(source_layer.layer().pad());
param_upgraded_pad->mutable_layers(layer_index)
->set_bottom(j, source_layer.bottom(0));
}
}
for (int j = 0; j < layer_connection.top_size(); ++j) {
const string& blob_name = layer_connection.top(j);
blob_name_to_last_top_idx[blob_name] = i;
}
}
}
示例9: NetNeedsDataUpgrade
bool NetNeedsDataUpgrade(const NetParameter& net_param) {
for (int i = 0; i < net_param.layers_size(); ++i) {
if (net_param.layers(i).type() == LayerParameter_LayerType_DATA) {
DataParameter layer_param = net_param.layers(i).data_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
if (layer_param.has_crop_size()) { return true; }
if (layer_param.has_mirror()) { return true; }
}
if (net_param.layers(i).type() == LayerParameter_LayerType_IMAGE_DATA) {
ImageDataParameter layer_param = net_param.layers(i).image_data_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
if (layer_param.has_crop_size()) { return true; }
if (layer_param.has_mirror()) { return true; }
}
if (net_param.layers(i).type() == LayerParameter_LayerType_WINDOW_DATA) {
WindowDataParameter layer_param = net_param.layers(i).window_data_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
if (layer_param.has_crop_size()) { return true; }
if (layer_param.has_mirror()) { return true; }
}
if (net_param.layers(i).type() == LayerParameter_LayerType_QDATA) {
QDataParameter layer_param = net_param.layers(i).qdata_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
if (layer_param.has_crop_size()) { return true; }
if (layer_param.has_mirror()) { return true; }
}
}
return false;
}
示例10: NetNeedsBatchNormUpgrade
bool NetNeedsBatchNormUpgrade(const NetParameter& net_param) {
for (int i = 0; i < net_param.layer_size(); ++i) {
// Check if BatchNorm layers declare three parameters, as required by
// the previous BatchNorm layer definition.
if (net_param.layer(i).type() == "BatchNorm"
&& net_param.layer(i).param_size() == 3) {
return true;
}
}
return false;
}
示例11: Init
void Net<Dtype>::ReInit( NetParameter& param, const int batch_size )
{
layers_.clear();
layer_names_.clear();
layer_need_backward_.clear();
// blobs stores the blobs that store intermediate results between the
// layers.
blobs_.clear();
blob_names_.clear();
blob_need_backward_.clear();
// bottom_vecs stores the vectors containing the input for each layer.
// They don't actually host the blobs (blobs_ does), so we simply store
// pointers.
bottom_vecs_.clear();
bottom_id_vecs_.clear();
// top_vecs stores the vectors containing the output for each layer
top_vecs_.clear();
top_id_vecs_.clear();
// blob indices for the input and the output of the net
net_input_blob_indices_.clear();
net_input_blobs_.clear();
net_output_blobs_.clear();
// The parameters in the network.
params_.clear();
// the learning rate multipliers
params_lr_.clear();
// the weight decay multipliers
params_weight_decay_.clear();
param.mutable_layers(0)->mutable_layer()->set_batchsize(batch_size);
Init( param );
}
示例12: UpgradeV1Net
bool UpgradeV1Net(const NetParameter& v1_net_param, NetParameter* net_param) {
bool is_fully_compatible = true;
if (v1_net_param.layer_size() > 0) {
LOG(ERROR) << "Input NetParameter to be upgraded already specifies 'layer' "
<< "fields; these will be ignored for the upgrade.";
is_fully_compatible = false;
}
net_param->CopyFrom(v1_net_param);
net_param->clear_layers();
net_param->clear_layer();
for (int i = 0; i < v1_net_param.layers_size(); ++i) {
if (!UpgradeV1LayerParameter(v1_net_param.layers(i),
net_param->add_layer())) {
LOG(ERROR) << "Upgrade of input layer " << i << " failed.";
is_fully_compatible = false;
}
}
return is_fully_compatible;
}
示例13: NetParameterToPrettyPrint
void NetParameterToPrettyPrint(const NetParameter& param,
NetParameterPrettyPrint* pretty_param) {
pretty_param->Clear();
if (param.has_name()) {
pretty_param->set_name(param.name());
}
if (param.has_force_backward()) {
pretty_param->set_force_backward(param.force_backward());
}
for (int i = 0; i < param.input_size(); ++i) {
pretty_param->add_input(param.input(i));
}
for (int i = 0; i < param.input_dim_size(); ++i) {
pretty_param->add_input_dim(param.input_dim(i));
}
for (int i = 0; i < param.layers_size(); ++i) {
pretty_param->add_layers()->CopyFrom(param.layers(i));
}
}
示例14: UpgradeV1Net
bool UpgradeV1Net(const NetParameter& v1_net_param, NetParameter* net_param) {
if (v1_net_param.layer_size() > 0) {
LOG(FATAL) << "Refusing to upgrade inconsistent NetParameter input; "
<< "the definition includes both 'layer' and 'layers' fields. "
<< "The current format defines 'layer' fields with string type like "
<< "layer { type: 'Layer' ... } and not layers { type: LAYER ... }. "
<< "Manually switch the definition to 'layer' format to continue.";
}
bool is_fully_compatible = true;
net_param->CopyFrom(v1_net_param);
net_param->clear_layers();
net_param->clear_layer();
for (int i = 0; i < v1_net_param.layers_size(); ++i) {
if (!UpgradeV1LayerParameter(v1_net_param.layers(i),
net_param->add_layer())) {
LOG(ERROR) << "Upgrade of input layer " << i << " failed.";
is_fully_compatible = false;
}
}
return is_fully_compatible;
}
示例15:
void Net<Dtype>::copyTrainedLayerFrom(const NetParameter& param){
int num_layers = param.layer_size();
for (int i = 0; i < num_layers; i++){
const LayerParameter& source_layer = param.layer(i);
const string& source_layer_name = source_layer.name();
int target_layer_id = 0;
while (target_layer_id != layer_names.size() &&
layer_names[target_layer_id] != source_layer_name){
target_layer_id++;
}
if (target_layer_id == layer_names.size()) continue;
const vector < boost::shared_ptr<Blob<Dtype>>>& target_blobs = layers[target_layer_id]->getBlobs();
for (int j = 0; j < target_blobs.size(); j++){
Blob<Dtype> source_blob;
source_blob.FromProto(source_layer.blobs(j));
Blob<Dtype>* target_blob = target_blobs[j].get();
CHECK(source_blob.shape() == target_blob->shape())
<< "Incompatible shape when sharing trained params.";
target_blob->FromProto(source_layer.blobs(j), false);
}
}
}