当前位置: 首页>>代码示例>>C++>>正文


C++ Datum::clear_float_data方法代码示例

本文整理汇总了C++中Datum::clear_float_data方法的典型用法代码示例。如果您正苦于以下问题:C++ Datum::clear_float_data方法的具体用法?C++ Datum::clear_float_data怎么用?C++ Datum::clear_float_data使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在Datum的用法示例。


在下文中一共展示了Datum::clear_float_data方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: assert

bool MostCV::LevelDBReader::GetNextEntry(string &key, vector<double> &retVec, int &label) {
  if (!database_iter_->Valid())
    return false;

  Datum datum;
  datum.clear_float_data();
  datum.clear_data();
  datum.ParseFromString(database_iter_->value().ToString());

  key = database_iter_->key().ToString();
  label = datum.label();

  int expected_data_size = std::max<int>(datum.data().size(), datum.float_data_size());
  const int datum_volume_size = datum.channels() * datum.height() * datum.width();
  if (expected_data_size != datum_volume_size) {
    cout << "Something wrong in saved data.";
    assert(false);
  }

  retVec.resize(datum_volume_size);

  const string& data = datum.data();
  if (data.size() != 0) {
    // Data stored in string, e.g. just pixel values of 196608 = 256 * 256 * 3
    for (int i = 0; i < datum_volume_size; ++i)
      retVec[i] = data[i];
  } else {
    // Data stored in real feature vector such as 4096 from feature extraction
    for (int i = 0; i < datum_volume_size; ++i)
      retVec[i] = datum.float_data(i);
  }

  database_iter_->Next();
  ++record_idx_;

  return true;
}
开发者ID:mostafa-saad,项目名称:deep-activity-rec,代码行数:37,代码来源:leveldb-reader.cpp

示例2: feature_extraction_pipeline


//.........这里部分代码省略.........
     type: DROPOUT
     dropout_param {
       dropout_ratio: 0.5
     }
     bottom: "fc7"
     top: "fc7"
   }
   */
  std::string feature_extraction_proto(argv[++arg_pos]);
  shared_ptr<Net<Dtype> > feature_extraction_net(
      new Net<Dtype>(feature_extraction_proto, caffe::TEST));
  feature_extraction_net->CopyTrainedLayersFrom(pretrained_binary_proto);

  std::string extract_feature_blob_names(argv[++arg_pos]);
  std::vector<std::string> blob_names;
  boost::split(blob_names, extract_feature_blob_names, boost::is_any_of(","));

  std::string save_feature_dataset_names(argv[++arg_pos]);
  std::vector<std::string> dataset_names;
  boost::split(dataset_names, save_feature_dataset_names,
               boost::is_any_of(","));
  CHECK_EQ(blob_names.size(), dataset_names.size()) <<
      " the number of blob names and dataset names must be equal";
  size_t num_features = blob_names.size();

  for (size_t i = 0; i < num_features; i++) {
    CHECK(feature_extraction_net->has_blob(blob_names[i]))
        << "Unknown feature blob name " << blob_names[i]
        << " in the network " << feature_extraction_proto;
  }

  int num_mini_batches = atoi(argv[++arg_pos]);

  std::vector<shared_ptr<db::DB> > feature_dbs;
  std::vector<shared_ptr<db::Transaction> > txns;
  const char* db_type = argv[++arg_pos];
  for (size_t i = 0; i < num_features; ++i) {
    LOG(INFO)<< "Opening dataset " << dataset_names[i];
    shared_ptr<db::DB> db(db::GetDB(db_type));
    db->Open(dataset_names.at(i), db::NEW);
    feature_dbs.push_back(db);
    shared_ptr<db::Transaction> txn(db->NewTransaction());
    txns.push_back(txn);
  }

  LOG(ERROR)<< "Extacting Features";

  Datum datum;
  const int kMaxKeyStrLength = 100;
  char key_str[kMaxKeyStrLength];
  std::vector<Blob<float>*> input_vec;
  std::vector<int> image_indices(num_features, 0);
  for (int batch_index = 0; batch_index < num_mini_batches; ++batch_index) {
    feature_extraction_net->Forward(input_vec);
    for (int i = 0; i < num_features; ++i) {
      const shared_ptr<Blob<Dtype> > feature_blob = feature_extraction_net
          ->blob_by_name(blob_names[i]);
      int batch_size = feature_blob->num();
      int dim_features = feature_blob->count() / batch_size;
      const Dtype* feature_blob_data;
      for (int n = 0; n < batch_size; ++n) {
        datum.set_height(feature_blob->height());
        datum.set_width(feature_blob->width());
        datum.set_channels(feature_blob->channels());
        datum.clear_data();
        datum.clear_float_data();
        feature_blob_data = feature_blob->cpu_data() +
            feature_blob->offset(n);
        for (int d = 0; d < dim_features; ++d) {
          datum.add_float_data(feature_blob_data[d]);
        }
        // int length = snprintf(key_str, kMaxKeyStrLength, "%08d",
        int length = snprintf(key_str, kMaxKeyStrLength, "%010d",
            image_indices[i]);
        string out;
        CHECK(datum.SerializeToString(&out));
        txns.at(i)->Put(std::string(key_str, length), out);
        ++image_indices[i];
        if (image_indices[i] % 1000 == 0) {
          txns.at(i)->Commit();
          txns.at(i).reset(feature_dbs.at(i)->NewTransaction());
          LOG(ERROR)<< "Extracted features of " << image_indices[i] <<
              " query images for feature blob " << blob_names[i];
        }
      }  // for (int n = 0; n < batch_size; ++n)
    }  // for (int i = 0; i < num_features; ++i)
  }  // for (int batch_index = 0; batch_index < num_mini_batches; ++batch_index)
  // write the last batch
  for (int i = 0; i < num_features; ++i) {
    if (image_indices[i] % 1000 != 0) {
      txns.at(i)->Commit();
    }
    LOG(ERROR)<< "Extracted features of " << image_indices[i] <<
        " query images for feature blob " << blob_names[i];
    feature_dbs.at(i)->Close();
  }

  LOG(ERROR)<< "Successfully extracted the features!";
  return 0;
}
开发者ID:koufeifei,项目名称:caffe,代码行数:101,代码来源:extract_features.cpp

示例3: LOG

void FeatureExtractor<Dtype>::ExtractFeatures(const NetParameter& net_param) {
  util::Context& context = util::Context::get_instance();
  int client_id = context.get_int32("client_id");
  string weights_path = context.get_string("weights");
  string extract_feature_blob_names 
      = context.get_string("extract_feature_blob_names");

  shared_ptr<Net<Dtype> > feature_extraction_net(
      new Net<Dtype>(net_param, thread_id_, 0));
  map<string, vector<int> >::const_iterator it 
      = layer_blobs_global_idx_ptr_->begin();
  for (; it != layer_blobs_global_idx_ptr_->end(); ++it) {
    const shared_ptr<Layer<Dtype> > layer 
        = feature_extraction_net->layer_by_name(it->first);
    layer->SetUpBlobGlobalTable(it->second, false, false);
  }
  if (client_id == 0 && thread_id_ == 0) {
    LOG(INFO) << "Extracting features by " << weights_path;
    feature_extraction_net->CopyTrainedLayersFrom(weights_path, true);
  } 
  petuum::PSTableGroup::GlobalBarrier();

  feature_extraction_net->SyncWithPS(0);

  vector<string> blob_names;
  boost::split(blob_names, extract_feature_blob_names, boost::is_any_of(","));

  string save_feature_leveldb_names  
      = context.get_string("save_feature_leveldb_names");
  vector<string> leveldb_names;
  boost::split(leveldb_names, save_feature_leveldb_names,
               boost::is_any_of(","));
  CHECK_EQ(blob_names.size(), leveldb_names.size()) <<
      " the number of blob names and leveldb names must be equal";
  size_t num_features = blob_names.size();

  for (size_t i = 0; i < num_features; i++) {
    CHECK(feature_extraction_net->has_blob(blob_names[i]))
        << "Unknown feature blob name " << blob_names[i]
        << " in the network ";
  } 
  CHECK(feature_extraction_net->has_blob("label"))
      << "Fail to find label blob in the network ";

  // Differentiate leveldb names
  std::ostringstream suffix;
  suffix  << "_" << client_id << "_" << thread_id_;
  for (size_t i = 0; i < num_features; i++) {
      leveldb_names[i] = leveldb_names[i] + suffix.str();
  }
  
  leveldb::Options options;
  options.error_if_exists = true;
  options.create_if_missing = true;
  options.write_buffer_size = 268435456;
  vector<shared_ptr<leveldb::DB> > feature_dbs;
  for (size_t i = 0; i < num_features; ++i) {
    leveldb::DB* db;
    leveldb::Status status = leveldb::DB::Open(options,
                                               leveldb_names[i].c_str(),
                                               &db);
    CHECK(status.ok()) << "Failed to open leveldb " << leveldb_names[i];
    feature_dbs.push_back(shared_ptr<leveldb::DB>(db));
  }

  int num_mini_batches = context.get_int32("num_mini_batches");
 
  Datum datum;
  vector<shared_ptr<leveldb::WriteBatch> > feature_batches(
      num_features,
      shared_ptr<leveldb::WriteBatch>(new leveldb::WriteBatch()));
  const int kMaxKeyStrLength = 100;
  char key_str[kMaxKeyStrLength];
  vector<Blob<float>*> input_vec;
  vector<int> image_indices(num_features, 0);
  for (int batch_index = 0; batch_index < num_mini_batches; ++batch_index) {
    feature_extraction_net->Forward(input_vec);
    for (int i = 0; i < num_features; ++i) {
      const shared_ptr<Blob<Dtype> > feature_blob 
          = feature_extraction_net->blob_by_name(blob_names[i]);
      const shared_ptr<Blob<Dtype> > label_blob
          = feature_extraction_net->blob_by_name("label");
      const Dtype* labels = label_blob->cpu_data(); 
      int batch_size = feature_blob->num();
      int dim_features = feature_blob->count() / batch_size;
      Dtype* feature_blob_data;
      for (int n = 0; n < batch_size; ++n) {
        datum.set_height(dim_features);
        datum.set_width(1);
        datum.set_channels(1);
        datum.clear_data();
        datum.clear_float_data();
        feature_blob_data = feature_blob->mutable_cpu_data() +
            feature_blob->offset(n);
        for (int d = 0; d < dim_features; ++d) {
          datum.add_float_data(feature_blob_data[d]);
        }
        datum.set_label(static_cast<int>(labels[n]));

        string value;
//.........这里部分代码省略.........
开发者ID:ZhitingHu,项目名称:NN,代码行数:101,代码来源:feature_extractor.cpp

示例4: feature_extraction_pipeline


//.........这里部分代码省略.........
     bottom: "fc7"
     top: "fc7"
   }
   */
  string feature_extraction_proto(argv[++arg_pos]);
  shared_ptr<Net<Dtype> > feature_extraction_net(
      new Net<Dtype>(feature_extraction_proto));
  feature_extraction_net->CopyTrainedLayersFrom(pretrained_binary_proto);

  string extract_feature_blob_names(argv[++arg_pos]);
  vector<string> blob_names;
  boost::split(blob_names, extract_feature_blob_names, boost::is_any_of(","));

  string save_feature_leveldb_names(argv[++arg_pos]);
  vector<string> leveldb_names;
  boost::split(leveldb_names, save_feature_leveldb_names,
               boost::is_any_of(","));
  CHECK_EQ(blob_names.size(), leveldb_names.size()) <<
      " the number of blob names and leveldb names must be equal";
  size_t num_features = blob_names.size();

  for (size_t i = 0; i < num_features; i++) {
    CHECK(feature_extraction_net->has_blob(blob_names[i]))
        << "Unknown feature blob name " << blob_names[i]
        << " in the network " << feature_extraction_proto;
  }

  leveldb::Options options;
  options.error_if_exists = true;
  options.create_if_missing = true;
  options.write_buffer_size = 268435456;
  vector<shared_ptr<leveldb::DB> > feature_dbs;
  for (size_t i = 0; i < num_features; ++i) {
    LOG(INFO)<< "Opening leveldb " << leveldb_names[i];
    leveldb::DB* db;
    leveldb::Status status = leveldb::DB::Open(options,
                                               leveldb_names[i].c_str(),
                                               &db);
    CHECK(status.ok()) << "Failed to open leveldb " << leveldb_names[i];
    feature_dbs.push_back(shared_ptr<leveldb::DB>(db));
  }

  int num_mini_batches = atoi(argv[++arg_pos]);

  LOG(ERROR)<< "Extracting Features";

  Datum datum;
  vector<shared_ptr<leveldb::WriteBatch> > feature_batches(
      num_features,
      shared_ptr<leveldb::WriteBatch>(new leveldb::WriteBatch()));
  const int kMaxKeyStrLength = 100;
  char key_str[kMaxKeyStrLength];
  vector<Blob<float>*> input_vec;
  vector<int> image_indices(num_features, 0);
  for (int batch_index = 0; batch_index < num_mini_batches; ++batch_index) {
    feature_extraction_net->Forward(input_vec);
    for (int i = 0; i < num_features; ++i) {
      const shared_ptr<Blob<Dtype> > feature_blob = feature_extraction_net
          ->blob_by_name(blob_names[i]);
      int batch_size = feature_blob->num();
      int dim_features = feature_blob->count() / batch_size;
      Dtype* feature_blob_data;
      for (int n = 0; n < batch_size; ++n) {
        datum.set_height(dim_features);
        datum.set_width(1);
        datum.set_channels(1);
        datum.clear_data();
        datum.clear_float_data();
        feature_blob_data = feature_blob->mutable_cpu_data() +
            feature_blob->offset(n);
        for (int d = 0; d < dim_features; ++d) {
          datum.add_float_data(feature_blob_data[d]);
        }
        string value;
        datum.SerializeToString(&value);
        snprintf(key_str, kMaxKeyStrLength, "%d", image_indices[i]);
        feature_batches[i]->Put(string(key_str), value);
        ++image_indices[i];
        if (image_indices[i] % 1000 == 0) {
          feature_dbs[i]->Write(leveldb::WriteOptions(),
                                feature_batches[i].get());
          LOG(ERROR)<< "Extracted features of " << image_indices[i] <<
              " query images for feature blob " << blob_names[i];
          feature_batches[i].reset(new leveldb::WriteBatch());
        }
      }  // for (int n = 0; n < batch_size; ++n)
    }  // for (int i = 0; i < num_features; ++i)
  }  // for (int batch_index = 0; batch_index < num_mini_batches; ++batch_index)
  // write the last batch
  for (int i = 0; i < num_features; ++i) {
    if (image_indices[i] % 1000 != 0) {
      feature_dbs[i]->Write(leveldb::WriteOptions(), feature_batches[i].get());
    }
    LOG(ERROR)<< "Extracted features of " << image_indices[i] <<
        " query images for feature blob " << blob_names[i];
  }

  LOG(ERROR)<< "Successfully extracted the features!";
  return 0;
}
开发者ID:Arroosh,项目名称:optical_flow_prediction,代码行数:101,代码来源:extract_features.cpp

示例5: main

int main(int argc, char** argv)
{
  ::google::InitGoogleLogging(argv[0]);
  if (argc < 5)
  {
    printf("Convert a set of images to the leveldb format used\n"
           "as input for Caffe.\n"
           "Usage:\n"
           "    convert_imageset ROOTFOLDER/ LABELFILE CONTEXT DB_NAME"
           " RANDOM_SHUFFLE_DATA[0 or 1]\n");
    return 0;
  }

  std::vector<std::pair<string, vector<float> > > lines;
  {
    std::ifstream infile(argv[2]);

    vector<float> label(NUMLABEL, 0);
    while (infile.good())
    {
      string filename;
      infile >> filename;
      if (filename.empty())
        break;

      for (int i = 0; i < NUMLABEL; ++i)
        infile >> label[i];

      lines.push_back(std::make_pair(filename, label));
    }
    infile.close();
    if (argc == 6 && argv[5][0] == '1')
    {
      // randomly shuffle data
      LOG(INFO)<< "Shuffling data";
      std::random_shuffle(lines.begin(), lines.end());
    }
    LOG(INFO)<< "A total of " << lines.size() << " images.";
  }

  std::map<string, vector<float> > map_name_contxt;
  {
    vector<float> contxt(NUMCONTEXT, 0);
    std::ifstream input(argv[3], 0);
    while (input.good())
    {
      string filename;
      input >> filename;
      if (filename.empty())
        break;

      for (int i = 0; i < NUMCONTEXT; ++i)
        input >> contxt[i];

      map_name_contxt.insert(std::make_pair(filename, contxt));
    }
    input.close();
  }

  leveldb::DB* db;
  leveldb::Options options;
  options.error_if_exists = true;
  options.create_if_missing = true;
  options.write_buffer_size = 268435456;
  LOG(INFO)<< "Opening leveldb " << argv[4];
  leveldb::Status status = leveldb::DB::Open(options, argv[4], &db);
  CHECK(status.ok()) << "Failed to open leveldb " << argv[4];

  string root_folder(argv[1]);
  Datum datum;
  int count = 0;
  leveldb::WriteBatch* batch = new leveldb::WriteBatch();
  int data_size;
  bool data_size_initialized = false;
  for (int line_id = 0; line_id < lines.size(); ++line_id)
  {
    const std::pair<string, vector<float> >& name_label = lines[line_id];
    const string& name = name_label.first;
    const vector<float>& cur_labels = name_label.second;
    const vector<float>& cur_conxts = map_name_contxt.find(name)->second;

    // set image name
    datum.set_img_name(name);

    // set image data
    {
      const string img_full_name = root_folder + name;
      cv::Mat cv_img = cv::imread(img_full_name, CV_LOAD_IMAGE_COLOR);
      if (!cv_img.data)
      {
        LOG(ERROR)<< "Could not open or find file " << img_full_name;
        return false;
      }

      datum.set_channels(3);
      datum.set_height(cv_img.rows);
      datum.set_width(cv_img.cols);
      datum.clear_data();
      datum.clear_float_data();
      string* datum_string = datum.mutable_data();
//.........这里部分代码省略.........
开发者ID:jieshen-sjtu,项目名称:caffe-for-DDNet,代码行数:101,代码来源:write_multilabel_context.cpp

示例6: ReadImageRectToDatumArr

//jin: modified from caffe/util/io.hpp:ReadImageToDatum 2016-01-13 16:17:56 
//by default, only support gray scale images
bool ReadImageRectToDatumArr(const string& img_filename, const int resize_height, const int resize_width, 
							 const vector<CNN_RECT> &cand_per_img, Datum *datum_per_img) 
{
    //jin:test
    //cout << "img_filename = " << img_filename << endl;
    if(resize_height <= 0 || resize_width <= 0 || resize_height != resize_width)
    {
        cerr<<"resize_height <=0 or resize_width <=0 or resize_height != resize_width)" << endl;
        return false;
    }     
    
    int label = 0; // all negatives by default
    int cv_read_flag = CV_LOAD_IMAGE_GRAYSCALE;   //int cv_read_flag = (is_color ? CV_LOAD_IMAGE_COLOR : CV_LOAD_IMAGE_GRAYSCALE);

    cv::Mat cv_img_origin = cv::imread(img_filename, cv_read_flag);
    if (!cv_img_origin.data) 
	{
        LOG(ERROR) << "Could not open or find file " << img_filename;
        return false;
    }
    
	// test
	cv::namedWindow( "Image", 1 );//创建窗口
	cv::imshow( "Image", cv_img_origin );//显示图像	
	cv::waitKey(0); //等待按键
//	cv::destroyWindow( "Image" );//销毁窗口
	
    //jin: 2016-02-23 09:18:42 convert mat into unsigned char and call get_img_rect instead of using cv::Rect
    unsigned char *image = cv_img_origin.data; 
    int width = cv_img_origin.cols, height = cv_img_origin.rows;
    int num_channels = 1; //int num_channels = (is_color ? 3 : 1);
    int cand_size = cand_per_img.size();
    
    //jin: determine maximal size needed to malloc 
    int  max_width = 0;
    for(int i=0; i<cand_size; ++i)
    {
       if (max_width < cand_per_img[i].width)
       {    max_width = cand_per_img[i].width; }
    }
    unsigned char* img_rect = (unsigned char*) malloc(max_width*max_width*sizeof(unsigned char));
    if(NULL == img_rect){
        printf("Failed to malloc.\n");
        return false;
    }
    
    unsigned char* img_resize = (unsigned char*) malloc(resize_height*resize_width*sizeof(unsigned char));
    if(NULL == img_rect)
	{
        printf("Failed to malloc.\n");
        return false;
    }
    
    for(int i=0; i<cand_size; ++i)
    {
        CNN_RECT rect = cand_per_img[i];
		//display the rectangle
		Point pt1 =  Point(rect.x, rect.y), pt2 = Point(rect.x+rect.w, rect.y+rect.h);
		rectangle(cv_img_origin, pt1, pt2, Scalar(1, 0, 0), 1, 8);
		
		
        get_img_rect(image, width, height, rect, img_rect);
        if(rect.width != resize_width && rect.height != resize_height )
        {   
            bilinear_image_resize(img_rect, rect.width, rect.height, img_resize, resize_width, resize_height);  
        }
        else
        {   
            int rect_size = rect.width*rect.height;
            for(int k=0; k<rect_size; ++k)
            {
                img_resize[k] = img_rect[k];
            }
        }
        
        Datum datum;
        datum.set_channels(num_channels);
        datum.set_height(resize_height);
        datum.set_width(resize_width);
        datum.set_label(label);
        datum.clear_data();
        datum.clear_float_data();
        string* datum_string = datum.mutable_data();
    
        for (int h = 0; h < resize_height; ++h) 
		{
            for (int w = 0; w < resize_width; ++w) 
			{
                datum_string->push_back(img_resize[h*resize_width+w]);
            }
        }
        
        datum_per_img[i] = datum;
    
    }
    
	cv::destroyWindow( "Image" );//销毁窗口
	
//.........这里部分代码省略.........
开发者ID:jinmeng,项目名称:Face-Detection,代码行数:101,代码来源:test_neg2lmdb.cpp


注:本文中的Datum::clear_float_data方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。