本文整理汇总了C++中Datum类的典型用法代码示例。如果您正苦于以下问题:C++ Datum类的具体用法?C++ Datum怎么用?C++ Datum使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Datum类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: TEST_F
TEST_F(IOTest, TestCVMatToDatumReference) {
string filename = EXAMPLES_SOURCE_DIR "images/cat.jpg";
cv::Mat cv_img = ReadImageToCVMat(filename);
Datum datum;
CVMatToDatum(cv_img, &datum);
Datum datum_ref;
ReadImageToDatumReference(filename, 0, 0, 0, true, &datum_ref);
EXPECT_EQ(datum.channels(), datum_ref.channels());
EXPECT_EQ(datum.height(), datum_ref.height());
EXPECT_EQ(datum.width(), datum_ref.width());
EXPECT_EQ(datum.data().size(), datum_ref.data().size());
const string& data = datum.data();
const string& data_ref = datum_ref.data();
for (int_tp i = 0; i < datum.data().size(); ++i) {
EXPECT_TRUE(data[i] == data_ref[i]);
}
}
示例2: TEST_F
TEST_F(IOTest, TestDecodeDatumNativeGray) {
string filename = EXAMPLES_SOURCE_DIR "images/cat_gray.jpg";
Datum datum;
EXPECT_TRUE(ReadFileToDatum(filename, &datum));
EXPECT_TRUE(DecodeDatumNative(&datum));
EXPECT_FALSE(DecodeDatumNative(&datum));
Datum datum_ref;
ReadImageToDatumReference(filename, 0, 0, 0, false, &datum_ref);
EXPECT_EQ(datum.channels(), datum_ref.channels());
EXPECT_EQ(datum.height(), datum_ref.height());
EXPECT_EQ(datum.width(), datum_ref.width());
EXPECT_EQ(datum.data().size(), datum_ref.data().size());
const string& data = datum.data();
const string& data_ref = datum_ref.data();
for (int i = 0; i < datum.data().size(); ++i) {
EXPECT_TRUE(data[i] == data_ref[i]);
}
}
示例3: Scope
Scope& Scope::AppendScope(const std::string& key)
{
if (key == std::string())
throw std::exception("Key cannot be an empty string.");
Scope* newScope = new Scope();
newScope->mParent = this;
// try to find entry first in this scope
Datum* d = Find(key);
if (d != nullptr)
{
if (d->GetType() != Datum::Table && d->GetType() != Datum::Unknown)
{
delete newScope;
throw std::exception("Found entry is not a table!");
}
if (d->IsExternal())
{
delete newScope;
throw std::exception("Table entry is external. Cannot modify data owned by something else.");
}
// a new scope gets added into this table datum
std::uint32_t datumSize = d->Size();
d->Set(newScope, datumSize);
return *d->GetTable(datumSize);
}
// if no entry is found, create new datum with this scope
Datum scopeDatum;
scopeDatum = newScope;
std::pair<std::string, Datum> pair(key, scopeDatum);
HashMap<std::string, Datum>::Iterator iterator = mTable.Insert(pair);
mOrder.PushBack(&(*iterator));
return *(iterator->second.GetTable());
}
示例4:
void DataTransformer<Dtype>::PostTransform(const int batch_item_id,
const Datum& datum,
const Dtype* mean,
Dtype* transformed_data)
{
const string& data = datum.data();
const int channels = datum.channels();
const int height = datum.height();
const int width = datum.width();
const int size = datum.channels() * datum.height() * datum.width();
/**
* only works for uint8 data data.
* post transfrom parameters:
* int : post_random_translation_size
* string : post_ground_truth_pooling_param : [num_of_pooling] [pooling_h_1] ] [pooling_w_1] [pooling_h_2],.......
* int : post_channel_for_additional_translation
*/
const int crop_size = param_.crop_size();
const bool mirror = param_.mirror();
const Dtype scale = param_.scale();
// if(param_.has_post_random_translation_size())
// {
//
// }
// if(param_.has_post_ground_truth_pooling_param())
// {
//
// }
// if(param_.has_post_channel_for_additional_translation())
// {
//
// }
}
示例5: Datum
Datum BaseFrameListStimulus::getCurrentAnnounceDrawData() {
Datum announceData = StandardDynamicStimulus::getCurrentAnnounceDrawData();
if (stimulusGroup) {
announceData.addElement(STIMULUS_GROUP, stimulusGroup->getTag());
}
announceData.addElement(LOOP, loop->getValue());
announceData.addElement("playing", Datum(isPlaying()));
int frameNumber = getFrameNumber();
announceData.addElement("current_frame", Datum((long)frameNumber));
Datum currentStimulusAnnounceData(0L);
if ((frameNumber >= 0) && (frameNumber < getNumFrames())) {
currentStimulusAnnounceData = getStimulusForFrame(frameNumber)->getCurrentAnnounceDrawData();
}
announceData.addElement("current_stimulus", currentStimulusAnnounceData);
return announceData;
}
示例6: b_chars
///////////////////
// String
///////////////////
void Lingo::b_chars(int nargs) {
Datum to = g_lingo->pop();
Datum from = g_lingo->pop();
Datum s = g_lingo->pop();
if (s.type != STRING)
error("Incorrect type for 'chars' function: %s", s.type2str());
to.toInt();
from.toInt();
int len = strlen(s.u.s->c_str());
int f = MAX(0, MIN(len, from.u.i - 1));
int t = MAX(0, MIN(len, to.u.i));
Common::String *res = new Common::String(&(s.u.s->c_str()[f]), &(s.u.s->c_str()[t]));
delete s.u.s;
s.u.s = res;
s.type = STRING;
g_lingo->push(s);
}
示例7: mwarning
// this routine checks that the request is a dictionary and that it contains a name that matches the calibrator
bool Calibrator::checkRequest(Datum dictionaryData) {
Datum data; // to hold field data for checking
// check if this is a dictionary
if (!(dictionaryData.getDataType() == M_DICTIONARY)) {
mwarning(M_SYSTEM_MESSAGE_DOMAIN,
"Request sent to calibrator %s that was not expected dictionary type was ignored.", uniqueCalibratorName.c_str());
return(false);
}
// check if there is a name field and if this calibrator should respond (i.e. if it has the correct name)
if (!(dictionaryData.hasKey(R_CALIBRATOR_NAME))) {
mwarning(M_SYSTEM_MESSAGE_DOMAIN,
"Request sent to calibrator %s that did not contain name field was ignored.", uniqueCalibratorName.c_str());
return(false);
}
Datum nameData = dictionaryData.getElement(R_CALIBRATOR_NAME);
if (!(nameData.getDataType() == M_STRING)) { // check if name field is a string
mwarning(M_SYSTEM_MESSAGE_DOMAIN,
"Request sent to calibrator %s that did not contain a string in the name field was ignored.", uniqueCalibratorName.c_str());
return(false);
}
if (uniqueCalibratorName == nameData.getString()) { // check is name field matches the name of this calibrator
if (VERBOSE_EYE_CALIBRATORS) mprintf("Calibrator %s successfully received a properly named request.", uniqueCalibratorName.c_str());
}
else {
if (VERBOSE_EYE_CALIBRATORS) mprintf("Calibrator %s received a request, but name did not match.", uniqueCalibratorName.c_str());
return(false); // request not meant for this calibrator (can be normal behavior -- no warnings)
}
return true;
}
示例8: getTheCast
Datum Lingo::getTheCast(Datum &id1, int field) {
Datum d;
int id = 0;
if (id1.type == INT) {
id = id1.u.i;
} else {
warning("Unknown the cast id type: %s", id1.type2str());
return d;
}
if (!_vm->_currentScore) {
warning("The cast %d field %d setting over non-active score", id, field);
return d;
}
Cast *cast;
CastInfo *castInfo;
if (!_vm->_currentScore->_casts.contains(id)) {
if (field == kTheLoaded) {
d.type = INT;
d.u.i = 0;
}
return d;
} else {
warning("The cast %d found", id);
}
cast = _vm->_currentScore->_casts[id];
castInfo = _vm->_currentScore->_castsInfo[id];
d.type = INT;
switch (field) {
case kTheCastType:
d.u.i = cast->type;
break;
case kTheFilename:
d.toString();
d.u.s = &castInfo->fileName;
break;
case kTheName:
d.toString();
d.u.s = &castInfo->name;
break;
case kTheScriptText:
d.toString();
d.u.s = &castInfo->script;
break;
case kTheWidth:
d.u.i = cast->initialRect.width();
break;
case kTheHeight:
d.u.i = cast->initialRect.height();
break;
case kTheBackColor:
{
if (cast->type != kCastShape) {
warning("Field %d of cast %d not found", field, id);
d.type = VOID;
return d;
}
ShapeCast *shape = static_cast<ShapeCast *>(_vm->_currentScore->_casts[id]);
d.u.i = shape->bgCol;
}
break;
case kTheForeColor:
{
if (cast->type != kCastShape) {
warning("Field %d of cast %d not found", field, id);
d.type = VOID;
return d;
}
ShapeCast *shape = static_cast<ShapeCast *>(_vm->_currentScore->_casts[id]);
d.u.i = shape->fgCol;
}
break;
case kTheLoaded:
d.u.i = 1; //Not loaded handled above
break;
default:
warning("Unprocessed getting field %d of cast %d", field, id);
d.type = VOID;
//TODO find out about String fields
}
return d;
}
示例9: getTheEntity
Datum Lingo::getTheEntity(int entity, Datum &id, int field) {
Datum d;
switch (entity) {
case kTheSprite:
d = getTheSprite(id, field);
break;
case kTheCast:
d = getTheCast(id, field);
break;
case kThePerFrameHook:
warning("STUB: getting the perframehook");
break;
case kTheFloatPrecision:
d.type = INT;
d.u.i = _floatPrecision;
break;
case kTheSqrt:
id.toFloat();
d.type = FLOAT;
d.u.f = sqrt(id.u.f);
break;
case kTheKey:
d.type = STRING;
d.u.s = new Common::String(_vm->_key);
break;
case kTheKeyCode:
d.type = INT;
d.u.i = _vm->_keyCode;
break;
case kTheColorQD:
d.type = INT;
d.u.i = 1;
break;
case kTheColorDepth:
// bpp. 1, 2, 4, 8, 32
d.type = INT;
d.u.i = _vm->_colorDepth;
break;
case kTheMachineType:
// 1 - Macintosh 512Ke D2
// 2 - Macintosh Plus D2
// 3 - Macintosh SE D2
// 4 - Macintosh II D2
// 5 - Macintosh IIx D2
// 6 - Macintosh IIcx D2
// 7 - Macintosh SE/30 D2
// 8 - Macintosh Portable D2
// 9 - Macintosh IIci D2
// 11 - Macintosh IIfx D3
// 15 - Macintosh Classic D3
// 16 - Macintosh IIsi D3
// 17 - Macintosh LC D3
// 18 - Macintosh Quadra 900 D3
// 19 - PowerBook 170 D3
// 20 - Macintosh Quadra 700 D3
// 21 - Classic II D3
// 22 - PowerBook 100 D3
// 23 - PowerBook 140 D3
// 24 - Macintosh Quadra 950 D4
// 25 - Macintosh LCIII D4
// 27 - PowerBook Duo 210 D4
// 28 - Macintosh Centris 650 D4
// 30 - PowerBook Duo 230 D4
// 31 - PowerBook 180 D4
// 32 - PowerBook 160 D4
// 33 - Macintosh Quadra 800 D4
// 35 - Macintosh LC II D4
// 42 - Macintosh IIvi D4
// 45 - Power Macintosh 7100/70 D5
// 46 - Macintosh IIvx D4
// 47 - Macintosh Color Classic D4
// 48 - PowerBook 165c D4
// 50 - Macintosh Centris 610 D4
// 52 - PowerBook 145 D4
// 53 - PowerComputing 8100/100 D5
// 73 - Power Macintosh 6100/60 D5
// 76 - Macintosh Quadra 840av D4
// 256 - IBM PC-type machine D3
d.type = INT;
d.u.i = _vm->_machineType;
break;
default:
warning("Unprocessed getting field %d of entity %d", field, entity);
d.type = VOID;
}
return d;
}
示例10: Rand
void DataTransformer<Dtype>::Transform(const Datum& datum,
Dtype* transformed_data) {
const string& data = datum.data();
const int datum_channels = datum.channels();
const int datum_height = datum.height();
const int datum_width = datum.width();
const int crop_size = param_.crop_size();
const Dtype scale = param_.scale();
const bool do_mirror = param_.mirror() && Rand(2);
const bool has_mean_file = param_.has_mean_file();
const bool has_uint8 = data.size() > 0;
const bool has_mean_values = mean_values_.size() > 0;
CHECK_GT(datum_channels, 0);
CHECK_GE(datum_height, crop_size);
CHECK_GE(datum_width, crop_size);
Dtype* mean = NULL;
if (has_mean_file) {
CHECK_EQ(datum_channels, data_mean_.channels());
CHECK_EQ(datum_height, data_mean_.height());
CHECK_EQ(datum_width, data_mean_.width());
mean = data_mean_.mutable_cpu_data();
}
if (has_mean_values) {
CHECK(mean_values_.size() == 1 || mean_values_.size() == datum_channels) <<
"Specify either 1 mean_value or as many as channels: " << datum_channels;
if (datum_channels > 1 && mean_values_.size() == 1) {
// Replicate the mean_value for simplicity
for (int c = 1; c < datum_channels; ++c) {
mean_values_.push_back(mean_values_[0]);
}
}
}
int height = datum_height;
int width = datum_width;
int h_off = 0;
int w_off = 0;
if (crop_size) {
height = crop_size;
width = crop_size;
// We only do random crop when we do training.
if (phase_ == TRAIN) {
h_off = Rand(datum_height - crop_size + 1);
w_off = Rand(datum_width - crop_size + 1);
} else {
h_off = (datum_height - crop_size) / 2;
w_off = (datum_width - crop_size) / 2;
}
}
Dtype datum_element;
int top_index, data_index;
for (int c = 0; c < datum_channels; ++c) {
for (int h = 0; h < height; ++h) {
for (int w = 0; w < width; ++w) {
data_index = (c * datum_height + h_off + h) * datum_width + w_off + w;
if (do_mirror) {
top_index = (c * height + h) * width + (width - 1 - w);
} else {
top_index = (c * height + h) * width + w;
}
if (has_uint8) {
datum_element =
static_cast<Dtype>(static_cast<uint8_t>(data[data_index]));
} else {
datum_element = datum.float_data(data_index);
}
if (has_mean_file) {
transformed_data[top_index] =
(datum_element - mean[data_index]) * scale;
} else {
if (has_mean_values) {
transformed_data[top_index] =
(datum_element - mean_values_[c]) * scale;
} else {
transformed_data[top_index] = datum_element * scale;
}
}
}
}
}
}
示例11: locker
Datum MovingDots::getCurrentAnnounceDrawData() {
boost::mutex::scoped_lock locker(stim_lock);
Datum announceData = StandardDynamicStimulus::getCurrentAnnounceDrawData();
announceData.addElement(STIM_TYPE, "moving_dots");
announceData.addElement(FIELD_RADIUS, currentFieldRadius);
announceData.addElement(FIELD_CENTER_X, fieldCenterX->getValue().getFloat());
announceData.addElement(FIELD_CENTER_Y, fieldCenterY->getValue().getFloat());
announceData.addElement(DOT_DENSITY, dotDensity->getValue().getFloat());
announceData.addElement(DOT_SIZE, dotSize->getValue().getFloat());
announceData.addElement(STIM_COLOR_R, red->getValue().getFloat());
announceData.addElement(STIM_COLOR_G, green->getValue().getFloat());
announceData.addElement(STIM_COLOR_B, blue->getValue().getFloat());
announceData.addElement(ALPHA_MULTIPLIER, alpha->getValue().getFloat());
announceData.addElement(DIRECTION, direction->getValue().getFloat());
announceData.addElement(SPEED, currentSpeed);
announceData.addElement(COHERENCE, currentCoherence);
announceData.addElement(LIFETIME, currentLifetime);
announceData.addElement("num_dots", long(currentNumDots));
announceData.addElement(RAND_SEED, randSeed);
if (announceDots->getValue().getBool()) {
Datum dotsData(reinterpret_cast<char *>(&(dotPositions[0])), dotPositions.size() * sizeof(GLfloat));
announceData.addElement("dots", dotsData);
}
return announceData;
}
示例12: caffe_rng_rand
void DataLayer<Dtype>::DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
// Initialize DB
db_.reset(db::GetDB(this->layer_param_.data_param().backend()));
db_->Open(this->layer_param_.data_param().source(), db::READ);
cursor_.reset(db_->NewCursor());
// Check if we should randomly skip a few data points
if (this->layer_param_.data_param().rand_skip()) {
unsigned int skip = caffe_rng_rand() %
this->layer_param_.data_param().rand_skip();
LOG(INFO) << "Skipping first " << skip << " data points.";
while (skip-- > 0) {
cursor_->Next();
}
}
// Read a data point, and use it to initialize the top blob.
Datum datum;
datum.ParseFromString(cursor_->value());
bool force_color = this->layer_param_.data_param().force_encoded_color();
if ((force_color && DecodeDatum(&datum, true)) ||
DecodeDatumNative(&datum)) {
LOG(INFO) << "Decoding Datum";
}
// image
int crop_size = this->layer_param_.transform_param().crop_size();
if (crop_size > 0) {
top[0]->Reshape(this->layer_param_.data_param().batch_size(),
datum.channels(), crop_size, crop_size);
this->prefetch_data_.Reshape(this->layer_param_.data_param().batch_size(),
datum.channels(), crop_size, crop_size);
this->transformed_data_.Reshape(1, datum.channels(), crop_size, crop_size);
} else {
top[0]->Reshape(
this->layer_param_.data_param().batch_size(), datum.channels(),
datum.height(), datum.width());
this->prefetch_data_.Reshape(this->layer_param_.data_param().batch_size(),
datum.channels(), datum.height(), datum.width());
this->transformed_data_.Reshape(1, datum.channels(),
datum.height(), datum.width());
}
LOG(INFO) << "output data size: " << top[0]->num() << ","
<< top[0]->channels() << "," << top[0]->height() << ","
<< top[0]->width();
// label
printf("Go here\n");
printf("%d\n", this->layer_param_.data_param().label_size());
if (this->output_labels_) {
top[1]->Reshape(this->layer_param_.data_param().batch_size(), this->layer_param_.data_param().label_size(), 1, 1);
this->prefetch_label_.Reshape(this->layer_param_.data_param().batch_size(),
this->layer_param_.data_param().label_size(), 1, 1);
}
}
示例13: feature_extraction_net
void FeatureExtractor<Dtype>::ExtractFeatures(const NetParameter& net_param) {
util::Context& context = util::Context::get_instance();
int client_id = context.get_int32("client_id");
string weights_path = context.get_string("weights");
string extract_feature_blob_names
= context.get_string("extract_feature_blob_names");
shared_ptr<Net<Dtype> > feature_extraction_net(
new Net<Dtype>(net_param, thread_id_, 0));
map<string, vector<int> >::const_iterator it
= layer_blobs_global_idx_ptr_->begin();
for (; it != layer_blobs_global_idx_ptr_->end(); ++it) {
const shared_ptr<Layer<Dtype> > layer
= feature_extraction_net->layer_by_name(it->first);
layer->SetUpBlobGlobalTable(it->second, false, false);
}
if (client_id == 0 && thread_id_ == 0) {
LOG(INFO) << "Extracting features by " << weights_path;
feature_extraction_net->CopyTrainedLayersFrom(weights_path, true);
}
petuum::PSTableGroup::GlobalBarrier();
feature_extraction_net->SyncWithPS(0);
vector<string> blob_names;
boost::split(blob_names, extract_feature_blob_names, boost::is_any_of(","));
string save_feature_leveldb_names
= context.get_string("save_feature_leveldb_names");
vector<string> leveldb_names;
boost::split(leveldb_names, save_feature_leveldb_names,
boost::is_any_of(","));
CHECK_EQ(blob_names.size(), leveldb_names.size()) <<
" the number of blob names and leveldb names must be equal";
size_t num_features = blob_names.size();
for (size_t i = 0; i < num_features; i++) {
CHECK(feature_extraction_net->has_blob(blob_names[i]))
<< "Unknown feature blob name " << blob_names[i]
<< " in the network ";
}
CHECK(feature_extraction_net->has_blob("label"))
<< "Fail to find label blob in the network ";
// Differentiate leveldb names
std::ostringstream suffix;
suffix << "_" << client_id << "_" << thread_id_;
for (size_t i = 0; i < num_features; i++) {
leveldb_names[i] = leveldb_names[i] + suffix.str();
}
leveldb::Options options;
options.error_if_exists = true;
options.create_if_missing = true;
options.write_buffer_size = 268435456;
vector<shared_ptr<leveldb::DB> > feature_dbs;
for (size_t i = 0; i < num_features; ++i) {
leveldb::DB* db;
leveldb::Status status = leveldb::DB::Open(options,
leveldb_names[i].c_str(),
&db);
CHECK(status.ok()) << "Failed to open leveldb " << leveldb_names[i];
feature_dbs.push_back(shared_ptr<leveldb::DB>(db));
}
int num_mini_batches = context.get_int32("num_mini_batches");
Datum datum;
vector<shared_ptr<leveldb::WriteBatch> > feature_batches(
num_features,
shared_ptr<leveldb::WriteBatch>(new leveldb::WriteBatch()));
const int kMaxKeyStrLength = 100;
char key_str[kMaxKeyStrLength];
vector<Blob<float>*> input_vec;
vector<int> image_indices(num_features, 0);
for (int batch_index = 0; batch_index < num_mini_batches; ++batch_index) {
feature_extraction_net->Forward(input_vec);
for (int i = 0; i < num_features; ++i) {
const shared_ptr<Blob<Dtype> > feature_blob
= feature_extraction_net->blob_by_name(blob_names[i]);
const shared_ptr<Blob<Dtype> > label_blob
= feature_extraction_net->blob_by_name("label");
const Dtype* labels = label_blob->cpu_data();
int batch_size = feature_blob->num();
int dim_features = feature_blob->count() / batch_size;
Dtype* feature_blob_data;
for (int n = 0; n < batch_size; ++n) {
datum.set_height(dim_features);
datum.set_width(1);
datum.set_channels(1);
datum.clear_data();
datum.clear_float_data();
feature_blob_data = feature_blob->mutable_cpu_data() +
feature_blob->offset(n);
for (int d = 0; d < dim_features; ++d) {
datum.add_float_data(feature_blob_data[d]);
}
datum.set_label(static_cast<int>(labels[n]));
string value;
//.........这里部分代码省略.........
示例14: main
int main(int argc, char** argv) {
::google::InitGoogleLogging(argv[0]);
if (argc < 5) {
printf(
"Convert a set of images to the leveldb format used\n"
"as input for Caffe.\n"
"Usage:\n"
" convert_imageset ROOTFOLDER/ ANNOTATION DB_NAME"
" MODE[0-train, 1-val, 2-test] RANDOM_SHUFFLE_DATA[0 or 1, default 1] RESIZE_WIDTH[default 256] RESIZE_HEIGHT[default 256](0 indicates no resize)\n"
"The ImageNet dataset for the training demo is at\n"
" http://www.image-net.org/download-images\n");
return 0;
}
std::ifstream infile(argv[2]);
string root_folder(argv[1]);
string coarse_folder(argv[8]);
string local_folder(argv[9]);
string van_folder(argv[10]);
string edge_folder(argv[11]);
string layout_folder(argv[12]);
std::vector<Seg_Anno> annos;
std::set<string> fNames;
string filename;
float prop;
int cc = 0;
while (infile >> filename)
{
if (cc % 1000 == 0)
LOG(INFO)<<filename;
cc ++;
Seg_Anno seg_Anno;
seg_Anno.filename_ = filename;
int x,y;
infile >> x >> y;
for (int i = 0; i < LABEL_LEN; i++)
{
//infile >> prop;
if(!(prop < 1000000 && prop > -1000000))
{
printf("123");
}
seg_Anno.pos_.push_back(0);
}
//string labelFile = filename;
//labelFile[labelFile.size() - 1] = 't';
//labelFile[labelFile.size() - 2] = 'x';
//labelFile[labelFile.size() - 3] = 't';
//labelFile = coarse_folder + "/" + labelFile;
//FILE * tf = fopen(labelFile.c_str(), "rb");
//if(tf == NULL) continue;
//fclose(tf);
if (fNames.find(filename)== fNames.end())
{
fNames.insert(filename);
annos.push_back(seg_Anno);
}
//debug
//if(annos.size() == 10)
// break;
}
if (argc < 6 || argv[5][0] != '0') {
// randomly shuffle data
LOG(INFO)<< "Shuffling data";
std::random_shuffle(annos.begin(), annos.end());
}
LOG(INFO)<< "A total of " << annos.size() << " images.";
leveldb::DB* db;
leveldb::Options options;
options.error_if_exists = true;
options.create_if_missing = true;
options.write_buffer_size = 268435456;
LOG(INFO)<< "Opening leveldb " << argv[3];
leveldb::Status status = leveldb::DB::Open(options, argv[3], &db);
CHECK(status.ok()) << "Failed to open leveldb " << argv[3];
Datum datum;
int count = 0;
const int maxKeyLength = 256;
char key_cstr[maxKeyLength];
leveldb::WriteBatch* batch = new leveldb::WriteBatch();
int data_size;
bool data_size_initialized = false;
// resize to height * width
int width = RESIZE_LEN;
int height = RESIZE_LEN;
if (argc > 6) width = atoi(argv[6]);
if (argc > 7) height = atoi(argv[7]);
if (width == 0 || height == 0)
LOG(INFO) << "NO RESIZE SHOULD BE DONE";
else
LOG(INFO) << "RESIZE DIM: " << width << "*" << height;
for (int anno_id = 0; anno_id < annos.size(); ++anno_id)
{
string labelFile = annos[anno_id].filename_;
labelFile[labelFile.size() - 1] = 't';
labelFile[labelFile.size() - 2] = 'x';
//.........这里部分代码省略.........
示例15: main
int main(int argc, char** argv) {
::google::InitGoogleLogging(argv[0]);
if (argc != 3) {
LOG(ERROR) << "Usage: compute_image_mean input_leveldb output_file";
return 1;
}
leveldb::DB* db;
leveldb::Options options;
options.create_if_missing = false;
LOG(INFO) << "Opening leveldb " << argv[1];
leveldb::Status status = leveldb::DB::Open(
options, argv[1], &db);
CHECK(status.ok()) << "Failed to open leveldb " << argv[1];
leveldb::ReadOptions read_options;
read_options.fill_cache = false;
leveldb::Iterator* it = db->NewIterator(read_options);
it->SeekToFirst();
Datum datum;
BlobProto sum_blob;
int count = 0;
datum.ParseFromString(it->value().ToString());
sum_blob.set_num(1);
sum_blob.set_channels(datum.channels());
sum_blob.set_height(datum.height());
sum_blob.set_width(datum.width());
const int data_size = datum.channels() * datum.height() * datum.width();
int size_in_datum = std::max<int>(datum.data().size(),
datum.float_data_size());
for (int i = 0; i < size_in_datum; ++i) {
sum_blob.add_data(0.);
}
LOG(INFO) << "Starting Iteration";
for (it->SeekToFirst(); it->Valid(); it->Next()) {
// just a dummy operation
datum.ParseFromString(it->value().ToString());
const string& data = datum.data();
size_in_datum = std::max<int>(datum.data().size(), datum.float_data_size());
CHECK_EQ(size_in_datum, data_size) << "Incorrect data field size " <<
size_in_datum;
if (data.size() != 0) {
for (int i = 0; i < size_in_datum; ++i) {
sum_blob.set_data(i, sum_blob.data(i) + (uint8_t)data[i]);
}
} else {
for (int i = 0; i < size_in_datum; ++i) {
sum_blob.set_data(i, sum_blob.data(i) +
static_cast<float>(datum.float_data(i)));
}
}
++count;
if (count % 10000 == 0) {
LOG(ERROR) << "Processed " << count << " files.";
}
}
if (count % 10000 != 0) {
LOG(ERROR) << "Processed " << count << " files.";
}
for (int i = 0; i < sum_blob.data_size(); ++i) {
sum_blob.set_data(i, sum_blob.data(i) / count);
}
// Write to disk
LOG(INFO) << "Write to " << argv[2];
WriteProtoToBinaryFile(sum_blob, argv[2]);
delete db;
return 0;
}