本文整理汇总了C++中DataSpace::selectAll方法的典型用法代码示例。如果您正苦于以下问题:C++ DataSpace::selectAll方法的具体用法?C++ DataSpace::selectAll怎么用?C++ DataSpace::selectAll使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类DataSpace
的用法示例。
在下文中一共展示了DataSpace::selectAll方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: read_hdf5_image
void read_hdf5_image(H5File h5f, Mat &image_out, const char *name, const Rect &roi=Rect(0,0,0,0))
{
DataSet dataset = h5f.openDataSet(name);
DataSpace dspace = dataset.getSpace();
assert (dspace.getSimpleExtentNdims() == 2);
hsize_t dims[2];
dspace.getSimpleExtentDims(dims);
if ((roi.width == 0) && (roi.height == 0)) {
image_out.create(dims[0], dims[1], CV_32F);
dspace.selectAll();
} else {
image_out.create(roi.height, roi.width, CV_32F);
hsize_t _offset[2], _size[2];
_offset[0] = roi.y; _offset[1] = roi.x;
_size[0] = roi.height; _size[1] = roi.width;
dspace.selectHyperslab(H5S_SELECT_SET, _size, _offset);
}
DataSpace imspace;
float *imdata;
if (image_out.isContinuous()) {
dims[0] = image_out.size().height; dims[1] = image_out.size().width;
imspace = DataSpace(2, dims);
imspace.selectAll();
imdata = image_out.ptr<float>();
} else {
// we are working with an ROI
assert (image_out.isSubmatrix());
Size parent_size; Point parent_ofs;
image_out.locateROI(parent_size, parent_ofs);
hsize_t parent_count[2];
parent_count[0] = parent_size.height; parent_count[1] = parent_size.width;
imspace.setExtentSimple(2, parent_count);
hsize_t im_offset[2], im_size[2];
im_offset[0] = parent_ofs.y; im_offset[1] = parent_ofs.x;
im_size[0] = image_out.size().height; im_size[1] = image_out.size().width;
imspace.selectHyperslab(H5S_SELECT_SET, im_size, im_offset);
imdata = image_out.ptr<float>() - parent_ofs.x - parent_ofs.y * parent_size.width;
}
dataset.read(imdata, PredType::NATIVE_FLOAT, imspace, dspace);
}
示例2: write_hdf5_image
void write_hdf5_image(H5File h5f, const char *name, const Mat &im)
{
DSetCreatPropList cparms;
hsize_t chunk_dims[2] = {256, 256};
hsize_t dims[2];
dims[0] = im.size().height;
dims[1] = im.size().width;
if (chunk_dims[0] > dims[0]) {
chunk_dims[0] = dims[0];
}
if (chunk_dims[1] > dims[1]) {
chunk_dims[1] = dims[1];
}
cparms.setChunk(2, chunk_dims);
cparms.setShuffle();
cparms.setDeflate(5);
DataSet dataset = h5f.createDataSet(name, PredType::NATIVE_FLOAT,
DataSpace(2, dims, dims),
cparms);
Mat image;
if (im.type() != CV_32F)
im.convertTo(image, CV_32F);
else
image = im;
DataSpace imspace;
float *imdata;
if (image.isContinuous()) {
imspace = dataset.getSpace(); // same size as
imspace.selectAll();
imdata = image.ptr<float>();
} else {
// we are working with an ROI
assert (image.isSubmatrix());
Size parent_size; Point parent_ofs;
image.locateROI(parent_size, parent_ofs);
hsize_t parent_count[2];
parent_count[0] = parent_size.height; parent_count[1] = parent_size.width;
imspace.setExtentSimple(2, parent_count);
hsize_t im_offset[2], im_size[2];
im_offset[0] = parent_ofs.y; im_offset[1] = parent_ofs.x;
im_size[0] = image.size().height; im_size[1] = image.size().width;
imspace.selectHyperslab(H5S_SELECT_SET, im_size, im_offset);
imdata = image.ptr<float>() - parent_ofs.x - parent_ofs.y * parent_size.width;
}
dataset.write(imdata, PredType::NATIVE_FLOAT, imspace);
}
示例3: write_feature
void write_feature(H5File h5f, const Mat &image_in, const char *name)
{
// make sure the sizes match
assert (imsize == image_in.size());
// make sure the image is in native float
Mat image;
if (image_in.type() != CV_32F)
image_in.convertTo(image, CV_32F);
else
image = image_in;
DataSet dataset = create_dataset(h5f, name);
DataSpace imspace;
float *imdata;
if (image.isContinuous()) {
imspace = dataset.getSpace(); // same size as
imspace.selectAll();
imdata = image.ptr<float>();
} else {
// we are working with an ROI
assert (image.isSubmatrix());
Size parent_size; Point parent_ofs;
image.locateROI(parent_size, parent_ofs);
hsize_t parent_count[2];
parent_count[0] = parent_size.height; parent_count[1] = parent_size.width;
imspace.setExtentSimple(2, parent_count);
hsize_t im_offset[2], im_size[2];
im_offset[0] = parent_ofs.y; im_offset[1] = parent_ofs.x;
im_size[0] = image.size().height; im_size[1] = image.size().width;
imspace.selectHyperslab(H5S_SELECT_SET, im_size, im_offset);
imdata = image.ptr<float>() - parent_ofs.x - parent_ofs.y * parent_size.width;
}
dataset.write(imdata, PredType::NATIVE_FLOAT, imspace);
}
示例4: priv_init
//.........这里部分代码省略.........
set_Md_store_size(frame_count_);
// fill in data
// assume that the frames run from 0 to frame_count_
for(unsigned int j = 0; j<frame_count_;++j)
{
string frame_name = format_name(j+start_);
Group * frame = new Group(file->openGroup(frame_name));
Attr_list_hdf g_attr_list(frame);
set_Md_store(j,new Md_store(g_attr_list));
if(two_d_data_)
{
if(!g_attr_list.contains_attr("z-position"))
throw logic_error("wrapper_i_hdf: z-position not found");
g_attr_list.get_value("z-position",frame_zdata_[j]);
}
for(set<pair<D_TYPE,int> >::iterator it = data_types_.begin();
it != data_types_.end();++it)
{
if(two_d_data_ && ((*it).first)==utilities::D_ZPOS)
continue;
// ***************
DataSet * dset = new DataSet(frame->openDataSet(format_dset_name((*it).first,(*it).second)));
// ***************
DataSpace dspace = dset-> getSpace();
dspace.selectAll();
int part_count = dspace.getSimpleExtentNpoints();
// if the first data set for this frame set the number of particles
if(frame_c_.size()==j)
frame_c_.push_back(part_count);
// if the part_count is less than a previous dataset, set the
// number of particles to be the smaller number. This
// shouldn't result in memory leaks as the bare arrays are
// never returned
else if(frame_c_.at(j) > part_count)
frame_c_.at(j) = part_count;
// if the current set has more than a previous set, keep the
// old value. these checks are a kludge, need to deal with
// this better at the level of writing out the data
else if(frame_c_.at(j) < part_count)
continue;
// if(frame_c_.at(j) != part_count)
// throw runtime_error("wrapper_i_hdf: data sets different sizes");
D_TYPE cur_type = (*it).first;
switch(v_type(cur_type))
{
case V_INT:
data_i_.at(d_mapi_(cur_type)).at(j) = new int [part_count];
dset->read(data_i_.at(d_mapi_(cur_type)).at(j),PredType::NATIVE_INT);
break;
case V_FLOAT:
data_f_.at(d_mapf_(cur_type)).at(j) = new float [part_count];
dset->read(data_f_.at(d_mapf_(cur_type)).at(j),PredType::NATIVE_FLOAT);
break;
case V_COMPLEX:
示例5: get_dset_info
void Generic_wrapper_hdf::get_dset_info(std::vector<int> & dims,V_TYPE& vt ,const std::string & dset_name) const
{
if (!(wrapper_open_))
throw runtime_error("wrapper must be open to add a dataset");
dims.clear();
// get data set
DataSet dset;
// open data set
if(!group_open_ || dset_name[0] == '/')
{
dset = file_->openDataSet(dset_name);
}
else if(group_)
{
dset = group_->openDataSet(dset_name);
}
else
throw logic_error("generic_wrapper_hdf:: can't add to a closed group");
// identify type
H5T_class_t dset_class_t = dset.getTypeClass();
H5T_sign_t sign;
switch(dset_class_t)
{
case H5T_INTEGER:
sign = dset.getIntType().getSign();
if(sign == H5T_SGN_2)
vt = V_INT;
else if(sign == H5T_SGN_NONE)
vt = V_UINT;
else
vt = V_ERROR;
case H5T_FLOAT:
vt = V_FLOAT;
case H5T_STRING:
case H5T_TIME:
case H5T_BITFIELD:
case H5T_OPAQUE:
case H5T_COMPOUND:
case H5T_REFERENCE:
case H5T_ENUM:
case H5T_VLEN:
case H5T_ARRAY:
case H5T_NO_CLASS:
case H5T_NCLASSES:
vt = V_ERROR;
}
// get the data space
DataSpace dataspace = dset.getSpace();
// select everything
dataspace.selectAll();
// get the rank
hsize_t rank = dataspace.getSimpleExtentNdims();
// make dims the right size
vector <hsize_t> tdims;
tdims.resize(rank);
// get the dimensionality
dataspace.getSimpleExtentDims(tdims.data(),NULL);
// copy to the return vector
dims.resize(rank);
for(hsize_t j = 0; j<rank;++j)
dims[j] = (unsigned int)tdims[j];
}
示例6: get_dset_priv
void Generic_wrapper_hdf::get_dset_priv(vector<T> & data,std::vector<unsigned int> & dims, const std::string & dset_name,const DataType & mtype) const
{
if (!(wrapper_open_))
throw runtime_error("wrapper must be open to read a dataset");
dims.clear();
data.clear();
// get data set
DataSet dset;
// open data set
try
{
if(!group_open_ || dset_name[0] == '/')
{
if(file_)
try
{
dset = file_->openDataSet(dset_name);
}
catch(FileIException &e)
{
throw runtime_error(e.getDetailMsg());
}
else
throw runtime_error("there is no open file");
}
else if(group_)
{
dset = group_->openDataSet(dset_name);
}
else
throw logic_error("generic_wrapper_hdf:: can't read from a closed group");
}
catch(Exception &e )
{
std::string er_msg = "error opening hdf \n" + e.getDetailMsg();
throw runtime_error(er_msg);
}
// check type
H5T_class_t dset_class_t = dset.getTypeClass();
H5T_class_t mem_class_t = mtype.getClass();
if(dset_class_t != mem_class_t)
throw runtime_error("Data type miss-match");
// if(mem_class_t == H5T_INTEGER)
// {
// IntType mem_int = IntType(mtype);
// H5T_sign_t dsign = dset.getIntType().getSign();
// H5T_sign_t msign = mem_int.getSign();
// if(dsign != msign)
// throw runtime_error("int signness miss-match ");
// }
// get the data space
DataSpace dataspace = dset.getSpace();
// select everything
dataspace.selectAll();
// get the rank
hsize_t rank = dataspace.getSimpleExtentNdims();
// make dims the right size
vector <hsize_t> tdims;
tdims.resize(rank);
// get the dimensionality
dataspace.getSimpleExtentDims(tdims.data(),NULL);
// copy to the return vector
dims.resize(rank);
for(hsize_t j = 0; j<rank;++j)
dims[j] = (unsigned int)tdims[j];
// get the number of entries
hsize_t total = dataspace.getSimpleExtentNpoints();
// resize the data vector
data.resize(total);
// read the data out
dset.read( data.data(), mtype, dataspace, dataspace );
}