本文整理汇总了C++中h5::H5File::close方法的典型用法代码示例。如果您正苦于以下问题:C++ H5File::close方法的具体用法?C++ H5File::close怎么用?C++ H5File::close使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类h5::H5File
的用法示例。
在下文中一共展示了H5File::close方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: memspace
arma::Mat<uint16_t> readLUT(const std::string& path)
{
H5::H5File file (path.c_str(), H5F_ACC_RDONLY);
H5::DataSet ds = file.openDataSet("LUT");
H5::DataSpace filespace = ds.getSpace();
int ndims = filespace.getSimpleExtentNdims();
assert(ndims == 2);
hsize_t dims[2] = {1, 1};
filespace.getSimpleExtentDims(dims);
H5::DataSpace memspace (ndims, dims);
arma::Mat<uint16_t> res (dims[0], dims[1]);
ds.read(res.memptr(), H5::PredType::NATIVE_UINT16, memspace, filespace);
filespace.close();
memspace.close();
ds.close();
file.close();
// NOTE: Armadillo stores data in column-major order, while HDF5 uses
// row-major ordering. Above, we read the data directly from HDF5 into
// the arma matrix, so it was implicitly transposed. The next function
// fixes this problem.
arma::inplace_trans(res);
return res;
}
示例2: FileNotFound
void pyne::Material::from_hdf5(std::string filename, std::string datapath, int row, int protocol)
{
// Turn off annoying HDF5 errors
H5::Exception::dontPrint();
// Check that the file is there
if (!pyne::file_exists(filename))
throw pyne::FileNotFound(filename);
// Check to see if the file is in HDF5 format.
bool isH5 = H5::H5File::isHdf5(filename);
if (!isH5)
throw h5wrap::FileNotHDF5(filename);
// Open the database
H5::H5File db (filename, H5F_ACC_RDONLY);
bool datapath_exists = h5wrap::path_exists(&db, datapath);
if (!datapath_exists)
throw h5wrap::PathNotFound(filename, datapath);
// Clear current content
comp.clear();
// Load via various protocols
if (protocol == 0)
_load_comp_protocol0(&db, datapath, row);
else if (protocol == 1)
_load_comp_protocol1(&db, datapath, row);
else
throw pyne::MaterialProtocolError();
// Close the database
db.close();
// Renomalize the composition, just to be safe.
norm_comp();
};
示例3: save_mat_image
bool save_mat_image(cv::Mat& img, TRect& rect, std::string fname, std::string group_name,
std::string dset_name, std::string dim1, std::string dim2, int compression) {
assert((img.dims == 2) && (img.rows == rect.N_bins[0]) && (img.cols == rect.N_bins[1]));
if((compression<0) || (compression > 9)) {
std::cerr << "! Invalid gzip compression level: " << compression << std::endl;
return false;
}
H5::Exception::dontPrint();
H5::H5File *file = H5Utils::openFile(fname);
if(file == NULL) { return false; }
H5::Group *group = H5Utils::openGroup(file, group_name);
if(group == NULL) {
delete file;
return false;
}
/*
* Image Data
*/
// Creation property list
H5::DSetCreatPropList plist;
int rank = 2;
hsize_t dim[2] = {rect.N_bins[0], rect.N_bins[1]};
plist.setDeflate(compression); // gzip compression level
float fillvalue = 0;
plist.setFillValue(H5::PredType::NATIVE_FLOAT, &fillvalue);
plist.setChunk(rank, &(dim[0]));
H5::DataSpace dspace(rank, &(dim[0]));
H5::DataSet* dataset;
try {
dataset = new H5::DataSet(group->createDataSet(dset_name, H5::PredType::NATIVE_FLOAT, dspace, plist));
} catch(H5::FileIException create_dset_err) {
std::cerr << "Unable to create dataset '" << dset_name << "'." << std::endl;
delete group;
delete file;
return false;
}
float *buf = new float[rect.N_bins[0]*rect.N_bins[1]];
for(size_t j=0; j<rect.N_bins[0]; j++) {
for(size_t k=0; k<rect.N_bins[1]; k++) {
buf[rect.N_bins[1]*j + k] = img.at<double>(j,k);
/*float tmp = img.at<double>(j,k);
if(tmp > 0.) {
std::cerr << j << ", " << k << " --> " << j + rect.N_bins[0]*k << " --> " << tmp << std::endl;
}*/
}
}
dataset->write(buf, H5::PredType::NATIVE_FLOAT);
/*
* Attributes
*/
hsize_t att_dim = 2;
H5::DataSpace att_dspace(1, &att_dim);
H5::PredType att_dtype = H5::PredType::NATIVE_UINT32;
H5::Attribute att_N = dataset->createAttribute("N_pix", att_dtype, att_dspace);
att_N.write(att_dtype, &(rect.N_bins));
att_dtype = H5::PredType::NATIVE_DOUBLE;
H5::Attribute att_min = dataset->createAttribute("min", att_dtype, att_dspace);
att_min.write(att_dtype, &(rect.min));
att_dtype = H5::PredType::NATIVE_DOUBLE;
H5::Attribute att_max = dataset->createAttribute("max", att_dtype, att_dspace);
att_max.write(att_dtype, &(rect.max));
att_dim = 1;
H5::StrType vls_type(0, H5T_VARIABLE);
H5::DataSpace att_space_str(H5S_SCALAR);
H5::Attribute att_name_1 = dataset->createAttribute("dim_name_1", vls_type, att_space_str);
att_name_1.write(vls_type, dim1);
H5::Attribute att_name_2 = dataset->createAttribute("dim_name_2", vls_type, att_space_str);
att_name_2.write(vls_type, dim2);
file->close();
delete[] buf;
delete dataset;
delete group;
delete file;
return true;
}
示例4: nuc_space
//.........这里部分代码省略.........
data_desc.insertMember("atoms_per_mol", HOFFSET(pyne::material_struct, atoms_per_mol), H5::PredType::NATIVE_DOUBLE);
data_desc.insertMember("comp", HOFFSET(pyne::material_struct, comp), comp_values_array_type);
// make the data array, have to over-allocate
material_struct * mat_data = (material_struct *) malloc(material_struct_size);
int name_len = name.length();
for (int i=0; i < 20; i++)
{
if (i < name_len)
(*mat_data).name[i] = name[i];
else
(*mat_data).name[i] = NULL;
};
(*mat_data).mass = mass;
(*mat_data).atoms_per_mol = atoms_per_mol;
for (int n = 0; n != nuc_size; n++)
{
if (0 < comp.count(nuclides[n]))
(*mat_data).comp[n] = comp[nuclides[n]];
else
(*mat_data).comp[n] = 0.0;
};
// get / make the data set
bool datapath_exists = h5wrap::path_exists(&db, datapath);
if (datapath_exists)
{
data_set = db.openDataSet(datapath);
data_space = data_set.getSpace();
data_rank = data_space.getSimpleExtentDims(data_dims, data_max_dims);
// Determine the row size.
int row_num = (int) row;
if (std::signbit(row))
row_num = data_dims[0] + row; // careful, row is negative
if (data_dims[0] <= row_num)
{
// row == -0, extend to data set so that we can append, or
// row_num is larger than current dimension, resize to accomodate.
data_dims[0] = row_num + 1;
data_set.extend(data_dims);
}
else if (data_dims[0] < 0)
throw h5wrap::HDF5BoundsError();
data_offset[0] = row_num;
}
else
{
// Get full space
data_space = H5::DataSpace(1, data_dims, data_max_dims);
// Make data set properties to enable chunking
H5::DSetCreatPropList data_set_params;
hsize_t chunk_dims[1] ={chunksize};
data_set_params.setChunk(1, chunk_dims);
material_struct * data_fill_value = (material_struct *) malloc(material_struct_size);
for (int i=0; i < 20; i++)
(*data_fill_value).name[i] = NULL;
(*data_fill_value).mass = -1.0;
(*data_fill_value).atoms_per_mol = -1.0;
for (int n = 0; n != nuc_size; n++)
(*data_fill_value).comp[n] = 0.0;
data_set_params.setFillValue(data_desc, &data_fill_value);
// Create the data set
data_set = db.createDataSet(datapath, data_desc, data_space, data_set_params);
data_set.extend(data_dims);
// Add attribute pointing to nuc path
H5::StrType nuc_attr_type(0, nucpath.length());
H5::DataSpace nuc_attr_space(H5S_SCALAR);
H5::Attribute nuc_attr = data_set.createAttribute("nucpath", nuc_attr_type, nuc_attr_space);
nuc_attr.write(nuc_attr_type, nucpath);
// Remember to de-allocate
free(data_fill_value);
};
// Get the data hyperslab
data_hyperslab = data_set.getSpace();
hsize_t data_count[1] = {1};
data_hyperslab.selectHyperslab(H5S_SELECT_SET, data_count, data_offset);
// Get a memory space for writing
H5::DataSpace mem_space (1, data_count, data_max_dims);
// Write the row...
data_set.write(mat_data, data_desc, mem_space, data_hyperslab);
// Close out the HDF5 file
db.close();
// Remember the milk!
// ...by which I mean to deallocate
free(mat_data);
};
示例5: save
bool TStellarData::save(const std::string& fname, const std::string& group, const std::string &dset, int compression) {
if((compression < 0) || (compression > 9)) {
std::cerr << "! Invalid gzip compression level: " << compression << std::endl;
return false;
}
hsize_t nstars = star.size();
if(nstars == 0) {
std::cerr << "! No stars to write." << std::endl;
return false;
}
H5::Exception::dontPrint();
H5::H5File *file = H5Utils::openFile(fname);
if(file == NULL) { return false; }
H5::Group *gp = H5Utils::openGroup(file, group);
if(gp == NULL) {
delete file;
return false;
}
/*
* Photometry
*/
// Datatype
hsize_t nbands = NBANDS;
H5::ArrayType f4arr(H5::PredType::NATIVE_FLOAT, 1, &nbands);
H5::ArrayType u4arr(H5::PredType::NATIVE_FLOAT, 1, &nbands);
H5::CompType dtype(sizeof(TFileData));
dtype.insertMember("obj_id", HOFFSET(TFileData, obj_id), H5::PredType::NATIVE_UINT64);
dtype.insertMember("l", HOFFSET(TFileData, l), H5::PredType::NATIVE_DOUBLE);
dtype.insertMember("b", HOFFSET(TFileData, b), H5::PredType::NATIVE_DOUBLE);
dtype.insertMember("mag", HOFFSET(TFileData, mag), f4arr);
dtype.insertMember("err", HOFFSET(TFileData, err), f4arr);
dtype.insertMember("maglimit", HOFFSET(TFileData, maglimit), f4arr);
dtype.insertMember("nDet", HOFFSET(TFileData, N_det), u4arr);
dtype.insertMember("EBV", HOFFSET(TFileData, EBV), H5::PredType::NATIVE_FLOAT);
// Dataspace
hsize_t dim = nstars;
H5::DataSpace dspace(1, &dim);
// Property List
H5::DSetCreatPropList plist;
plist.setChunk(1, &nstars);
plist.setDeflate(compression);
// Dataset
H5::DataSet dataset = gp->createDataSet(dset, dtype, dspace, plist);
// Write dataset
TFileData* data = new TFileData[nstars];
for(size_t i=0; i<nstars; i++) {
data[i].obj_id = star[i].obj_id;
data[i].l = star[i].l;
data[i].b = star[i].b;
for(size_t k=0; k<NBANDS; k++) {
data[i].mag[k] = star[i].m[k];
data[i].err[k] = star[i].err[k];
data[i].maglimit[k] = star[i].maglimit[k];
}
data[i].EBV = star[i].EBV;
}
dataset.write(data, dtype);
/*
* Attributes
*/
dim = 1;
H5::DataSpace att_dspace(1, &dim);
H5::PredType att_dtype = H5::PredType::NATIVE_UINT64;
H5::Attribute att_healpix_index = dataset.createAttribute("healpix_index", att_dtype, att_dspace);
att_healpix_index.write(att_dtype, &healpix_index);
att_dtype = H5::PredType::NATIVE_UINT32;
H5::Attribute att_nside = dataset.createAttribute("nside", att_dtype, att_dspace);
att_nside.write(att_dtype, &nside);
att_dtype = H5::PredType::NATIVE_UCHAR;
H5::Attribute att_nested = dataset.createAttribute("nested", att_dtype, att_dspace);
att_nested.write(att_dtype, &nested);
att_dtype = H5::PredType::NATIVE_DOUBLE;
H5::Attribute att_l = dataset.createAttribute("l", att_dtype, att_dspace);
att_l.write(att_dtype, &l);
att_dtype = H5::PredType::NATIVE_DOUBLE;
H5::Attribute att_b = dataset.createAttribute("b", att_dtype, att_dspace);
att_b.write(att_dtype, &b);
att_dtype = H5::PredType::NATIVE_DOUBLE;
H5::Attribute att_EBV = dataset.createAttribute("EBV", att_dtype, att_dspace);
att_EBV.write(att_dtype, &EBV);
file->close();
//.........这里部分代码省略.........
示例6: Frame
Bundle2::Bundle2(const boost::filesystem::path& fileName, bool loadGeometry):
version_(BUNDLE_VERSION), poiFirstFrame_(0) {
// Opening file
H5::H5File bundleFile;
bundleFile.openFile(fileName.string(), H5F_ACC_RDONLY);
loadParameters(bundleFile);
// Loading POI
H5::Group poiGroup = bundleFile.openGroup("/POI");
hsize_t count;
H5::Attribute attr = poiGroup.openAttribute("count");
attr.read(H5::PredType::NATIVE_HSIZE, &count);
attr.close();
for(size_t frame = 0; frame < count; ++frame) {
cout.flush();
const std::string frameGroupName = boost::str(boost::format("Frame %1$04d") % frame);
H5::Group frameGroup = poiGroup.openGroup(frameGroupName);
addPOIFrame();
for(size_t camera = 0; camera < numCameras_; ++camera)
poi_[poi_.size() - 1][camera].load(frameGroup, camera);
frameGroup.close();
}
poiGroup.close();
// Loading frames
H5::Group bundleGroup = bundleFile.openGroup("/Bundle");
H5::Group framesGroup = bundleGroup.openGroup("Frames");
attr = framesGroup.openAttribute("count");
attr.read(H5::PredType::NATIVE_HSIZE, &count);
attr.close();
for(size_t frame = 0; frame < count; ++frame) {
Frame* f = new Frame(framesGroup, frame, numCameras_);
frames_.push_back(f);
}
framesGroup.close();
// Loading tracks
H5::DataSet tracksDataset = bundleGroup.openDataSet("Tracks");
hsize_t tracksDim[2];
H5::DataSpace tracksDS = tracksDataset.getSpace();
tracksDS.getSimpleExtentDims(tracksDim);
tracksDS.close();
for(size_t i = 0; i < tracksDim[0]; ++i) {
size_t j = addTrack();
tracks_[j]->load(tracksDataset, frames_, i);
}
tracksDataset.close();
bundleGroup.close();
if(loadGeometry && checkGeometry_(bundleFile)) loadGeometry_(bundleFile);
bundleFile.close();
}
示例7: posesDS
void Bundle2::saveGeometry(const boost::filesystem::path& fileName) const {
H5::H5File bundleFile;
bundleFile.openFile(fileName.string(), H5F_ACC_RDWR);
H5::Group rootGroup = bundleFile.openGroup("/");
// If the group "Geometry" exists, delete it!
if(checkGeometry_(bundleFile)) {
rootGroup.unlink("Geometry");
}
// Creating group Geometry
H5::Group geometryGroup = rootGroup.createGroup("Geometry");
// Saving poses
const hsize_t posesChunkDim[] = { 3, 12 };
H5::DSetCreatPropList posesPropList;
posesPropList.setLayout(H5D_CHUNKED);
posesPropList.setChunk(2, posesChunkDim);
posesPropList.setDeflate(9);
const hsize_t posesMaxDim[] = { H5S_UNLIMITED, 12 };
const hsize_t posesCurDim[] = { frames_.size(), 12 };
H5::DataSpace posesDS(2, posesCurDim, posesMaxDim);
H5::DataSet posesDataSet = geometryGroup.createDataSet("Poses", H5::PredType::IEEE_F64LE, posesDS, posesPropList);
double* posesData = (double*)malloc(frames_.size()*12*sizeof(double));
size_t i = 0;
for(deque<Frame*>::const_iterator it = frames_.begin(); it != frames_.end(); it++) {
posesData[i*12] = (*it)->pose()->t().x();
posesData[i*12 + 1] = (*it)->pose()->t().y();
posesData[i*12 + 2] = (*it)->pose()->t().z();
core::Matrix<double> R = (*it)->pose()->R();
posesData[i*12 + 3] = R[0][0];
posesData[i*12 + 4] = R[1][0];
posesData[i*12 + 5] = R[2][0];
posesData[i*12 + 6] = R[0][1];
posesData[i*12 + 7] = R[1][1];
posesData[i*12 + 8] = R[2][1];
posesData[i*12 + 9] = R[0][2];
posesData[i*12 + 10] = R[1][2];
posesData[i*12 + 11] = R[2][2];
++i;
}
posesDataSet.write((const void*)posesData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL);
free((void*)posesData);
posesDataSet.close();
posesDS.close();
// Saving points
const hsize_t pointsChunkDim[] = {10, 3};
H5::DSetCreatPropList pointsPropList;
pointsPropList.setLayout(H5D_CHUNKED);
pointsPropList.setChunk(2, pointsChunkDim);
pointsPropList.setDeflate(9);
const hsize_t pointsMaxDim[] = { H5S_UNLIMITED, 3 };
const hsize_t pointsCurDim[] = { tracks_.size(), 3 };
H5::DataSpace pointsDS(2, pointsCurDim, pointsMaxDim);
H5::DataSet pointsDataSet = geometryGroup.createDataSet("Points", H5::PredType::IEEE_F64LE, pointsDS, pointsPropList);
double* pointsData = (double*)malloc(tracks_.size()*3*sizeof(double));
i = 0;
for(deque<Track*>::const_iterator it = tracks_.begin(); it != tracks_.end(); it++) {
pointsData[i*3] = (*it)->point()->coords().x();
pointsData[i*3 + 1] = (*it)->point()->coords().y();
pointsData[i*3 + 2] = (*it)->point()->coords().z();
++i;
}
pointsDataSet.write((const void*)pointsData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL);
free((void*)pointsData);
pointsDataSet.close();
pointsDS.close();
// Saving inlier information
const hsize_t inliersChunkDim[] = { 3 };
H5::DSetCreatPropList inliersPropList;
inliersPropList.setLayout(H5D_CHUNKED);
inliersPropList.setChunk(1, inliersChunkDim);
inliersPropList.setDeflate(9);
const hsize_t inliersMaxDim[] = { H5S_UNLIMITED };
const hsize_t inliersCurDim[] = { frames_.size() };
H5::DataSpace inliersDS(1, inliersCurDim, inliersMaxDim);
H5::VarLenType inliersType(&H5::PredType::STD_U8LE);
H5::DataSet inliersDataSet = geometryGroup.createDataSet("Inliers", inliersType, inliersDS, inliersPropList);
i = 0;
for(deque<Frame*>::const_iterator it = frames_.begin(); it != frames_.end(); it++) {
hvl_t inliersLine;
//.........这里部分代码省略.........