本文整理汇总了C++中h5::H5File::openDataSet方法的典型用法代码示例。如果您正苦于以下问题:C++ H5File::openDataSet方法的具体用法?C++ H5File::openDataSet怎么用?C++ H5File::openDataSet使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类h5::H5File
的用法示例。
在下文中一共展示了H5File::openDataSet方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: LBTHROW
H5::DataSet CompartmentReportHDF5::_openDataset( const H5::H5File& file,
const uint32_t cellID )
{
std::stringstream cellName;
cellName << "a" << cellID;
const std::string datasetName = "/" + cellName.str() + "/" + _reportName +
"/" + dataDatasetName;
H5::DataSet dataset;
H5E_BEGIN_TRY
dataset = file.openDataSet( datasetName );
H5E_END_TRY
if( !dataset.getId() )
{
LBTHROW(
std::runtime_error( "ReportReaderHDF5: "
"Dataset " + datasetName + " not found "
"in file: " + file.getFileName( )));
}
if( dataset.getSpace().getSimpleExtentNdims() != 2 )
{
LBTHROW(
std::runtime_error("Compartment_Report_HDF5_File_Reader: "
"Error, not 2 dimensional array on " +
datasetName));
}
return dataset;
}
示例2: memspace
arma::Mat<uint16_t> readLUT(const std::string& path)
{
H5::H5File file (path.c_str(), H5F_ACC_RDONLY);
H5::DataSet ds = file.openDataSet("LUT");
H5::DataSpace filespace = ds.getSpace();
int ndims = filespace.getSimpleExtentNdims();
assert(ndims == 2);
hsize_t dims[2] = {1, 1};
filespace.getSimpleExtentDims(dims);
H5::DataSpace memspace (ndims, dims);
arma::Mat<uint16_t> res (dims[0], dims[1]);
ds.read(res.memptr(), H5::PredType::NATIVE_UINT16, memspace, filespace);
filespace.close();
memspace.close();
ds.close();
file.close();
// NOTE: Armadillo stores data in column-major order, while HDF5 uses
// row-major ordering. Above, we read the data directly from HDF5 into
// the arma matrix, so it was implicitly transposed. The next function
// fixes this problem.
arma::inplace_trans(res);
return res;
}
示例3: getNodeType
bool readDataset1D(const H5::H5File &file, const std::string &name, std::vector<_Tp> &data)
{
H5::DataSet dataset = file.openDataSet(name);
H5::DataSpace dataspace = dataset.getSpace();
hsize_t dims_out[1];
int rank = dataspace.getSimpleExtentDims( dims_out, NULL);
int _type;
bool read = getNodeType(dataset, _type);
read &= (_type == StorageNode::SEQ);
read &= (rank == 1);
if (!read)
return read;
data.resize(dims_out[0]);
dataset.read(data.data(), dataset.getDataType());
return true;
}
示例4: init_from_datafile
void StateSet::init_from_datafile(std::string filename) {
// open other file read-only
H5::H5File otherfile;
otherfile.openFile(filename, H5F_ACC_RDONLY);
H5::Group otherroot = otherfile.openGroup("/");
// check that grid properties match
int othersx, othersy, otherN;
double otherdx;
otherroot.openAttribute("num_states").read(H5::PredType::NATIVE_INT, &otherN);
otherroot.openAttribute("grid_sizex").read(H5::PredType::NATIVE_INT, &othersx);
otherroot.openAttribute("grid_sizex").read(H5::PredType::NATIVE_INT, &othersy);
otherroot.openAttribute("grid_delta").read(H5::PredType::NATIVE_DOUBLE, &otherdx);
if (static_cast<int>(N) != otherN)
throw GeneralError("Cannot copy state data from datafile: value for num_states does not match.");
if (static_cast<int>(datalayout.sizex) != othersx)
throw GeneralError("Cannot copy state data from datafile: value for grid_sizex does not match.");
if (static_cast<int>(datalayout.sizey) != othersy)
throw GeneralError("Cannot copy state data from datafile: value for grid_sizey does not match.");
if (datalayout.dx != otherdx)
throw GeneralError("Cannot copy state data from datafile: value for grid_delta does not match.");
// copy data
H5::DataSet other_states_data = otherfile.openDataSet("/states");
other_states_data.read(state_array->get_dataptr(), other_states_data.getArrayType());
}
示例5: nuc_space
void pyne::Material::write_hdf5(std::string filename, std::string datapath, std::string nucpath, float row, int chunksize)
{
// Turn off annoying HDF5 errors
H5::Exception::dontPrint();
// Create new/open datafile.
H5::H5File db;
if (pyne::file_exists(filename))
{
bool isH5 = H5::H5File::isHdf5(filename);
if (!isH5)
throw h5wrap::FileNotHDF5(filename);
db = H5::H5File(filename, H5F_ACC_RDWR);
}
else
db = H5::H5File(filename, H5F_ACC_TRUNC);
//
// Read in nuclist if available, write it out if not
//
bool nucpath_exists = h5wrap::path_exists(&db, nucpath);
std::vector<int> nuclides;
int nuc_size;
hsize_t nuc_dims[1];
if (nucpath_exists)
{
nuclides = h5wrap::h5_array_to_cpp_vector_1d<int>(&db, nucpath, H5::PredType::NATIVE_INT);
nuc_size = nuclides.size();
nuc_dims[0] = nuc_size;
}
else
{
nuclides = std::vector<int>();
for (pyne::comp_iter i = comp.begin(); i != comp.end(); i++)
nuclides.push_back(i->first);
nuc_size = nuclides.size();
// Create the data if it doesn't exist
int nuc_data [nuc_size];
for (int n = 0; n != nuc_size; n++)
nuc_data[n] = nuclides[n];
nuc_dims[0] = nuc_size;
H5::DataSpace nuc_space(1, nuc_dims);
H5::DataSet nuc_set = db.createDataSet(nucpath, H5::PredType::NATIVE_INT, nuc_space);
nuc_set.write(nuc_data, H5::PredType::NATIVE_INT);
db.flush(H5F_SCOPE_GLOBAL);
};
//
// Write out to the file
//
H5::DataSet data_set;
H5::DataSpace data_space, data_hyperslab;
int data_rank = 1;
hsize_t data_dims[1] = {1};
hsize_t data_max_dims[1] = {H5S_UNLIMITED};
hsize_t data_offset[1] = {0};
size_t material_struct_size = sizeof(pyne::material_struct) + sizeof(double)*(nuc_size);
H5::CompType data_desc(material_struct_size);
H5::ArrayType comp_values_array_type (H5::PredType::NATIVE_DOUBLE, 1, nuc_dims);
// make the data table type
data_desc.insertMember("name", HOFFSET(pyne::material_struct, name), H5::StrType(0, 20));
data_desc.insertMember("mass", HOFFSET(pyne::material_struct, mass), H5::PredType::NATIVE_DOUBLE);
data_desc.insertMember("atoms_per_mol", HOFFSET(pyne::material_struct, atoms_per_mol), H5::PredType::NATIVE_DOUBLE);
data_desc.insertMember("comp", HOFFSET(pyne::material_struct, comp), comp_values_array_type);
// make the data array, have to over-allocate
material_struct * mat_data = (material_struct *) malloc(material_struct_size);
int name_len = name.length();
for (int i=0; i < 20; i++)
{
if (i < name_len)
(*mat_data).name[i] = name[i];
else
(*mat_data).name[i] = NULL;
};
(*mat_data).mass = mass;
(*mat_data).atoms_per_mol = atoms_per_mol;
for (int n = 0; n != nuc_size; n++)
{
if (0 < comp.count(nuclides[n]))
(*mat_data).comp[n] = comp[nuclides[n]];
else
(*mat_data).comp[n] = 0.0;
};
// get / make the data set
bool datapath_exists = h5wrap::path_exists(&db, datapath);
if (datapath_exists)
{
data_set = db.openDataSet(datapath);
data_space = data_set.getSpace();
data_rank = data_space.getSimpleExtentDims(data_dims, data_max_dims);
// Determine the row size.
//.........这里部分代码省略.........