本文整理汇总了C++中DSetCreatPropList类的典型用法代码示例。如果您正苦于以下问题:C++ DSetCreatPropList类的具体用法?C++ DSetCreatPropList怎么用?C++ DSetCreatPropList使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了DSetCreatPropList类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: getPropList
/**
* Sets up the chunking and compression rate.
* @param length
* @return The configured property list
*/
DSetCreatPropList getPropList(const std::size_t length) {
DSetCreatPropList propList;
hsize_t chunk_dims[1] = {length};
propList.setChunk(1, chunk_dims);
propList.setDeflate(6);
return propList;
}
示例2: createCompoundDataSet
//Creates a dataset: an array of HDF5 CompoundType
//If you want a dimension i to be unlimited, pass chunk_dims[i]=NCHUNK and max_dims[i]=0. If limited, pass max_dims[i]=N and chunk_dims[i]=N.
ArfRecordingData* ArfFileBase::createCompoundDataSet(CompType type, String path, int dimension, int* max_dims, int* chunk_dims)
{
ScopedPointer<DataSet> data;
DSetCreatPropList prop;
hsize_t Hdims[3];
hsize_t Hmax_dims [3];
hsize_t Hchunk_dims[3];
for (int i=0; i < dimension; i++)
{
Hchunk_dims[i] = chunk_dims[i];
if (chunk_dims[i] > 0 && chunk_dims[i] != max_dims[i])
{
Hmax_dims[i] = H5S_UNLIMITED;
Hdims[i] = 0;
}
else
{
Hmax_dims[i] = max_dims[i];
Hdims[i] = max_dims[i];
}
}
DataSpace dSpace(dimension, Hdims, Hmax_dims);
prop.setChunk(dimension, Hchunk_dims);
data = new DataSet(file->createDataSet(path.toUTF8(),type,dSpace,prop));
return new ArfRecordingData(data.release());
}
示例3: mspace1
// * * * * * * * * * * * * * * * * * * * * * * * * * *
void H5_C3PO_NS::createExtendibleDataset(std::string FILE_NAME,const char* datasetName_)
{
hsize_t dims[2] = { 0, 1}; // dataset dimensions at creation
hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
DataSpace mspace1( RANK, dims, maxdims);
H5File* file=new H5File( FILE_NAME.c_str(),H5F_ACC_RDWR );
IntType datatype( PredType::NATIVE_DOUBLE ); //Define datatype for the data
datatype.setOrder( H5T_ORDER_LE );
DSetCreatPropList cparms;
hsize_t chunk_dims[2] ={6, 1};
cparms.setChunk( RANK, chunk_dims );
//Set fill value for the dataset
int fill_val = 1.0;
cparms.setFillValue( PredType::NATIVE_DOUBLE, &fill_val);
DataSet dataset = file->createDataSet( datasetName_, PredType::NATIVE_DOUBLE, mspace1, cparms);
file->close();
delete file;
}
示例4: hdf5ExternalArrayTestCreate
void hdf5ExternalArrayTestCreate(CuTest *testCase) {
for (hsize_t chunkIdx = 0; chunkIdx < numSizes; ++chunkIdx) {
hsize_t chunkSize = chunkSizes[chunkIdx];
setup();
try {
IntType datatype(PredType::NATIVE_HSIZE);
H5File file(H5std_string(fileName), H5F_ACC_TRUNC);
Hdf5ExternalArray myArray;
DSetCreatPropList cparms;
if (chunkSize > 0) {
cparms.setDeflate(2);
cparms.setChunk(1, &chunkSize);
}
myArray.create(&file, datasetName, datatype, N, &cparms);
for (hsize_t i = 0; i < N; ++i) {
hsize_t *block = reinterpret_cast<hsize_t *>(myArray.getUpdate(i));
*block = i;
}
myArray.write();
file.flush(H5F_SCOPE_LOCAL);
file.close();
checkNumbers(testCase);
} catch (Exception &exception) {
cerr << exception.getCDetailMsg() << endl;
CuAssertTrue(testCase, 0);
} catch (...) {
CuAssertTrue(testCase, 0);
}
teardown();
}
}
示例5: TestCompress
int TestCompress()
{
unsigned int flags = 0;
unsigned int config = 0;
size_t cd_nelemts = 0;
TESTING("compression")
#ifdef H5_HAVE_FILTER_DEFLATE
try {
/* Create packet table with compression. */
FL_PacketTable wrapper(fileID, "/compressTest", H5T_NATIVE_CHAR, 100, 8);
/* Create an HDF5 C++ file object */
H5File file;
file.setId(fileID);
/* Make sure that the deflate filter is set by opening the packet table
* as a dataset and getting its creation property list */
DataSet dsetID = file.openDataSet("/compressTest");
DSetCreatPropList dcplID = dsetID.getCreatePlist();
dcplID.getFilterById(H5Z_FILTER_DEFLATE, flags, cd_nelemts, NULL, 0, NULL, config);
} catch (Exception e) {
H5_FAILED();
return 1;
}
PASSED();
#else
SKIPPED();
puts(" deflate filter not enabled");
#endif /* H5_HAVE_FILTER_DEFLATE */
return 0;
}
示例6: catch
void HDF5Genome::setGenomeBottomDimensions(
const vector<Sequence::UpdateInfo>& bottomDimensions)
{
hal_size_t numBottomSegments = 0;
for (vector<Sequence::UpdateInfo>::const_iterator i
= bottomDimensions.begin(); i != bottomDimensions.end();
++i)
{
numBottomSegments += i->_numSegments;
}
H5::Exception::dontPrint();
try
{
DataSet d = _group.openDataSet(bottomArrayName);
_group.unlink(bottomArrayName);
}
catch (H5::Exception){}
hal_size_t numChildren = _alignment->getChildNames(_name).size();
// scale down the chunk size in order to keep chunks proportional to
// the size of a bottom segment with two children.
hsize_t chunk;
_dcprops.getChunk(1, &chunk);
double scale = numChildren < 10 ? 1. : 10. / numChildren;
chunk *= scale;
DSetCreatPropList botDC;
botDC.copy(_dcprops);
botDC.setChunk(1, &chunk);
_bottomArray.create(&_group, bottomArrayName,
HDF5BottomSegment::dataType(numChildren),
numBottomSegments + 1, &botDC, _numChunksInArrayBuffer);
_numChildrenInBottomArray = numChildren;
_childCache.clear();
}
示例7: test_null_filter
static void test_null_filter()
{
// Output message about test being performed
SUBTEST("'Null' filter");
try {
//hsize_t null_size; // Size of dataset with null filter
// Prepare dataset create property list
DSetCreatPropList dsplist;
dsplist.setChunk(2, chunk_size);
if (H5Zregister (H5Z_BOGUS)<0)
throw Exception("test_null_filter", "H5Zregister failed");
// Set some pretent filter
dsplist.setFilter(H5Z_FILTER_BOGUS);
// this function is just a stub right now; will work on it later - BMR
//if(test_filter_internal(file,DSET_BOGUS_NAME,dc,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&null_size)<0)
// throw Exception("test_null_filter", "test_filter_internal failed");
// Close objects.
dsplist.close();
PASSED();
} // end of try
// catch all other exceptions
catch (Exception E)
{
issue_fail_msg("test_null_filter()", __LINE__, __FILE__, E.getCDetailMsg());
}
} // test_null_filter
示例8: FILE_NAME
HDF5HandlerBase::HDF5HandlerBase(const std::string &fileName, const std::string &datasetName)
: FILE_NAME(H5std_string(fileName))
, DATASETNAME(H5std_string(datasetName))
{
try
{
Exception::dontPrint();
file = H5File(FILE_NAME, H5F_ACC_TRUNC);
hsize_t dims[1] = {0};
hsize_t maxdims[1] = {H5S_UNLIMITED};
hsize_t chunk_dims[1] = {10000};
DataSpace dataspace = DataSpace(1,dims,maxdims);
DSetCreatPropList prop;
prop.setChunk(1, chunk_dims);
dataset = file.createDataSet( DATASETNAME,
PredType::STD_I32BE, dataspace, prop);
prop.close();
dataspace.close();
} catch (Exception &error) {
// Throw FileIException, DataSetIException, DataSpaceIException
throw;
}
}
示例9:
HDF5RecordingData::HDF5RecordingData(DataSet* data)
{
DataSpace dSpace;
DSetCreatPropList prop;
ScopedPointer<DataSet> dataSet = data;
hsize_t dims[3], chunk[3];
dSpace = dataSet->getSpace();
prop = dataSet->getCreatePlist();
dimension = dSpace.getSimpleExtentDims(dims);
prop.getChunk(dimension,chunk);
this->size[0] = dims[0];
if (dimension > 1)
this->size[1] = dims[1];
else
this->size[1] = 1;
if (dimension > 1)
this->size[2] = dims[2];
else
this->size[2] = 1;
this->xChunkSize = chunk[0];
this->xPos = dims[0];
this->dSet = dataSet;
this->rowXPos.clear();
this->rowXPos.insertMultiple(0,0,this->size[1]);
}
示例10:
void HDF5CLParser::applyToDCProps(DSetCreatPropList& dcprops) const
{
if (hasOption("chunk"))
{
hsize_t chunk = getOption<hsize_t>("chunk");
hsize_t deflate = getOption<hsize_t>("deflate");
dcprops.setChunk(1, &chunk);
dcprops.setDeflate(deflate);
}
}
示例11: dSpace
HDF5RecordingData* HDF5FileBase::createDataSet(DataTypes type, int dimension, int* size, int* chunking, String path)
{
ScopedPointer<DataSet> data;
DSetCreatPropList prop;
if (!opened) return nullptr;
//Right now this classes don't support datasets with rank > 3.
//If it's needed in the future we can extend them to be of generic rank
if ((dimension > 3) || (dimension < 1)) return nullptr;
DataType H5type = getH5Type(type);
hsize_t dims[3], chunk_dims[3], max_dims[3];
for (int i=0; i < dimension; i++)
{
dims[i] = size[i];
if (chunking[i] > 0)
{
chunk_dims[i] = chunking[i];
max_dims[i] = H5S_UNLIMITED;
}
else
{
chunk_dims[i] = size[i];
max_dims[i] = size[i];
}
}
try
{
DataSpace dSpace(dimension,dims,max_dims);
prop.setChunk(dimension,chunk_dims);
data = new DataSet(file->createDataSet(path.toUTF8(),H5type,dSpace,prop));
return new HDF5RecordingData(data.release());
}
catch (DataSetIException error)
{
error.printError();
return nullptr;
}
catch (FileIException error)
{
error.printError();
return nullptr;
}
catch (DataSpaceIException error)
{
error.printError();
return nullptr;
}
}
示例12: hdf5DNATypeTest
void hdf5DNATypeTest(CuTest *testCase)
{
for (hsize_t chunkIdx = 0; chunkIdx < numSizes; ++chunkIdx)
{
hsize_t chunkSize = chunkSizes[chunkIdx];
setup();
try
{
PredType datatype = HDF5DNA::dataType();
H5File file(H5std_string(fileName), H5F_ACC_TRUNC);
HDF5ExternalArray myArray;
DSetCreatPropList cparms;
if (chunkSize > 0)
{
cparms.setChunk(1, &chunkSize);
}
hsize_t NEVEN = N % 2 ? N + 1 : N;
myArray.create(&file, datasetName, datatype, NEVEN / 2, &cparms);
for (hsize_t i = 0; i < NEVEN / 2; ++i)
{
unsigned char value = 0U;
HDF5DNA::pack(idxToDNA(i * 2), i * 2, value);
HDF5DNA::pack(idxToDNA((i * 2) + 1), (i * 2) + 1, value);
myArray.setValue(i, 0, value);
}
myArray.write();
file.flush(H5F_SCOPE_LOCAL);
file.close();
H5File rfile(H5std_string(fileName), H5F_ACC_RDONLY);
HDF5ExternalArray readArray;
readArray.load(&rfile, datasetName);
for (hsize_t i = 0; i < NEVEN / 2; ++i)
{
unsigned char value = readArray.getValue<unsigned char>(i, 0);
char v1 = HDF5DNA::unpack(0, value);
char v2 = HDF5DNA::unpack(1, value);
CuAssertTrue(testCase, v1 == idxToDNA(i * 2));
CuAssertTrue(testCase, v2 == idxToDNA((i * 2) + 1));
}
}
catch(Exception& exception)
{
cerr << exception.getCDetailMsg() << endl;
CuAssertTrue(testCase, 0);
}
catch(...)
{
CuAssertTrue(testCase, 0);
}
teardown();
}
}
示例13: write_hdf5_image
void write_hdf5_image(H5File h5f, const char *name, const Mat &im)
{
DSetCreatPropList cparms;
hsize_t chunk_dims[2] = {256, 256};
hsize_t dims[2];
dims[0] = im.size().height;
dims[1] = im.size().width;
if (chunk_dims[0] > dims[0]) {
chunk_dims[0] = dims[0];
}
if (chunk_dims[1] > dims[1]) {
chunk_dims[1] = dims[1];
}
cparms.setChunk(2, chunk_dims);
cparms.setShuffle();
cparms.setDeflate(5);
DataSet dataset = h5f.createDataSet(name, PredType::NATIVE_FLOAT,
DataSpace(2, dims, dims),
cparms);
Mat image;
if (im.type() != CV_32F)
im.convertTo(image, CV_32F);
else
image = im;
DataSpace imspace;
float *imdata;
if (image.isContinuous()) {
imspace = dataset.getSpace(); // same size as
imspace.selectAll();
imdata = image.ptr<float>();
} else {
// we are working with an ROI
assert (image.isSubmatrix());
Size parent_size; Point parent_ofs;
image.locateROI(parent_size, parent_ofs);
hsize_t parent_count[2];
parent_count[0] = parent_size.height; parent_count[1] = parent_size.width;
imspace.setExtentSimple(2, parent_count);
hsize_t im_offset[2], im_size[2];
im_offset[0] = parent_ofs.y; im_offset[1] = parent_ofs.x;
im_size[0] = image.size().height; im_size[1] = image.size().width;
imspace.selectHyperslab(H5S_SELECT_SET, im_size, im_offset);
imdata = image.ptr<float>() - parent_ofs.x - parent_ofs.y * parent_size.width;
}
dataset.write(imdata, PredType::NATIVE_FLOAT, imspace);
}
示例14: create_dataset
static DataSet create_dataset(H5File h5f, const char *name)
{
DSetCreatPropList cparms;
hsize_t chunk_dims[2] = {256, 256};
hsize_t dims[2];
cparms.setChunk(2, chunk_dims);
cparms.setShuffle();
cparms.setDeflate(5);
dims[0] = imsize.height;
dims[1] = imsize.width;
return h5f.createDataSet(name, PredType::NATIVE_FLOAT,
DataSpace(2, dims, dims),
cparms);
}
示例15: end_column
// When the column header is complete, create a table with
// appropriately typed columns and prepare to write data to it.
void end_column (void* state)
{
program_state_t* s = (program_state_t*)state;
// Create a global dataspace.
s->current_dims = 0;
hsize_t max_dims = H5S_UNLIMITED;
DataSpace global_dataspace(1, &s->current_dims, &max_dims);
// Define an HDF5 datatype based on the Byfl column header.
construct_hdf5_datatype(s);
// Create a dataset. Enable chunking (required because of the
// H5S_UNLIMITED dimension) and deflate compression (optional).
DSetCreatPropList proplist;
proplist.setChunk(1, &chunk_size);
proplist.setDeflate(9); // Maximal compression
s->dataset = s->hdf5file.createDataSet(s->table_name, s->datatype,
global_dataspace, proplist);
}