本文整理汇总了C++中DSetCreatPropList::setChunk方法的典型用法代码示例。如果您正苦于以下问题:C++ DSetCreatPropList::setChunk方法的具体用法?C++ DSetCreatPropList::setChunk怎么用?C++ DSetCreatPropList::setChunk使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类DSetCreatPropList
的用法示例。
在下文中一共展示了DSetCreatPropList::setChunk方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: test_null_filter
static void test_null_filter()
{
// Output message about test being performed
SUBTEST("'Null' filter");
try {
//hsize_t null_size; // Size of dataset with null filter
// Prepare dataset create property list
DSetCreatPropList dsplist;
dsplist.setChunk(2, chunk_size);
if (H5Zregister (H5Z_BOGUS)<0)
throw Exception("test_null_filter", "H5Zregister failed");
// Set some pretent filter
dsplist.setFilter(H5Z_FILTER_BOGUS);
// this function is just a stub right now; will work on it later - BMR
//if(test_filter_internal(file,DSET_BOGUS_NAME,dc,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&null_size)<0)
// throw Exception("test_null_filter", "test_filter_internal failed");
// Close objects.
dsplist.close();
PASSED();
} // end of try
// catch all other exceptions
catch (Exception E)
{
issue_fail_msg("test_null_filter()", __LINE__, __FILE__, E.getCDetailMsg());
}
} // test_null_filter
示例2: getPropList
/**
* Sets up the chunking and compression rate.
* @param length
* @return The configured property list
*/
DSetCreatPropList getPropList(const std::size_t length) {
DSetCreatPropList propList;
hsize_t chunk_dims[1] = {length};
propList.setChunk(1, chunk_dims);
propList.setDeflate(6);
return propList;
}
示例3: datatype
// * * * * * * * * * * * * * * * * * * * * * * * * * *
void H5_C3PO_NS::createExtendibleDataset(std::string FILE_NAME,const char* datasetName_)
{
hsize_t dims[2] = { 0, 1}; // dataset dimensions at creation
hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
DataSpace mspace1( RANK, dims, maxdims);
H5File* file=new H5File( FILE_NAME.c_str(),H5F_ACC_RDWR );
IntType datatype( PredType::NATIVE_DOUBLE ); //Define datatype for the data
datatype.setOrder( H5T_ORDER_LE );
DSetCreatPropList cparms;
hsize_t chunk_dims[2] ={6, 1};
cparms.setChunk( RANK, chunk_dims );
//Set fill value for the dataset
int fill_val = 1.0;
cparms.setFillValue( PredType::NATIVE_DOUBLE, &fill_val);
DataSet dataset = file->createDataSet( datasetName_, PredType::NATIVE_DOUBLE, mspace1, cparms);
file->close();
delete file;
}
示例4: hdf5ExternalArrayTestCreate
void hdf5ExternalArrayTestCreate(CuTest *testCase) {
for (hsize_t chunkIdx = 0; chunkIdx < numSizes; ++chunkIdx) {
hsize_t chunkSize = chunkSizes[chunkIdx];
setup();
try {
IntType datatype(PredType::NATIVE_HSIZE);
H5File file(H5std_string(fileName), H5F_ACC_TRUNC);
Hdf5ExternalArray myArray;
DSetCreatPropList cparms;
if (chunkSize > 0) {
cparms.setDeflate(2);
cparms.setChunk(1, &chunkSize);
}
myArray.create(&file, datasetName, datatype, N, &cparms);
for (hsize_t i = 0; i < N; ++i) {
hsize_t *block = reinterpret_cast<hsize_t *>(myArray.getUpdate(i));
*block = i;
}
myArray.write();
file.flush(H5F_SCOPE_LOCAL);
file.close();
checkNumbers(testCase);
} catch (Exception &exception) {
cerr << exception.getCDetailMsg() << endl;
CuAssertTrue(testCase, 0);
} catch (...) {
CuAssertTrue(testCase, 0);
}
teardown();
}
}
示例5: DATASETNAME
HDF5HandlerBase::HDF5HandlerBase(const std::string &fileName, const std::string &datasetName)
: FILE_NAME(H5std_string(fileName))
, DATASETNAME(H5std_string(datasetName))
{
try
{
Exception::dontPrint();
file = H5File(FILE_NAME, H5F_ACC_TRUNC);
hsize_t dims[1] = {0};
hsize_t maxdims[1] = {H5S_UNLIMITED};
hsize_t chunk_dims[1] = {10000};
DataSpace dataspace = DataSpace(1,dims,maxdims);
DSetCreatPropList prop;
prop.setChunk(1, chunk_dims);
dataset = file.createDataSet( DATASETNAME,
PredType::STD_I32BE, dataspace, prop);
prop.close();
dataspace.close();
} catch (Exception &error) {
// Throw FileIException, DataSetIException, DataSpaceIException
throw;
}
}
示例6: catch
void HDF5Genome::setGenomeBottomDimensions(
const vector<Sequence::UpdateInfo>& bottomDimensions)
{
hal_size_t numBottomSegments = 0;
for (vector<Sequence::UpdateInfo>::const_iterator i
= bottomDimensions.begin(); i != bottomDimensions.end();
++i)
{
numBottomSegments += i->_numSegments;
}
H5::Exception::dontPrint();
try
{
DataSet d = _group.openDataSet(bottomArrayName);
_group.unlink(bottomArrayName);
}
catch (H5::Exception){}
hal_size_t numChildren = _alignment->getChildNames(_name).size();
// scale down the chunk size in order to keep chunks proportional to
// the size of a bottom segment with two children.
hsize_t chunk;
_dcprops.getChunk(1, &chunk);
double scale = numChildren < 10 ? 1. : 10. / numChildren;
chunk *= scale;
DSetCreatPropList botDC;
botDC.copy(_dcprops);
botDC.setChunk(1, &chunk);
_bottomArray.create(&_group, bottomArrayName,
HDF5BottomSegment::dataType(numChildren),
numBottomSegments + 1, &botDC, _numChunksInArrayBuffer);
_numChildrenInBottomArray = numChildren;
_childCache.clear();
}
示例7: createCompoundDataSet
//Creates a dataset: an array of HDF5 CompoundType
//If you want a dimension i to be unlimited, pass chunk_dims[i]=NCHUNK and max_dims[i]=0. If limited, pass max_dims[i]=N and chunk_dims[i]=N.
ArfRecordingData* ArfFileBase::createCompoundDataSet(CompType type, String path, int dimension, int* max_dims, int* chunk_dims)
{
ScopedPointer<DataSet> data;
DSetCreatPropList prop;
hsize_t Hdims[3];
hsize_t Hmax_dims [3];
hsize_t Hchunk_dims[3];
for (int i=0; i < dimension; i++)
{
Hchunk_dims[i] = chunk_dims[i];
if (chunk_dims[i] > 0 && chunk_dims[i] != max_dims[i])
{
Hmax_dims[i] = H5S_UNLIMITED;
Hdims[i] = 0;
}
else
{
Hmax_dims[i] = max_dims[i];
Hdims[i] = max_dims[i];
}
}
DataSpace dSpace(dimension, Hdims, Hmax_dims);
prop.setChunk(dimension, Hchunk_dims);
data = new DataSet(file->createDataSet(path.toUTF8(),type,dSpace,prop));
return new ArfRecordingData(data.release());
}
示例8:
void HDF5CLParser::applyToDCProps(DSetCreatPropList& dcprops) const
{
if (hasOption("chunk"))
{
hsize_t chunk = getOption<hsize_t>("chunk");
hsize_t deflate = getOption<hsize_t>("deflate");
dcprops.setChunk(1, &chunk);
dcprops.setDeflate(deflate);
}
}
示例9: dSpace
HDF5RecordingData* HDF5FileBase::createDataSet(DataTypes type, int dimension, int* size, int* chunking, String path)
{
ScopedPointer<DataSet> data;
DSetCreatPropList prop;
if (!opened) return nullptr;
//Right now this classes don't support datasets with rank > 3.
//If it's needed in the future we can extend them to be of generic rank
if ((dimension > 3) || (dimension < 1)) return nullptr;
DataType H5type = getH5Type(type);
hsize_t dims[3], chunk_dims[3], max_dims[3];
for (int i=0; i < dimension; i++)
{
dims[i] = size[i];
if (chunking[i] > 0)
{
chunk_dims[i] = chunking[i];
max_dims[i] = H5S_UNLIMITED;
}
else
{
chunk_dims[i] = size[i];
max_dims[i] = size[i];
}
}
try
{
DataSpace dSpace(dimension,dims,max_dims);
prop.setChunk(dimension,chunk_dims);
data = new DataSet(file->createDataSet(path.toUTF8(),H5type,dSpace,prop));
return new HDF5RecordingData(data.release());
}
catch (DataSetIException error)
{
error.printError();
return nullptr;
}
catch (FileIException error)
{
error.printError();
return nullptr;
}
catch (DataSpaceIException error)
{
error.printError();
return nullptr;
}
}
示例10: hdf5DNATypeTest
void hdf5DNATypeTest(CuTest *testCase)
{
for (hsize_t chunkIdx = 0; chunkIdx < numSizes; ++chunkIdx)
{
hsize_t chunkSize = chunkSizes[chunkIdx];
setup();
try
{
PredType datatype = HDF5DNA::dataType();
H5File file(H5std_string(fileName), H5F_ACC_TRUNC);
HDF5ExternalArray myArray;
DSetCreatPropList cparms;
if (chunkSize > 0)
{
cparms.setChunk(1, &chunkSize);
}
hsize_t NEVEN = N % 2 ? N + 1 : N;
myArray.create(&file, datasetName, datatype, NEVEN / 2, &cparms);
for (hsize_t i = 0; i < NEVEN / 2; ++i)
{
unsigned char value = 0U;
HDF5DNA::pack(idxToDNA(i * 2), i * 2, value);
HDF5DNA::pack(idxToDNA((i * 2) + 1), (i * 2) + 1, value);
myArray.setValue(i, 0, value);
}
myArray.write();
file.flush(H5F_SCOPE_LOCAL);
file.close();
H5File rfile(H5std_string(fileName), H5F_ACC_RDONLY);
HDF5ExternalArray readArray;
readArray.load(&rfile, datasetName);
for (hsize_t i = 0; i < NEVEN / 2; ++i)
{
unsigned char value = readArray.getValue<unsigned char>(i, 0);
char v1 = HDF5DNA::unpack(0, value);
char v2 = HDF5DNA::unpack(1, value);
CuAssertTrue(testCase, v1 == idxToDNA(i * 2));
CuAssertTrue(testCase, v2 == idxToDNA((i * 2) + 1));
}
}
catch(Exception& exception)
{
cerr << exception.getCDetailMsg() << endl;
CuAssertTrue(testCase, 0);
}
catch(...)
{
CuAssertTrue(testCase, 0);
}
teardown();
}
}
示例11: write_hdf5_image
void write_hdf5_image(H5File h5f, const char *name, const Mat &im)
{
DSetCreatPropList cparms;
hsize_t chunk_dims[2] = {256, 256};
hsize_t dims[2];
dims[0] = im.size().height;
dims[1] = im.size().width;
if (chunk_dims[0] > dims[0]) {
chunk_dims[0] = dims[0];
}
if (chunk_dims[1] > dims[1]) {
chunk_dims[1] = dims[1];
}
cparms.setChunk(2, chunk_dims);
cparms.setShuffle();
cparms.setDeflate(5);
DataSet dataset = h5f.createDataSet(name, PredType::NATIVE_FLOAT,
DataSpace(2, dims, dims),
cparms);
Mat image;
if (im.type() != CV_32F)
im.convertTo(image, CV_32F);
else
image = im;
DataSpace imspace;
float *imdata;
if (image.isContinuous()) {
imspace = dataset.getSpace(); // same size as
imspace.selectAll();
imdata = image.ptr<float>();
} else {
// we are working with an ROI
assert (image.isSubmatrix());
Size parent_size; Point parent_ofs;
image.locateROI(parent_size, parent_ofs);
hsize_t parent_count[2];
parent_count[0] = parent_size.height; parent_count[1] = parent_size.width;
imspace.setExtentSimple(2, parent_count);
hsize_t im_offset[2], im_size[2];
im_offset[0] = parent_ofs.y; im_offset[1] = parent_ofs.x;
im_size[0] = image.size().height; im_size[1] = image.size().width;
imspace.selectHyperslab(H5S_SELECT_SET, im_size, im_offset);
imdata = image.ptr<float>() - parent_ofs.x - parent_ofs.y * parent_size.width;
}
dataset.write(imdata, PredType::NATIVE_FLOAT, imspace);
}
示例12: create_dataset
static DataSet create_dataset(H5File h5f, const char *name)
{
DSetCreatPropList cparms;
hsize_t chunk_dims[2] = {256, 256};
hsize_t dims[2];
cparms.setChunk(2, chunk_dims);
cparms.setShuffle();
cparms.setDeflate(5);
dims[0] = imsize.height;
dims[1] = imsize.width;
return h5f.createDataSet(name, PredType::NATIVE_FLOAT,
DataSpace(2, dims, dims),
cparms);
}
示例13: end_column
// When the column header is complete, create a table with
// appropriately typed columns and prepare to write data to it.
void end_column (void* state)
{
program_state_t* s = (program_state_t*)state;
// Create a global dataspace.
s->current_dims = 0;
hsize_t max_dims = H5S_UNLIMITED;
DataSpace global_dataspace(1, &s->current_dims, &max_dims);
// Define an HDF5 datatype based on the Byfl column header.
construct_hdf5_datatype(s);
// Create a dataset. Enable chunking (required because of the
// H5S_UNLIMITED dimension) and deflate compression (optional).
DSetCreatPropList proplist;
proplist.setChunk(1, &chunk_size);
proplist.setDeflate(9); // Maximal compression
s->dataset = s->hdf5file.createDataSet(s->table_name, s->datatype,
global_dataspace, proplist);
}
示例14: max
void HDF5Genome::setDimensions(
const vector<Sequence::Info>& sequenceDimensions,
bool storeDNAArrays)
{
_totalSequenceLength = 0;
hal_size_t totalSeq = sequenceDimensions.size();
hal_size_t maxName = 0;
// Copy segment dimensions to use the external interface
vector<Sequence::UpdateInfo> topDimensions;
topDimensions.reserve(sequenceDimensions.size());
vector<Sequence::UpdateInfo> bottomDimensions;
bottomDimensions.reserve(sequenceDimensions.size());
// Compute summary info from the list of sequence Dimensions
for (vector<Sequence::Info>::const_iterator i = sequenceDimensions.begin();
i != sequenceDimensions.end();
++i)
{
_totalSequenceLength += i->_length;
maxName = max(static_cast<hal_size_t>(i->_name.length()), maxName);
topDimensions.push_back(
Sequence::UpdateInfo(i->_name, i->_numTopSegments));
bottomDimensions.push_back(
Sequence::UpdateInfo(i->_name, i->_numBottomSegments));
}
// Unlink the DNA and segment arrays if they exist (using
// exceptions is the only way I know how right now). Note that
// the file needs to be refactored to take advantage of the new
// space.
H5::Exception::dontPrint();
try
{
DataSet d = _group.openDataSet(dnaArrayName);
_group.unlink(dnaArrayName);
}
catch (H5::Exception){}
try
{
DataSet d = _group.openDataSet(sequenceIdxArrayName);
_group.unlink(sequenceIdxArrayName);
}
catch (H5::Exception){}
try
{
DataSet d = _group.openDataSet(sequenceNameArrayName);
_group.unlink(sequenceNameArrayName);
}
catch (H5::Exception){}
if (_totalSequenceLength > 0 && storeDNAArrays == true)
{
hal_size_t arrayLength = _totalSequenceLength / 2;
if (_totalSequenceLength % 2)
{
++arrayLength;
_rup->set(rupGroupName, "1");
}
else
{
_rup->set(rupGroupName, "0");
}
hsize_t chunk;
_dcprops.getChunk(1, &chunk);
// enalarge chunk size because dna bases are so much smaller
// than segments. (about 30x). we default to 10x enlargement
// since the seem to compress about 3x worse.
chunk *= dnaChunkScale;
DSetCreatPropList dnaDC;
dnaDC.copy(_dcprops);
dnaDC.setChunk(1, &chunk);
_dnaArray.create(&_group, dnaArrayName, HDF5DNA::dataType(),
arrayLength, &dnaDC, _numChunksInArrayBuffer);
}
if (totalSeq > 0)
{
_sequenceIdxArray.create(&_group, sequenceIdxArrayName,
HDF5Sequence::idxDataType(),
totalSeq + 1, &_dcprops, _numChunksInArrayBuffer);
_sequenceNameArray.create(&_group, sequenceNameArrayName,
HDF5Sequence::nameDataType(maxName + 1),
totalSeq, &_dcprops, _numChunksInArrayBuffer);
writeSequences(sequenceDimensions);
}
// Do the same as above for the segments.
setGenomeTopDimensions(topDimensions);
setGenomeBottomDimensions(bottomDimensions);
_parentCache = NULL;
_childCache.clear();
}
示例15: hdf5SequenceTypeTest
void hdf5SequenceTypeTest(CuTest *testCase)
{
for (hsize_t lengthIdx = 0; lengthIdx < numLengths; ++lengthIdx)
{
hsize_t length = maxNameLength[lengthIdx];
for (hsize_t chunkIdx = 0; chunkIdx < numSizes; ++chunkIdx)
{
hsize_t chunkSize = chunkSizes[chunkIdx];
setup();
try
{
CompType datatype = HDF5Sequence::dataType(length);
H5File file(H5std_string(fileName), H5F_ACC_TRUNC);
HDF5ExternalArray myArray;
DSetCreatPropList cparms;
if (chunkSize > 0)
{
cparms.setChunk(1, &chunkSize);
}
myArray.create(&file, datasetName, datatype, N, &cparms);
hal_size_t totalTopSegments = 0;
hal_size_t totalBottomSegments = 0;
for (hsize_t i = 0; i < N; ++i)
{
HDF5Sequence sequence(NULL, &myArray, i);
Sequence::Info seqInfo(genName(i, length), i * 2, i * 3, i * 4);
sequence.set(i, seqInfo, totalTopSegments, totalBottomSegments);
totalTopSegments += seqInfo._numTopSegments;
totalBottomSegments += seqInfo._numBottomSegments;
}
myArray.write();
file.flush(H5F_SCOPE_LOCAL);
file.close();
H5File rfile(H5std_string(fileName), H5F_ACC_RDONLY);
HDF5ExternalArray readArray;
readArray.load(&rfile, datasetName);
for (hsize_t i = 0; i < N; ++i)
{
HDF5Sequence sequence(NULL, &readArray, i);
CuAssertTrue(testCase,
sequence.getName() == genName(i, length));
CuAssertTrue(testCase,
sequence.getStartPosition() == i);
CuAssertTrue(testCase,
sequence.getSequenceLength() == i * 2);
CuAssertTrue(testCase,
sequence.getNumTopSegments() == i * 3);
CuAssertTrue(testCase,
sequence.getNumBottomSegments() == i * 4);
}
}
catch(Exception& exception)
{
cerr << exception.getCDetailMsg() << endl;
CuAssertTrue(testCase, 0);
}
catch(...)
{
CuAssertTrue(testCase, 0);
}
teardown();
}
}
}