本文整理汇总了C++中DSetCreatPropList::setDeflate方法的典型用法代码示例。如果您正苦于以下问题:C++ DSetCreatPropList::setDeflate方法的具体用法?C++ DSetCreatPropList::setDeflate怎么用?C++ DSetCreatPropList::setDeflate使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类DSetCreatPropList
的用法示例。
在下文中一共展示了DSetCreatPropList::setDeflate方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: getPropList
/**
* Sets up the chunking and compression rate.
* @param length
* @return The configured property list
*/
DSetCreatPropList getPropList(const std::size_t length) {
DSetCreatPropList propList;
hsize_t chunk_dims[1] = {length};
propList.setChunk(1, chunk_dims);
propList.setDeflate(6);
return propList;
}
示例2: hdf5ExternalArrayTestCreate
void hdf5ExternalArrayTestCreate(CuTest *testCase) {
for (hsize_t chunkIdx = 0; chunkIdx < numSizes; ++chunkIdx) {
hsize_t chunkSize = chunkSizes[chunkIdx];
setup();
try {
IntType datatype(PredType::NATIVE_HSIZE);
H5File file(H5std_string(fileName), H5F_ACC_TRUNC);
Hdf5ExternalArray myArray;
DSetCreatPropList cparms;
if (chunkSize > 0) {
cparms.setDeflate(2);
cparms.setChunk(1, &chunkSize);
}
myArray.create(&file, datasetName, datatype, N, &cparms);
for (hsize_t i = 0; i < N; ++i) {
hsize_t *block = reinterpret_cast<hsize_t *>(myArray.getUpdate(i));
*block = i;
}
myArray.write();
file.flush(H5F_SCOPE_LOCAL);
file.close();
checkNumbers(testCase);
} catch (Exception &exception) {
cerr << exception.getCDetailMsg() << endl;
CuAssertTrue(testCase, 0);
} catch (...) {
CuAssertTrue(testCase, 0);
}
teardown();
}
}
示例3:
void HDF5CLParser::applyToDCProps(DSetCreatPropList& dcprops) const
{
if (hasOption("chunk"))
{
hsize_t chunk = getOption<hsize_t>("chunk");
hsize_t deflate = getOption<hsize_t>("deflate");
dcprops.setChunk(1, &chunk);
dcprops.setDeflate(deflate);
}
}
示例4: write_hdf5_image
void write_hdf5_image(H5File h5f, const char *name, const Mat &im)
{
DSetCreatPropList cparms;
hsize_t chunk_dims[2] = {256, 256};
hsize_t dims[2];
dims[0] = im.size().height;
dims[1] = im.size().width;
if (chunk_dims[0] > dims[0]) {
chunk_dims[0] = dims[0];
}
if (chunk_dims[1] > dims[1]) {
chunk_dims[1] = dims[1];
}
cparms.setChunk(2, chunk_dims);
cparms.setShuffle();
cparms.setDeflate(5);
DataSet dataset = h5f.createDataSet(name, PredType::NATIVE_FLOAT,
DataSpace(2, dims, dims),
cparms);
Mat image;
if (im.type() != CV_32F)
im.convertTo(image, CV_32F);
else
image = im;
DataSpace imspace;
float *imdata;
if (image.isContinuous()) {
imspace = dataset.getSpace(); // same size as
imspace.selectAll();
imdata = image.ptr<float>();
} else {
// we are working with an ROI
assert (image.isSubmatrix());
Size parent_size; Point parent_ofs;
image.locateROI(parent_size, parent_ofs);
hsize_t parent_count[2];
parent_count[0] = parent_size.height; parent_count[1] = parent_size.width;
imspace.setExtentSimple(2, parent_count);
hsize_t im_offset[2], im_size[2];
im_offset[0] = parent_ofs.y; im_offset[1] = parent_ofs.x;
im_size[0] = image.size().height; im_size[1] = image.size().width;
imspace.selectHyperslab(H5S_SELECT_SET, im_size, im_offset);
imdata = image.ptr<float>() - parent_ofs.x - parent_ofs.y * parent_size.width;
}
dataset.write(imdata, PredType::NATIVE_FLOAT, imspace);
}
示例5: create_dataset
static DataSet create_dataset(H5File h5f, const char *name)
{
DSetCreatPropList cparms;
hsize_t chunk_dims[2] = {256, 256};
hsize_t dims[2];
cparms.setChunk(2, chunk_dims);
cparms.setShuffle();
cparms.setDeflate(5);
dims[0] = imsize.height;
dims[1] = imsize.width;
return h5f.createDataSet(name, PredType::NATIVE_FLOAT,
DataSpace(2, dims, dims),
cparms);
}
示例6: end_column
// When the column header is complete, create a table with
// appropriately typed columns and prepare to write data to it.
void end_column (void* state)
{
program_state_t* s = (program_state_t*)state;
// Create a global dataspace.
s->current_dims = 0;
hsize_t max_dims = H5S_UNLIMITED;
DataSpace global_dataspace(1, &s->current_dims, &max_dims);
// Define an HDF5 datatype based on the Byfl column header.
construct_hdf5_datatype(s);
// Create a dataset. Enable chunking (required because of the
// H5S_UNLIMITED dimension) and deflate compression (optional).
DSetCreatPropList proplist;
proplist.setChunk(1, &chunk_size);
proplist.setDeflate(9); // Maximal compression
s->dataset = s->hdf5file.createDataSet(s->table_name, s->datatype,
global_dataspace, proplist);
}
示例7: FILE_NAME
int
main(int argc, char **argv) {
// Try block to detect exceptions raised by any of the calls inside it
try {
// Turn off the auto-printing when failure occurs so that we can
// handle the errors appropriately
H5std_string FILE_NAME(argv[1]);
Exception::dontPrint();
// Open the file and the dataset in the file.
H5File file(FILE_NAME, H5F_ACC_RDONLY);
DataSet dataset;
H5std_string dataset_name;
auto objCount(H5Fget_obj_count(file.getId(), H5F_OBJ_ALL));
for (size_t i = 0; i != objCount; ++i)
if (H5G_DATASET == file.getObjTypeByIdx(i)) {
dataset_name = file.getObjnameByIdx(i);
dataset = file.openDataSet(dataset_name);
}
auto datatype(dataset.getDataType());
auto dataspace(dataset.getSpace());
hsize_t dims_in[2];
auto ndims(dataspace.getSimpleExtentDims(dims_in, NULL));
hsize_t dims_out[2] = { DIM0, DIM1 }; // dataset dimensions
double *buf = new double[dims_in[0] * dims_in[1]];
// Read data.
dataset.read(buf, PredType::NATIVE_DOUBLE);//, memspace, dataspace);
H5std_string outFileName("out.h5");
// Create a new file using the default property lists.
H5File outfile(outFileName, H5F_ACC_TRUNC);
// Create the data space for the dataset.
DataSpace *output_dataspace = new DataSpace(ndims, dims_out);
hsize_t chunk_dims[2] = { 20, 20 }; // chunk dimensions
// Modify dataset creation property to enable chunking
DSetCreatPropList *plist = new DSetCreatPropList;
plist->setChunk(2, chunk_dims);
// Set ZLIB (DEFLATE) Compression using level 9.
plist->setDeflate(9);
// Create the attributes.
const size_t numAttrs = file.getNumAttrs();
for (size_t i = 0; i != numAttrs; ++i) {
auto attr(file.openAttribute(i));
auto output_attr(outfile.createAttribute(attr.getName(),
attr.getDataType(),
attr.getSpace()));
switch (attr.getTypeClass()) {
case H5T_FLOAT: {
double buf;
attr.read(attr.getDataType(), &buf);
output_attr.write(attr.getDataType(), &buf);
}
break;
case H5T_STRING: {
char *buf = new char[(unsigned long)attr.getStorageSize()];
attr.read(attr.getDataType(), buf);
output_attr.write(attr.getDataType(), buf);
delete buf;
}
break;
default:
break;
}
}
// Create the dataset.
DataSet *output_dataset = new DataSet(outfile.createDataSet(dataset_name, datatype, *output_dataspace, *plist));
// Write data to dataset.
output_dataset->write(buf, datatype);
// Close objects and file. Either approach will close the HDF5 item.
delete output_dataspace;
delete output_dataset;
delete plist;
file.close();
} // end of try block
// catch failure caused by the H5File operations
catch(FileIException &error) {
error.printError();
return -1;
}
// catch failure caused by the DataSet operations
catch(DataSetIException &error) {
error.printError();
return -1;
//.........这里部分代码省略.........
示例8: main
int main (void)
{
hsize_t dims[2] = { DIM0, DIM1 }; // dataset dimensions
hsize_t chunk_dims[2] = { 20, 20 }; // chunk dimensions
int i,j, buf[DIM0][DIM1];
// Try block to detect exceptions raised by any of the calls inside it
try
{
// Turn off the auto-printing when failure occurs so that we can
// handle the errors appropriately
Exception::dontPrint();
// Create a new file using the default property lists.
H5File file(FILE_NAME, H5F_ACC_TRUNC);
// Create the data space for the dataset.
DataSpace *dataspace = new DataSpace(2, dims);
// Modify dataset creation property to enable chunking
DSetCreatPropList *plist = new DSetCreatPropList;
plist->setChunk(2, chunk_dims);
// Set ZLIB (DEFLATE) Compression using level 6.
// To use SZIP compression comment out this line.
plist->setDeflate(6);
// Uncomment these lines to set SZIP Compression
// unsigned szip_options_mask = H5_SZIP_NN_OPTION_MASK;
// unsigned szip_pixels_per_block = 16;
// plist->setSzip(szip_options_mask, szip_pixels_per_block);
// Create the dataset.
DataSet *dataset = new DataSet(file.createDataSet( DATASET_NAME,
PredType::STD_I32BE, *dataspace, *plist) );
for (i = 0; i< DIM0; i++)
for (j=0; j<DIM1; j++)
buf[i][j] = i+j;
// Write data to dataset.
dataset->write(buf, PredType::NATIVE_INT);
// Close objects and file. Either approach will close the HDF5 item.
delete dataspace;
delete dataset;
delete plist;
file.close();
// -----------------------------------------------
// Re-open the file and dataset, retrieve filter
// information for dataset and read the data back.
// -----------------------------------------------
int rbuf[DIM0][DIM1];
int numfilt;
size_t nelmts={1}, namelen={1};
unsigned flags, filter_info, cd_values[1], idx;
char name[1];
H5Z_filter_t filter_type;
// Open the file and the dataset in the file.
file.openFile(FILE_NAME, H5F_ACC_RDONLY);
dataset = new DataSet(file.openDataSet( DATASET_NAME));
// Get the create property list of the dataset.
plist = new DSetCreatPropList(dataset->getCreatePlist ());
// Get the number of filters associated with the dataset.
numfilt = plist->getNfilters();
cout << "Number of filters associated with dataset: " << numfilt << endl;
for (idx=0; idx < numfilt; idx++) {
nelmts = 0;
filter_type = plist->getFilter(idx, flags, nelmts, cd_values, namelen, name , filter_info);
cout << "Filter Type: ";
switch (filter_type) {
case H5Z_FILTER_DEFLATE:
cout << "H5Z_FILTER_DEFLATE" << endl;
break;
case H5Z_FILTER_SZIP:
cout << "H5Z_FILTER_SZIP" << endl;
break;
default:
cout << "Other filter type included." << endl;
}
}
// Read data.
dataset->read(rbuf, PredType::NATIVE_INT);
delete plist;
delete dataset;
file.close(); // can be skipped
} // end of try block
//.........这里部分代码省略.........
示例9: Exception
/*-------------------------------------------------------------------------
* Function: test_compression
*
* Purpose: Tests dataset compression. If compression is requested when
* it hasn't been compiled into the library (such as when
* updating an existing compressed dataset) then data is sent to
* the file uncompressed but no errors are returned.
*
* Return: Success: 0
*
* Failure: -1
*
* Programmer: Binh-Minh Ribler (using C version)
* Friday, January 5, 2001
*
* Modifications:
*
*-------------------------------------------------------------------------
*/
static herr_t
test_compression(H5File& file)
{
#ifndef H5_HAVE_FILTER_DEFLATE
const char *not_supported;
not_supported = " Deflate compression is not enabled.";
#endif /* H5_HAVE_FILTER_DEFLATE */
int points[100][200];
int check[100][200];
hsize_t i, j, n;
// Initialize the dataset
for (i = n = 0; i < 100; i++)
{
for (j = 0; j < 200; j++) {
points[i][j] = (int)n++;
}
}
char* tconv_buf = new char [1000];
DataSet* dataset = NULL;
try
{
const hsize_t size[2] = {100, 200};
// Create the data space
DataSpace space1(2, size, NULL);
// Create a small conversion buffer to test strip mining
DSetMemXferPropList xfer;
xfer.setBuffer (1000, tconv_buf, NULL);
// Use chunked storage with compression
DSetCreatPropList dscreatplist;
const hsize_t chunk_size[2] = {2, 25};
dscreatplist.setChunk (2, chunk_size);
dscreatplist.setDeflate (6);
#ifdef H5_HAVE_FILTER_DEFLATE
SUBTEST("Compression (setup)");
// Create the dataset
dataset = new DataSet (file.createDataSet
(DSET_COMPRESS_NAME, PredType::NATIVE_INT, space1, dscreatplist));
PASSED();
/*----------------------------------------------------------------------
* STEP 1: Read uninitialized data. It should be zero.
*----------------------------------------------------------------------
*/
SUBTEST("Compression (uninitialized read)");
dataset->read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);
for (i=0; i<size[0]; i++) {
for (j=0; j<size[1]; j++) {
if (0!=check[i][j]) {
H5_FAILED();
cerr << " Read a non-zero value." << endl;
cerr << " At index " << (unsigned long)i << "," <<
(unsigned long)j << endl;
throw Exception("test_compression", "Failed in uninitialized read");
}
}
}
PASSED();
/*----------------------------------------------------------------------
* STEP 2: Test compression by setting up a chunked dataset and writing
* to it.
*----------------------------------------------------------------------
*/
SUBTEST("Compression (write)");
for (i=n=0; i<size[0]; i++)
{
for (j=0; j<size[1]; j++)
{
points[i][j] = (int)n++;
}
//.........这里部分代码省略.........
示例10: data_type
long SaveContainerHdf5::_writeFile(void* f,Data &aData,
CtSaving::HeaderMap &aHeader,
CtSaving::FileFormat aFormat) {
DEB_MEMBER_FUNCT();
_File* file = (_File*)f;
size_t buf_size = 0;
// get the proper data type
PredType data_type(PredType::NATIVE_UINT8);
switch (aData.type) {
case Data::UINT8:
break;
case Data::INT8:
data_type = PredType::NATIVE_INT8;
break;
case Data::UINT16:
data_type = PredType::NATIVE_UINT16;
break;
case Data::INT16:
data_type = PredType::NATIVE_INT16;
break;
case Data::UINT32:
data_type = PredType::NATIVE_UINT32;
break;
case Data::INT32:
data_type = PredType::NATIVE_INT32;
break;
case Data::UINT64:
data_type = PredType::NATIVE_UINT64;
break;
case Data::INT64:
data_type = PredType::NATIVE_INT64;
break;
case Data::FLOAT:
data_type = PredType::NATIVE_FLOAT;
break;
case Data::DOUBLE:
data_type = PredType::NATIVE_DOUBLE;
break;
case Data::UNDEF:
default:
THROW_CTL_ERROR(Error) << "Invalid image type";
}
try {
if (!file->m_format_written) {
// ISO 8601 Time format
time_t now;
time(&now);
char buf[sizeof("2011-10-08T07:07:09Z")];
#ifdef WIN32
struct tm gmtime_now;
gmtime_s(&gmtime_now, &now);
strftime(buf, sizeof(buf), "%FT%TZ", &gmtime_now);
#else
strftime(buf, sizeof(buf), "%FT%TZ", gmtime(&now));
#endif
string stime = string(buf);
write_h5_dataset(*file->m_entry,"start_time",stime);
// write header only once into "parameters" group
// but we should write some keys into measurement, like motor_pos counter_pos (spec)???
if (!aHeader.empty()) {
for (map<string, string>::const_iterator it = aHeader.begin(); it != aHeader.end(); it++) {
string key = it->first;
string value = it->second;
write_h5_dataset(*file->m_measurement_detector_parameters,
key.c_str(),value);
}
}
delete file->m_measurement_detector_parameters;
file->m_measurement_detector_parameters = NULL;
// create the image data structure in the file
hsize_t data_dims[3], max_dims[3];
data_dims[1] = aData.dimensions[1];
data_dims[2] = aData.dimensions[0];
data_dims[0] = m_nbframes;
max_dims[1] = aData.dimensions[1];
max_dims[2] = aData.dimensions[0];
max_dims[0] = H5S_UNLIMITED;
// Create property list for the dataset and setup chunk size
DSetCreatPropList plist;
hsize_t chunk_dims[RANK_THREE];
// test direct chunk write, so chunk dims is 1 image size
chunk_dims[0] = 1; chunk_dims[1] = data_dims[1]; chunk_dims[2] = data_dims[2];
plist.setChunk(RANK_THREE, chunk_dims);
#if defined(WITH_Z_COMPRESSION)
if (aFormat == CtSaving::HDF5GZ)
plist.setDeflate(m_compression_level);
#endif
#if defined(WITH_BS_COMPRESSION)
if (aFormat == CtSaving::HDF5BS) {
unsigned int opt_vals[2]= {0, BSHUF_H5_COMPRESS_LZ4};
plist.setFilter(BSHUF_H5FILTER, H5Z_FLAG_MANDATORY, 2, opt_vals);
}
//.........这里部分代码省略.........