本文整理汇总了Java中ncsa.hdf.hdf5lib.exceptions.HDF5Exception类的典型用法代码示例。如果您正苦于以下问题:Java HDF5Exception类的具体用法?Java HDF5Exception怎么用?Java HDF5Exception使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
HDF5Exception类属于ncsa.hdf.hdf5lib.exceptions包,在下文中一共展示了HDF5Exception类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: readExchangeSymbolQuotes
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; //导入依赖的package包/类
private void readExchangeSymbolQuotes(String exchange, String symbol) throws EodDataSinkException {
openQuoteDataset(exchange, symbol);
try {
int fileDataspaceHandle = H5.H5Dget_space(quoteDatasetHandle);
long dimensions[] = new long[1];
long maxDimensions[] = new long[1];
@SuppressWarnings("unused")
int status = H5.H5Sget_simple_extent_dims(fileDataspaceHandle, dimensions, maxDimensions);
final byte[] readBuffer = new byte[Hdf5QuoteDatatype.QUOTE_DATATYPE_SIZE * (int)dimensions[0]];
H5.H5Dread(quoteDatasetHandle,
HDF5Constants.H5T_NATIVE_INT,
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
readBuffer);
}
catch (HDF5Exception e) {
throw new EodDataSinkException();
}
}
示例2: close
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; //导入依赖的package包/类
void close() throws EodDataSinkException {
try {
// TODO: this is really quite unsatisfactory as
// an exception thrown from any of these calls
// will mean any subsequent calls will not
// be executed.
H5.H5Tclose(codeDatatypeHandle);
H5.H5Tclose(nameDatatypeHandle);
H5.H5Tclose(countryDatatypeHandle);
H5.H5Tclose(currencyDatatypeHandle);
H5.H5Tclose(suffixDatatypeHandle);
H5.H5Tclose(timezoneDatatypeHandle);
H5.H5Tclose(isIntradayDatatypeHandle);
H5.H5Tclose(intradayStartDateDatatypeHandle);
H5.H5Tclose(hasIntradayProductDatatypeHandle);
H5.H5Tclose(exchangeDatatypeHandle);
}
catch (HDF5Exception ex) {
logger.error(ex);
throw new EodDataSinkException("Encountered problem while attempting to close exchange data types.");
}
}
示例3: makeDataset
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; //导入依赖的package包/类
/**
* General dataset making recipe.
* @param fullPath the dataset full path.
* @param typeIdSupplier type id supplier lambda.
* @param dimensions array with the dimensions of the data.
* @param data the data. It must be an array of the appropriate type given the type that is
* going to be returned by the {@code typeIdSupplier}.
* @return true iff the data-set needed to be created (it did not existed previously). It will
* return false if the data-set existed even if it was modified in the process.
*/
private boolean makeDataset(final String fullPath, final IntSupplier typeIdSupplier, final long[] dimensions, final Object data) {
checkCanWrite();
int typeCopyId = -1;
try {
typeCopyId = typeIdSupplier.getAsInt();
final Pair<String, String> pathAndName = splitPathInParentAndName(fullPath);
final String groupPath = pathAndName.getLeft();
final String dataSetName = pathAndName.getRight();
makeGroup(groupPath);
final int childType = findOutGroupChildType(groupPath, dataSetName, fullPath);
if (childType == HDF5Constants.H5G_UNKNOWN) {
createDataset(fullPath, typeCopyId, dimensions);
writeDataset(fullPath, typeCopyId, data);
return true;
} else if (childType == HDF5Constants.H5G_DATASET) {
writeDataset(fullPath, typeCopyId, data);
return false;
} else {
throw new HDF5LibException(String.format("problem trying to write dataset %s in file %s: there is a collision with a non-dataset object", fullPath, file));
}
} finally {
if (typeCopyId != -1) { try { H5.H5Tclose(typeCopyId); } catch (final HDF5Exception ex ){} }
}
}
示例4: dispose
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; //导入依赖的package包/类
@Override
public void dispose(){
super.dispose();
if(h5file!=null){
try {
h5file.close();
h5file=null;
} catch (HDF5Exception e) {
}
}
if(imagedata!=null){
imagedata.close(0);
imagedata=null;
}
}
示例5: hdf5_close
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; //导入依赖的package包/类
/**
* Closes the HDF5 file.
* Function usefull only because of the try/catch
*/
public static void hdf5_close(H5File h5)
{
try {
h5.close();
} catch (HDF5Exception ex) {
System.out.println("Could not close HDF5 file?");
ex.printStackTrace();
}
}
示例6: createExchangeDataset
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; //导入依赖的package包/类
private synchronized void createExchangeDataset(long dimension)
throws HDF5Exception, EodDataSinkException {
if (!isOpen) {
throw new EodDataSinkException("HDF5 File data sink closed!");
}
long dimensions[] = { dimension };
long maxDimensions[] = { HDF5Constants.H5S_UNLIMITED };
int exchangeDataspaceHandle = H5.H5Screate_simple(
EXCHANGE_DATASET_RANK, dimensions, maxDimensions);
Hdf5ExchangeDatatype exchangeDatatype = new Hdf5ExchangeDatatype();
exchangeDatatypeHandle = exchangeDatatype.getFileDatatypeHandle();
int createProperties = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
@SuppressWarnings("unused")
int status = H5.H5Pset_chunk(createProperties, EXCHANGE_DATASET_RANK,
dimensions);
if ((fileHandle >= 0) && (exchangeDataspaceHandle >= 0)
&& (exchangeDatatypeHandle >= 0)) {
exchangeDatasetHandle = H5.H5Dcreate(fileHandle,
EXCHANGE_DATASET_NAME, exchangeDatatypeHandle,
exchangeDataspaceHandle, HDF5Constants.H5P_DEFAULT,
createProperties, HDF5Constants.H5P_DEFAULT);
} else {
throw new EodDataSinkException(
"Failed to create exchange dataset from scratch.");
}
logger.info("Sucessfully created new exchange dataset.");
}
示例7: createExchangeDataset
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; //导入依赖的package包/类
private void createExchangeDataset(long dimension) throws HDF5Exception, EodDataSinkException {
long dimensions[] = { dimension };
long maxDimensions[] = { HDF5Constants.H5S_UNLIMITED };
int exchangeDataspaceHandle = H5.H5Screate_simple(EXCHANGE_DATASET_RANK, dimensions, maxDimensions);
Hdf5ExchangeDatatype exchangeDatatype = new Hdf5ExchangeDatatype();
exchangeDatatypeHandle = exchangeDatatype.getFileDatatypeHandle();
int createProperties = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
@SuppressWarnings("unused")
int status = H5.H5Pset_chunk(createProperties, EXCHANGE_DATASET_RANK, dimensions);
if ((fileHandle >= 0)
&& (exchangeDataspaceHandle >= 0)
&& (exchangeDatatypeHandle >= 0)) {
exchangeDatasetHandle = H5.H5Dcreate(fileHandle,
EXCHANGE_DATASET_NAME,
exchangeDatatypeHandle,
exchangeDataspaceHandle,
HDF5Constants.H5P_DEFAULT,
createProperties,
HDF5Constants.H5P_DEFAULT);
}
else {
throw new EodDataSinkException("Failed to create exchange dataset from scratch.");
}
logger.info("Sucessfully created new exchange dataset.");
}
示例8: hdf5_close
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; //导入依赖的package包/类
/**
* Closes the HDF5 file. Function usefull only because of the try/catch
*/
public static void hdf5_close(H5File h5) {
try {
h5.close();
} catch (HDF5Exception ex) {
System.out.println("Could not close HDF5 file?");
ex.printStackTrace();
}
}
示例9: createQuoteDataset
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; //导入依赖的package包/类
private void createQuoteDataset(long dimension, int locationHandle) throws HDF5Exception, EodDataSinkException {
long dimensions[] = { dimension };
long maxDimensions[] = { HDF5Constants.H5S_UNLIMITED };
int quoteDataspaceHandle = H5.H5Screate_simple(QUOTE_DATASET_RANK, dimensions, maxDimensions);
quoteFileDatatypeHandle = Hdf5QuoteDatatype.getFileDatatypeHandle();
quoteMemoryDatatypeHandle = Hdf5QuoteDatatype.getMemoryDatatypeHandle();
int createProperties = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
@SuppressWarnings("unused")
int status = H5.H5Pset_chunk(createProperties, QUOTE_DATASET_RANK, QUOTEDATASET_CHUNK_DIMENSIONS);
if ((fileHandle >= 0)
&& (quoteDataspaceHandle >= 0)
&& (quoteFileDatatypeHandle >= 0)) {
try {
quoteDatasetHandle = H5.H5Dcreate(locationHandle,
QUOTE_DATASET_NAME,
quoteFileDatatypeHandle,
quoteDataspaceHandle,
HDF5Constants.H5P_DEFAULT,
createProperties,
HDF5Constants.H5P_DEFAULT);
}
catch (HDF5Exception e) {
throw e;
}
finally {
H5.H5Sclose(quoteDataspaceHandle);
}
}
else {
throw new EodDataSinkException("Failed to create exchange dataset from scratch.");
}
logger.info("Sucessfully created new quote dataset.");
}
示例10: recursiveGetInfo
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; //导入依赖的package包/类
private void recursiveGetInfo(IHDF5Reader reader, HDF5LinkInformation link)
{
List<HDF5LinkInformation> members = reader.object().getGroupMemberInformation(link.getPath(), true);
// DefaultMutableTreeNode node = new DefaultMutableTreeNode(link.getName());
for (HDF5LinkInformation info : members)
{
IJ.log(info.getPath() + ":" + info.getType());
switch (info.getType())
{
case DATASET:
HDF5DataSetInformation dsInfo = reader.object().getDataSetInformation(info.getPath());
HDF5DataTypeInformation dsType = dsInfo.getTypeInformation();
String dimText = "";
if( dsInfo.getRank() == 0)
{
dimText ="1";
}
else
{
dimText += dsInfo.getDimensions()[0];
for( int i = 1; i < dsInfo.getRank(); ++i)
{
dimText += "x" + dsInfo.getDimensions()[i];
}
}
String typeText = HDF5ImageJ.dsInfoToTypeString(dsInfo);
// try to read element_size_um attribute
String element_size_um_text = "unknown";
try {
float[] element_size_um = reader.float32().getArrayAttr(info.getPath(), "element_size_um");
element_size_um_text = "" + element_size_um[0] + "x"
+ element_size_um[1] + "x" + element_size_um[2];
}
catch (HDF5Exception err) {
IJ.log("Warning: Can't read attribute 'element_size_um' from dataset '" + info.getPath() + "':\n"
+ err );
}
IJ.log(info.getPath() + ":" + dsInfo);
dataSets_.add( new DataSetInfo( info.getPath(), dimText, typeText,
element_size_um_text));
break;
case SOFT_LINK:
IJ.log(info.getPath() + " -> " + info.tryGetSymbolicLinkTarget());
// node.add(new DefaultMutableTreeNode(info.getName() + " -> " + info.tryGetSymbolicLinkTarget()));
break;
case GROUP:
recursiveGetInfo( reader, info);
// node.add( browse(reader,info));
break;
default:
break;
}
}
}