本文整理汇总了Python中tables.open_file方法的典型用法代码示例。如果您正苦于以下问题:Python tables.open_file方法的具体用法?Python tables.open_file怎么用?Python tables.open_file使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tables
的用法示例。
在下文中一共展示了tables.open_file方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_dbnames
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def get_dbnames(filepath):
'''Returns he database names of the given Gm database (HDF5 file)
The file should have been created with the `GMTableParser.parse`
method.
:param filepath: the path to the HDF5 file
:return: a list of strings identyfying the database names in the file
'''
with tables.open_file(filepath, 'r') as h5file:
root = h5file.get_node('/')
return [group._v_name for group in # pylint: disable=protected-access
h5file.list_nodes(root, classname=Group.__name__)]
# note: h5file.walk_groups() might raise a ClosedNodeError.
# This error is badly documented (as much pytables stuff),
# the only mention is (pytables pdf doc): "CloseNodeError: The
# operation can not be completed because the node is closed. For
# instance, listing the children of a closed group is not allowed".
# I suspect it deals with groups deleted / overwritten and the way
# hdf5 files mark portions of files to be "empty". However,
# the list_nodes above seems not to raise anymore
示例2: prepare
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def prepare(self):
assert self.inputs, "No input connected to the hdf_saver!"
assert len(self.inputs) == 1,\
"Cannot link more than one block to a hdf_saver!"
d = path.dirname(self.filename)
if not path.exists(d):
# Create the folder if it does not exist
try:
makedirs(d)
except OSError:
assert path.exists(d),"Error creating "+d
if path.exists(self.filename):
# If the file already exists, append a number to the name
print("[hdf_saver] WARNING!",self.filename,"already exists !")
name,ext = path.splitext(self.filename)
i = 1
while path.exists(name+"_%05d"%i+ext):
i += 1
self.filename = name+"_%05d"%i+ext
print("[hdf_saver] Using",self.filename,"instead!")
self.hfile = tables.open_file(self.filename,"w")
for name,value in self.metadata.items():
self.hfile.create_array(self.hfile.root,name,value)
示例3: load_files
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def load_files(self):
if self.target_lfiles is not None:
for target_lfile in self.target_lfiles:
target_lang = tables.open_file(target_lfile, 'r')
self.target_langs.append([target_lang.get_node(self.table_name),
target_lang.get_node(self.index_name)])
for source_lfile in self.source_lfiles:
source_lang = tables.open_file(source_lfile, 'r')
self.source_langs.append([source_lang.get_node(self.table_name),
source_lang.get_node(self.index_name)])
self.data_len = self.source_langs[-1][1].shape[0]
self.idxs = np.arange(self.data_len)
if self.shuffle:
np.random.shuffle(self.idxs)
示例4: touch
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def touch(path: Union[str, Path]):
"""Creates an HDF file, wiping an existing file if necessary.
If the given path is proper to create a HDF file, it creates a new
HDF file.
Parameters
----------
path
The path to the HDF file.
Raises
------
ValueError
If the non-proper path is given to create a HDF file.
"""
path = _get_valid_hdf_path(path)
with tables.open_file(str(path), mode='w'):
pass
示例5: remove
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def remove(path: Union[str, Path], entity_key: str):
"""Removes a piece of data from an HDF file.
Parameters
----------
path :
The path to the HDF file to remove the data from.
entity_key :
A representation of the internal HDF path where the data is located.
Raises
------
ValueError
If the path or entity_key are improperly formatted.
"""
path = _get_valid_hdf_path(path)
entity_key = EntityKey(entity_key)
with tables.open_file(str(path), mode='a') as file:
file.remove_node(entity_key.path, recursive=True)
示例6: get_keys
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def get_keys(path: str) -> List[str]:
"""Gets key representation of all paths in an HDF file.
Parameters
----------
path :
The path to the HDF file.
Returns
-------
A list of key representations of the internal paths in the HDF.
"""
path = _get_valid_hdf_path(path)
with tables.open_file(str(path)) as file:
keys = _get_keys(file.root)
return keys
示例7: __init__
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def __init__(self, path, name, logging_struct, T_per_file=500000):
name = "__".join(name.split("/")) # escape slash character in name
try:
from tables import open_file
self.path = path
self.name = name
self.T_per_file = T_per_file
self.hdf_path = os.path.join(path, "hdf")
self.folder_name = os.path.join(self.hdf_path, name)
if not os.path.isdir(self.folder_name):
os.makedirs(self.folder_name)
self.logging_struct = logging_struct
except Exception as e:
self.logging_struct.py_logger.warning("Could not execute HDF logger save - no disk space, or no permissions? " +
"Error message: {}, path: {}, name: {}".format(e, path, name))
pass
示例8: __init__
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def __init__(self, *args, **kwargs):
"""
Create a new datawriter for hdf5 files
:param args:
:param kwargs:
"""
# init base class
super(hdf5, self).__init__(*args, **kwargs)
# store init item only if dbinit
if not kwargs.get('dbappend', False):
# Create an open file, which needs to be closed after the sampling
self.db = tables.open_file(self.dbname + '.h5', 'w', self.dbname)
self.table = self.db.create_table('/', self.dbname, description=self.get_table_def())
else:
# Continues writing file
self.db = tables.open_file(self.dbname + '.h5', 'a')
self.table = self.db.root[self.dbname]
示例9: main
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def main():
parser = argparse.ArgumentParser(description="Writes names of samples "
"contained in HDF5 file to stdout")
parser.add_argument("h5file", help="HDF5 file containing /samples table")
options = parser.parse_args()
h5f = tables.open_file(options.h5file)
for node in h5f.root:
if node.name.startswith("samples"):
_, chr_name = node.name.split("_", 1)
sys.stdout.write("%s:\n" % chr_name)
for row in node:
sys.stdout.write(" %s\n" % row['name'])
sys.stdout.write("\n")
else:
sys.stderr.write("%s does not contain samples table\n" % options.h5file)
exit(2)
h5f.close()
示例10: load_model
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def load_model(self):
model_file = os.path.join(self.forest_dir, self.forest_name)
with tables.open_file(model_file, filters=self.comp_filt) as mfile:
self.model = {
"thrs": mfile.get_node("/thrs")[:],
"fids": mfile.get_node("/fids")[:],
"cids": mfile.get_node("/cids")[:],
"edge_bnds": mfile.get_node("/edge_bnds")[:].flatten(),
"edge_pts": mfile.get_node("/edge_pts")[:].flatten(),
"n_seg": mfile.get_node("/n_seg")[:].flatten(),
"segs": mfile.get_node("/segs")[:],
}
self.trained = True
return self.model
示例11: setUp
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def setUp(self):
# Load data
Y = np.loadtxt(os.path.join(base_path,self.datafile))
m = deepgp.DeepGP([Y.shape[1],5,2],Y,kernels=[GPy.kern.RBF(5,ARD=True), GPy.kern.RBF(2,ARD=True)], num_inducing=2, back_constraint=False)
if not os.path.exists(os.path.join(base_path,self.modelfile)):
# Create the model file
m.randomize()
m._trigger_params_changed()
m.save(os.path.join(base_path,self.modelfile))
with h5py.File(os.path.join(base_path,self.modelfile),'r+') as f:
L = f.create_dataset("L", (1,),dtype=np.float)
L[:] = m._log_marginal_likelihood
f.close()
# Load model parameters
with tables.open_file(os.path.join(base_path,self.modelfile),'r') as f:
m.param_array[:] = f.root.param_array[:]
L = float(f.root.L[:])
m._trigger_params_changed()
f.close()
self.model = m
self.L = L
示例12: test_muon_reconstruction
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def test_muon_reconstruction(tmpdir):
from ctapipe.tools.muon_reconstruction import MuonAnalysis
with tempfile.NamedTemporaryFile(suffix=".hdf5") as f:
assert (
run_tool(
MuonAnalysis(),
argv=[f"--input={LST_MUONS}", f"--output={f.name}", "--overwrite",],
)
== 0
)
t = tables.open_file(f.name)
table = t.root.dl1.event.telescope.parameters.muons[:]
assert len(table) > 20
assert np.count_nonzero(np.isnan(table["muonring_radius"])) == 0
assert run_tool(MuonAnalysis(), ["--help-all"]) == 0
示例13: test_units
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def test_units():
class WithUnits(Container):
inverse_length = Field(5 / u.m, "foo")
time = Field(1 * u.s, "bar", unit=u.s)
grammage = Field(2 * u.g / u.cm ** 2, "baz", unit=u.g / u.cm ** 2)
c = WithUnits()
with tempfile.NamedTemporaryFile() as f:
with HDF5TableWriter(f.name, "data") as writer:
writer.write("units", c)
with tables.open_file(f.name, "r") as f:
assert f.root.data.units.attrs["inverse_length_UNIT"] == "m-1"
assert f.root.data.units.attrs["time_UNIT"] == "s"
assert f.root.data.units.attrs["grammage_UNIT"] == "cm-2 g"
示例14: write_image_annotation_pairs_to_h5
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def write_image_annotation_pairs_to_h5(filename_pairs, h5_filename):
atom = tables.Int8Atom()
h5_file = tables.open_file(h5_filename, mode='a')
array_x = h5_file.create_earray(h5_file.root, 'X', atom, (0, 512, 1024, 3))
array_y = h5_file.create_earray(h5_file.root, 'Y', atom, (0, 512, 1024))
h = 512
w = 1024
for img_path, annotation_path in tqdm(filename_pairs):
img = misc.imread(img_path)
img = misc.imresize(img, (h, w))
annotation = misc.imread(annotation_path)
annotation = custom_ignore_labels(annotation)
annotation = misc.imresize(annotation, (h, w), 'nearest')
array_x.append(np.expand_dims(img, 0))
array_y.append(np.expand_dims(annotation, 0))
h5_file.close()
示例15: write
# 需要导入模块: import tables [as 别名]
# 或者: from tables import open_file [as 别名]
def write(self, frames):
"""
Write the frames to the target HDF5 file, using the format used by
``pd.Panel.to_hdf``
Parameters
----------
frames : iter[(int, DataFrame)] or dict[int -> DataFrame]
An iterable or other mapping of sid to the corresponding OHLCV
pricing data.
"""
with HDFStore(self._path, 'w',
complevel=self._complevel, complib=self._complib) \
as store:
panel = pd.Panel.from_dict(dict(frames))
panel.to_hdf(store, 'updates')
with tables.open_file(self._path, mode='r+') as h5file:
h5file.set_node_attr('/', 'version', 0)