本文整理匯總了Python中tensorflow.python.platform.gfile.Stat方法的典型用法代碼示例。如果您正苦於以下問題:Python gfile.Stat方法的具體用法?Python gfile.Stat怎麽用?Python gfile.Stat使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類tensorflow.python.platform.gfile
的用法示例。
在下文中一共展示了gfile.Stat方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: _SetPath
# 需要導入模塊: from tensorflow.python.platform import gfile [as 別名]
# 或者: from tensorflow.python.platform.gfile import Stat [as 別名]
def _SetPath(self, path):
"""Sets the current path to watch for new events.
This also records the size of the old path, if any. If the size can't be
found, an error is logged.
Args:
path: The full path of the file to watch.
"""
old_path = self._path
if old_path and not io_wrapper.IsGCSPath(old_path):
try:
# We're done with the path, so store its size.
size = gfile.Stat(old_path).length
logging.debug('Setting latest size of %s to %d', old_path, size)
self._finalized_sizes[old_path] = size
except errors.OpError as e:
logging.error('Unable to get size of %s: %s', old_path, e)
self._path = path
self._loader = self._loader_factory(path)
示例2: _HasOOOWrite
# 需要導入模塊: from tensorflow.python.platform import gfile [as 別名]
# 或者: from tensorflow.python.platform.gfile import Stat [as 別名]
def _HasOOOWrite(self, path):
"""Returns whether the path has had an out-of-order write."""
# Check the sizes of each path before the current one.
size = gfile.Stat(path).length
old_size = self._finalized_sizes.get(path, None)
if size != old_size:
if old_size is None:
logging.error('File %s created after file %s even though it\'s '
'lexicographically earlier', path, self._path)
else:
logging.error('File %s updated even though the current file is %s',
path, self._path)
return True
else:
return False
示例3: __init__
# 需要導入模塊: from tensorflow.python.platform import gfile [as 別名]
# 或者: from tensorflow.python.platform.gfile import Stat [as 別名]
def __init__(self, dump_root, debug_dump_rel_path):
"""`DebugTensorDatum` constructor.
Args:
dump_root: (`str`) Debug dump root directory.
debug_dump_rel_path: (`str`) Path to a debug dump file, relative to the
`dump_root`. For example, suppose the debug dump root
directory is `/tmp/tfdbg_1` and the dump file is at
`/tmp/tfdbg_1/ns_1/node_a_0_DebugIdentity_123456789`, then
the value of the debug_dump_rel_path should be
`ns_1/node_a_0_DebugIdenity_1234456789`.
Raises:
ValueError: If the base file name of the dump file does not conform to
the dump file naming pattern:
`node_name`_`output_slot`_`debug_op`_`timestamp`
"""
base = os.path.basename(debug_dump_rel_path)
if base.count("_") < 3:
raise ValueError(
"Dump file path does not conform to the naming pattern: %s" % base)
# TODO(cais): Add hostname and pid to support dumps from distributed
# sessions.
self._extended_timestamp = base.split("_")[-1]
# It may include an index suffix at the end if file path collision happened
# due to identical timestamps.
if "-" in self._extended_timestamp:
self._timestamp = int(
self._extended_timestamp[:self._extended_timestamp.find("-")])
else:
self._timestamp = int(self._extended_timestamp)
self._debug_op = base.split("_")[-2]
self._output_slot = int(base.split("_")[-3])
namespace = os.path.dirname(debug_dump_rel_path).replace("\\", "/")
node_base_name = "_".join(base.split("_")[:-3])
if not namespace or namespace == ".":
self._node_name = node_base_name
else:
self._node_name = namespace + "/" + node_base_name
self._file_path = os.path.join(dump_root, debug_dump_rel_path)
self._dump_size_bytes = (gfile.Stat(self._file_path).length if
gfile.Exists(self._file_path) else None)
self._run_fetches_info = None
self._run_feed_keys_info = None
示例4: __init__
# 需要導入模塊: from tensorflow.python.platform import gfile [as 別名]
# 或者: from tensorflow.python.platform.gfile import Stat [as 別名]
def __init__(self, dump_root, debug_dump_rel_path):
"""`DebugTensorDatum` constructor.
Args:
dump_root: (`str`) Debug dump root directory.
debug_dump_rel_path: (`str`) Path to a debug dump file, relative to the
`dump_root`. For example, suppose the debug dump root
directory is `/tmp/tfdbg_1` and the dump file is at
`/tmp/tfdbg_1/ns_1/node_a_0_DebugIdentity_123456789`, then
the value of the debug_dump_rel_path should be
`ns_1/node_a_0_DebugIdenity_1234456789`.
Raises:
ValueError: If the base file name of the dump file does not conform to
the dump file naming pattern:
`node_name`_`output_slot`_`debug_op`_`timestamp`
"""
base = os.path.basename(debug_dump_rel_path)
if base.count("_") < 3:
raise ValueError(
"Dump file path does not conform to the naming pattern: %s" % base)
# TODO(cais): Add hostname and pid to support dumps from distributed
# sessions.
self._timestamp = int(base.split("_")[-1])
self._debug_op = base.split("_")[-2]
self._output_slot = int(base.split("_")[-3])
namespace = os.path.dirname(debug_dump_rel_path).replace("\\", "/")
node_base_name = "_".join(base.split("_")[:-3])
if not namespace or namespace == ".":
self._node_name = node_base_name
else:
self._node_name = namespace + "/" + node_base_name
self._file_path = os.path.join(dump_root, debug_dump_rel_path)
self._dump_size_bytes = (gfile.Stat(self._file_path).length if
gfile.Exists(self._file_path) else None)
self._run_fetches_info = None
self._run_feed_keys_info = None
示例5: __init__
# 需要導入模塊: from tensorflow.python.platform import gfile [as 別名]
# 或者: from tensorflow.python.platform.gfile import Stat [as 別名]
def __init__(self, dump_root, debug_dump_rel_path):
"""`DebugTensorDatum` constructor.
Args:
dump_root: (`str`) Debug dump root directory. This path should not include
the path component that represents the device name (see also below).
debug_dump_rel_path: (`str`) Path to a debug dump file, relative to the
`dump_root`. The first item of this relative path is assumed to be
a path representing the name of the device that the Tensor belongs to.
See `device_path_to_device_name` for more details on the device path.
For example, suppose the debug dump root
directory is `/tmp/tfdbg_1` and the dump file is at
`/tmp/tfdbg_1/<device_path>/>ns_1/node_a_0_DebugIdentity_123456789`,
then the value of the debug_dump_rel_path should be
`<device_path>/ns_1/node_a_0_DebugIdenity_1234456789`.
Raises:
ValueError: If the base file name of the dump file does not conform to
the dump file naming pattern:
`node_name`_`output_slot`_`debug_op`_`timestamp`
"""
path_components = os.path.normpath(debug_dump_rel_path).split(os.sep)
self._device_name = device_path_to_device_name(path_components[0])
base = path_components[-1]
if base.count("_") < 3:
raise ValueError(
"Dump file path does not conform to the naming pattern: %s" % base)
self._extended_timestamp = base.split("_")[-1]
# It may include an index suffix at the end if file path collision happened
# due to identical timestamps.
if "-" in self._extended_timestamp:
self._timestamp = int(
self._extended_timestamp[:self._extended_timestamp.find("-")])
else:
self._timestamp = int(self._extended_timestamp)
self._debug_op = base.split("_")[-2]
self._output_slot = int(base.split("_")[-3])
node_base_name = "_".join(base.split("_")[:-3])
self._node_name = "/".join(path_components[1:-1] + [node_base_name])
self._file_path = os.path.join(dump_root, debug_dump_rel_path)
self._dump_size_bytes = (gfile.Stat(self._file_path).length if
gfile.Exists(self._file_path) else None)
開發者ID:PacktPublishing,項目名稱:Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda,代碼行數:49,代碼來源:debug_data.py