本文整理汇总了Python中tensorflow.python.platform.gfile.Stat方法的典型用法代码示例。如果您正苦于以下问题:Python gfile.Stat方法的具体用法?Python gfile.Stat怎么用?Python gfile.Stat使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.python.platform.gfile
的用法示例。
在下文中一共展示了gfile.Stat方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _SetPath
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import Stat [as 别名]
def _SetPath(self, path):
"""Sets the current path to watch for new events.
This also records the size of the old path, if any. If the size can't be
found, an error is logged.
Args:
path: The full path of the file to watch.
"""
old_path = self._path
if old_path and not io_wrapper.IsGCSPath(old_path):
try:
# We're done with the path, so store its size.
size = gfile.Stat(old_path).length
logging.debug('Setting latest size of %s to %d', old_path, size)
self._finalized_sizes[old_path] = size
except errors.OpError as e:
logging.error('Unable to get size of %s: %s', old_path, e)
self._path = path
self._loader = self._loader_factory(path)
示例2: _HasOOOWrite
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import Stat [as 别名]
def _HasOOOWrite(self, path):
"""Returns whether the path has had an out-of-order write."""
# Check the sizes of each path before the current one.
size = gfile.Stat(path).length
old_size = self._finalized_sizes.get(path, None)
if size != old_size:
if old_size is None:
logging.error('File %s created after file %s even though it\'s '
'lexicographically earlier', path, self._path)
else:
logging.error('File %s updated even though the current file is %s',
path, self._path)
return True
else:
return False
示例3: __init__
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import Stat [as 别名]
def __init__(self, dump_root, debug_dump_rel_path):
"""`DebugTensorDatum` constructor.
Args:
dump_root: (`str`) Debug dump root directory.
debug_dump_rel_path: (`str`) Path to a debug dump file, relative to the
`dump_root`. For example, suppose the debug dump root
directory is `/tmp/tfdbg_1` and the dump file is at
`/tmp/tfdbg_1/ns_1/node_a_0_DebugIdentity_123456789`, then
the value of the debug_dump_rel_path should be
`ns_1/node_a_0_DebugIdenity_1234456789`.
Raises:
ValueError: If the base file name of the dump file does not conform to
the dump file naming pattern:
`node_name`_`output_slot`_`debug_op`_`timestamp`
"""
base = os.path.basename(debug_dump_rel_path)
if base.count("_") < 3:
raise ValueError(
"Dump file path does not conform to the naming pattern: %s" % base)
# TODO(cais): Add hostname and pid to support dumps from distributed
# sessions.
self._extended_timestamp = base.split("_")[-1]
# It may include an index suffix at the end if file path collision happened
# due to identical timestamps.
if "-" in self._extended_timestamp:
self._timestamp = int(
self._extended_timestamp[:self._extended_timestamp.find("-")])
else:
self._timestamp = int(self._extended_timestamp)
self._debug_op = base.split("_")[-2]
self._output_slot = int(base.split("_")[-3])
namespace = os.path.dirname(debug_dump_rel_path).replace("\\", "/")
node_base_name = "_".join(base.split("_")[:-3])
if not namespace or namespace == ".":
self._node_name = node_base_name
else:
self._node_name = namespace + "/" + node_base_name
self._file_path = os.path.join(dump_root, debug_dump_rel_path)
self._dump_size_bytes = (gfile.Stat(self._file_path).length if
gfile.Exists(self._file_path) else None)
self._run_fetches_info = None
self._run_feed_keys_info = None
示例4: __init__
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import Stat [as 别名]
def __init__(self, dump_root, debug_dump_rel_path):
"""`DebugTensorDatum` constructor.
Args:
dump_root: (`str`) Debug dump root directory.
debug_dump_rel_path: (`str`) Path to a debug dump file, relative to the
`dump_root`. For example, suppose the debug dump root
directory is `/tmp/tfdbg_1` and the dump file is at
`/tmp/tfdbg_1/ns_1/node_a_0_DebugIdentity_123456789`, then
the value of the debug_dump_rel_path should be
`ns_1/node_a_0_DebugIdenity_1234456789`.
Raises:
ValueError: If the base file name of the dump file does not conform to
the dump file naming pattern:
`node_name`_`output_slot`_`debug_op`_`timestamp`
"""
base = os.path.basename(debug_dump_rel_path)
if base.count("_") < 3:
raise ValueError(
"Dump file path does not conform to the naming pattern: %s" % base)
# TODO(cais): Add hostname and pid to support dumps from distributed
# sessions.
self._timestamp = int(base.split("_")[-1])
self._debug_op = base.split("_")[-2]
self._output_slot = int(base.split("_")[-3])
namespace = os.path.dirname(debug_dump_rel_path).replace("\\", "/")
node_base_name = "_".join(base.split("_")[:-3])
if not namespace or namespace == ".":
self._node_name = node_base_name
else:
self._node_name = namespace + "/" + node_base_name
self._file_path = os.path.join(dump_root, debug_dump_rel_path)
self._dump_size_bytes = (gfile.Stat(self._file_path).length if
gfile.Exists(self._file_path) else None)
self._run_fetches_info = None
self._run_feed_keys_info = None
示例5: __init__
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import Stat [as 别名]
def __init__(self, dump_root, debug_dump_rel_path):
"""`DebugTensorDatum` constructor.
Args:
dump_root: (`str`) Debug dump root directory. This path should not include
the path component that represents the device name (see also below).
debug_dump_rel_path: (`str`) Path to a debug dump file, relative to the
`dump_root`. The first item of this relative path is assumed to be
a path representing the name of the device that the Tensor belongs to.
See `device_path_to_device_name` for more details on the device path.
For example, suppose the debug dump root
directory is `/tmp/tfdbg_1` and the dump file is at
`/tmp/tfdbg_1/<device_path>/>ns_1/node_a_0_DebugIdentity_123456789`,
then the value of the debug_dump_rel_path should be
`<device_path>/ns_1/node_a_0_DebugIdenity_1234456789`.
Raises:
ValueError: If the base file name of the dump file does not conform to
the dump file naming pattern:
`node_name`_`output_slot`_`debug_op`_`timestamp`
"""
path_components = os.path.normpath(debug_dump_rel_path).split(os.sep)
self._device_name = device_path_to_device_name(path_components[0])
base = path_components[-1]
if base.count("_") < 3:
raise ValueError(
"Dump file path does not conform to the naming pattern: %s" % base)
self._extended_timestamp = base.split("_")[-1]
# It may include an index suffix at the end if file path collision happened
# due to identical timestamps.
if "-" in self._extended_timestamp:
self._timestamp = int(
self._extended_timestamp[:self._extended_timestamp.find("-")])
else:
self._timestamp = int(self._extended_timestamp)
self._debug_op = base.split("_")[-2]
self._output_slot = int(base.split("_")[-3])
node_base_name = "_".join(base.split("_")[:-3])
self._node_name = "/".join(path_components[1:-1] + [node_base_name])
self._file_path = os.path.join(dump_root, debug_dump_rel_path)
self._dump_size_bytes = (gfile.Stat(self._file_path).length if
gfile.Exists(self._file_path) else None)
开发者ID:PacktPublishing,项目名称:Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda,代码行数:49,代码来源:debug_data.py