本文整理汇总了Python中tensorflow.python.platform.gfile.ListDirectory方法的典型用法代码示例。如果您正苦于以下问题:Python gfile.ListDirectory方法的具体用法?Python gfile.ListDirectory怎么用?Python gfile.ListDirectory使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.python.platform.gfile
的用法示例。
在下文中一共展示了gfile.ListDirectory方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: generators_from_logdir
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import ListDirectory [as 别名]
def generators_from_logdir(logdir):
"""Returns a list of event generators for subdirectories with event files.
The number of generators returned should equal the number of directories
within logdir that contain event files. If only logdir contains event files,
returns a list of length one.
Args:
logdir: A log directory that contains event files.
Returns:
List of event generators for each subdirectory with event files.
"""
subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
generators = [itertools.chain(*[
generator_from_event_file(os.path.join(subdir, f))
for f in gfile.ListDirectory(subdir)
if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
]) for subdir in subdirs]
return generators
示例2: ListPlugins
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import ListDirectory [as 别名]
def ListPlugins(logdir):
"""List all the plugins that have registered assets in logdir.
If the plugins_dir does not exist, it returns an empty list. This maintains
compatibility with old directories that have no plugins written.
Args:
logdir: A directory that was created by a TensorFlow events writer.
Returns:
a list of plugin names, as strings
"""
plugins_dir = os.path.join(logdir, _PLUGINS_DIR)
if not gfile.IsDirectory(plugins_dir):
return []
entries = gfile.ListDirectory(plugins_dir)
return [x for x in entries if _IsDirectory(plugins_dir, x)]
示例3: ListDirectoryAbsolute
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import ListDirectory [as 别名]
def ListDirectoryAbsolute(directory):
"""Yields all files in the given directory. The paths are absolute."""
return (os.path.join(directory, path)
for path in gfile.ListDirectory(directory))
示例4: get_paths
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import ListDirectory [as 别名]
def get_paths(base_dir, parser):
"""Gets a list of Paths in a given directory.
Args:
base_dir: directory.
parser: a function which gets the raw Path and can augment it with
information such as the export_version, or ignore the path by returning
None. An example parser may extract the export version from a path
such as "/tmp/exports/100" an another may extract from a full file
name such as "/tmp/checkpoint-99.out".
Returns:
A list of Paths contained in the base directory with the parsing function
applied.
By default the following fields are populated,
- Path.path
The parsing function is responsible for populating,
- Path.export_version
"""
raw_paths = gfile.ListDirectory(base_dir)
paths = []
for r in raw_paths:
p = parser(Path(os.path.join(base_dir, r), None))
if p:
paths.append(p)
return sorted(paths)
示例5: __init__
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import ListDirectory [as 别名]
def __init__(self, sess, session_root, watch_fn=None, log_usage=True):
"""Constructor of DumpingDebugWrapperSession.
Args:
sess: The TensorFlow `Session` object being wrapped.
session_root: (`str`) Path to the session root directory. Must be a
directory that does not exist or an empty directory. If the directory
does not exist, it will be created by the debugger core during debug
[`Session.run()`](../../../g3doc/api_docs/python/client.md#session.run)
calls.
As the `run()` calls occur, subdirectories will be added to
`session_root`. The subdirectories' names has the following pattern:
run_<epoch_time_stamp>_<uuid>
E.g., run_1480734393835964_ad4c953a85444900ae79fc1b652fb324
watch_fn: (`Callable`) A Callable that can be used to define per-run
debug ops and watched tensors. See the doc of
`NonInteractiveDebugWrapperSession.__init__()` for details.
log_usage: (`bool`) whether the usage of this class is to be logged.
Raises:
ValueError: If `session_root` is an existing and non-empty directory or
if `session_root` is a file.
"""
if log_usage:
pass # No logging for open-source.
framework.NonInteractiveDebugWrapperSession.__init__(
self, sess, watch_fn=watch_fn)
if gfile.Exists(session_root):
if not gfile.IsDirectory(session_root):
raise ValueError(
"session_root path points to a file: %s" % session_root)
elif gfile.ListDirectory(session_root):
raise ValueError(
"session_root path points to a non-empty directory: %s" %
session_root)
self._session_root = session_root
示例6: testGC
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import ListDirectory [as 别名]
def testGC(self):
export_path = os.path.join(tf.test.get_temp_dir(), "gc")
self.doBasicsOneExportPath(export_path, global_step=100)
self.assertEquals(gfile.ListDirectory(export_path), ["00000100"])
self.doBasicsOneExportPath(export_path, global_step=101)
self.assertEquals(
sorted(gfile.ListDirectory(export_path)), ["00000100", "00000101"])
self.doBasicsOneExportPath(export_path, global_step=102)
self.assertEquals(
sorted(gfile.ListDirectory(export_path)), ["00000101", "00000102"])
示例7: _get_paths
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import ListDirectory [as 别名]
def _get_paths(base_dir, parser):
"""Gets a list of Paths in a given directory.
Args:
base_dir: directory.
parser: a function which gets the raw Path and can augment it with
information such as the export_version, or ignore the path by returning
None. An example parser may extract the export version from a path such
as "/tmp/exports/100" an another may extract from a full file name such as
"/tmp/checkpoint-99.out".
Returns:
A list of Paths contained in the base directory with the parsing function
applied.
By default the following fields are populated,
- Path.path
The parsing function is responsible for populating,
- Path.export_version
"""
# We are mocking this in the test, hence we should not use public API
raw_paths = gfile.ListDirectory(base_dir)
paths = []
for r in raw_paths:
# ListDirectory() return paths with "/" at the last if base_dir was GCS URL
r = tf.compat.as_str_any(r)
if r[-1] == '/':
r = r[0:len(r) - 1]
p = parser(Path(os.path.join(tf.compat.as_str_any(base_dir), r), None))
if p:
paths.append(p)
return sorted(paths)
示例8: testGcsDirWithSeparator
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import ListDirectory [as 别名]
def testGcsDirWithSeparator(self):
base_dir = "gs://bucket/foo"
with tf.compat.v1.test.mock.patch.object(
gfile, "ListDirectory") as mock_list_directory:
# gfile.ListDirectory returns directory names with separator '/'
mock_list_directory.return_value = ["0/", "1/"]
self.assertEqual(
gc._get_paths(base_dir, _create_parser(base_dir)), [
gc.Path(os.path.join(base_dir, "0"), 0),
gc.Path(os.path.join(base_dir, "1"), 1)
])
示例9: _get_paths
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import ListDirectory [as 别名]
def _get_paths(base_dir, parser):
"""Gets a list of Paths in a given directory.
Args:
base_dir: directory.
parser: a function which gets the raw Path and can augment it with
information such as the export_version, or ignore the path by returning
None. An example parser may extract the export version from a path
such as "/tmp/exports/100" an another may extract from a full file
name such as "/tmp/checkpoint-99.out".
Returns:
A list of Paths contained in the base directory with the parsing function
applied.
By default the following fields are populated,
- Path.path
The parsing function is responsible for populating,
- Path.export_version
"""
raw_paths = gfile.ListDirectory(base_dir)
paths = []
for r in raw_paths:
p = parser(Path(os.path.join(compat.as_str_any(base_dir),
compat.as_str_any(r)),
None))
if p:
paths.append(p)
return sorted(paths)
开发者ID:PacktPublishing,项目名称:Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda,代码行数:30,代码来源:gc.py
示例10: __init__
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import ListDirectory [as 别名]
def __init__(self,
sess,
session_root,
watch_fn=None,
thread_name_filter=None,
log_usage=True):
"""Constructor of DumpingDebugWrapperSession.
Args:
sess: The TensorFlow `Session` object being wrapped.
session_root: (`str`) Path to the session root directory. Must be a
directory that does not exist or an empty directory. If the directory
does not exist, it will be created by the debugger core during debug
@{tf.Session.run}
calls.
As the `run()` calls occur, subdirectories will be added to
`session_root`. The subdirectories' names has the following pattern:
run_<epoch_time_stamp>_<zero_based_run_counter>
E.g., run_1480734393835964_ad4c953a85444900ae79fc1b652fb324
watch_fn: (`Callable`) A Callable that can be used to define per-run
debug ops and watched tensors. See the doc of
`NonInteractiveDebugWrapperSession.__init__()` for details.
thread_name_filter: Regular-expression white list for threads on which the
wrapper session will be active. See doc of `BaseDebugWrapperSession` for
more details.
log_usage: (`bool`) whether the usage of this class is to be logged.
Raises:
ValueError: If `session_root` is an existing and non-empty directory or
if `session_root` is a file.
"""
if log_usage:
pass # No logging for open-source.
framework.NonInteractiveDebugWrapperSession.__init__(
self, sess, watch_fn=watch_fn, thread_name_filter=thread_name_filter)
if gfile.Exists(session_root):
if not gfile.IsDirectory(session_root):
raise ValueError(
"session_root path points to a file: %s" % session_root)
elif gfile.ListDirectory(session_root):
raise ValueError(
"session_root path points to a non-empty directory: %s" %
session_root)
self._session_root = session_root
self._run_counter = 0
self._run_counter_lock = threading.Lock()
示例11: get_inspection_units
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import ListDirectory [as 别名]
def get_inspection_units(logdir='', event_file='', tag=''):
"""Returns a list of InspectionUnit objects given either logdir or event_file.
If logdir is given, the number of InspectionUnits should equal the
number of directories or subdirectories that contain event files.
If event_file is given, the number of InspectionUnits should be 1.
Args:
logdir: A log directory that contains event files.
event_file: Or, a particular event file path.
tag: An optional tag name to query for.
Returns:
A list of InspectionUnit objects.
"""
if logdir:
subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
inspection_units = []
for subdir in subdirs:
generator = itertools.chain(*[
generator_from_event_file(os.path.join(subdir, f))
for f in gfile.ListDirectory(subdir)
if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
])
inspection_units.append(InspectionUnit(
name=subdir,
generator=generator,
field_to_obs=get_field_to_observations_map(generator, tag)))
if inspection_units:
print('Found event files in:\n{}\n'.format('\n'.join(
[u.name for u in inspection_units])))
elif event_accumulator.IsTensorFlowEventsFile(logdir):
print(
'It seems that {} may be an event file instead of a logdir. If this '
'is the case, use --event_file instead of --logdir to pass '
'it in.'.format(logdir))
else:
print('No event files found within logdir {}'.format(logdir))
return inspection_units
elif event_file:
generator = generator_from_event_file(event_file)
return [InspectionUnit(
name=event_file,
generator=generator,
field_to_obs=get_field_to_observations_map(generator, tag))]
示例12: __init__
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import ListDirectory [as 别名]
def __init__(self,
sess,
session_root,
watch_fn=None,
thread_name_filter=None,
pass_through_operrors=None,
log_usage=True):
"""Constructor of DumpingDebugWrapperSession.
Args:
sess: The TensorFlow `Session` object being wrapped.
session_root: (`str`) Path to the session root directory. Must be a
directory that does not exist or an empty directory. If the directory
does not exist, it will be created by the debugger core during debug
@{tf.Session.run}
calls.
As the `run()` calls occur, subdirectories will be added to
`session_root`. The subdirectories' names has the following pattern:
run_<epoch_time_stamp>_<zero_based_run_counter>
E.g., run_1480734393835964_ad4c953a85444900ae79fc1b652fb324
watch_fn: (`Callable`) A Callable that can be used to define per-run
debug ops and watched tensors. See the doc of
`NonInteractiveDebugWrapperSession.__init__()` for details.
thread_name_filter: Regular-expression white list for threads on which the
wrapper session will be active. See doc of `BaseDebugWrapperSession` for
more details.
pass_through_operrors: If true, all captured OpErrors will be
propagated. By default this captures all OpErrors.
log_usage: (`bool`) whether the usage of this class is to be logged.
Raises:
ValueError: If `session_root` is an existing and non-empty directory or
if `session_root` is a file.
"""
if log_usage:
pass # No logging for open-source.
framework.NonInteractiveDebugWrapperSession.__init__(
self, sess, watch_fn=watch_fn, thread_name_filter=thread_name_filter,
pass_through_operrors=pass_through_operrors)
if gfile.Exists(session_root):
if not gfile.IsDirectory(session_root):
raise ValueError(
"session_root path points to a file: %s" % session_root)
elif gfile.ListDirectory(session_root):
raise ValueError(
"session_root path points to a non-empty directory: %s" %
session_root)
else:
gfile.MakeDirs(session_root)
self._session_root = session_root
self._run_counter = 0
self._run_counter_lock = threading.Lock()
开发者ID:PacktPublishing,项目名称:Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda,代码行数:58,代码来源:dumping_wrapper.py