本文整理汇总了Python中tensorflow.python.util.compat.as_str_any函数的典型用法代码示例。如果您正苦于以下问题:Python as_str_any函数的具体用法?Python as_str_any怎么用?Python as_str_any使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了as_str_any函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_matching_files_v2
def get_matching_files_v2(pattern):
"""Returns a list of files that match the given pattern(s).
Args:
pattern: string or iterable of strings. The glob pattern(s).
Returns:
A list of strings containing filenames that match the given pattern(s).
Raises:
errors.OpError: If there are filesystem / directory listing errors.
"""
if isinstance(pattern, six.string_types):
return [
# Convert the filenames to string from bytes.
compat.as_str_any(matching_filename)
for matching_filename in pywrap_tensorflow.GetMatchingFiles(
compat.as_bytes(pattern))
]
else:
return [
# Convert the filenames to string from bytes.
compat.as_str_any(matching_filename) # pylint: disable=g-complex-comprehension
for single_filename in pattern
for matching_filename in pywrap_tensorflow.GetMatchingFiles(
compat.as_bytes(single_filename))
]
示例2: get_paths
def get_paths(base_dir, parser):
"""Gets a list of Paths in a given directory.
Args:
base_dir: directory.
parser: a function which gets the raw Path and can augment it with
information such as the export_version, or ignore the path by returning
None. An example parser may extract the export version from a path
such as "/tmp/exports/100" an another may extract from a full file
name such as "/tmp/checkpoint-99.out".
Returns:
A list of Paths contained in the base directory with the parsing function
applied.
By default the following fields are populated,
- Path.path
The parsing function is responsible for populating,
- Path.export_version
"""
raw_paths = gfile.ListDirectory(base_dir)
paths = []
for r in raw_paths:
p = parser(Path(os.path.join(compat.as_str_any(base_dir),
compat.as_str_any(r)),
None))
if p:
paths.append(p)
return sorted(paths)
示例3: get_matching_files_v2
def get_matching_files_v2(pattern):
"""Returns a list of files that match the given pattern(s).
Args:
pattern: string or iterable of strings. The glob pattern(s).
Returns:
A list of strings containing filenames that match the given pattern(s).
Raises:
errors.OpError: If there are filesystem / directory listing errors.
"""
with errors.raise_exception_on_not_ok_status() as status:
if isinstance(pattern, six.string_types):
return [
# Convert the filenames to string from bytes.
compat.as_str_any(matching_filename)
for matching_filename in pywrap_tensorflow.GetMatchingFiles(
compat.as_bytes(pattern), status)
]
else:
return [
# Convert the filenames to string from bytes.
compat.as_str_any(matching_filename)
for single_filename in pattern
for matching_filename in pywrap_tensorflow.GetMatchingFiles(
compat.as_bytes(single_filename), status)
]
示例4: request_stop
def request_stop(self, ex=None):
"""Request that the threads stop.
After this is called, calls to `should_stop()` will return `True`.
Note: If an exception is being passed in, in must be in the context of
handling the exception (i.e. `try: ... except Exception as ex: ...`) and not
a newly created one.
Args:
ex: Optional `Exception`, or Python `exc_info` tuple as returned by
`sys.exc_info()`. If this is the first call to `request_stop()` the
corresponding exception is recorded and re-raised from `join()`.
"""
with self._lock:
ex = self._filter_exception(ex)
# If we have already joined the coordinator the exception will not have a
# chance to be reported, so just raise it normally. This can happen if
# you continue to use a session have having stopped and joined the
# coordinator threads.
if self._joined:
if isinstance(ex, tuple):
six.reraise(*ex)
elif ex is not None:
# NOTE(touts): This is bogus if request_stop() is not called
# from the exception handler that raised ex.
six.reraise(*sys.exc_info())
if not self._stop_event.is_set():
if ex and self._exc_info_to_raise is None:
if isinstance(ex, tuple):
logging.info("Error reported to Coordinator: %s, %s",
type(ex[1]),
compat.as_str_any(ex[1]))
self._exc_info_to_raise = ex
else:
logging.info("Error reported to Coordinator: %s, %s",
type(ex),
compat.as_str_any(ex))
self._exc_info_to_raise = sys.exc_info()
# self._exc_info_to_raise should contain a tuple containing exception
# (type, value, traceback)
if (len(self._exc_info_to_raise) != 3 or
not self._exc_info_to_raise[0] or
not self._exc_info_to_raise[1]):
# Raise, catch and record the exception here so that error happens
# where expected.
try:
raise ValueError(
"ex must be a tuple or sys.exc_info must return the current "
"exception: %s"
% self._exc_info_to_raise)
except ValueError:
# Record this error so it kills the coordinator properly.
# NOTE(touts): As above, this is bogus if request_stop() is not
# called from the exception handler that raised ex.
self._exc_info_to_raise = sys.exc_info()
self._stop_event.set()
示例5: test_add_pruned_collection_proto_in_bytes_list
def test_add_pruned_collection_proto_in_bytes_list(self):
# Note: This also tests _is_removed_mentioned().
collection_name = 'proto_collection'
base_meta_graph_def = meta_graph_pb2.MetaGraphDef()
base_meta_graph_def.collection_def[collection_name].bytes_list.value.extend(
[compat.as_bytes(compat.as_str_any(_make_asset_file_def_any('node1'))),
compat.as_bytes(compat.as_str_any(_make_asset_file_def_any('node2'))),
compat.as_bytes(compat.as_str_any(_make_asset_file_def_any('node3'))),
compat.as_bytes(compat.as_str_any(_make_asset_file_def_any('node4'))),
compat.as_bytes(compat.as_str_any(_make_asset_file_def_any('/a/a_1'))),
compat.as_bytes(compat.as_str_any(_make_asset_file_def_any('/b/b_1')))
])
meta_graph_def = meta_graph_pb2.MetaGraphDef()
removed_op_names = ['node2', 'node4', 'node5', '/a', '/b/b_1']
meta_graph_transform._add_pruned_collection(
base_meta_graph_def, meta_graph_def, collection_name, removed_op_names)
collection = meta_graph_def.collection_def[collection_name]
expected_values = [
compat.as_bytes(compat.as_str_any(_make_asset_file_def_any('node1'))),
compat.as_bytes(compat.as_str_any(_make_asset_file_def_any('node3'))),
compat.as_bytes(compat.as_str_any(_make_asset_file_def_any('/a/a_1'))),
]
self.assertEqual(expected_values, collection.bytes_list.value[:])
示例6: test_asset_path_returned
def test_asset_path_returned(self):
root = tracking.AutoTrackable()
root.path = tracking.TrackableAsset(self._vocab_path)
save_dir = os.path.join(self.get_temp_dir(), "saved_model")
root.get_asset = def_function.function(lambda: root.path.asset_path)
save.save(root, save_dir, signatures=root.get_asset.get_concrete_function())
second_dir = os.path.join(self.get_temp_dir(), "second_dir")
file_io.rename(save_dir, second_dir)
imported_path = _import_and_infer(second_dir, {})["output_0"]
self.assertIn(compat.as_str_any(second_dir),
compat.as_str_any(imported_path))
示例7: parser
def parser(path):
# Modify the path object for RegEx match for Windows Paths
if os.name == 'nt':
match = re.match("^" + compat.as_str_any(base_dir).replace('\\','/') + "/(\\d+)$",
compat.as_str_any(path.path).replace('\\','/'))
else:
match = re.match("^" + compat.as_str_any(base_dir) + "/(\\d+)$",
compat.as_str_any(path.path))
if not match:
return None
return path._replace(export_version=int(match.group(1)))
示例8: _export_eval_result
def _export_eval_result(self, eval_result, checkpoint_path):
"""Export `eval_result` according to strategies in `EvalSpec`."""
export_dir_base = os.path.join(
compat.as_str_any(self._estimator.model_dir),
compat.as_str_any('export'))
for strategy in self._eval_spec.export_strategies:
strategy.export(
self._estimator,
os.path.join(
compat.as_str_any(export_dir_base),
compat.as_str_any(strategy.name)),
checkpoint_path=checkpoint_path,
eval_result=eval_result)
示例9: _export_eval_result
def _export_eval_result(self, eval_result, is_the_final_export):
"""Export `eval_result` according to exporters in `EvalSpec`."""
export_dir_base = os.path.join(
compat.as_str_any(self._estimator.model_dir),
compat.as_str_any('export'))
for exporter in self._eval_spec.exporters:
exporter.export(
estimator=self._estimator,
export_path=os.path.join(
compat.as_str_any(export_dir_base),
compat.as_str_any(exporter.name)),
checkpoint_path=eval_result.checkpoint_path,
eval_result=eval_result.metrics,
is_the_final_export=is_the_final_export)
示例10: get_summary_description
def get_summary_description(node_def):
"""Given a TensorSummary node_def, retrieve its SummaryDescription.
When a Summary op is instantiated, a SummaryDescription of associated
metadata is stored in its NodeDef. This method retrieves the description.
Args:
node_def: the node_def_pb2.NodeDef of a TensorSummary op
Returns:
a summary_pb2.SummaryDescription
Raises:
ValueError: if the node is not a summary op.
@compatibility(eager)
Not compatible with eager execution. To write TensorBoard
summaries under eager execution, use `tf.contrib.summary` instead.
@end_compatibility
"""
if node_def.op != 'TensorSummary':
raise ValueError("Can't get_summary_description on %s" % node_def.op)
description_str = _compat.as_str_any(node_def.attr['description'].s)
summary_description = SummaryDescription()
_json_format.Parse(description_str, summary_description)
return summary_description
示例11: list_directory
def list_directory(dirname):
"""Returns a list of entries contained within a directory.
The list is in arbitrary order. It does not contain the special entries "."
and "..".
Args:
dirname: string, path to a directory
Returns:
[filename1, filename2, ... filenameN] as strings
Raises:
errors.NotFoundError if directory doesn't exist
"""
if not is_directory(dirname):
raise errors.NotFoundError(None, None, "Could not find directory")
with errors.raise_exception_on_not_ok_status() as status:
# Convert each element to string, since the return values of the
# vector of string should be interpreted as strings, not bytes.
return [
compat.as_str_any(filename)
for filename in pywrap_tensorflow.GetChildren(
compat.as_bytes(dirname), status)
]
示例12: _is_removed_mentioned
def _is_removed_mentioned(s, removed_op_names):
"""Determine whether any removed op is mentioned in the given object.
This relies on the string representation of the object. This is used for
proto messages that may mention ops by name in nested fields. The string
representation of the proto includes those field values, so this string
search approach is sufficient.
Args:
s: an object to search for removed op names.
removed_op_names: An iterable of names of ops that were removed.
Returns:
True if any removed op is mentioned in the given object, False otherwise.
"""
# A common approach taken by some of the transforms in gtt is to add new nodes
# that have the same prefix as the node they are removing. For example, if
# the original node name was /foo, they may remove that node and add in
# /foo/bar. This regex ensures that we handle these two nodes
# as separate entities. It matches on nodes having names in the form of
# '/foo/bar_x' as well as nodes having names in the form of 'foo.'
s_names = _re.findall(r'((?:[\/]?[a-zA-Z0-9\_]*)*)', compat.as_str_any(s))
for removed_op_name in removed_op_names:
for s_name in s_names:
if s_name.endswith(removed_op_name):
return True
return False
示例13: _normalize_outputs
def _normalize_outputs(outputs, function_name, signature_key):
"""Construct an output dictionary from unnormalized function outputs."""
if isinstance(outputs, collections.Mapping):
for key, value in outputs.items():
if not isinstance(value, ops.Tensor):
raise ValueError(
("Got a dictionary containing non-Tensor value {} for key {} "
"in the output of the function {} used to generate a SavedModel "
"signature. Dictionaries outputs for functions used as signatures "
"should have one Tensor output per string key.")
.format(value, key, compat.as_str_any(function_name)))
return outputs
else:
original_outputs = outputs
if not isinstance(outputs, collections.Sequence):
outputs = [outputs]
if not _is_flat(outputs):
raise ValueError(
("Got non-flat outputs '{}' from '{}' for SavedModel "
"signature '{}'. Signatures have one Tensor per output, so "
"to have predictable names Python functions used to generate "
"these signatures should avoid outputting Tensors in nested "
"structures.")
.format(original_outputs, function_name, signature_key))
return {("output_{}".format(output_index)): output
for output_index, output
in enumerate(outputs)}
示例14: list_directory_v2
def list_directory_v2(path):
"""Returns a list of entries contained within a directory.
The list is in arbitrary order. It does not contain the special entries "."
and "..".
Args:
path: string, path to a directory
Returns:
[filename1, filename2, ... filenameN] as strings
Raises:
errors.NotFoundError if directory doesn't exist
"""
if not is_directory(path):
raise errors.NotFoundError(
node_def=None,
op=None,
message="Could not find directory {}".format(path))
# Convert each element to string, since the return values of the
# vector of string should be interpreted as strings, not bytes.
return [
compat.as_str_any(filename)
for filename in pywrap_tensorflow.GetChildren(compat.as_bytes(path))
]
示例15: IsTensorFlowEventsFile
def IsTensorFlowEventsFile(path):
"""Check the path name to see if it is probably a TF Events file."""
if 'tfevents' not in compat.as_str_any(os.path.basename(path)):
return False
if _CNS_DELETED_FILE_PATTERN.search(path):
logging.info('Ignoring deleted Colossus file: %s', path)
return False
return True