本文整理汇总了Python中tensorflow.python.framework.ops.add_to_collections函数的典型用法代码示例。如果您正苦于以下问题:Python add_to_collections函数的具体用法?Python add_to_collections怎么用?Python add_to_collections使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了add_to_collections函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: var_creator
def var_creator(*args, **kwargs):
"""Create an AggregatingVariable and fix up collections."""
# Record what collections this variable should be added to.
collections = kwargs.pop("collections", None)
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
kwargs["collections"] = []
# Create and wrap the variable.
v = next_creator(*args, **kwargs)
wrapped = values.AggregatingVariable(v, aggregation)
# Add the wrapped variable to the requested collections.
# The handling of eager mode and the global step matches
# ResourceVariable._init_from_args().
if not context.executing_eagerly():
g = ops.get_default_graph()
# If "trainable" is True, next_creator() will add the contained
# variable to the TRAINABLE_VARIABLES collection, so we manually
# remove it and replace with the wrapper. We can't set "trainable"
# to False for next_creator() since that causes functions like
# implicit_gradients to skip those variables.
if kwargs.get("trainable", True):
collections.append(ops.GraphKeys.TRAINABLE_VARIABLES)
l = g.get_collection_ref(ops.GraphKeys.TRAINABLE_VARIABLES)
l.remove(v)
g.add_to_collections(collections, wrapped)
elif ops.GraphKeys.GLOBAL_STEP in collections:
ops.add_to_collections(ops.GraphKeys.GLOBAL_STEP, wrapped)
return wrapped
示例2: variable_creator_scope
def variable_creator_scope(self, next_creator, **kwargs):
"""Creates variables & adds them to collections to match legacy code."""
collections = kwargs.pop("collections", None)
v = None
# Get expected variable name.
name = kwargs.get("name", None)
with ops.name_scope(name, "Variable") as name_scope:
name = name_scope
if self._share_variables:
v = self._variables_by_name.get(name, None)
if v is None:
v = next_creator(**kwargs)
self._variables.append(v)
if self._share_variables:
self._variables_by_name[name] = v
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
if v.trainable and ops.GraphKeys.TRAINABLE_VARIABLES not in collections:
collections = list(collections) + [ops.GraphKeys.TRAINABLE_VARIABLES]
ops.add_to_collections(collections, v)
return v
示例3: __init__
def __init__(self, initial_value, trainable=True, collections=None,
validate_shape=True, name=None):
"""Creates a new variable with value `initial_value`.
The new variable is added to the graph collections listed in `collections`,
which defaults to `[GraphKeys.VARIABLES]`.
If `trainable` is `True` the variable is also added to the graph collection
`GraphKeys.TRAINABLE_VARIABLES`.
This constructor creates both a `variable` Op and an `assign` Op to set the
variable to its initial value.
Args:
initial_value: A `Tensor`, or Python object convertible to a `Tensor`.
The initial value for the Variable. Must have a shape specified unless
`validate_shape` is set to False.
trainable: If `True`, the default, also adds the variable to the graph
collection `GraphKeys.TRAINABLE_VARIABLES`. This collection is used as
the default list of variables to use by the `Optimizer` classes.
collections: List of graph collections keys. The new variable is added to
these collections. Defaults to `[GraphKeys.VARIABLES]`.
validate_shape: If `False`, allows the variable to be initialized with a
value of unknown shape. If `True`, the default, the shape of
`initial_value` must be known.
name: Optional name for the variable. Defaults to `'Variable'` and gets
uniquified automatically.
Returns:
A Variable.
Raises:
ValueError: If the initial value does not have a shape and
`validate_shape` is `True`.
"""
if collections is None:
collections = [ops.GraphKeys.VARIABLES]
if trainable and ops.GraphKeys.TRAINABLE_VARIABLES not in collections:
collections = list(collections) + [ops.GraphKeys.TRAINABLE_VARIABLES]
with ops.control_dependencies(None):
with ops.op_scope([initial_value], name, "Variable") as name:
self._initial_value = ops.convert_to_tensor(initial_value,
name="initial_value")
initial_value_shape = self._initial_value.get_shape()
if validate_shape and not initial_value_shape.is_fully_defined():
raise ValueError("initial_value must have a shape specified: %s"
% self._initial_value)
shape_to_set = initial_value_shape if validate_shape else []
self._variable = state_ops.variable_op(
shape_to_set, self._initial_value.dtype.base_dtype,
set_shape=validate_shape, name=name)
with ops.device(self._variable.device):
self._initializer_op = state_ops.assign(
self._variable, self._initial_value,
validate_shape=validate_shape).op
self._snapshot = array_ops.identity(self._variable, name="read")
ops.add_to_collections(collections, self)
self._save_slice_info = None
示例4: _init_from_args
def _init_from_args(self, initial_value=None, trainable=True,
collections=None, validate_shape=True,
caching_device=None, name=None):
"""Creates a new variable from arguments.
Args:
initial_value: A `Tensor`, or Python object convertible to a `Tensor`.
The initial value for the Variable. Must have a shape specified unless
`validate_shape` is set to False.
trainable: If `True`, the default, also adds the variable to the graph
collection `GraphKeys.TRAINABLE_VARIABLES`. This collection is used as
the default list of variables to use by the `Optimizer` classes.
collections: List of graph collections keys. The new variable is added to
these collections. Defaults to `[GraphKeys.VARIABLES]`.
validate_shape: If `False`, allows the variable to be initialized with a
value of unknown shape. If `True`, the default, the shape of
`initial_value` must be known.
caching_device: Optional device string or function describing where the
Variable should be cached for reading. Defaults to the Variable's
device. If not `None`, caches on another device. Typical use is to
cache on the device where the Ops using the Variable reside, to
deduplicate copying through `Switch` and other conditional statements.
name: Optional name for the variable. Defaults to `'Variable'` and gets
uniquified automatically.
Raises:
ValueError: If the initial value is not specified, or does not have a
shape and `validate_shape` is `True`.
"""
if initial_value is None:
raise ValueError("initial_value must be specified.")
if collections is None:
collections = [ops.GraphKeys.VARIABLES]
if trainable and ops.GraphKeys.TRAINABLE_VARIABLES not in collections:
collections = list(collections) + [ops.GraphKeys.TRAINABLE_VARIABLES]
with ops.control_dependencies(None):
with ops.op_scope([initial_value], name, "Variable") as name:
self._initial_value = ops.convert_to_tensor(initial_value,
name="initial_value")
initial_value_shape = self._initial_value.get_shape()
if validate_shape and not initial_value_shape.is_fully_defined():
raise ValueError("initial_value must have a shape specified: %s"
% self._initial_value)
shape_to_set = initial_value_shape if validate_shape else []
self._variable = state_ops.variable_op(
shape_to_set, self._initial_value.dtype.base_dtype,
set_shape=validate_shape, name=name)
with ops.device(self._variable.device):
self._initializer_op = state_ops.assign(
self._variable, self._initial_value,
validate_shape=validate_shape).op
with ops.device(caching_device if caching_device is not None
else self._variable.device):
self._snapshot = array_ops.identity(self._variable, name="read")
ops.add_to_collections(collections, self)
self._caching_device = caching_device
self._save_slice_info = None
示例5: _register_dense_variable_read
def _register_dense_variable_read(read, collections, trainable):
"""Helper function to put a read from a dense variable in the collections."""
if collections is None:
collections = []
if (trainable and
ops.GraphKeys.TRAINABLE_RESOURCE_VARIABLES not in collections):
collections = (list(collections) +
[ops.GraphKeys.TRAINABLE_RESOURCE_VARIABLES])
ops.add_to_collections(collections, read)
示例6: _init_from_args
def _init_from_args(self, name):
"""Initialize the CriticalSection from constructor arguments."""
with ops.name_scope(name, "CriticalSection", []) as name:
with ops.control_dependencies(None):
# pylint: disable=protected-access
handle_name = ops._name_from_scope_name(name)
container = ops.get_default_graph()._container
# pylint: enable=protected-access
if container is None:
container = ""
self._handle = gen_resource_variable_ops.critical_section_op(
shared_name=handle_name, name=name)
if context.in_graph_mode():
ops.add_to_collections(CRITICAL_SECTIONS, self)
示例7: variable_creator_scope
def variable_creator_scope(self, next_creator, **kwargs):
"""Creates variables & adds them to collections to match legacy code."""
v = next_creator(**kwargs)
self._variables.append(v)
collections = kwargs.get("collections")
trainable = v.trainable
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
if trainable and ops.GraphKeys.TRAINABLE_VARIABLES not in collections:
collections = list(collections) + [ops.GraphKeys.TRAINABLE_VARIABLES]
ops.add_to_collections(collections, v)
return v
示例8: _init_from_args
def _init_from_args(self, name, shared_name): # pylint: disable=invalid-name
"""Initialize the CriticalSection from constructor arguments."""
with ops.name_scope(name, "CriticalSection", []) as name:
with ops.init_scope():
# pylint: disable=protected-access
container = ops.get_default_graph()._container
# pylint: enable=protected-access
if shared_name is None:
shared_name = name
if container is None:
container = ""
self._handle = gen_resource_variable_ops.mutex_v2(
shared_name=shared_name, container=container, name=name)
if not context.executing_eagerly():
ops.add_to_collections(CRITICAL_SECTIONS, self)
示例9: _init_from_args
def _init_from_args(self, name, shared_name): # pylint: disable=invalid-name
"""Initialize the Notification from constructor arguments."""
with ops.name_scope(name, "Notification", []) as name:
with ops.init_scope():
# pylint: disable=protected-access
container = ops.get_default_graph()._container
# pylint: enable=protected-access
if shared_name is None:
shared_name = name
if container is None:
container = ""
# Build the notification resource outside of any control dependencies.
with ops.control_dependencies(None):
self._handle = gen_resource_variable_ops.notification(
shared_name=shared_name, container=container, name=name)
if not context.executing_eagerly():
ops.add_to_collections(NOTIFICATIONS, self)
示例10: collect_named_outputs
def collect_named_outputs(collections, alias, outputs):
"""Add `Tensor` outputs tagged with alias to collections.
It is useful to collect end-points or tags for summaries. Example of usage:
logits = collect_named_outputs('end_points', 'inception_v3/logits', logits)
assert 'inception_v3/logits' in logits.aliases
Args:
collections: A collection or list of collections. If None skip collection.
alias: String to append to the list of aliases of outputs, for example,
'inception_v3/conv1'.
outputs: Tensor, an output tensor to collect
Returns:
The outputs Tensor to allow inline call.
"""
append_tensor_alias(outputs, alias)
if collections:
ops.add_to_collections(collections, outputs)
return outputs
示例11: collect_named_outputs
def collect_named_outputs(collections, name, outputs):
"""Add tuple (name, outputs) to collections.
It is useful to collect end-points or tags for summaries. Example of usage:
logits = collect_named_outputs('end_points', 'inception_v3/logits', logits)
Args:
collections: A collection or list of collections. If None skip collection.
name: String, name to represent the outputs, ex. 'inception_v3/conv1'
outputs: Tensor, an output tensor to collect
Returns:
The outputs Tensor to allow inline call.
"""
if collections:
# Remove ending '/' if present.
if name[-1] == '/':
name = name[:-1]
ops.add_to_collections(collections, (name, outputs))
return outputs
示例12: collect_named_outputs
def collect_named_outputs(collections, alias, outputs):
"""Add `Tensor` outputs tagged with alias to collections.
It is useful to collect end-points or tags for summaries. Example of usage:
logits = collect_named_outputs('end_points', 'inception_v3/logits', logits)
assert logits.alias == 'inception_v3/logits'
Args:
collections: A collection or list of collections. If None skip collection.
alias: String, alias to name the outputs, ex. 'inception_v3/conv1'
outputs: Tensor, an output tensor to collect
Returns:
The outputs Tensor to allow inline call.
"""
# Remove ending '/' if present.
if alias[-1] == '/':
alias = alias[:-1]
outputs.alias = alias
if collections:
ops.add_to_collections(collections, outputs)
return outputs
示例13: _init_from_args
#.........这里部分代码省略.........
shape and `validate_shape` is `True`.
"""
if initial_value is None:
raise ValueError("initial_value must be specified.")
init_from_fn = callable(initial_value)
if init_from_fn and dtype is None:
raise ValueError("dtype must also be specified when initial_value is callable.")
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
if not isinstance(collections, (list, tuple, set)):
raise ValueError(
"collections argument to Variable constructor must be a list, tuple, "
"or set. Got %s of type %s" % (collections, type(collections))
)
if trainable and ops.GraphKeys.TRAINABLE_VARIABLES not in collections:
collections = list(collections) + [ops.GraphKeys.TRAINABLE_VARIABLES]
expected_shape = tensor_shape.as_shape(expected_shape)
with ops.control_dependencies(None):
with ops.name_scope(name, "Variable", [] if init_from_fn else [initial_value]) as name:
# Get the initial value from a callable function. The real shape of the
# variable will be set later, since under the init_from_fn case, the
# shape won't be known until after the function is invoked.
#
# NOTE: The current Variable OpKernel does not support
# partially defined shapes, so we only set the shape if it is
# fully defined. For historical reasons, we use the scalar
# shape (`[]`) to represent an unknown or partially known
# shape. A future version of the Variable ops will remove this
# limitation.
def full_shape_to_list(shape):
"""Returns shape as a list if shape is fully defined."""
if shape and shape.is_fully_defined():
return shape.as_list()
else:
return []
def assert_expected_shape():
"""Asserts that the initial value has the expected shape."""
if expected_shape:
expected_shape.assert_is_compatible_with(self._initial_value.get_shape())
if init_from_fn:
expected_shape_list = full_shape_to_list(expected_shape)
set_shape = validate_shape and expected_shape.is_fully_defined()
self._variable = state_ops.variable_op(
expected_shape_list, dtype.base_dtype, set_shape=set_shape, name=name
)
with ops.colocate_with(self._variable.op):
with ops.name_scope("Initializer"):
# Colocate the tensors created by the initial_value() function
# with the variable itself.
self._initial_value = ops.convert_to_tensor(
initial_value(), name="initial_value", dtype=dtype
)
assert_expected_shape()
# Or get the initial value from a Tensor or Python object.
else:
self._initial_value = ops.convert_to_tensor(initial_value, name="initial_value", dtype=dtype)
assert_expected_shape()
set_shape = validate_shape and self._initial_value.get_shape().is_fully_defined()
# In this case, the variable op can't be created until after the
# initial_value has been converted to a Tensor with a known type.
self._variable = state_ops.variable_op(
full_shape_to_list(self._initial_value.get_shape()),
self._initial_value.dtype.base_dtype,
set_shape=set_shape,
name=name,
)
# Manually overrides the variable's shape with the initial value's.
if validate_shape:
initial_value_shape = self._initial_value.get_shape()
if not initial_value_shape.is_fully_defined():
raise ValueError("initial_value must have a shape specified: %s" % self._initial_value)
self._variable.set_shape(initial_value_shape)
# TODO(b/28152992): Remove the below hack modifying the node_def shape
# directly once set_shape() handles it.
self._variable.op.node_def.attr["shape"].shape.CopyFrom(initial_value_shape.as_proto())
# Assigns initial value.
self._initializer_op = state_ops.assign(
self._variable, self._initial_value, validate_shape=validate_shape
).op
# TODO(vrv): Change this class to not take caching_device, but
# to take the op to colocate the snapshot with, so we can use
# colocation rather than devices.
if caching_device is not None:
with ops.device(caching_device):
self._snapshot = array_ops.identity(self._variable, name="read")
else:
with ops.colocate_with(self._variable.op):
self._snapshot = array_ops.identity(self._variable, name="read")
ops.add_to_collections(collections, self)
self._caching_device = caching_device
self._save_slice_info = None
示例14: _init_from_args
#.........这里部分代码省略.........
with ops.name_scope("Initializer"):
initial_value = ops.convert_to_tensor(
initial_value, name="initial_value", dtype=dtype)
self._handle = _eager_safe_variable_handle(
shape=initial_value.get_shape(),
dtype=initial_value.dtype.base_dtype,
shared_name=handle_name,
name=name,
graph_mode=False)
self._handle_device = (
self._handle.device if self._in_graph_mode else
context.get_default_context().device_name)
self._shape = initial_value.get_shape()
# pylint: enable=protected-access
# Or get the initial value from a Tensor or Python object.
else:
with ops.name_scope("Initializer"):
initial_value = ops.convert_to_tensor(
initial_value, name="initial_value", dtype=dtype)
# pylint: disable=protected-access
if (self._in_graph_mode and initial_value is not None and
initial_value.op._get_control_flow_context() is not None):
raise ValueError(
"Initializer for variable %s is from inside a control-flow "
"construct, such as a loop or conditional. When creating a "
"variable inside a loop or conditional, use a lambda as the "
"initializer." % name)
# pylint: enable=protected-access
self._handle = _eager_safe_variable_handle(
shape=initial_value.get_shape(),
dtype=initial_value.dtype.base_dtype,
shared_name=handle_name,
name=name,
graph_mode=self._in_graph_mode)
self._handle_device = (self._handle.device if self._in_graph_mode else
context.get_default_context().device_name)
self._shape = initial_value.get_shape()
self._initial_value = initial_value if self._in_graph_mode else None
self._handle_name = handle_name + ":0"
self._dtype = initial_value.dtype.base_dtype
self._constraint = constraint
if self._in_graph_mode:
with ops.name_scope("IsInitialized"):
self._is_initialized_op = (
gen_resource_variable_ops.var_is_initialized_op(self._handle))
if initial_value is not None:
with ops.name_scope("Assign") as n, ops.colocate_with(self._handle):
self._initializer_op = (
gen_resource_variable_ops.assign_variable_op(
self._handle,
self._try_guard_against_uninitialized_dependencies(
initial_value),
name=n))
with ops.name_scope("Read"), ops.colocate_with(self._handle):
# Manually assign reads to the handle's device to avoid log
# messages.
with ops.device(self._handle_device):
value = self._read_variable_op()
self._graph_element = value
if caching_device is not None:
# Variables may be created in a tf.device() or ops.colocate_with()
# context. At the same time, users would expect caching device to
# be independent of this context, and/or would not expect the
# current device context to be merged with the caching device
# spec. Therefore we reset the colocation stack before creating
# the cached value. Note that resetting the colocation stack will
# also reset the device stack.
with ops.colocate_with(None, ignore_existing=True):
with ops.device(caching_device):
self._cached_value = array_ops.identity(value)
else:
self._cached_value = None
else:
gen_resource_variable_ops.assign_variable_op(self._handle,
initial_value)
self._is_initialized_op = None
self._initializer_op = None
self._graph_element = None
if caching_device:
with ops.device(caching_device):
self._cached_value = self._read_variable_op()
else:
self._cached_value = None
if context.in_graph_mode():
ops.add_to_collections(collections, self)
elif ops.GraphKeys.GLOBAL_STEP in collections:
ops.add_to_collections(ops.GraphKeys.GLOBAL_STEP, self)
if not self._in_graph_mode:
# After the handle has been created, set up a way to clean it up when
# executing eagerly. We'll hold the only reference to the deleter, so that
# when this object is garbage collected the deleter will be too. This
# means ResourceVariables can be part of reference cycles without those
# cycles being uncollectable, and means that no __del__ will be defined at
# all in graph mode.
self._handle_deleter = EagerResourceDeleter(
handle=self._handle, handle_device=self._handle_device)
示例15: _init_from_args
#.........这里部分代码省略.........
which is the initial value for the Variable. The initial value must have
a shape specified unless `validate_shape` is set to False. Can also be a
callable with no argument that returns the initial value when called.
(Note that initializer functions from init_ops.py must first be bound
to a shape before being used here.)
trainable: If `True`, the default, also adds the variable to the graph
collection `GraphKeys.TRAINABLE_VARIABLES`. This collection is used as
the default list of variables to use by the `Optimizer` classes.
collections: List of graph collections keys. The new variable is added to
these collections. Defaults to `[GraphKeys.GLOBAL_VARIABLES]`.
validate_shape: Ignored. Provided for compatibility with tf.Variable.
caching_device: Optional device string or function describing where the
Variable should be cached for reading. Defaults to the Variable's
device. If not `None`, caches on another device. Typical use is to
cache on the device where the Ops using the Variable reside, to
deduplicate copying through `Switch` and other conditional statements.
name: Optional name for the variable. Defaults to `'Variable'` and gets
uniquified automatically.
dtype: If set, initial_value will be converted to the given type.
If None, either the datatype will be kept (if initial_value is
a Tensor) or float32 will be used (if it is a Python object convertible
to a Tensor).
Raises:
ValueError: If the initial value is not specified, or does not have a
shape and `validate_shape` is `True`.
"""
if initial_value is None:
raise ValueError("initial_value must be specified.")
init_from_fn = callable(initial_value)
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
if not isinstance(collections, (list, tuple, set)):
raise ValueError(
"collections argument to Variable constructor must be a list, tuple, "
"or set. Got %s of type %s" % (collections, type(collections)))
if trainable and ops.GraphKeys.TRAINABLE_VARIABLES not in collections:
collections = list(collections) + [ops.GraphKeys.TRAINABLE_VARIABLES]
self._save_slice_info = None
with ops.control_dependencies(None):
with ops.name_scope(name, "Variable", [] if init_from_fn else
[initial_value]) as name:
# pylint: disable=protected-access
true_name = ops._name_from_scope_name(name)
if init_from_fn:
# Use attr_scope and device(None) to simulate the behavior of
# colocate_with when the variable we want to colocate with doesn't
# yet exist.
attr = attr_value_pb2.AttrValue(
list=attr_value_pb2.AttrValue.ListValue(
s=[compat.as_bytes("loc:@%s" % true_name)]))
with ops.get_default_graph()._attr_scope({"_class": attr}):
with ops.name_scope("Initializer"), ops.device(None):
self._initial_value = ops.convert_to_tensor(
initial_value(), name="initial_value", dtype=dtype)
self._handle = gen_resource_variable_ops.var_handle_op(
shape=self._initial_value.get_shape(),
dtype=self._initial_value.dtype.base_dtype,
shared_name=true_name, name=name)
# pylint: enable=protected-access
# Or get the initial value from a Tensor or Python object.
else:
self._initial_value = ops.convert_to_tensor(
initial_value, name="initial_value", dtype=dtype)
self._handle = gen_resource_variable_ops.var_handle_op(
shape=self._initial_value.get_shape(),
dtype=self._initial_value.dtype.base_dtype,
shared_name=true_name, name=name)
self._dtype = self._initial_value.dtype.base_dtype
with ops.name_scope("IsInitialized"):
self._is_initialized_op = (
gen_resource_variable_ops.var_is_initialized_op(self._handle))
if initial_value is not None:
with ops.name_scope("Assign") as n, ops.colocate_with(self._handle):
self._initialize_op = gen_resource_variable_ops.assign_variable_op(
self._handle, self._initial_value, name=n)
with ops.name_scope("Read"), ops.colocate_with(self._handle):
# Manually assign reads to the handle's device to avoid log messages.
with ops.device(self._handle.device):
value = gen_resource_variable_ops.read_variable_op(
self._handle, dtype=self._dtype)
self._graph_element = value
if caching_device is not None:
# Variables may be created in a tf.device() or ops.colocate_with()
# context. At the same time, users would expect caching device to be
# independent of this context, and/or would not expect the current
# device context to be merged with the caching device spec.
# Therefore we reset the colocation stack before creating the cached
# value. Note that resetting the colocation stack will also reset
# the device stack.
with ops.colocate_with(None, ignore_existing=True):
with ops.device(caching_device):
self._cached_value = array_ops.identity(value)
else:
self._cached_value = None
ops.add_to_collections(collections, self)