本文整理汇总了Python中tensorflow.contrib.rnn.RNNCell方法的典型用法代码示例。如果您正苦于以下问题:Python rnn.RNNCell方法的具体用法?Python rnn.RNNCell怎么用?Python rnn.RNNCell使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.contrib.rnn
的用法示例。
在下文中一共展示了rnn.RNNCell方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_rnn_cell_trainable_variables
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def get_rnn_cell_trainable_variables(cell):
"""Returns the list of trainable variables of an RNN cell.
Args:
cell: an instance of :tf_main:`RNNCell <nn/rnn_cell/RNNCell>`.
Returns:
list: trainable variables of the cell.
"""
cell_ = cell
while True:
try:
return cell_.trainable_variables
except AttributeError:
# Cell wrappers (e.g., `DropoutWrapper`) cannot directly access to
# `trainable_variables` as they don't initialize superclass
# (tf==v1.3). So try to access through the cell in the wrapper.
cell_ = cell._cell # pylint: disable=protected-access
示例2: _get_single_cell
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def _get_single_cell(cell_type, num_units):
"""Constructs and return a single `RNNCell`.
Args:
cell_type: Either a string identifying the `RNNCell` type or a subclass of
`RNNCell`.
num_units: The number of units in the `RNNCell`.
Returns:
An initialized `RNNCell`.
Raises:
ValueError: `cell_type` is an invalid `RNNCell` name.
TypeError: `cell_type` is not a string or a subclass of `RNNCell`.
"""
cell_type = _CELL_TYPES.get(cell_type, cell_type)
if not cell_type or not issubclass(cell_type, contrib_rnn.RNNCell):
raise ValueError('The supported cell types are {}; got {}'.format(
list(_CELL_TYPES.keys()), cell_type))
return cell_type(num_units=num_units)
示例3: apply_dropout
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def apply_dropout(
cell, input_keep_probability, output_keep_probability, random_seed=None):
"""Apply dropout to the outputs and inputs of `cell`.
Args:
cell: An `RNNCell`.
input_keep_probability: Probability to keep inputs to `cell`. If `None`,
no dropout is applied.
output_keep_probability: Probability to keep outputs of `cell`. If `None`,
no dropout is applied.
random_seed: Seed for random dropout.
Returns:
An `RNNCell`, the result of applying the supplied dropouts to `cell`.
"""
input_prob_none = input_keep_probability is None
output_prob_none = output_keep_probability is None
if input_prob_none and output_prob_none:
return cell
if input_prob_none:
input_keep_probability = 1.0
if output_prob_none:
output_keep_probability = 1.0
return contrib_rnn.DropoutWrapper(
cell, input_keep_probability, output_keep_probability, random_seed)
示例4: __init__
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def __init__(self, prenet, attention_mechanism, rnn_cell, frame_projection, stop_projection, mask_finished=False):
"""Initialize decoder parameters
Args:
prenet: A tensorflow fully connected layer acting as the decoder pre-net
attention_mechanism: A _BaseAttentionMechanism instance, usefull to
learn encoder-decoder alignments
rnn_cell: Instance of RNNCell, main body of the decoder
frame_projection: tensorflow fully connected layer with r * num_mels output units
stop_projection: tensorflow fully connected layer, expected to project to a scalar
and through a sigmoid activation
mask_finished: Boolean, Whether to mask decoder frames after the <stop_token>
"""
super(TacotronDecoderCell, self).__init__()
#Initialize decoder layers
self._prenet = prenet
self._attention_mechanism = attention_mechanism
self._cell = rnn_cell
self._frame_projection = frame_projection
self._stop_projection = stop_projection
self._mask_finished = mask_finished
self._attention_layer_size = self._attention_mechanism.values.get_shape()[-1].value
示例5: __init__
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def __init__(self, is_training, attention_mechanism, rnn_cell, frame_projection = None, stop_projection = None):
"""Initialize decoder parameters
Args:
prenet: A tensorflow fully connected layer acting as the decoder pre-net
attention_mechanism: A _BaseAttentionMechanism instance, usefull to
learn encoder-decoder alignments
rnn_cell: Instance of RNNCell, main body of the decoder
frame_projection: tensorflow fully connected layer with r * num_mels output units
stop_projection: tensorflow fully connected layer, expected to project to a scalar
and through a sigmoid activation
mask_finished: Boolean, Whether to mask decoder frames after the <stop_token>
"""
super(TacotronDecoderWrapper, self).__init__()
#Initialize decoder layers
self._training = is_training
self._attention_mechanism = attention_mechanism
self._cell = rnn_cell
self._frame_projection = frame_projection
self._stop_projection = stop_projection
self._attention_layer_size = self._attention_mechanism.values.get_shape()[-1].value
示例6: __init__
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def __init__(self, prenet, attention_mechanism, rnn_cell, frame_projection, stop_projection):
"""Initialize decoder parameters
Args:
prenet: A tensorflow fully connected layer acting as the decoder pre-net
attention_mechanism: A _BaseAttentionMechanism instance, usefull to
learn encoder-decoder alignments
rnn_cell: Instance of RNNCell, main body of the decoder
frame_projection: tensorflow fully connected layer with r * num_mels output units
stop_projection: tensorflow fully connected layer, expected to project to a scalar
and through a sigmoid activation
mask_finished: Boolean, Whether to mask decoder frames after the <stop_token>
"""
super(TacotronDecoderCell, self).__init__()
#Initialize decoder layers
self._prenet = prenet
self._attention_mechanism = attention_mechanism
self._cell = rnn_cell
self._frame_projection = frame_projection
self._stop_projection = stop_projection
self._attention_layer_size = self._attention_mechanism.values.get_shape()[-1].value
示例7: get_rnn_cell_trainable_variables
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def get_rnn_cell_trainable_variables(cell):
"""Returns the list of trainable variables of an RNN cell.
Args:
cell: an instance of :tf_main:`RNNCell <nn/rnn_cell/RNNCell>`.
Returns:
list: trainable variables of the cell.
"""
cell_ = cell
while True:
try:
return cell_.trainable_variables
except AttributeError:
# Cell wrappers (e.g., `DropoutWrapper`) cannot directly access to
# `trainable_variables` as they don't initialize superclass
# (tf==v1.3). So try to access through the cell in the wrapper.
cell_ = cell._cell # pylint: disable=protected-access
示例8: __init__
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def __init__(self, prenet, attention_mechanism, rnn_cell, frame_projection, stop_projection):
"""Initialize decoder parameters
Args:
prenet: A tensorflow fully connected layer acting as the decoder pre-net
attention_mechanism: A _BaseAttentionMechanism instance, usefull to
learn encoder-decoder alignments
rnn_cell: Instance of RNNCell, main body of the decoder
frame_projection: tensorflow fully connected layer with r * num_mels output units
stop_projection: tensorflow fully connected layer, expected to project to a scalar
and through a sigmoid activation
mask_finished: Boolean, Whether to mask decoder frames after the <stop_token>
"""
super(TacotronDecoderCell, self).__init__()
# Initialize decoder layers
self._prenet = prenet
self._attention_mechanism = attention_mechanism
self._cell = rnn_cell
self._frame_projection = frame_projection
self._stop_projection = stop_projection
self._attention_layer_size = self._attention_mechanism.values.get_shape()[-1].value
示例9: construct_rnn_cell
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def construct_rnn_cell(num_units, cell_type='basic_rnn',
dropout_keep_probabilities=None):
"""Constructs cells, applies dropout and assembles a `MultiRNNCell`.
The cell type chosen by DynamicRNNEstimator.__init__() is the same as
returned by this function when called with the same arguments.
Args:
num_units: A single `int` or a list/tuple of `int`s. The size of the
`RNNCell`s.
cell_type: A string identifying the `RNNCell` type or a subclass of
`RNNCell`.
dropout_keep_probabilities: a list of dropout probabilities or `None`. If a
list is given, it must have length `len(cell_type) + 1`.
Returns:
An initialized `RNNCell`.
"""
if not isinstance(num_units, (list, tuple)):
num_units = (num_units,)
cells = [_get_single_cell(cell_type, n) for n in num_units]
if dropout_keep_probabilities:
cells = apply_dropout(cells, dropout_keep_probabilities)
if len(cells) == 1:
return cells[0]
return contrib_rnn.MultiRNNCell(cells)
示例10: apply_dropout
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def apply_dropout(cells, dropout_keep_probabilities, random_seed=None):
"""Applies dropout to the outputs and inputs of `cell`.
Args:
cells: A list of `RNNCell`s.
dropout_keep_probabilities: a list whose elements are either floats in
`[0.0, 1.0]` or `None`. It must have length one greater than `cells`.
random_seed: Seed for random dropout.
Returns:
A list of `RNNCell`s, the result of applying the supplied dropouts.
Raises:
ValueError: If `len(dropout_keep_probabilities) != len(cells) + 1`.
"""
if len(dropout_keep_probabilities) != len(cells) + 1:
raise ValueError(
'The number of dropout probabilites must be one greater than the '
'number of cells. Got {} cells and {} dropout probabilities.'.format(
len(cells), len(dropout_keep_probabilities)))
wrapped_cells = [
contrib_rnn.DropoutWrapper(cell, prob, 1.0, seed=random_seed)
for cell, prob in zip(cells[:-1], dropout_keep_probabilities[:-2])
]
wrapped_cells.append(
contrib_rnn.DropoutWrapper(cells[-1], dropout_keep_probabilities[-2],
dropout_keep_probabilities[-1]))
return wrapped_cells
示例11: _to_rnn_cell
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def _to_rnn_cell(cell_or_type, num_units, num_layers):
"""Constructs and return an `RNNCell`.
Args:
cell_or_type: Either a string identifying the `RNNCell` type, a subclass of
`RNNCell` or an instance of an `RNNCell`.
num_units: The number of units in the `RNNCell`.
num_layers: The number of layers in the RNN.
Returns:
An initialized `RNNCell`.
Raises:
ValueError: `cell_or_type` is an invalid `RNNCell` name.
TypeError: `cell_or_type` is not a string or a subclass of `RNNCell`.
"""
if isinstance(cell_or_type, contrib_rnn.RNNCell):
return cell_or_type
if isinstance(cell_or_type, str):
cell_or_type = _CELL_TYPES.get(cell_or_type)
if cell_or_type is None:
raise ValueError('The supported cell types are {}; got {}'.format(
list(_CELL_TYPES.keys()), cell_or_type))
if not issubclass(cell_or_type, contrib_rnn.RNNCell):
raise TypeError(
'cell_or_type must be a subclass of RNNCell or one of {}.'.format(
list(_CELL_TYPES.keys())))
cell = cell_or_type(num_units=num_units)
if num_layers > 1:
cell = contrib_rnn.MultiRNNCell(
[cell] * num_layers, state_is_tuple=True)
return cell
示例12: __init__
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def __init__(self, cell, mem_size, embed_size, max_n_valid_indices):
"""Constructs a `ResidualWrapper` for `cell`.
Args:
cell: An instance of `RNNCell`.
mem_size: size of the memory.
embed_size: the size/dimension of the embedding in each memory location.
max_n_valid_indices: maximum number of valid_indices.
"""
self._cell = cell
self._mem_size = mem_size
self._embed_size = embed_size
self._max_n_valid_indices = max_n_valid_indices
示例13: define_rnn_cell
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def define_rnn_cell(cell_class, num_units, num_layers=1, keep_prob=1.0,
input_keep_prob=None, output_keep_prob=None):
if input_keep_prob is None:
input_keep_prob = keep_prob
if output_keep_prob is None:
output_keep_prob = keep_prob
cells = []
for _ in range(num_layers):
if cell_class == 'GRU':
cell = GRUCell(num_units=num_units)
elif cell_class == 'LSTM':
cell = LSTMCell(num_units=num_units)
else:
cell = RNNCell(num_units=num_units)
if keep_prob < 1.0:
cell = DropoutWrapper(cell=cell, input_keep_prob=input_keep_prob, output_keep_prob=output_keep_prob)
cells.append(cell)
if len(cells) > 1:
final_cell = MultiRNNCell(cells)
else:
final_cell = cells[0]
return final_cell
示例14: __init__
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def __init__(self, cell, attention_mechanism, dropout, attn_cell_config,
num_proj, dtype=tf.float32):
"""
Args:
cell: (RNNCell)
attention_mechanism: (AttentionMechanism)
dropout: (tf.float)
attn_cell_config: (dict) hyper params
"""
# variables and tensors
self._cell = cell
self._attention_mechanism = attention_mechanism
self._dropout = dropout
# hyperparameters and shapes
self._n_channels = self._attention_mechanism._n_channels
self._dim_e = attn_cell_config["dim_e"]
self._dim_o = attn_cell_config["dim_o"]
self._num_units = attn_cell_config["num_units"]
self._dim_embeddings = attn_cell_config["dim_embeddings"]
self._num_proj = num_proj
self._dtype = dtype
# for RNNCell
self._state_size = AttentionState(self._cell._state_size, self._dim_o)
示例15: __init__
# 需要导入模块: from tensorflow.contrib import rnn [as 别名]
# 或者: from tensorflow.contrib.rnn import RNNCell [as 别名]
def __init__(self, cell: RNNCell, prenets: Tuple[PreNet]):
super(DecoderPreNetWrapper, self).__init__()
self._cell = cell
self.prenets = prenets