本文整理匯總了Python中tensorflow.contrib.layers.python.layers.layer_norm方法的典型用法代碼示例。如果您正苦於以下問題:Python layers.layer_norm方法的具體用法?Python layers.layer_norm怎麽用?Python layers.layer_norm使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類tensorflow.contrib.layers.python.layers
的用法示例。
在下文中一共展示了layers.layer_norm方法的7個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: normalize
# 需要導入模塊: from tensorflow.contrib.layers.python import layers [as 別名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 別名]
def normalize(inp, activation, reuse, scope):
"""The function to forward the normalization.
Args:
inp: the input feature maps.
reuse: whether reuse the variables for the batch norm.
scope: the label for this conv layer.
activation: the activation function for this conv layer.
Return:
The processed feature maps.
"""
if FLAGS.norm == 'batch_norm':
return tf_layers.batch_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'layer_norm':
return tf_layers.layer_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'None':
if activation is not None:
return activation(inp)
return inp
else:
raise ValueError('Please set correct normalization.')
## Loss functions
示例2: __init__
# 需要導入模塊: from tensorflow.contrib.layers.python import layers [as 別名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 別名]
def __init__(self, num_units, forget_bias=1.0, reuse_norm=False,
input_size=None, activation=nn_ops.relu,
layer_norm=True, norm_gain=1.0, norm_shift=0.0,
loop_steps=1, decay_rate=0.9, learning_rate=0.5,
dropout_keep_prob=1.0, dropout_prob_seed=None):
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
self._num_units = num_units
self._activation = activation
self._forget_bias = forget_bias
self._reuse_norm = reuse_norm
self._keep_prob = dropout_keep_prob
self._seed = dropout_prob_seed
self._layer_norm = layer_norm
self._S = loop_steps
self._eta = learning_rate
self._lambda = decay_rate
self._g = norm_gain
self._b = norm_shift
示例3: normalize
# 需要導入模塊: from tensorflow.contrib.layers.python import layers [as 別名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 別名]
def normalize(inp, activation, reuse, scope):
if FLAGS.norm == 'batch_norm':
return tf_layers.batch_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'layer_norm':
return tf_layers.layer_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'None':
if activation is not None:
return activation(inp)
else:
return inp
# Loss functions
示例4: normalize
# 需要導入模塊: from tensorflow.contrib.layers.python import layers [as 別名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 別名]
def normalize(inp, activation, reuse, scope):
if FLAGS.norm == 'batch_norm':
return tf_layers.batch_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'layer_norm':
return tf_layers.layer_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'None':
if activation is not None:
return activation(inp)
else:
return inp
## Loss functions
示例5: normalize
# 需要導入模塊: from tensorflow.contrib.layers.python import layers [as 別名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 別名]
def normalize(inp, activation, reuse, scope):
if FLAGS.norm == 'batch_norm':
return tf_layers.batch_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'layer_norm':
return tf_layers.layer_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'None':
return activation(inp)
## Loss functions
示例6: _norm
# 需要導入模塊: from tensorflow.contrib.layers.python import layers [as 別名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 別名]
def _norm(self, inp, scope=None):
reuse = tf.get_variable_scope().reuse
with vs.variable_scope(scope or "Norm") as scope:
normalized = layer_norm(inp, reuse=reuse, scope=scope)
return normalized
示例7: normalize
# 需要導入模塊: from tensorflow.contrib.layers.python import layers [as 別名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 別名]
def normalize(inp, activation, reuse, scope):
if FLAGS.norm == 'batch_norm':
return tf_layers.batch_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'layer_norm':
return tf_layers.layer_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'None':
if activation is not None:
return activation(inp)
else:
return inp
## Loss functions