本文整理汇总了Python中tensorflow.contrib.layers.python.layers.layer_norm方法的典型用法代码示例。如果您正苦于以下问题:Python layers.layer_norm方法的具体用法?Python layers.layer_norm怎么用?Python layers.layer_norm使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.contrib.layers.python.layers
的用法示例。
在下文中一共展示了layers.layer_norm方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: normalize
# 需要导入模块: from tensorflow.contrib.layers.python import layers [as 别名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 别名]
def normalize(inp, activation, reuse, scope):
"""The function to forward the normalization.
Args:
inp: the input feature maps.
reuse: whether reuse the variables for the batch norm.
scope: the label for this conv layer.
activation: the activation function for this conv layer.
Return:
The processed feature maps.
"""
if FLAGS.norm == 'batch_norm':
return tf_layers.batch_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'layer_norm':
return tf_layers.layer_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'None':
if activation is not None:
return activation(inp)
return inp
else:
raise ValueError('Please set correct normalization.')
## Loss functions
示例2: __init__
# 需要导入模块: from tensorflow.contrib.layers.python import layers [as 别名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 别名]
def __init__(self, num_units, forget_bias=1.0, reuse_norm=False,
input_size=None, activation=nn_ops.relu,
layer_norm=True, norm_gain=1.0, norm_shift=0.0,
loop_steps=1, decay_rate=0.9, learning_rate=0.5,
dropout_keep_prob=1.0, dropout_prob_seed=None):
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
self._num_units = num_units
self._activation = activation
self._forget_bias = forget_bias
self._reuse_norm = reuse_norm
self._keep_prob = dropout_keep_prob
self._seed = dropout_prob_seed
self._layer_norm = layer_norm
self._S = loop_steps
self._eta = learning_rate
self._lambda = decay_rate
self._g = norm_gain
self._b = norm_shift
示例3: normalize
# 需要导入模块: from tensorflow.contrib.layers.python import layers [as 别名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 别名]
def normalize(inp, activation, reuse, scope):
if FLAGS.norm == 'batch_norm':
return tf_layers.batch_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'layer_norm':
return tf_layers.layer_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'None':
if activation is not None:
return activation(inp)
else:
return inp
# Loss functions
示例4: normalize
# 需要导入模块: from tensorflow.contrib.layers.python import layers [as 别名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 别名]
def normalize(inp, activation, reuse, scope):
if FLAGS.norm == 'batch_norm':
return tf_layers.batch_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'layer_norm':
return tf_layers.layer_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'None':
if activation is not None:
return activation(inp)
else:
return inp
## Loss functions
示例5: normalize
# 需要导入模块: from tensorflow.contrib.layers.python import layers [as 别名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 别名]
def normalize(inp, activation, reuse, scope):
if FLAGS.norm == 'batch_norm':
return tf_layers.batch_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'layer_norm':
return tf_layers.layer_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'None':
return activation(inp)
## Loss functions
示例6: _norm
# 需要导入模块: from tensorflow.contrib.layers.python import layers [as 别名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 别名]
def _norm(self, inp, scope=None):
reuse = tf.get_variable_scope().reuse
with vs.variable_scope(scope or "Norm") as scope:
normalized = layer_norm(inp, reuse=reuse, scope=scope)
return normalized
示例7: normalize
# 需要导入模块: from tensorflow.contrib.layers.python import layers [as 别名]
# 或者: from tensorflow.contrib.layers.python.layers import layer_norm [as 别名]
def normalize(inp, activation, reuse, scope):
if FLAGS.norm == 'batch_norm':
return tf_layers.batch_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'layer_norm':
return tf_layers.layer_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'None':
if activation is not None:
return activation(inp)
else:
return inp
## Loss functions