本文整理汇总了Python中tensorflow.keras.layers.multiply方法的典型用法代码示例。如果您正苦于以下问题:Python layers.multiply方法的具体用法?Python layers.multiply怎么用?Python layers.multiply使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.keras.layers
的用法示例。
在下文中一共展示了layers.multiply方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: spatial_squeeze_excite_block
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import multiply [as 别名]
def spatial_squeeze_excite_block(input_tensor):
""" Create a spatial squeeze-excite block
Args:
input_tensor: input Keras tensor
Returns: a Keras tensor
References
- [Concurrent Spatial and Channel Squeeze & Excitation in Fully Convolutional Networks](https://arxiv.org/abs/1803.02579)
"""
se = Conv2D(1, (1, 1), activation='sigmoid', use_bias=False,
kernel_initializer='he_normal')(input_tensor)
x = multiply([input_tensor, se])
return x
示例2: spatial_squeeze_excite_block
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import multiply [as 别名]
def spatial_squeeze_excite_block(input):
''' Create a spatial squeeze-excite block
Args:
input: input tensor
Returns: a keras tensor
References
- [Concurrent Spatial and Channel Squeeze & Excitation in Fully Convolutional Networks](https://arxiv.org/abs/1803.02579)
'''
se = Conv2D(1, (1, 1), activation='sigmoid', use_bias=False,
kernel_initializer='he_normal')
x = multiply([input, se])
return x
示例3: squeeze_excite_block
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import multiply [as 别名]
def squeeze_excite_block(input, ratio=16):
''' Create a channel-wise squeeze-excite block
Args:
input: input tensor
filters: number of output filters
Returns: a keras tensor
References
- [Squeeze and Excitation Networks](https://arxiv.org/abs/1709.01507)
'''
init = input
filters = init._keras_shape[1]
se_shape = (1, 1, filters)
se = GlobalAveragePooling2D()(init)
se = Reshape(se_shape)(se)
se = Dense(filters // ratio, activation='relu', kernel_initializer='he_normal', use_bias=False)(se)
se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', use_bias=False)(se)
x = multiply([init, se])
return x
示例4: squeeze_excite_block
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import multiply [as 别名]
def squeeze_excite_block(input_tensor, ratio=16):
""" Create a channel-wise squeeze-excite block
Args:
input_tensor: input Keras tensor
ratio: number of output filters
Returns: a Keras tensor
References
- [Squeeze and Excitation Networks](https://arxiv.org/abs/1709.01507)
"""
init = input_tensor
channel_axis = 1 if K.image_data_format() == "channels_first" else -1
filters = _tensor_shape(init)[channel_axis]
se_shape = (1, 1, filters)
se = GlobalAveragePooling2D()(init)
se = Reshape(se_shape)(se)
se = Dense(filters // ratio, activation='relu', kernel_initializer='he_normal', use_bias=False)(se)
se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', use_bias=False)(se)
if K.image_data_format() == 'channels_first':
se = Permute((3, 1, 2))(se)
x = multiply([init, se])
return x
示例5: squeeze_excitation_layer
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import multiply [as 别名]
def squeeze_excitation_layer(self, x, out_dim):
'''
SE module performs inter-channel weighting.
'''
squeeze = GlobalAveragePooling2D()(x)
excitation = Dense(units=out_dim // self.ratio)(squeeze)
excitation = self.activation(excitation)
excitation = Dense(units=out_dim)(excitation)
excitation = self.activation(excitation, 'sigmoid')
excitation = Reshape((1,1,out_dim))(excitation)
scale = multiply([x,excitation])
return scale
示例6: squeeze_excite_block
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import multiply [as 别名]
def squeeze_excite_block(input, ratio=16):
''' Create a channel-wise squeeze-excite block
Args:
input: input tensor
filters: number of output filters
Returns: a keras tensor
References
- [Squeeze and Excitation Networks](https://arxiv.org/abs/1709.01507)
'''
init = input
channel_axis = 1 if K.image_data_format() == "channels_first" else -1
filters = init._keras_shape[channel_axis]
se_shape = (1, 1, filters)
se = GlobalAveragePooling2D()(init)
se = Reshape(se_shape)(se)
se = Dense(filters // ratio, activation='relu', kernel_initializer='he_normal', use_bias=False)(se)
se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', use_bias=False)(se)
if K.image_data_format() == 'channels_first':
se = Permute((3, 1, 2))(se)
x = multiply([init, se])
return x
示例7: spatial_squeeze_excite_block
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import multiply [as 别名]
def spatial_squeeze_excite_block(input):
''' Create a spatial squeeze-excite block
Args:
input: input tensor
Returns: a keras tensor
References
- [Concurrent Spatial and Channel Squeeze & Excitation in Fully Convolutional Networks](https://arxiv.org/abs/1803.02579)
'''
se = Conv2D(1, (1, 1), activation='sigmoid', use_bias=False, kernel_initializer='he_normal')
x = multiply([input, se])
return x
示例8: add_prior
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import multiply [as 别名]
def add_prior(input_model,
prior_shape,
name='prior_model',
prefix=None,
use_logp=True,
final_pred_activation='softmax',
add_prior_layer_reg=0):
"""
Append post-prior layer to a given model
"""
# naming
model_name = name
if prefix is None:
prefix = model_name
# prior input layer
prior_input_name = '%s-input' % prefix
prior_tensor = KL.Input(shape=prior_shape, name=prior_input_name)
prior_tensor_input = prior_tensor
like_tensor = input_model.output
# operation varies depending on whether we log() prior or not.
if use_logp:
# name = '%s-log' % prefix
# prior_tensor = KL.Lambda(_log_layer_wrap(add_prior_layer_reg), name=name)(prior_tensor)
print("Breaking change: use_logp option now requires log input!", file=sys.stderr)
merge_op = KL.add
else:
# using sigmoid to get the likelihood values between 0 and 1
# note: they won't add up to 1.
name = '%s_likelihood_sigmoid' % prefix
like_tensor = KL.Activation('sigmoid', name=name)(like_tensor)
merge_op = KL.multiply
# merge the likelihood and prior layers into posterior layer
name = '%s_posterior' % prefix
post_tensor = merge_op([prior_tensor, like_tensor], name=name)
# output prediction layer
# we use a softmax to compute P(L_x|I) where x is each location
pred_name = '%s_prediction' % prefix
if final_pred_activation == 'softmax':
assert use_logp, 'cannot do softmax when adding prior via P()'
print("using final_pred_activation %s for %s" % (final_pred_activation, model_name))
softmax_lambda_fcn = lambda x: tf.keras.activations.softmax(x, axis=-1)
pred_tensor = KL.Lambda(softmax_lambda_fcn, name=pred_name)(post_tensor)
else:
pred_tensor = KL.Activation('linear', name=pred_name)(post_tensor)
# create the model
model_inputs = [*input_model.inputs, prior_tensor_input]
model = Model(inputs=model_inputs, outputs=[pred_tensor], name=model_name)
# compile
return model