本文整理汇总了Python中tensorflow.keras.layers.Conv2DTranspose方法的典型用法代码示例。如果您正苦于以下问题:Python layers.Conv2DTranspose方法的具体用法?Python layers.Conv2DTranspose怎么用?Python layers.Conv2DTranspose使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.keras.layers
的用法示例。
在下文中一共展示了layers.Conv2DTranspose方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: tconv_layer
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def tconv_layer(inputs,
filters=32,
kernel_size=3,
strides=2,
postfix=None):
"""Helper function to build Conv2DTranspose-BN-ReLU
layer
"""
x = Conv2DTranspose(filters=filters,
kernel_size=kernel_size,
strides=strides,
padding='same',
kernel_initializer='he_normal',
name='tconv_'+postfix)(inputs)
x = BatchNormalization(name="bn_"+postfix)(x)
x = Activation('relu', name='relu_'+postfix)(x)
return x
示例2: expanding_layer_2D
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def expanding_layer_2D(input, neurons, concatenate_link, ba_norm,
ba_norm_momentum):
up = concatenate([Conv2DTranspose(neurons, (2, 2), strides=(2, 2),
padding='same')(input), concatenate_link], axis=-1)
conv1 = Conv2D(neurons, (3, 3,), activation='relu', padding='same')(up)
if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
conc1 = concatenate([up, conv1], axis=-1)
conv2 = Conv2D(neurons, (3, 3), activation='relu', padding='same')(conc1)
if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
conc2 = concatenate([up, conv2], axis=-1)
return conc2
#-----------------------------------------------------#
# Subroutines 3D #
#-----------------------------------------------------#
# Create a contracting layer
示例3: expanding_layer_2D
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def expanding_layer_2D(input, neurons, concatenate_link, ba_norm,
ba_norm_momentum):
up = concatenate([Conv2DTranspose(neurons, (2, 2), strides=(2, 2),
padding='same')(input), concatenate_link], axis=-1)
conv1 = Conv2D(neurons, (3, 3,), activation='relu', padding='same')(up)
if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
conv2 = Conv2D(neurons, (3, 3), activation='relu', padding='same')(conv1)
if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
shortcut = Conv2D(neurons, (1, 1), activation='relu', padding="same")(up)
add_layer = add([shortcut, conv2])
return add_layer
#-----------------------------------------------------#
# Subroutines 3D #
#-----------------------------------------------------#
# Create a contracting layer
示例4: trans_conv2d_bn
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def trans_conv2d_bn(x, filters, num_row, num_col, padding='same', strides=(2, 2), name=None):
'''
2D Transposed Convolutional layers
Arguments:
x {keras layer} -- input layer
filters {int} -- number of filters
num_row {int} -- number of rows in filters
num_col {int} -- number of columns in filters
Keyword Arguments:
padding {str} -- mode of padding (default: {'same'})
strides {tuple} -- stride of convolution operation (default: {(2, 2)})
name {str} -- name of the layer (default: {None})
Returns:
[keras layer] -- [output layer]
'''
x = Conv2DTranspose(filters, (num_row, num_col), strides=strides, padding=padding)(x)
x = BatchNormalization(axis=3, scale=False)(x)
return x
示例5: create_model
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def create_model(self):
print('[ImgDecoder] Starting create_model')
dense = Dense(units=1024, name='p_img_dense')
reshape = Reshape((1, 1, 1024))
# for 64x64 img
deconv1 = Conv2DTranspose(filters=128, kernel_size=4, strides=1, padding='valid', activation='relu')
deconv2 = Conv2DTranspose(filters=64, kernel_size=5, strides=1, padding='valid', activation='relu', dilation_rate=3)
deconv3 = Conv2DTranspose(filters=64, kernel_size=6, strides=1, padding='valid', activation='relu', dilation_rate=2)
deconv4 = Conv2DTranspose(filters=32, kernel_size=5, strides=2, padding='valid', activation='relu', dilation_rate=1)
deconv5 = Conv2DTranspose(filters=16, kernel_size=5, strides=1, padding='valid', activation='relu', dilation_rate=1)
# deconv6 = Conv2DTranspose(filters=8, kernel_size=6, strides=2, padding='valid', activation='relu')
deconv7 = Conv2DTranspose(filters=3, kernel_size=6, strides=1, padding='valid', activation='tanh')
self.network = tf.keras.Sequential([
dense,
reshape,
deconv1,
deconv2,
deconv3,
deconv4,
deconv5,
deconv7],
name='p_img')
print('[ImgDecoder] Done with create_model')
示例6: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def __init__(self, up_scale,**kwargs):
super(UpConvBlock, self).__init__(**kwargs)
constant_features = 16
k_reg = None if w_decay is None else l2(w_decay)
features = []
total_up_scale = 2 ** up_scale
for i in range(up_scale):
out_features = 1 if i == up_scale-1 else constant_features
if i==up_scale-1:
features.append(layers.Conv2D(
filters=out_features, kernel_size=(1,1), strides=(1,1), padding='same',
activation='relu', kernel_initializer=tf.initializers.TruncatedNormal(stddev=0.1),
kernel_regularizer=k_reg,use_bias=True)) #tf.initializers.TruncatedNormal(mean=0.)
features.append(layers.Conv2DTranspose(
out_features, kernel_size=(total_up_scale,total_up_scale),
strides=(2,2), padding='same',
kernel_initializer=tf.initializers.TruncatedNormal(stddev=0.1),
kernel_regularizer=k_reg,use_bias=True)) # stddev=0.1
else:
features.append(layers.Conv2D(
filters=out_features, kernel_size=(1,1), strides=(1,1), padding='same',
activation='relu',kernel_initializer=weight_init,
kernel_regularizer=k_reg,use_bias=True))
features.append(layers.Conv2DTranspose(
out_features, kernel_size=(total_up_scale,total_up_scale),
strides=(2,2), padding='same', use_bias=True,
kernel_initializer=weight_init, kernel_regularizer=k_reg))
self.features = keras.Sequential(features)
示例7: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def __init__(self,
dim2,
classes,
out_size,
bn_eps,
data_format="channels_last",
**kwargs):
super(SBDecoder, self).__init__(**kwargs)
self.decode1 = SBDecodeBlock(
channels=classes,
out_size=((out_size[0] // 8, out_size[1] // 8) if out_size else None),
bn_eps=bn_eps,
data_format=data_format,
name="decode1")
self.decode2 = SBDecodeBlock(
channels=classes,
out_size=((out_size[0] // 4, out_size[1] // 4) if out_size else None),
bn_eps=bn_eps,
data_format=data_format,
name="decode2")
self.conv3c = conv1x1_block(
in_channels=dim2,
out_channels=classes,
bn_eps=bn_eps,
activation=(lambda: PReLU2(classes, data_format=data_format, name="activ")),
data_format=data_format,
name="conv3c")
self.output_conv = nn.Conv2DTranspose(
filters=classes,
kernel_size=2,
strides=2,
padding="valid",
output_padding=0,
use_bias=False,
data_format=data_format,
name="output_conv")
self.up = InterpolationBlock(
scale_factor=2,
out_size=out_size,
data_format=data_format,
name="up")
示例8: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def __init__(self,
in_channels,
out_channels,
kernel_size,
strides=1,
padding=0,
out_padding=0,
dilation=1,
groups=1,
use_bias=True,
data_format="channels_last",
**kwargs):
super(Deconv2d, self).__init__(**kwargs)
assert (dilation == 1)
assert (groups == 1)
assert (in_channels is not None)
if isinstance(padding, int):
padding = (padding, padding)
self.use_crop = (padding[0] > 0) or (padding[1] > 0)
if self.use_crop:
self.crop = nn.Cropping2D(
cropping=padding,
data_format=data_format,
name="crop")
self.conv = nn.Conv2DTranspose(
filters=out_channels,
kernel_size=kernel_size,
strides=strides,
padding="valid",
output_padding=out_padding,
data_format=data_format,
dilation_rate=dilation,
use_bias=use_bias,
name="conv")
示例9: CAE
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def CAE(input_shape=(28, 28, 1), filters=[32, 64, 128, 10]):
model = Sequential()
if input_shape[0] % 8 == 0:
pad3 = 'same'
else:
pad3 = 'valid'
model.add(InputLayer(input_shape))
model.add(Conv2D(filters[0], 5, strides=2, padding='same', activation='relu', name='conv1'))
model.add(Conv2D(filters[1], 5, strides=2, padding='same', activation='relu', name='conv2'))
model.add(Conv2D(filters[2], 3, strides=2, padding=pad3, activation='relu', name='conv3'))
model.add(Flatten())
model.add(Dense(units=filters[3], name='embedding'))
model.add(Dense(units=filters[2]*int(input_shape[0]/8)*int(input_shape[0]/8), activation='relu'))
model.add(Reshape((int(input_shape[0]/8), int(input_shape[0]/8), filters[2])))
model.add(Conv2DTranspose(filters[1], 3, strides=2, padding=pad3, activation='relu', name='deconv3'))
model.add(Conv2DTranspose(filters[0], 5, strides=2, padding='same', activation='relu', name='deconv2'))
model.add(Conv2DTranspose(input_shape[2], 5, strides=2, padding='same', name='deconv1'))
encoder = Model(inputs=model.input, outputs=model.get_layer('embedding').output)
return model, encoder
示例10: decoder_layer
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def decoder_layer(inputs,
paired_inputs,
filters=16,
kernel_size=3,
strides=2,
activation='relu',
instance_norm=True):
"""Builds a generic decoder layer made of Conv2D-IN-LeakyReLU
IN is optional, LeakyReLU may be replaced by ReLU
Arguments: (partial)
inputs (tensor): the decoder layer input
paired_inputs (tensor): the encoder layer output
provided by U-Net skip connection &
concatenated to inputs.
"""
conv = Conv2DTranspose(filters=filters,
kernel_size=kernel_size,
strides=strides,
padding='same')
x = inputs
if instance_norm:
x = InstanceNormalization()(x)
if activation == 'relu':
x = Activation('relu')(x)
else:
x = LeakyReLU(alpha=0.2)(x)
x = conv(x)
x = concatenate([x, paired_inputs])
return x
示例11: decode
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def decode(filters):
"""upsample sequential model."""
net = Seq()
net.add(
layers.Conv2DTranspose(
filters, 3, strides=2, padding="same", kernel_initializer="he_normal"
)
)
net.add(layers.ReLU())
return net
示例12: last_layer
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def last_layer(out_channels=1):
"""last layer of u-net.
"""
return layers.Conv2DTranspose(
filters=out_channels,
kernel_size=1,
strides=2,
padding="same",
activation="sigmoid",
kernel_initializer="he_normal",
)
示例13: upsample
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def upsample(N, input_layer, base_filters=64):
"""deconv defaults."""
return Conv2DTranspose(
filters=base_filters * N,
kernel_size=3,
strides=(2, 2),
padding="same",
kernel_initializer="he_normal",
)(input_layer)
示例14: expanding_layer_2D
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def expanding_layer_2D(input, neurons, concatenate_link, ba_norm,
ba_norm_momentum):
up = concatenate([Conv2DTranspose(neurons, (2, 2), strides=(2, 2),
padding='same')(input), concatenate_link], axis=-1)
conv1 = Conv2D(neurons, (3, 3,), activation='relu', padding='same')(up)
if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
conv2 = Conv2D(neurons, (3, 3), activation='relu', padding='same')(conv1)
if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
conc = concatenate([up, conv2], axis=-1)
return conc
#-----------------------------------------------------#
# Subroutines 3D #
#-----------------------------------------------------#
# Create a contracting layer
示例15: expanding_layer_2D
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import Conv2DTranspose [as 别名]
def expanding_layer_2D(input, neurons, concatenate_link, ba_norm,
ba_norm_momentum):
up = concatenate([Conv2DTranspose(neurons, (2, 2), strides=(2, 2),
padding='same')(input), concatenate_link], axis=-1)
conv1 = Conv2D(neurons, (3, 3,), activation='relu', padding='same')(up)
if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
conv2 = Conv2D(neurons, (3, 3), activation='relu', padding='same')(conv1)
if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
return conv2
#-----------------------------------------------------#
# Subroutines 3D #
#-----------------------------------------------------#
# Create a contracting layer