本文整理汇总了Python中keras.layers.LeakyReLU方法的典型用法代码示例。如果您正苦于以下问题:Python layers.LeakyReLU方法的具体用法?Python layers.LeakyReLU怎么用?Python layers.LeakyReLU使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras.layers
的用法示例。
在下文中一共展示了layers.LeakyReLU方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: g_block
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def g_block(inp, fil, u = True):
if u:
out = UpSampling2D(interpolation = 'bilinear')(inp)
else:
out = Activation('linear')(inp)
skip = Conv2D(fil, 1, padding = 'same', kernel_initializer = 'he_normal')(out)
out = Conv2D(filters = fil, kernel_size = 3, padding = 'same', kernel_initializer = 'he_normal')(out)
out = LeakyReLU(0.2)(out)
out = Conv2D(filters = fil, kernel_size = 3, padding = 'same', kernel_initializer = 'he_normal')(out)
out = LeakyReLU(0.2)(out)
out = Conv2D(fil, 1, padding = 'same', kernel_initializer = 'he_normal')(out)
out = add([out, skip])
out = LeakyReLU(0.2)(out)
return out
示例2: d_block
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def d_block(inp, fil, p = True):
skip = Conv2D(fil, 1, padding = 'same', kernel_initializer = 'he_normal')(inp)
out = Conv2D(filters = fil, kernel_size = 3, padding = 'same', kernel_initializer = 'he_normal')(inp)
out = LeakyReLU(0.2)(out)
out = Conv2D(filters = fil, kernel_size = 3, padding = 'same', kernel_initializer = 'he_normal')(out)
out = LeakyReLU(0.2)(out)
out = Conv2D(fil, 1, padding = 'same', kernel_initializer = 'he_normal')(out)
out = add([out, skip])
out = LeakyReLU(0.2)(out)
if p:
out = AveragePooling2D()(out)
return out
示例3: encoder
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def encoder(self):
if self.E:
return self.E
inp = Input(shape = [im_size, im_size, 3])
x = d_block(inp, 1 * cha) #64
x = d_block(x, 2 * cha) #32
x = d_block(x, 3 * cha) #16
x = d_block(x, 4 * cha) #8
x = d_block(x, 8 * cha) #4
x = d_block(x, 16 * cha, p = False) #4
x = Flatten()(x)
x = Dense(16 * cha, kernel_initializer = 'he_normal')(x)
x = LeakyReLU(0.2)(x)
x = Dense(latent_size, kernel_initializer = 'he_normal', bias_initializer = 'zeros')(x)
self.E = Model(inputs = inp, outputs = x)
return self.E
示例4: _conv_block
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def _conv_block(inp, convs, skip=True):
x = inp
count = 0
len_convs = len(convs)
for conv in convs:
if count == (len_convs - 2) and skip:
skip_connection = x
count += 1
if conv['stride'] > 1: x = ZeroPadding2D(((1,0),(1,0)))(x) # peculiar padding as darknet prefer left and top
x = Conv2D(conv['filter'],
conv['kernel'],
strides=conv['stride'],
padding='valid' if conv['stride'] > 1 else 'same', # peculiar padding as darknet prefer left and top
name='conv_' + str(conv['layer_idx']),
use_bias=False if conv['bnorm'] else True)(x)
if conv['bnorm']: x = BatchNormalization(epsilon=0.001, name='bnorm_' + str(conv['layer_idx']))(x)
if conv['leaky']: x = LeakyReLU(alpha=0.1, name='leaky_' + str(conv['layer_idx']))(x)
return add([skip_connection, x]) if skip else x
#SPP block uses three pooling layers of sizes [5, 9, 13] with strides one and all outputs together with the input are concatenated to be fed
#to the FC block
示例5: _conv_block
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def _conv_block(inp, convs, do_skip=True):
x = inp
count = 0
for conv in convs:
if count == (len(convs) - 2) and do_skip:
skip_connection = x
count += 1
if conv['stride'] > 1: x = ZeroPadding2D(((1,0),(1,0)))(x) # unlike tensorflow darknet prefer left and top paddings
x = Conv2D(conv['filter'],
conv['kernel'],
strides=conv['stride'],
padding='valid' if conv['stride'] > 1 else 'same', # unlike tensorflow darknet prefer left and top paddings
name='conv_' + str(conv['layer_idx']),
use_bias=False if conv['bnorm'] else True)(x)
if conv['bnorm']: x = BatchNormalization(epsilon=0.001, name='bnorm_' + str(conv['layer_idx']))(x)
if conv['leaky']: x = LeakyReLU(alpha=0.1, name='leaky_' + str(conv['layer_idx']))(x)
return add([skip_connection, x]) if do_skip else x
示例6: build_discriminator
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def build_discriminator(self):
"""Discriminator network with PatchGAN."""
inp_img = Input(shape = (self.image_size, self.image_size, 3))
x = ZeroPadding2D(padding = 1)(inp_img)
x = Conv2D(filters = self.d_conv_dim, kernel_size = 4, strides = 2, padding = 'valid', use_bias = False)(x)
x = LeakyReLU(0.01)(x)
curr_dim = self.d_conv_dim
for i in range(1, self.d_repeat_num):
x = ZeroPadding2D(padding = 1)(x)
x = Conv2D(filters = curr_dim*2, kernel_size = 4, strides = 2, padding = 'valid')(x)
x = LeakyReLU(0.01)(x)
curr_dim = curr_dim * 2
kernel_size = int(self.image_size / np.power(2, self.d_repeat_num))
out_src = ZeroPadding2D(padding = 1)(x)
out_src = Conv2D(filters = 1, kernel_size = 3, strides = 1, padding = 'valid', use_bias = False)(out_src)
out_cls = Conv2D(filters = self.c_dim, kernel_size = kernel_size, strides = 1, padding = 'valid', use_bias = False)(x)
out_cls = Reshape((self.c_dim, ))(out_cls)
return Model(inp_img, [out_src, out_cls])
示例7: build_model
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def build_model():
x = Input((28 * 28,), name="x")
hidden_dim = 512
h = x
h = Dense(hidden_dim)(h)
h = BatchNormalization()(h)
h = LeakyReLU(0.2)(h)
h = Dropout(0.5)(h)
h = Dense(hidden_dim / 2)(h)
h = BatchNormalization()(h)
h = LeakyReLU(0.2)(h)
h = Dropout(0.5)(h)
h = Dense(10)(h)
h = Activation('softmax')(h)
m = Model(x, h)
m.compile('adam', 'categorical_crossentropy', metrics=['accuracy'])
return m
示例8: residual_layer
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def residual_layer(self, x, filters, kernel_size):
conv_1 = self.conv_layer(x, filters, kernel_size)
conv_2 = Conv2D(
filters = filters,
kernel_size = kernel_size,
strides = (1, 1),
padding = 'same',
data_format = 'channels_first',
use_bias = False,
activation = 'linear',
kernel_regularizer = regularizers.l2(self.reg_const)
)(conv_1)
bn = BatchNormalization(axis=1)(conv_2)
merge_layer = add([x, bn])
lrelu = LeakyReLU()(merge_layer)
return lrelu
示例9: value_head
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def value_head(self, x):
x = self.conv_layer(x, 1, (1, 1))
x = Flatten()(x)
x = Dense(
self.value_head_hidden_layer_size,
use_bias = False,
activation = 'linear',
kernel_regularizer = regularizers.l2(self.reg_const)
)(x)
x = LeakyReLU()(x)
x = Dense(
1,
use_bias = False,
activation = 'tanh',
kernel_regularizer = regularizers.l2(self.reg_const),
name = 'value_head'
)(x)
return x
示例10: _conv_block
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def _conv_block(inp, convs, skip=True):
x = inp
count = 0
for conv in convs:
if count == (len(convs) - 2) and skip:
skip_connection = x
count += 1
if conv['stride'] > 1: x = ZeroPadding2D(((1,0),(1,0)))(x) # peculiar padding as darknet prefer left and top
x = Conv2D(conv['filter'],
conv['kernel'],
strides=conv['stride'],
padding='valid' if conv['stride'] > 1 else 'same', # peculiar padding as darknet prefer left and top
name='conv_' + str(conv['layer_idx']),
use_bias=False if conv['bnorm'] else True)(x)
if conv['bnorm']: x = BatchNormalization(epsilon=0.001, name='bnorm_' + str(conv['layer_idx']))(x)
if conv['leaky']: x = LeakyReLU(alpha=0.1, name='leaky_' + str(conv['layer_idx']))(x)
return add([skip_connection, x]) if skip else x
示例11: discriminator
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def discriminator(self):
if self.D:
return self.D
inp = Input(shape = [im_size, im_size, 3])
inpl = Input(shape = [latent_size])
#Latent input
l = Dense(512, kernel_initializer = 'he_normal')(inpl)
l = LeakyReLU(0.2)(l)
l = Dense(512, kernel_initializer = 'he_normal')(l)
l = LeakyReLU(0.2)(l)
l = Dense(512, kernel_initializer = 'he_normal')(l)
l = LeakyReLU(0.2)(l)
x = d_block(inp, 1 * cha) #64
x = d_block(x, 2 * cha) #32
x = d_block(x, 3 * cha) #16
x = d_block(x, 4 * cha) #8
x = d_block(x, 8 * cha) #4
x = d_block(x, 16 * cha, p = False) #4
x = Flatten()(x)
x = concatenate([x, l])
x = Dense(16 * cha, kernel_initializer = 'he_normal')(x)
x = LeakyReLU(0.2)(x)
x = Dense(1, kernel_initializer = 'he_normal')(x)
self.D = Model(inputs = [inp, inpl], outputs = x)
return self.D
示例12: init_model
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def init_model(self):
x = Input(shape = (IMGWIDTH, IMGWIDTH, 3))
x1 = Conv2D(8, (3, 3), padding='same', activation = 'relu')(x)
x1 = BatchNormalization()(x1)
x1 = MaxPooling2D(pool_size=(2, 2), padding='same')(x1)
x2 = Conv2D(8, (5, 5), padding='same', activation = 'relu')(x1)
x2 = BatchNormalization()(x2)
x2 = MaxPooling2D(pool_size=(2, 2), padding='same')(x2)
x3 = Conv2D(16, (5, 5), padding='same', activation = 'relu')(x2)
x3 = BatchNormalization()(x3)
x3 = MaxPooling2D(pool_size=(2, 2), padding='same')(x3)
x4 = Conv2D(16, (5, 5), padding='same', activation = 'relu')(x3)
x4 = BatchNormalization()(x4)
x4 = MaxPooling2D(pool_size=(4, 4), padding='same')(x4)
y = Flatten()(x4)
y = Dropout(0.5)(y)
y = Dense(16)(y)
y = LeakyReLU(alpha=0.1)(y)
y = Dropout(0.5)(y)
y = Dense(1, activation = 'sigmoid')(y)
return KerasModel(inputs = x, outputs = y)
示例13: initial_conv
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def initial_conv(input):
x = Conv2D(16, (3, 3), padding='same', **conv_params)(input)
x = BatchNormalization(**bn_params)(x)
x = LeakyReLU(leakiness)(x)
return x
示例14: expand_conv
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def expand_conv(init, base, k, strides=(1, 1)):
x = Conv2D(base * k, (3, 3), padding='same',
strides=strides, **conv_params)(init)
x = BatchNormalization(**bn_params)(x)
x = LeakyReLU(leakiness)(x)
x = Conv2D(base * k, (3, 3), padding='same', **conv_params)(x)
skip = Conv2D(base * k, (1, 1), padding='same',
strides=strides, **conv_params)(init)
m = Add()([x, skip])
return m
示例15: conv1_block
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import LeakyReLU [as 别名]
def conv1_block(input, k=1, dropout=0.0):
init = input
x = BatchNormalization(**bn_params)(input)
x = LeakyReLU(leakiness)(x)
x = Conv2D(16 * k, (3, 3), padding='same', **conv_params)(x)
if dropout > 0.0: x = Dropout(dropout)(x)
x = BatchNormalization(**bn_params)(x)
x = LeakyReLU(leakiness)(x)
x = Conv2D(16 * k, (3, 3), padding='same', **conv_params)(x)
m = Add()([init, x])
return m