本文整理汇总了Python中keras.layers.Permute方法的典型用法代码示例。如果您正苦于以下问题:Python layers.Permute方法的具体用法?Python layers.Permute怎么用?Python layers.Permute使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras.layers
的用法示例。
在下文中一共展示了layers.Permute方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: duc
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def duc(x, factor=8, output_shape=(512, 512, 1)):
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
H, W, c, r = output_shape[0], output_shape[1], output_shape[2], factor
h = H / r
w = W / r
x = Conv2D(
c*r*r,
(3, 3),
padding='same',
name='conv_duc_%s'%factor)(x)
x = BatchNormalization(axis=bn_axis,name='bn_duc_%s'%factor)(x)
x = Activation('relu')(x)
x = Permute((3, 1, 2))(x)
x = Reshape((c, r, r, h, w))(x)
x = Permute((1, 4, 2, 5, 3))(x)
x = Reshape((c, H, W))(x)
x = Permute((2, 3, 1))(x)
return x
# interpolation
示例2: softmax_by_row
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def softmax_by_row(self, z: typing.Any) -> tuple:
"""Conduct softmax on each dimension across the four gates."""
# z_transform: [B, U, 4]
z_transform = Permute((2, 1))(Reshape((4, self._units))(z))
size = [-1, 1, -1]
# Perform softmax on each slice
for i in range(0, self._units):
begin = [0, i, 0]
# z_slice: [B, 1, 4]
z_slice = tf.slice(z_transform, begin, size)
if i == 0:
z_s = tf.nn.softmax(z_slice)
else:
z_s = tf.concat([z_s, tf.nn.softmax(z_slice)], 1)
# zi, zl, zt, zd: [B, U]
zi, zl, zt, zd = tf.unstack(z_s, axis=2)
return zi, zl, zt, zd
示例3: interp_net
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def interp_net():
if gpu_num > 1:
dev = "/cpu:0"
else:
dev = "/gpu:0"
with tf.device(dev):
main_input = Input(shape=(4*num_features, timestamp), name='input')
sci = single_channel_interp(ref_points, hours_look_ahead)
cci = cross_channel_interp()
interp = cci(sci(main_input))
reconst = cci(sci(main_input, reconstruction=True),
reconstruction=True)
aux_output = Lambda(lambda x: x, name='aux_output')(reconst)
z = Permute((2, 1))(interp)
z = GRU(hid, activation='tanh', recurrent_dropout=0.2, dropout=0.2)(z)
main_output = Dense(1, activation='sigmoid', name='main_output')(z)
orig_model = Model([main_input], [main_output, aux_output])
if gpu_num > 1:
model = multi_gpu_model(orig_model, gpus=gpu_num)
else:
model = orig_model
print(orig_model.summary())
return model
示例4: softmax
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def softmax(x, restore_shape=True):
"""
Softmax activation for a tensor x. No need to unroll the input first.
:param x: x is a tensor with shape (None, channels, h, w)
:param restore_shape: if False, output is returned unrolled (None, h * w, channels)
:return: softmax activation of tensor x
"""
_, c, h, w = x._keras_shape
x = Permute(dims=(2, 3, 1))(x)
x = Reshape(target_shape=(h * w, c))(x)
x = Activation('softmax')(x)
if restore_shape:
x = Reshape(target_shape=(h, w, c))(x)
x = Permute(dims=(3, 1, 2))(x)
return x
示例5: DUC
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def DUC(factor=(8, 8)):
if factor[0] != factor[1]:
raise ValueError('DUC upconvolution support only equal factors, '
'got {}'.format(factor))
factor = factor[0]
def layer(input_tensor):
h, w, c = int_shape(input_tensor)[1:]
H = h * factor
W = w * factor
x = Conv2DBlock(c*factor**2, (1,1),
padding='same',
name='duc_{}'.format(factor))(input_tensor)
x = Permute((3, 1, 2))(x)
x = Reshape((c, factor, factor, h, w))(x)
x = Permute((1, 4, 2, 5, 3))(x)
x = Reshape((c, H, W))(x)
x = Permute((2, 3, 1))(x)
return x
return layer
示例6: model_definition
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def model_definition():
""" Keras RNetwork for MTCNN """
input_ = Input(shape=(24, 24, 3))
var_x = Conv2D(28, (3, 3), strides=1, padding='valid', name='conv1')(input_)
var_x = PReLU(shared_axes=[1, 2], name='prelu1')(var_x)
var_x = MaxPool2D(pool_size=3, strides=2, padding='same')(var_x)
var_x = Conv2D(48, (3, 3), strides=1, padding='valid', name='conv2')(var_x)
var_x = PReLU(shared_axes=[1, 2], name='prelu2')(var_x)
var_x = MaxPool2D(pool_size=3, strides=2)(var_x)
var_x = Conv2D(64, (2, 2), strides=1, padding='valid', name='conv3')(var_x)
var_x = PReLU(shared_axes=[1, 2], name='prelu3')(var_x)
var_x = Permute((3, 2, 1))(var_x)
var_x = Flatten()(var_x)
var_x = Dense(128, name='conv4')(var_x)
var_x = PReLU(name='prelu4')(var_x)
classifier = Dense(2, activation='softmax', name='conv5-1')(var_x)
bbox_regress = Dense(4, name='conv5-2')(var_x)
return [input_], [classifier, bbox_regress]
示例7: create_Kao_Onet
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def create_Kao_Onet( weight_path = 'model48.h5'):
input = Input(shape = [48,48,3])
x = Conv2D(32, (3, 3), strides=1, padding='valid', name='conv1')(input)
x = PReLU(shared_axes=[1,2],name='prelu1')(x)
x = MaxPool2D(pool_size=3, strides=2, padding='same')(x)
x = Conv2D(64, (3, 3), strides=1, padding='valid', name='conv2')(x)
x = PReLU(shared_axes=[1,2],name='prelu2')(x)
x = MaxPool2D(pool_size=3, strides=2)(x)
x = Conv2D(64, (3, 3), strides=1, padding='valid', name='conv3')(x)
x = PReLU(shared_axes=[1,2],name='prelu3')(x)
x = MaxPool2D(pool_size=2)(x)
x = Conv2D(128, (2, 2), strides=1, padding='valid', name='conv4')(x)
x = PReLU(shared_axes=[1,2],name='prelu4')(x)
x = Permute((3,2,1))(x)
x = Flatten()(x)
x = Dense(256, name='conv5') (x)
x = PReLU(name='prelu5')(x)
classifier = Dense(2, activation='softmax',name='conv6-1')(x)
bbox_regress = Dense(4,name='conv6-2')(x)
landmark_regress = Dense(10,name='conv6-3')(x)
model = Model([input], [classifier, bbox_regress, landmark_regress])
model.load_weights(weight_path, by_name=True)
return model
示例8: create_Kao_Rnet
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def create_Kao_Rnet (weight_path = 'model24.h5'):
input = Input(shape=[24, 24, 3]) # change this shape to [None,None,3] to enable arbitraty shape input
x = Conv2D(28, (3, 3), strides=1, padding='valid', name='conv1')(input)
x = PReLU(shared_axes=[1, 2], name='prelu1')(x)
x = MaxPool2D(pool_size=3,strides=2, padding='same')(x)
x = Conv2D(48, (3, 3), strides=1, padding='valid', name='conv2')(x)
x = PReLU(shared_axes=[1, 2], name='prelu2')(x)
x = MaxPool2D(pool_size=3, strides=2)(x)
x = Conv2D(64, (2, 2), strides=1, padding='valid', name='conv3')(x)
x = PReLU(shared_axes=[1, 2], name='prelu3')(x)
x = Permute((3, 2, 1))(x)
x = Flatten()(x)
x = Dense(128, name='conv4')(x)
x = PReLU( name='prelu4')(x)
classifier = Dense(2, activation='softmax', name='conv5-1')(x)
bbox_regress = Dense(4, name='conv5-2')(x)
model = Model([input], [classifier, bbox_regress])
model.load_weights(weight_path, by_name=True)
return model
示例9: softmax
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def softmax(x, restore_shape=True):
"""
Softmax activation for a tensor x. No need to unroll the input first.
:param x: x is a tensor with shape (None, channels, h, w)
:param restore_shape: if False, output is returned unrolled (None, h * w, channels)
:return: softmax activation of tensor x
"""
_, c, h, w = x._keras_shape
x = Permute(dims=(2, 3, 1))(x)
x = Reshape(target_shape=(h * w, c))(x)
x = Activation('softmax')(x)
if restore_shape:
x = Reshape(target_shape=(h, w, c))(x)
x = Permute(dims=(3, 1, 2))(x)
return x
示例10: build_model
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def build_model(state_size, num_actions):
input_shape = (4,) + state_size
model = Sequential()
if K.image_dim_ordering() == 'tf':
# (width, height, channels)
model.add(Permute((2, 3, 1), input_shape=input_shape))
elif K.image_dim_ordering() == 'th':
# (channels, width, height)
model.add(Permute((1, 2, 3), input_shape=input_shape))
else:
raise RuntimeError('Unknown image_dim_ordering.')
model.add(Convolution2D(32, 8, 8, subsample=(4, 4)))
model.add(Activation('relu'))
model.add(Convolution2D(64, 4, 4, subsample=(2, 2)))
model.add(Activation('relu'))
model.add(Convolution2D(64, 3, 3, subsample=(1, 1)))
model.add(Activation('relu'))
model.add(Flatten())
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dense(num_actions))
model.add(Activation('linear'))
print(model.summary())
return model
示例11: channel_shuffle
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def channel_shuffle(self, x):
n, h, w, c = x.shape.as_list()
x_reshaped = layers.Reshape([h, w, self.groups, int(c // self.groups)])(x)
x_transposed = layers.Permute((1, 2, 4, 3))(x_reshaped)
output = layers.Reshape([h, w, c])(x_transposed)
return output
示例12: attention_temporal
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def attention_temporal(self, input_data, sequence_length):
"""
A temporal attention layer
:param input_data: Network input
:param sequence_length: Length of the input sequence
:return: The output of attention layer
"""
a = Permute((2, 1))(input_data)
a = Dense(sequence_length, activation='sigmoid')(a)
a_probs = Permute((2, 1))(a)
output_attention_mul = Multiply()([input_data, a_probs])
return output_attention_mul
示例13: squeeze_excite_block
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def squeeze_excite_block(input, ratio=16):
''' Create a channel-wise squeeze-excite block
Args:
input: input tensor
filters: number of output filters
Returns: a keras tensor
References
- [Squeeze and Excitation Networks](https://arxiv.org/abs/1709.01507)
'''
init = input
channel_axis = 1 if K.image_data_format() == "channels_first" else -1
filters = init._keras_shape[channel_axis]
se_shape = (1, 1, filters)
se = GlobalAveragePooling2D()(init)
se = Reshape(se_shape)(se)
se = Dense(filters // ratio, activation='relu', kernel_initializer='he_normal', use_bias=False)(se)
se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', use_bias=False)(se)
if K.image_data_format() == 'channels_first':
se = Permute((3, 1, 2))(se)
x = multiply([init, se])
return x
示例14: squeeze_excite_block
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def squeeze_excite_block(input_tensor, ratio=16):
""" Create a channel-wise squeeze-excite block
Args:
input_tensor: input Keras tensor
ratio: number of output filters
Returns: a Keras tensor
References
- [Squeeze and Excitation Networks](https://arxiv.org/abs/1709.01507)
"""
init = input_tensor
channel_axis = 1 if K.image_data_format() == "channels_first" else -1
filters = _tensor_shape(init)[channel_axis]
se_shape = (1, 1, filters)
se = GlobalAveragePooling2D()(init)
se = Reshape(se_shape)(se)
se = Dense(filters // ratio, activation='relu', kernel_initializer='he_normal', use_bias=False)(se)
se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', use_bias=False)(se)
if K.image_data_format() == 'channels_first':
se = Permute((3, 1, 2))(se)
x = multiply([init, se])
return x
示例15: create_Rnet
# 需要导入模块: from keras import layers [as 别名]
# 或者: from keras.layers import Permute [as 别名]
def create_Rnet(weight_path):
input = Input(shape=[24, 24, 3])
# 24,24,3 -> 11,11,28
x = Conv2D(28, (3, 3), strides=1, padding='valid', name='conv1')(input)
x = PReLU(shared_axes=[1, 2], name='prelu1')(x)
x = MaxPool2D(pool_size=3,strides=2, padding='same')(x)
# 11,11,28 -> 4,4,48
x = Conv2D(48, (3, 3), strides=1, padding='valid', name='conv2')(x)
x = PReLU(shared_axes=[1, 2], name='prelu2')(x)
x = MaxPool2D(pool_size=3, strides=2)(x)
# 4,4,48 -> 3,3,64
x = Conv2D(64, (2, 2), strides=1, padding='valid', name='conv3')(x)
x = PReLU(shared_axes=[1, 2], name='prelu3')(x)
# 3,3,64 -> 64,3,3
x = Permute((3, 2, 1))(x)
x = Flatten()(x)
# 576 -> 128
x = Dense(128, name='conv4')(x)
x = PReLU( name='prelu4')(x)
# 128 -> 2 128 -> 4
classifier = Dense(2, activation='softmax', name='conv5-1')(x)
bbox_regress = Dense(4, name='conv5-2')(x)
model = Model([input], [classifier, bbox_regress])
model.load_weights(weight_path, by_name=True)
return model
#-----------------------------#
# mtcnn的第三段
# 精修框并获得五个点
#-----------------------------#