当前位置: 首页>>代码示例>>Python>>正文


Python advanced_activations.PReLU方法代码示例

本文整理汇总了Python中keras.layers.advanced_activations.PReLU方法的典型用法代码示例。如果您正苦于以下问题:Python advanced_activations.PReLU方法的具体用法?Python advanced_activations.PReLU怎么用?Python advanced_activations.PReLU使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在keras.layers.advanced_activations的用法示例。


在下文中一共展示了advanced_activations.PReLU方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: deep_mlp

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def deep_mlp(self):
        """
        Deep Multilayer Perceptrop.
        """
        if self._config.num_mlp_layers == 0:
            self.add(Dropout(0.5))
        else:
            for j in xrange(self._config.num_mlp_layers):
                self.add(Dense(self._config.mlp_hidden_dim))
                if self._config.mlp_activation == 'elu':
                    self.add(ELU())
                elif self._config.mlp_activation == 'leaky_relu':
                    self.add(LeakyReLU())
                elif self._config.mlp_activation == 'prelu':
                    self.add(PReLU())
                else:
                    self.add(Activation(self._config.mlp_activation))
                self.add(Dropout(0.5)) 
开发者ID:mateuszmalinowski,项目名称:visual_turing_test-tutorial,代码行数:20,代码来源:model_zoo.py

示例2: build_model

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def build_model():
    """
    定义模型
    """
    model = Sequential()

    model.add(LSTM(units=Conf.LAYERS[1], input_shape=(Conf.LAYERS[1], Conf.LAYERS[0]), return_sequences=True))
    model.add(Dropout(0.2))

    model.add(LSTM(Conf.LAYERS[2], return_sequences=False))
    model.add(Dropout(0.2))

    model.add(Dense(units=Conf.LAYERS[3]))
    # model.add(BatchNormalization(weights=None, epsilon=1e-06, momentum=0.9))
    model.add(Activation("tanh"))
    # act = PReLU(alpha_initializer='zeros', weights=None)
    # act = LeakyReLU(alpha=0.3)
    # model.add(act)

    start = time.time()
    model.compile(loss="mse", optimizer="rmsprop")
    print("> Compilation Time : ", time.time() - start)
    return model 
开发者ID:liyinwei,项目名称:copper_price_forecast,代码行数:25,代码来源:co_lstm_predict_day.py

示例3: test_tiny_conv_prelu_random

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def test_tiny_conv_prelu_random(self):
        np.random.seed(1988)

        # Define a model
        from keras.layers.advanced_activations import PReLU

        model = Sequential()
        model.add(
            Convolution2D(
                input_shape=(10, 10, 3),
                nb_filter=3,
                nb_row=5,
                nb_col=5,
                border_mode="same",
            )
        )
        model.add(PReLU(shared_axes=[1, 2]))

        model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])

        # Get the coreml model
        self._test_keras_model(model) 
开发者ID:apple,项目名称:coremltools,代码行数:24,代码来源:test_keras_numeric.py

示例4: test_tiny_conv_prelu_random

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def test_tiny_conv_prelu_random(self, model_precision=_MLMODEL_FULL_PRECISION):
        np.random.seed(1988)

        # Define a model
        from keras.layers.advanced_activations import PReLU

        model = Sequential()
        model.add(
            Conv2D(
                input_shape=(10, 10, 3), filters=3, kernel_size=(5, 5), padding="same"
            )
        )
        model.add(PReLU(shared_axes=[1, 2]))

        model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])

        # Get the coreml model
        self._test_model(model, model_precision=model_precision) 
开发者ID:apple,项目名称:coremltools,代码行数:20,代码来源:test_keras2_numeric.py

示例5: build_model

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def build_model(self):
            model = Sequential()
            model.add(Dropout(0.1, input_shape=(nn_input_dim_NN,)))
            model.add(Dense(input_dim=nn_input_dim_NN, output_dim=310, init='he_normal'))
            model.add(LeakyReLU(alpha=.001))
            model.add(BatchNormalization())
            model.add(Dropout(0.6))
            model.add(Dense(input_dim=310,output_dim=252, init='he_normal'))
            model.add(PReLU(init='zero'))
            model.add(BatchNormalization())
            model.add(Dropout(0.5))
            model.add(Dense(input_dim=252,output_dim=128, init='he_normal'))
            model.add(LeakyReLU(alpha=.001))
            model.add(BatchNormalization())
            model.add(Dropout(0.4))
            model.add(Dense(input_dim=128,output_dim=2, init='he_normal', activation='softmax'))
            #model.add(Activation('softmax'))
            sgd = SGD(lr=0.02, decay=1e-6, momentum=0.9, nesterov=True)

            model.compile(optimizer=sgd, loss='binary_crossentropy',class_mode='binary')

            return KerasClassifier(nn=model,**self.params) 
开发者ID:ikki407,项目名称:stacking,代码行数:24,代码来源:ikki_NN_1.py

示例6: create_Kao_Onet

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def create_Kao_Onet( weight_path = 'model48.h5'):
    input = Input(shape = [48,48,3])
    x = Conv2D(32, (3, 3), strides=1, padding='valid', name='conv1')(input)
    x = PReLU(shared_axes=[1,2],name='prelu1')(x)
    x = MaxPool2D(pool_size=3, strides=2, padding='same')(x)
    x = Conv2D(64, (3, 3), strides=1, padding='valid', name='conv2')(x)
    x = PReLU(shared_axes=[1,2],name='prelu2')(x)
    x = MaxPool2D(pool_size=3, strides=2)(x)
    x = Conv2D(64, (3, 3), strides=1, padding='valid', name='conv3')(x)
    x = PReLU(shared_axes=[1,2],name='prelu3')(x)
    x = MaxPool2D(pool_size=2)(x)
    x = Conv2D(128, (2, 2), strides=1, padding='valid', name='conv4')(x)
    x = PReLU(shared_axes=[1,2],name='prelu4')(x)
    x = Permute((3,2,1))(x)
    x = Flatten()(x)
    x = Dense(256, name='conv5') (x)
    x = PReLU(name='prelu5')(x)

    classifier = Dense(2, activation='softmax',name='conv6-1')(x)
    bbox_regress = Dense(4,name='conv6-2')(x)
    landmark_regress = Dense(10,name='conv6-3')(x)
    model = Model([input], [classifier, bbox_regress, landmark_regress])
    model.load_weights(weight_path, by_name=True)

    return model 
开发者ID:wotchin,项目名称:SmooFaceEngine,代码行数:27,代码来源:mtcnn_model.py

示例7: create_Kao_Rnet

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def create_Kao_Rnet (weight_path = 'model24.h5'):
    input = Input(shape=[24, 24, 3])  # change this shape to [None,None,3] to enable arbitraty shape input
    x = Conv2D(28, (3, 3), strides=1, padding='valid', name='conv1')(input)
    x = PReLU(shared_axes=[1, 2], name='prelu1')(x)
    x = MaxPool2D(pool_size=3,strides=2, padding='same')(x)

    x = Conv2D(48, (3, 3), strides=1, padding='valid', name='conv2')(x)
    x = PReLU(shared_axes=[1, 2], name='prelu2')(x)
    x = MaxPool2D(pool_size=3, strides=2)(x)

    x = Conv2D(64, (2, 2), strides=1, padding='valid', name='conv3')(x)
    x = PReLU(shared_axes=[1, 2], name='prelu3')(x)
    x = Permute((3, 2, 1))(x)
    x = Flatten()(x)
    x = Dense(128, name='conv4')(x)
    x = PReLU( name='prelu4')(x)
    classifier = Dense(2, activation='softmax', name='conv5-1')(x)
    bbox_regress = Dense(4, name='conv5-2')(x)
    model = Model([input], [classifier, bbox_regress])
    model.load_weights(weight_path, by_name=True)
    return model 
开发者ID:wotchin,项目名称:SmooFaceEngine,代码行数:23,代码来源:mtcnn_model.py

示例8: create_Pnet

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def create_Pnet(weight_path):
    input = Input(shape=[None, None, 3])

    x = Conv2D(10, (3, 3), strides=1, padding='valid', name='conv1')(input)
    x = PReLU(shared_axes=[1,2],name='PReLU1')(x)
    x = MaxPool2D(pool_size=2)(x)

    x = Conv2D(16, (3, 3), strides=1, padding='valid', name='conv2')(x)
    x = PReLU(shared_axes=[1,2],name='PReLU2')(x)

    x = Conv2D(32, (3, 3), strides=1, padding='valid', name='conv3')(x)
    x = PReLU(shared_axes=[1,2],name='PReLU3')(x)

    classifier = Conv2D(2, (1, 1), activation='softmax', name='conv4-1')(x)
    # 无激活函数,线性。
    bbox_regress = Conv2D(4, (1, 1), name='conv4-2')(x)

    model = Model([input], [classifier, bbox_regress])
    model.load_weights(weight_path, by_name=True)
    return model

#-----------------------------#
#   mtcnn的第二段
#   精修框
#-----------------------------# 
开发者ID:bubbliiiing,项目名称:mtcnn-keras,代码行数:27,代码来源:mtcnn.py

示例9: build

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def build(inp, dropout_rate=0.01):
    enet = initial_block(inp)
    enet = BatchNormalization(momentum=0.1)(enet)  # enet_unpooling uses momentum of 0.1, keras default is 0.99
    enet = PReLU(shared_axes=[1, 2])(enet)
    enet = bottleneck(enet, 64, downsample=True, dropout_rate=dropout_rate)  # bottleneck 1.0
    for _ in range(4):
        enet = bottleneck(enet, 64, dropout_rate=dropout_rate)  # bottleneck 1.i
    
    enet = bottleneck(enet, 128, downsample=True)  # bottleneck 2.0
    # bottleneck 2.x and 3.x
    for _ in range(2):
        enet = bottleneck(enet, 128)  # bottleneck 2.1
        enet = bottleneck(enet, 128, dilated=2)  # bottleneck 2.2
        enet = bottleneck(enet, 128, asymmetric=5)  # bottleneck 2.3
        enet = bottleneck(enet, 128, dilated=4)  # bottleneck 2.4
        enet = bottleneck(enet, 128)  # bottleneck 2.5
        enet = bottleneck(enet, 128, dilated=8)  # bottleneck 2.6
        enet = bottleneck(enet, 128, asymmetric=5)  # bottleneck 2.7
        enet = bottleneck(enet, 128, dilated=16)  # bottleneck 2.8
    return enet 
开发者ID:PavlosMelissinos,项目名称:enet-keras,代码行数:22,代码来源:encoder.py

示例10: build

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def build(inp, dropout_rate=0.01):
    pooling_indices = []
    enet, indices_single = initial_block(inp)
    enet = BatchNormalization(momentum=0.1)(enet)  # enet_unpooling uses momentum of 0.1, keras default is 0.99
    enet = PReLU(shared_axes=[1, 2])(enet)
    pooling_indices.append(indices_single)
    enet, indices_single = bottleneck(enet, 64, downsample=True, dropout_rate=dropout_rate)  # bottleneck 1.0
    pooling_indices.append(indices_single)
    for _ in range(4):
        enet = bottleneck(enet, 64, dropout_rate=dropout_rate)  # bottleneck 1.i
    
    enet, indices_single = bottleneck(enet, 128, downsample=True)  # bottleneck 2.0
    pooling_indices.append(indices_single)
    # bottleneck 2.x and 3.x
    for _ in range(2):
        enet = bottleneck(enet, 128)  # bottleneck 2.1
        enet = bottleneck(enet, 128, dilated=2)  # bottleneck 2.2
        enet = bottleneck(enet, 128, asymmetric=5)  # bottleneck 2.3
        enet = bottleneck(enet, 128, dilated=4)  # bottleneck 2.4
        enet = bottleneck(enet, 128)  # bottleneck 2.5
        enet = bottleneck(enet, 128, dilated=8)  # bottleneck 2.6
        enet = bottleneck(enet, 128, asymmetric=5)  # bottleneck 2.7
        enet = bottleneck(enet, 128, dilated=16)  # bottleneck 2.8
    return enet, pooling_indices 
开发者ID:PavlosMelissinos,项目名称:enet-keras,代码行数:26,代码来源:encoder.py

示例11: nn_model

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def nn_model(dims):
    model = Sequential()

    model.add(Dense(400, input_dim=dims, kernel_initializer='he_normal'))
    model.add(PReLU())
    model.add(BatchNormalization())
    model.add(Dropout(0.4))

    model.add(Dense(200, kernel_initializer='he_normal'))
    model.add(PReLU())
    model.add(BatchNormalization())
    model.add(Dropout(0.2))

    model.add(Dense(50, kernel_initializer='he_normal'))
    model.add(PReLU())
    model.add(BatchNormalization())
    model.add(Dropout(0.2))

    model.add(Dense(1, kernel_initializer='he_normal', activation='sigmoid'))
    model.compile(loss = 'binary_crossentropy', optimizer = 'adadelta')
    return(model) 
开发者ID:jeongyoonlee,项目名称:kaggler-template,代码行数:23,代码来源:train_predict_krs1.py

示例12: conv_bn_prelu

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def conv_bn_prelu(bottom, w_size, name, strides=(1,1), dilation_rate=(1,1)):
    if dilation_rate == (1,1):
        conv_type = 'conv'
    else:
        conv_type = 'atrousconv'

    top = Conv2D(w_size[0], (w_size[1],w_size[2]),
        kernel_regularizer=l2(5e-5),
        padding='same',
        strides=strides,
        dilation_rate=dilation_rate,
        name=conv_type+name)(bottom)
    top = BatchNormalization(name='bn-'+name)(top)
    top = PReLU(alpha_initializer='zero', shared_axes=[1,2], name='prelu-'+name)(top)
    # top = Dropout(0.25)(top)
    return top 
开发者ID:luannd,项目名称:MinutiaeNet,代码行数:18,代码来源:CoarseNet_model.py

示例13: create_Pnet

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def create_Pnet(weight_path):
    # h,w
    input = Input(shape=[None, None, 3])

    # h,w,3 -> h/2,w/2,10
    x = Conv2D(10, (3, 3), strides=1, padding='valid', name='conv1')(input)
    x = PReLU(shared_axes=[1,2],name='PReLU1')(x)
    x = MaxPool2D(pool_size=2)(x)

    # h/2,w/2,10 -> h/2,w/2,16
    x = Conv2D(16, (3, 3), strides=1, padding='valid', name='conv2')(x)
    x = PReLU(shared_axes=[1,2],name='PReLU2')(x)
    # h/2,w/2,32
    x = Conv2D(32, (3, 3), strides=1, padding='valid', name='conv3')(x)
    x = PReLU(shared_axes=[1,2],name='PReLU3')(x)

    # h/2, w/2, 2
    classifier = Conv2D(2, (1, 1), activation='softmax', name='conv4-1')(x)
    # 无激活函数,线性。
    # h/2, w/2, 4
    bbox_regress = Conv2D(4, (1, 1), name='conv4-2')(x)

    model = Model([input], [classifier, bbox_regress])
    model.load_weights(weight_path, by_name=True)
    return model

#-----------------------------#
#   mtcnn的第二段
#   精修框
#-----------------------------# 
开发者ID:bubbliiiing,项目名称:keras-face-recognition,代码行数:32,代码来源:mtcnn.py

示例14: create_Rnet

# 需要导入模块: from keras.layers import advanced_activations [as 别名]
# 或者: from keras.layers.advanced_activations import PReLU [as 别名]
def create_Rnet(weight_path):
    input = Input(shape=[24, 24, 3])
    # 24,24,3 -> 11,11,28
    x = Conv2D(28, (3, 3), strides=1, padding='valid', name='conv1')(input)
    x = PReLU(shared_axes=[1, 2], name='prelu1')(x)
    x = MaxPool2D(pool_size=3,strides=2, padding='same')(x)

    # 11,11,28 -> 4,4,48
    x = Conv2D(48, (3, 3), strides=1, padding='valid', name='conv2')(x)
    x = PReLU(shared_axes=[1, 2], name='prelu2')(x)
    x = MaxPool2D(pool_size=3, strides=2)(x)

    # 4,4,48 -> 3,3,64
    x = Conv2D(64, (2, 2), strides=1, padding='valid', name='conv3')(x)
    x = PReLU(shared_axes=[1, 2], name='prelu3')(x)
    # 3,3,64 -> 64,3,3
    x = Permute((3, 2, 1))(x)
    x = Flatten()(x)
    # 576 -> 128
    x = Dense(128, name='conv4')(x)
    x = PReLU( name='prelu4')(x)
    # 128 -> 2 128 -> 4
    classifier = Dense(2, activation='softmax', name='conv5-1')(x)
    bbox_regress = Dense(4, name='conv5-2')(x)
    model = Model([input], [classifier, bbox_regress])
    model.load_weights(weight_path, by_name=True)
    return model

#-----------------------------#
#   mtcnn的第三段
#   精修框并获得五个点
#-----------------------------# 
开发者ID:bubbliiiing,项目名称:keras-face-recognition,代码行数:34,代码来源:mtcnn.py


注:本文中的keras.layers.advanced_activations.PReLU方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。