當前位置: 首頁>>代碼示例>>Python>>正文


Python advanced_activations.PReLU方法代碼示例

本文整理匯總了Python中keras.layers.advanced_activations.PReLU方法的典型用法代碼示例。如果您正苦於以下問題:Python advanced_activations.PReLU方法的具體用法?Python advanced_activations.PReLU怎麽用?Python advanced_activations.PReLU使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在keras.layers.advanced_activations的用法示例。


在下文中一共展示了advanced_activations.PReLU方法的14個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: deep_mlp

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def deep_mlp(self):
        """
        Deep Multilayer Perceptrop.
        """
        if self._config.num_mlp_layers == 0:
            self.add(Dropout(0.5))
        else:
            for j in xrange(self._config.num_mlp_layers):
                self.add(Dense(self._config.mlp_hidden_dim))
                if self._config.mlp_activation == 'elu':
                    self.add(ELU())
                elif self._config.mlp_activation == 'leaky_relu':
                    self.add(LeakyReLU())
                elif self._config.mlp_activation == 'prelu':
                    self.add(PReLU())
                else:
                    self.add(Activation(self._config.mlp_activation))
                self.add(Dropout(0.5)) 
開發者ID:mateuszmalinowski,項目名稱:visual_turing_test-tutorial,代碼行數:20,代碼來源:model_zoo.py

示例2: build_model

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def build_model():
    """
    定義模型
    """
    model = Sequential()

    model.add(LSTM(units=Conf.LAYERS[1], input_shape=(Conf.LAYERS[1], Conf.LAYERS[0]), return_sequences=True))
    model.add(Dropout(0.2))

    model.add(LSTM(Conf.LAYERS[2], return_sequences=False))
    model.add(Dropout(0.2))

    model.add(Dense(units=Conf.LAYERS[3]))
    # model.add(BatchNormalization(weights=None, epsilon=1e-06, momentum=0.9))
    model.add(Activation("tanh"))
    # act = PReLU(alpha_initializer='zeros', weights=None)
    # act = LeakyReLU(alpha=0.3)
    # model.add(act)

    start = time.time()
    model.compile(loss="mse", optimizer="rmsprop")
    print("> Compilation Time : ", time.time() - start)
    return model 
開發者ID:liyinwei,項目名稱:copper_price_forecast,代碼行數:25,代碼來源:co_lstm_predict_day.py

示例3: test_tiny_conv_prelu_random

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def test_tiny_conv_prelu_random(self):
        np.random.seed(1988)

        # Define a model
        from keras.layers.advanced_activations import PReLU

        model = Sequential()
        model.add(
            Convolution2D(
                input_shape=(10, 10, 3),
                nb_filter=3,
                nb_row=5,
                nb_col=5,
                border_mode="same",
            )
        )
        model.add(PReLU(shared_axes=[1, 2]))

        model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])

        # Get the coreml model
        self._test_keras_model(model) 
開發者ID:apple,項目名稱:coremltools,代碼行數:24,代碼來源:test_keras_numeric.py

示例4: test_tiny_conv_prelu_random

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def test_tiny_conv_prelu_random(self, model_precision=_MLMODEL_FULL_PRECISION):
        np.random.seed(1988)

        # Define a model
        from keras.layers.advanced_activations import PReLU

        model = Sequential()
        model.add(
            Conv2D(
                input_shape=(10, 10, 3), filters=3, kernel_size=(5, 5), padding="same"
            )
        )
        model.add(PReLU(shared_axes=[1, 2]))

        model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])

        # Get the coreml model
        self._test_model(model, model_precision=model_precision) 
開發者ID:apple,項目名稱:coremltools,代碼行數:20,代碼來源:test_keras2_numeric.py

示例5: build_model

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def build_model(self):
            model = Sequential()
            model.add(Dropout(0.1, input_shape=(nn_input_dim_NN,)))
            model.add(Dense(input_dim=nn_input_dim_NN, output_dim=310, init='he_normal'))
            model.add(LeakyReLU(alpha=.001))
            model.add(BatchNormalization())
            model.add(Dropout(0.6))
            model.add(Dense(input_dim=310,output_dim=252, init='he_normal'))
            model.add(PReLU(init='zero'))
            model.add(BatchNormalization())
            model.add(Dropout(0.5))
            model.add(Dense(input_dim=252,output_dim=128, init='he_normal'))
            model.add(LeakyReLU(alpha=.001))
            model.add(BatchNormalization())
            model.add(Dropout(0.4))
            model.add(Dense(input_dim=128,output_dim=2, init='he_normal', activation='softmax'))
            #model.add(Activation('softmax'))
            sgd = SGD(lr=0.02, decay=1e-6, momentum=0.9, nesterov=True)

            model.compile(optimizer=sgd, loss='binary_crossentropy',class_mode='binary')

            return KerasClassifier(nn=model,**self.params) 
開發者ID:ikki407,項目名稱:stacking,代碼行數:24,代碼來源:ikki_NN_1.py

示例6: create_Kao_Onet

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def create_Kao_Onet( weight_path = 'model48.h5'):
    input = Input(shape = [48,48,3])
    x = Conv2D(32, (3, 3), strides=1, padding='valid', name='conv1')(input)
    x = PReLU(shared_axes=[1,2],name='prelu1')(x)
    x = MaxPool2D(pool_size=3, strides=2, padding='same')(x)
    x = Conv2D(64, (3, 3), strides=1, padding='valid', name='conv2')(x)
    x = PReLU(shared_axes=[1,2],name='prelu2')(x)
    x = MaxPool2D(pool_size=3, strides=2)(x)
    x = Conv2D(64, (3, 3), strides=1, padding='valid', name='conv3')(x)
    x = PReLU(shared_axes=[1,2],name='prelu3')(x)
    x = MaxPool2D(pool_size=2)(x)
    x = Conv2D(128, (2, 2), strides=1, padding='valid', name='conv4')(x)
    x = PReLU(shared_axes=[1,2],name='prelu4')(x)
    x = Permute((3,2,1))(x)
    x = Flatten()(x)
    x = Dense(256, name='conv5') (x)
    x = PReLU(name='prelu5')(x)

    classifier = Dense(2, activation='softmax',name='conv6-1')(x)
    bbox_regress = Dense(4,name='conv6-2')(x)
    landmark_regress = Dense(10,name='conv6-3')(x)
    model = Model([input], [classifier, bbox_regress, landmark_regress])
    model.load_weights(weight_path, by_name=True)

    return model 
開發者ID:wotchin,項目名稱:SmooFaceEngine,代碼行數:27,代碼來源:mtcnn_model.py

示例7: create_Kao_Rnet

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def create_Kao_Rnet (weight_path = 'model24.h5'):
    input = Input(shape=[24, 24, 3])  # change this shape to [None,None,3] to enable arbitraty shape input
    x = Conv2D(28, (3, 3), strides=1, padding='valid', name='conv1')(input)
    x = PReLU(shared_axes=[1, 2], name='prelu1')(x)
    x = MaxPool2D(pool_size=3,strides=2, padding='same')(x)

    x = Conv2D(48, (3, 3), strides=1, padding='valid', name='conv2')(x)
    x = PReLU(shared_axes=[1, 2], name='prelu2')(x)
    x = MaxPool2D(pool_size=3, strides=2)(x)

    x = Conv2D(64, (2, 2), strides=1, padding='valid', name='conv3')(x)
    x = PReLU(shared_axes=[1, 2], name='prelu3')(x)
    x = Permute((3, 2, 1))(x)
    x = Flatten()(x)
    x = Dense(128, name='conv4')(x)
    x = PReLU( name='prelu4')(x)
    classifier = Dense(2, activation='softmax', name='conv5-1')(x)
    bbox_regress = Dense(4, name='conv5-2')(x)
    model = Model([input], [classifier, bbox_regress])
    model.load_weights(weight_path, by_name=True)
    return model 
開發者ID:wotchin,項目名稱:SmooFaceEngine,代碼行數:23,代碼來源:mtcnn_model.py

示例8: create_Pnet

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def create_Pnet(weight_path):
    input = Input(shape=[None, None, 3])

    x = Conv2D(10, (3, 3), strides=1, padding='valid', name='conv1')(input)
    x = PReLU(shared_axes=[1,2],name='PReLU1')(x)
    x = MaxPool2D(pool_size=2)(x)

    x = Conv2D(16, (3, 3), strides=1, padding='valid', name='conv2')(x)
    x = PReLU(shared_axes=[1,2],name='PReLU2')(x)

    x = Conv2D(32, (3, 3), strides=1, padding='valid', name='conv3')(x)
    x = PReLU(shared_axes=[1,2],name='PReLU3')(x)

    classifier = Conv2D(2, (1, 1), activation='softmax', name='conv4-1')(x)
    # 無激活函數,線性。
    bbox_regress = Conv2D(4, (1, 1), name='conv4-2')(x)

    model = Model([input], [classifier, bbox_regress])
    model.load_weights(weight_path, by_name=True)
    return model

#-----------------------------#
#   mtcnn的第二段
#   精修框
#-----------------------------# 
開發者ID:bubbliiiing,項目名稱:mtcnn-keras,代碼行數:27,代碼來源:mtcnn.py

示例9: build

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def build(inp, dropout_rate=0.01):
    enet = initial_block(inp)
    enet = BatchNormalization(momentum=0.1)(enet)  # enet_unpooling uses momentum of 0.1, keras default is 0.99
    enet = PReLU(shared_axes=[1, 2])(enet)
    enet = bottleneck(enet, 64, downsample=True, dropout_rate=dropout_rate)  # bottleneck 1.0
    for _ in range(4):
        enet = bottleneck(enet, 64, dropout_rate=dropout_rate)  # bottleneck 1.i
    
    enet = bottleneck(enet, 128, downsample=True)  # bottleneck 2.0
    # bottleneck 2.x and 3.x
    for _ in range(2):
        enet = bottleneck(enet, 128)  # bottleneck 2.1
        enet = bottleneck(enet, 128, dilated=2)  # bottleneck 2.2
        enet = bottleneck(enet, 128, asymmetric=5)  # bottleneck 2.3
        enet = bottleneck(enet, 128, dilated=4)  # bottleneck 2.4
        enet = bottleneck(enet, 128)  # bottleneck 2.5
        enet = bottleneck(enet, 128, dilated=8)  # bottleneck 2.6
        enet = bottleneck(enet, 128, asymmetric=5)  # bottleneck 2.7
        enet = bottleneck(enet, 128, dilated=16)  # bottleneck 2.8
    return enet 
開發者ID:PavlosMelissinos,項目名稱:enet-keras,代碼行數:22,代碼來源:encoder.py

示例10: build

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def build(inp, dropout_rate=0.01):
    pooling_indices = []
    enet, indices_single = initial_block(inp)
    enet = BatchNormalization(momentum=0.1)(enet)  # enet_unpooling uses momentum of 0.1, keras default is 0.99
    enet = PReLU(shared_axes=[1, 2])(enet)
    pooling_indices.append(indices_single)
    enet, indices_single = bottleneck(enet, 64, downsample=True, dropout_rate=dropout_rate)  # bottleneck 1.0
    pooling_indices.append(indices_single)
    for _ in range(4):
        enet = bottleneck(enet, 64, dropout_rate=dropout_rate)  # bottleneck 1.i
    
    enet, indices_single = bottleneck(enet, 128, downsample=True)  # bottleneck 2.0
    pooling_indices.append(indices_single)
    # bottleneck 2.x and 3.x
    for _ in range(2):
        enet = bottleneck(enet, 128)  # bottleneck 2.1
        enet = bottleneck(enet, 128, dilated=2)  # bottleneck 2.2
        enet = bottleneck(enet, 128, asymmetric=5)  # bottleneck 2.3
        enet = bottleneck(enet, 128, dilated=4)  # bottleneck 2.4
        enet = bottleneck(enet, 128)  # bottleneck 2.5
        enet = bottleneck(enet, 128, dilated=8)  # bottleneck 2.6
        enet = bottleneck(enet, 128, asymmetric=5)  # bottleneck 2.7
        enet = bottleneck(enet, 128, dilated=16)  # bottleneck 2.8
    return enet, pooling_indices 
開發者ID:PavlosMelissinos,項目名稱:enet-keras,代碼行數:26,代碼來源:encoder.py

示例11: nn_model

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def nn_model(dims):
    model = Sequential()

    model.add(Dense(400, input_dim=dims, kernel_initializer='he_normal'))
    model.add(PReLU())
    model.add(BatchNormalization())
    model.add(Dropout(0.4))

    model.add(Dense(200, kernel_initializer='he_normal'))
    model.add(PReLU())
    model.add(BatchNormalization())
    model.add(Dropout(0.2))

    model.add(Dense(50, kernel_initializer='he_normal'))
    model.add(PReLU())
    model.add(BatchNormalization())
    model.add(Dropout(0.2))

    model.add(Dense(1, kernel_initializer='he_normal', activation='sigmoid'))
    model.compile(loss = 'binary_crossentropy', optimizer = 'adadelta')
    return(model) 
開發者ID:jeongyoonlee,項目名稱:kaggler-template,代碼行數:23,代碼來源:train_predict_krs1.py

示例12: conv_bn_prelu

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def conv_bn_prelu(bottom, w_size, name, strides=(1,1), dilation_rate=(1,1)):
    if dilation_rate == (1,1):
        conv_type = 'conv'
    else:
        conv_type = 'atrousconv'

    top = Conv2D(w_size[0], (w_size[1],w_size[2]),
        kernel_regularizer=l2(5e-5),
        padding='same',
        strides=strides,
        dilation_rate=dilation_rate,
        name=conv_type+name)(bottom)
    top = BatchNormalization(name='bn-'+name)(top)
    top = PReLU(alpha_initializer='zero', shared_axes=[1,2], name='prelu-'+name)(top)
    # top = Dropout(0.25)(top)
    return top 
開發者ID:luannd,項目名稱:MinutiaeNet,代碼行數:18,代碼來源:CoarseNet_model.py

示例13: create_Pnet

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def create_Pnet(weight_path):
    # h,w
    input = Input(shape=[None, None, 3])

    # h,w,3 -> h/2,w/2,10
    x = Conv2D(10, (3, 3), strides=1, padding='valid', name='conv1')(input)
    x = PReLU(shared_axes=[1,2],name='PReLU1')(x)
    x = MaxPool2D(pool_size=2)(x)

    # h/2,w/2,10 -> h/2,w/2,16
    x = Conv2D(16, (3, 3), strides=1, padding='valid', name='conv2')(x)
    x = PReLU(shared_axes=[1,2],name='PReLU2')(x)
    # h/2,w/2,32
    x = Conv2D(32, (3, 3), strides=1, padding='valid', name='conv3')(x)
    x = PReLU(shared_axes=[1,2],name='PReLU3')(x)

    # h/2, w/2, 2
    classifier = Conv2D(2, (1, 1), activation='softmax', name='conv4-1')(x)
    # 無激活函數,線性。
    # h/2, w/2, 4
    bbox_regress = Conv2D(4, (1, 1), name='conv4-2')(x)

    model = Model([input], [classifier, bbox_regress])
    model.load_weights(weight_path, by_name=True)
    return model

#-----------------------------#
#   mtcnn的第二段
#   精修框
#-----------------------------# 
開發者ID:bubbliiiing,項目名稱:keras-face-recognition,代碼行數:32,代碼來源:mtcnn.py

示例14: create_Rnet

# 需要導入模塊: from keras.layers import advanced_activations [as 別名]
# 或者: from keras.layers.advanced_activations import PReLU [as 別名]
def create_Rnet(weight_path):
    input = Input(shape=[24, 24, 3])
    # 24,24,3 -> 11,11,28
    x = Conv2D(28, (3, 3), strides=1, padding='valid', name='conv1')(input)
    x = PReLU(shared_axes=[1, 2], name='prelu1')(x)
    x = MaxPool2D(pool_size=3,strides=2, padding='same')(x)

    # 11,11,28 -> 4,4,48
    x = Conv2D(48, (3, 3), strides=1, padding='valid', name='conv2')(x)
    x = PReLU(shared_axes=[1, 2], name='prelu2')(x)
    x = MaxPool2D(pool_size=3, strides=2)(x)

    # 4,4,48 -> 3,3,64
    x = Conv2D(64, (2, 2), strides=1, padding='valid', name='conv3')(x)
    x = PReLU(shared_axes=[1, 2], name='prelu3')(x)
    # 3,3,64 -> 64,3,3
    x = Permute((3, 2, 1))(x)
    x = Flatten()(x)
    # 576 -> 128
    x = Dense(128, name='conv4')(x)
    x = PReLU( name='prelu4')(x)
    # 128 -> 2 128 -> 4
    classifier = Dense(2, activation='softmax', name='conv5-1')(x)
    bbox_regress = Dense(4, name='conv5-2')(x)
    model = Model([input], [classifier, bbox_regress])
    model.load_weights(weight_path, by_name=True)
    return model

#-----------------------------#
#   mtcnn的第三段
#   精修框並獲得五個點
#-----------------------------# 
開發者ID:bubbliiiing,項目名稱:keras-face-recognition,代碼行數:34,代碼來源:mtcnn.py


注:本文中的keras.layers.advanced_activations.PReLU方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。