当前位置: 首页>>代码示例>>Python>>正文


Python optimizers.Adadelta方法代码示例

本文整理汇总了Python中keras.optimizers.Adadelta方法的典型用法代码示例。如果您正苦于以下问题:Python optimizers.Adadelta方法的具体用法?Python optimizers.Adadelta怎么用?Python optimizers.Adadelta使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在keras.optimizers的用法示例。


在下文中一共展示了optimizers.Adadelta方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: fit

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def fit(self, train_X, val_X, nb_epoch=50, batch_size=100, feature_weights=None):
        print 'Training autoencoder'
        optimizer = Adadelta(lr=1.5)
        # optimizer = Adam()
        # optimizer = Adagrad()
        if feature_weights is None:
            self.autoencoder.compile(optimizer=optimizer, loss='binary_crossentropy') # kld, binary_crossentropy, mse
        else:
            print 'Using weighted loss'
            self.autoencoder.compile(optimizer=optimizer, loss=weighted_binary_crossentropy(feature_weights)) # kld, binary_crossentropy, mse

        self.autoencoder.fit(train_X[0], train_X[1],
                        nb_epoch=nb_epoch,
                        batch_size=batch_size,
                        shuffle=True,
                        validation_data=(val_X[0], val_X[1]),
                        callbacks=[
                                    ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=3, min_lr=0.01),
                                    EarlyStopping(monitor='val_loss', min_delta=1e-5, patience=5, verbose=1, mode='auto'),
                                    # ModelCheckpoint(self.model_save_path, monitor='val_loss', save_best_only=True, verbose=0),
                        ]
                        )

        return self 
开发者ID:hugochan,项目名称:KATE,代码行数:26,代码来源:deepae.py

示例2: fit

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def fit(self, train_X, val_X, nb_epoch=50, batch_size=100):
        print 'Training variational autoencoder'
        optimizer = Adadelta(lr=2.)
        self.vae.compile(optimizer=optimizer, loss=self.vae_loss)

        self.vae.fit(train_X[0], train_X[1],
                shuffle=True,
                epochs=nb_epoch,
                batch_size=batch_size,
                validation_data=(val_X[0], val_X[1]),
                callbacks=[ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=3, min_lr=0.01),
                            EarlyStopping(monitor='val_loss', min_delta=1e-5, patience=5, verbose=1, mode='auto'),
                            CustomModelCheckpoint(self.encoder, self.save_model, monitor='val_loss', save_best_only=True, mode='auto')
                        ]
                )

        return self 
开发者ID:hugochan,项目名称:KATE,代码行数:19,代码来源:vae.py

示例3: fit

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def fit(self, train_X, val_X, nb_epoch=50, batch_size=100, contractive=None):
        optimizer = Adadelta(lr=2.)
        # optimizer = Adam()
        # optimizer = Adagrad()
        if contractive:
            print 'Using contractive loss, lambda: %s' % contractive
            self.autoencoder.compile(optimizer=optimizer, loss=contractive_loss(self, contractive))
        else:
            print 'Using binary crossentropy'
            self.autoencoder.compile(optimizer=optimizer, loss='binary_crossentropy') # kld, binary_crossentropy, mse

        self.autoencoder.fit(train_X[0], train_X[1],
                        epochs=nb_epoch,
                        batch_size=batch_size,
                        shuffle=True,
                        validation_data=(val_X[0], val_X[1]),
                        callbacks=[
                                    ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=3, min_lr=0.01),
                                    EarlyStopping(monitor='val_loss', min_delta=1e-5, patience=5, verbose=1, mode='auto'),
                                    CustomModelCheckpoint(self.encoder, self.save_model, monitor='val_loss', save_best_only=True, mode='auto')
                        ]
                        )

        return self 
开发者ID:hugochan,项目名称:KATE,代码行数:26,代码来源:ae.py

示例4: get_optimizer

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def get_optimizer(args):

	clipvalue = 0
	clipnorm = 10

	if args.algorithm == 'rmsprop':
		optimizer = opt.RMSprop(lr=0.001, rho=0.9, epsilon=1e-06, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'sgd':
		optimizer = opt.SGD(lr=0.01, momentum=0.0, decay=0.0, nesterov=False, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'adagrad':
		optimizer = opt.Adagrad(lr=0.01, epsilon=1e-06, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'adadelta':
		optimizer = opt.Adadelta(lr=1.0, rho=0.95, epsilon=1e-06, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'adam':
		optimizer = opt.Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'adamax':
		optimizer = opt.Adamax(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=1e-08, clipnorm=clipnorm, clipvalue=clipvalue)
	
	return optimizer 
开发者ID:madrugado,项目名称:Attention-Based-Aspect-Extraction,代码行数:21,代码来源:optimizers.py

示例5: get_optimizer

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def get_optimizer(config):
    if config.OPTIMIZER == 'SGD':
        return SGD(lr=config.LEARNING_RATE, momentum=config.LEARNING_MOMENTUM, clipnorm=config.GRADIENT_CLIP_NORM, nesterov=config.NESTEROV)
    elif config.OPTIMIZER == 'RMSprop':
        return RMSprop(lr=config.LEARNING_RATE, clipnorm=config.GRADIENT_CLIP_NORM)
    elif config.OPTIMIZER == 'Adagrad':
        return Adagrad(lr=config.LEARNING_RATE, clipnorm=config.GRADIENT_CLIP_NORM)
    elif config.OPTIMIZER == 'Adadelta':
        return Adadelta(lr=config.LEARNING_RATE, clipnorm=config.GRADIENT_CLIP_NORM)
    elif config.OPTIMIZER == 'Adam':
        return Adam(lr=config.LEARNING_RATE, clipnorm=config.GRADIENT_CLIP_NORM, amsgrad=config.AMSGRAD)
    elif config.OPTIMIZER == 'Adamax':
        return Adamax(lr=config.LEARNING_RATE, clipnorm=config.GRADIENT_CLIP_NORM)
    elif config.OPTIMIZER == 'Nadam':
        return Nadam(lr=config.LEARNING_RATE, clipnorm=config.GRADIENT_CLIP_NORM)
    else:
        raise Exception('Unrecognized optimizer: {}'.format(config.OPTIMIZER)) 
开发者ID:nearthlab,项目名称:image-segmentation,代码行数:19,代码来源:trainer.py

示例6: get_optimizer

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def get_optimizer(name='Adadelta'):
    if name == 'SGD':
        return optimizers.SGD(clipnorm=1.)
    if name == 'RMSprop':
        return optimizers.RMSprop(clipnorm=1.)
    if name == 'Adagrad':
        return optimizers.Adagrad(clipnorm=1.)
    if name == 'Adadelta':
        return optimizers.Adadelta(clipnorm=1.)
    if name == 'Adam':
        return optimizers.Adam(clipnorm=1.)
    if name == 'Adamax':
        return optimizers.Adamax(clipnorm=1.)
    if name == 'Nadam':
        return optimizers.Nadam(clipnorm=1.)

    return optimizers.Adam(clipnorm=1.) 
开发者ID:ClimbsRocks,项目名称:auto_ml,代码行数:19,代码来源:utils_models.py

示例7: get_optimizer

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def get_optimizer(args):

	clipvalue = 0
	clipnorm = 10

	if args.algorithm == 'rmsprop':
		optimizer = opt.RMSprop(lr=0.0001, rho=0.9, epsilon=1e-06, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'sgd':
		optimizer = opt.SGD(lr=0.01, momentum=0.0, decay=0.0, nesterov=False, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'adagrad':
		optimizer = opt.Adagrad(lr=0.01, epsilon=1e-06, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'adadelta':
		optimizer = opt.Adadelta(lr=1.0, rho=0.95, epsilon=1e-06, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'adam':
		optimizer = opt.Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'adamax':
		optimizer = opt.Adamax(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=1e-08, clipnorm=clipnorm, clipvalue=clipvalue)
	
	return optimizer 
开发者ID:ruidan,项目名称:IMN-E2E-ABSA,代码行数:21,代码来源:optimizers.py

示例8: get_optimizer

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def get_optimizer(args):

	clipvalue = 0
	clipnorm = 10

	if args.algorithm == 'rmsprop':
		optimizer = opt.RMSprop(lr=0.0005, rho=0.9, epsilon=1e-06, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'sgd':
		optimizer = opt.SGD(lr=0.01, momentum=0.0, decay=0.0, nesterov=False, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'adagrad':
		optimizer = opt.Adagrad(lr=0.01, epsilon=1e-06, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'adadelta':
		optimizer = opt.Adadelta(lr=1.0, rho=0.95, epsilon=1e-06, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'adam':
		optimizer = opt.Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, clipnorm=clipnorm, clipvalue=clipvalue)
	elif args.algorithm == 'adamax':
		optimizer = opt.Adamax(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=1e-08, clipnorm=clipnorm, clipvalue=clipvalue)
	
	return optimizer 
开发者ID:ruidan,项目名称:DAS,代码行数:21,代码来源:optimizers.py

示例9: get_learning_rate

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def get_learning_rate(self):

        if hasattr(self.model, 'optimizer'):
            config = self.model.optimizer.get_config()

            from keras.optimizers import Adadelta, Adam, Adamax, Adagrad, RMSprop, SGD

            if isinstance(self.model.optimizer, Adadelta) or isinstance(self.model.optimizer, Adam) \
                    or isinstance(self.model.optimizer, Adamax) or isinstance(self.model.optimizer, Adagrad)\
                    or isinstance(self.model.optimizer, RMSprop) or isinstance(self.model.optimizer, SGD):
                return config['lr'] * (1. / (1. + config['decay'] * float(K.get_value(self.model.optimizer.iterations))))

            elif 'lr' in config:
                return config['lr'] 
开发者ID:aetros,项目名称:aetros-cli,代码行数:16,代码来源:KerasCallback.py

示例10: S_LSTM

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def S_LSTM(dimx = 30, dimy = 30, embedding_matrix=None, LSTM_neurons = 32):
    
    inpx = Input(shape=(dimx,),dtype='int32',name='inpx')
    x = word2vec_embedding_layer(embedding_matrix,train='False')(inpx)  
    inpy = Input(shape=(dimy,),dtype='int32',name='inpy')
    y = word2vec_embedding_layer(embedding_matrix,train='False')(inpy)    
    
    #hx = LSTM(LSTM_neurons)(x)
    #hy = LSTM(LSTM_neurons)(y)
   
    shared_lstm = Bidirectional(LSTM(LSTM_neurons,return_sequences=False),merge_mode='sum')   
    #shared_lstm = LSTM(LSTM_neurons,return_sequences=True)    
    hx = shared_lstm(x)
    #hx = Dropout(0.2)(hx)
    hy = shared_lstm(y)
    #hy = Dropout(0.2)(hy)
    
    h1,h2=hx,hy

    corr1 = Exp()([h1,h2])
    adadelta = optimizers.Adadelta()
    
    model = Model( [inpx,inpy],corr1)
    model.compile( loss='binary_crossentropy',optimizer=adadelta)
    
    return model 
开发者ID:GauravBh1010tt,项目名称:DeepLearn,代码行数:28,代码来源:model_Siam_LSTM.py

示例11: test_adadelta

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def test_adadelta(self):
        print('test Adadelta')
        self.assertTrue(_test_optimizer(Adadelta())) 
开发者ID:lllcho,项目名称:CAPTCHA-breaking,代码行数:5,代码来源:test_optimizers.py

示例12: get_optimizer

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def get_optimizer(config_data):
    options = config_data['optimizer']
    name = options['name']

    if name == 'adadelta':
        return optimizers.Adadelta(lr=options['lr'], rho=options['rho'], epsilon=options['epsilon'])
    else:
        return optimizers.SGD() 
开发者ID:spinningbytes,项目名称:deep-mlsa,代码行数:10,代码来源:run_utils.py

示例13: neural_network

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def neural_network(domain_adaptation=False):
    """
    moment alignment neural network (MANN)
    
    - Zellinger, Werner, et al. "Robust unsupervised domain adaptation for
    neural networks via moment alignment.", arXiv preprint arXiv:1711.06114, 2017
    """
    # layer definition
    input_s = Input(shape=(2,), name='souce_input')
    input_t = Input(shape=(2,), name='target_input')
    encoding = Dense(N_HIDDEN_NODES,
                     activation='sigmoid',
                     name='hidden')
    prediction = Dense(N_CLASSES,
                       activation='softmax',
                       name='pred')
    # network architecture
    encoded_s = encoding(input_s)
    encoded_t = encoding(input_t)
    pred_s = prediction(encoded_s)
    pred_t = prediction(encoded_t)
    dense_s_t = merge([encoded_s,encoded_t], mode='concat', concat_axis=1)
    # input/output definition
    nn = Model(input=[input_s,input_t],
               output=[pred_s,pred_t,dense_s_t])
    # seperate model for activation visualization
    visualize_model = Model(input=[input_s,input_t],
                            output=[encoded_s,encoded_t])
    # compile model
    if domain_adaptation==False:
        cmd_weight = 0.
    else:
        # Please note that the loss weight of the cmd is one per default
        # (see paper).
        cmd_weight = 1.
    nn.compile(loss=['categorical_crossentropy',
                     'categorical_crossentropy',cmd],
               loss_weights=[1.,0.,cmd_weight],
               optimizer=Adadelta(),
               metrics=['accuracy'])
    return nn, visualize_model 
开发者ID:wzell,项目名称:mann,代码行数:43,代码来源:artificial_example.py

示例14: test_adadelta

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def test_adadelta():
    _test_optimizer(optimizers.Adadelta(), target=0.6)
    _test_optimizer(optimizers.Adadelta(decay=1e-3), target=0.6) 
开发者ID:hello-sea,项目名称:DeepLearning_Wavelet-LSTM,代码行数:5,代码来源:optimizers_test.py

示例15: make_deep_learning_model

# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import Adadelta [as 别名]
def make_deep_learning_model(hidden_layers=None, num_cols=None, optimizer='Adadelta', dropout_rate=0.2, weight_constraint=0, feature_learning=False, kernel_initializer='normal', activation='elu'):

    if feature_learning == True and hidden_layers is None:
        hidden_layers = [1, 0.75, 0.25]

    if hidden_layers is None:
        hidden_layers = [1, 0.75, 0.25]

    # The hidden_layers passed to us is simply describing a shape. it does not know the num_cols we are dealing with, it is simply values of 0.5, 1, and 2, which need to be multiplied by the num_cols
    scaled_layers = []
    for layer in hidden_layers:
        scaled_layers.append(min(int(num_cols * layer), 10))

    # If we're training this model for feature_learning, our penultimate layer (our final hidden layer before the "output" layer) will always have 10 neurons, meaning that we always output 10 features from our feature_learning model
    if feature_learning == True:
        scaled_layers.append(10)

    model = Sequential()

    model.add(Dense(scaled_layers[0], input_dim=num_cols, kernel_initializer=kernel_initializer, kernel_regularizer=regularizers.l2(0.01)))
    model.add(get_activation_layer(activation))

    for layer_size in scaled_layers[1:-1]:
        model.add(Dense(layer_size, kernel_initializer=kernel_initializer, kernel_regularizer=regularizers.l2(0.01)))
        model.add(get_activation_layer(activation))

    # There are times we will want the output from our penultimate layer, not the final layer, so give it a name that makes the penultimate layer easy to find
    model.add(Dense(scaled_layers[-1], kernel_initializer=kernel_initializer, name='penultimate_layer', kernel_regularizer=regularizers.l2(0.01)))
    model.add(get_activation_layer(activation))

    # For regressors, we want an output layer with a single node
    model.add(Dense(1, kernel_initializer=kernel_initializer))


    # The final step is to compile the model
    model.compile(loss='mean_squared_error', optimizer=get_optimizer(optimizer), metrics=['mean_absolute_error', 'mean_absolute_percentage_error'])

    return model 
开发者ID:ClimbsRocks,项目名称:auto_ml,代码行数:40,代码来源:utils_models.py


注:本文中的keras.optimizers.Adadelta方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。