本文整理汇总了Python中keras.optimizers.SGD属性的典型用法代码示例。如果您正苦于以下问题:Python optimizers.SGD属性的具体用法?Python optimizers.SGD怎么用?Python optimizers.SGD使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类keras.optimizers
的用法示例。
在下文中一共展示了optimizers.SGD属性的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: build_model
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def build_model(config):
"""Builds the cnn."""
params = config.model_arch
get_model = getattr(models, 'get_model_'+str(params['architecture']))
model = get_model(params)
#model = model_kenun.build_convnet_model(params)
# Learning setup
t_params = config.training_params
sgd = SGD(lr=t_params["learning_rate"], decay=t_params["decay"],
momentum=t_params["momentum"], nesterov=t_params["nesterov"])
adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)
optimizer = eval(t_params['optimizer'])
metrics = ['mean_squared_error']
if config.model_arch["final_activation"] == 'softmax':
metrics.append('categorical_accuracy')
if t_params['loss_func'] == 'cosine':
loss_func = eval(t_params['loss_func'])
else:
loss_func = t_params['loss_func']
model.compile(loss=loss_func, optimizer=optimizer,metrics=metrics)
return model
示例2: nn_model
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def nn_model():
(x_train, y_train), _ = mnist.load_data()
# 归一化
x_train = x_train.reshape(x_train.shape[0], -1) / 255.
# one-hot
y_train = np_utils.to_categorical(y=y_train, num_classes=10)
# constant(value=1.)自定义常数,constant(value=1.)===one()
# 创建模型:输入784个神经元,输出10个神经元
model = Sequential([
Dense(units=200, input_dim=784, bias_initializer=constant(value=1.), activation=tanh),
Dense(units=100, bias_initializer=one(), activation=tanh),
Dense(units=10, bias_initializer=one(), activation=softmax),
])
opt = SGD(lr=0.2, clipnorm=1.) # 优化器
model.compile(optimizer=opt, loss=categorical_crossentropy, metrics=['acc', 'mae']) # 编译
model.fit(x_train, y_train, batch_size=64, epochs=20, callbacks=[RemoteMonitor()])
model_save(model, './model.h5')
示例3: __init__
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def __init__(self, architecture_file=None, weight_file=None, optimizer=None):
# Generate mapping for softmax layer to characters
output_str = '0123456789abcdefghijklmnopqrstuvwxyz '
self.output = [x for x in output_str]
self.L = len(self.output)
# Load model and saved weights
from keras.models import model_from_json
if architecture_file is None:
self.model = model_from_json(open('char2_architecture.json').read())
else:
self.model = model_from_json(open(architecture_file).read())
if weight_file is None:
self.model.load_weights('char2_weights.h5')
else:
self.model.load_weights(weight_file)
if optimizer is None:
from keras.optimizers import SGD
optimizer = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
self.model.compile(loss='categorical_crossentropy', optimizer=optimizer)
示例4: _build
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def _build(self):
# the model that will be trained
rnn_x = Input(shape=(None, Z_DIM + ACTION_DIM))
lstm = LSTM(HIDDEN_UNITS, return_sequences=True, return_state=True)
lstm_output, _, _ = lstm(rnn_x)
mdn = Dense(Z_DIM)(lstm_output)
rnn = Model(rnn_x, mdn)
# the model used during prediction
state_input_h = Input(shape=(HIDDEN_UNITS,))
state_input_c = Input(shape=(HIDDEN_UNITS,))
state_inputs = [state_input_h, state_input_c]
_, state_h, state_c = lstm(rnn_x, initial_state=state_inputs)
forward = Model([rnn_x] + state_inputs, [state_h, state_c])
optimizer = Adam(lr=0.0001)
# optimizer = SGD(lr=0.0001, decay=1e-4, momentum=0.9, nesterov=True)
rnn.compile(loss='mean_squared_error', optimizer=optimizer)
return [rnn, forward]
示例5: _demo_heatmap_script
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def _demo_heatmap_script():
"""
Here is a script to compute the heatmap of the dog synsets.
We find the synsets corresponding to dogs on ImageNet website
"""
im = preprocess_image_batch(['examples/dog.jpg'], color_mode='rgb')
# Test pretrained model
sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
model = convnet('alexnet', weights_path='weights/alexnet_weights.h5', heatmap=True)
model.compile(optimizer=sgd, loss='mse')
out = model.predict(im)
s = 'n02084071'
# Most of the synsets are not in the subset of the synsets used in ImageNet recognition task.
ids = np.array([id_ for id_ in synset_to_dfs_ids(s) if id_ is not None])
heatmap = out[0, ids, :, :].sum(axis=0)
return heatmap
示例6: __init__
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def __init__(self, model_inputs=[],model_outputs=[],lambda_cycle=10.0,lambda_id=1.0):
self.OPTIMIZER = SGD(lr=2e-4,nesterov=True)
self.inputs = model_inputs
self.outputs = model_outputs
self.gan_model = Model(self.inputs,self.outputs)
self.OPTIMIZER = Adam(lr=2e-4, beta_1=0.5)
self.gan_model.compile(loss=['mse', 'mse',
'mae', 'mae',
'mae', 'mae'],
loss_weights=[ 1, 1,
lambda_cycle, lambda_cycle,
lambda_id, lambda_id ],
optimizer=self.OPTIMIZER)
# self.save_model()
self.summary()
示例7: test_lstm
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def test_lstm(self):
x_train = np.random.random((100, 100, 100))
y_train = keras.utils.to_categorical(np.random.randint(10, size=(100, 1)), num_classes=10)
x_test = np.random.random((20, 100, 100))
y_test = keras.utils.to_categorical(np.random.randint(10, size=(20, 1)), num_classes=10)
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model = Sequential()
model.add(LSTM(32, return_sequences=True, input_shape=(100, 100)))
model.add(Flatten())
model.add(Dense(10, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer=sgd)
model.fit(x_train, y_train, batch_size=32, epochs=1)
model.evaluate(x_test, y_test, batch_size=32)
示例8: __init__
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
with graph.as_default():
if sess is not None:
set_session(sess)
inp = None
output = None
if self.shared_network is None:
inp = Input((self.input_dim,))
output = self.get_network_head(inp).output
else:
inp = self.shared_network.input
output = self.shared_network.output
output = Dense(
self.output_dim, activation=self.activation,
kernel_initializer='random_normal')(output)
self.model = Model(inp, output)
self.model.compile(
optimizer=SGD(lr=self.lr), loss=self.loss)
示例9: cnn_model
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def cnn_model():
num_of_classes = get_num_of_classes()
model = Sequential()
model.add(Conv2D(16, (2,2), input_shape=(image_x, image_y, 1), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='same'))
model.add(Conv2D(32, (3,3), activation='relu'))
model.add(MaxPooling2D(pool_size=(3, 3), strides=(3, 3), padding='same'))
model.add(Conv2D(64, (5,5), activation='relu'))
model.add(MaxPooling2D(pool_size=(5, 5), strides=(5, 5), padding='same'))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(num_of_classes, activation='softmax'))
sgd = optimizers.SGD(lr=1e-2)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
filepath="cnn_model_keras2.h5"
checkpoint1 = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, mode='max')
callbacks_list = [checkpoint1]
#from keras.utils import plot_model
#plot_model(model, to_file='model.png', show_shapes=True)
return model, callbacks_list
示例10: cnn
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def cnn(trn_set, tst_set):
trn_x, trn_y = trn_set
trn_y = np.squeeze(trn_y, axis=2)
tst_x, tst_y = tst_set
tst_y = np.squeeze(tst_y, axis=2)
model = Sequential()
model.add(Convolution2D(2, 5, 5, activation='sigmoid', input_shape=(1, 28, 28)))
model.add(MaxPooling2D(pool_size=(3, 3)))
model.add(Flatten())
model.add(Dense(10, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer=SGD(lr=0.1))
return model, trn_x, trn_y, tst_x, tst_y
################################################################################
示例11: get_optimizer
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def get_optimizer(args):
clipvalue = 0
clipnorm = 10
if args.algorithm == 'rmsprop':
optimizer = opt.RMSprop(lr=0.001, rho=0.9, epsilon=1e-06, clipnorm=clipnorm, clipvalue=clipvalue)
elif args.algorithm == 'sgd':
optimizer = opt.SGD(lr=0.01, momentum=0.0, decay=0.0, nesterov=False, clipnorm=clipnorm, clipvalue=clipvalue)
elif args.algorithm == 'adagrad':
optimizer = opt.Adagrad(lr=0.01, epsilon=1e-06, clipnorm=clipnorm, clipvalue=clipvalue)
elif args.algorithm == 'adadelta':
optimizer = opt.Adadelta(lr=1.0, rho=0.95, epsilon=1e-06, clipnorm=clipnorm, clipvalue=clipvalue)
elif args.algorithm == 'adam':
optimizer = opt.Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, clipnorm=clipnorm, clipvalue=clipvalue)
elif args.algorithm == 'adamax':
optimizer = opt.Adamax(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=1e-08, clipnorm=clipnorm, clipvalue=clipvalue)
return optimizer
示例12: _fine_tuning
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def _fine_tuning(self):
self.freeze_top_layers()
self.model.compile(
loss='categorical_crossentropy',
optimizer=SGD(lr=1e-4, decay=1e-6, momentum=0.9, nesterov=True),
metrics=['accuracy'])
self.model.fit_generator(
self.get_train_datagen(rotation_range=30.,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
preprocessing_function=self.preprocess_input),
samples_per_epoch=config.nb_train_samples,
nb_epoch=self.nb_epoch,
validation_data=self.get_validation_datagen(preprocessing_function=self.preprocess_input),
nb_val_samples=config.nb_validation_samples,
callbacks=self.get_callbacks(config.get_fine_tuned_weights_path(), patience=self.fine_tuning_patience),
class_weight=self.class_weight)
self.model.save(config.get_model_path())
示例13: train
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def train(n_labeled_data):
model = create_cnn()
pseudo = PseudoCallback(model, n_labeled_data, min(256, n_labeled_data))
model.compile(SGD(1e-2, 0.9), loss=pseudo.loss_function, metrics=[pseudo.accuracy])
if not os.path.exists("result_pseudo_trans_mobile"):
os.mkdir("result_pseudo_trans_mobile")
hist = model.fit_generator(pseudo.train_generator(), steps_per_epoch=pseudo.train_steps_per_epoch,
validation_data=pseudo.test_generator(), callbacks=[pseudo],
validation_steps=pseudo.test_stepes_per_epoch, epochs=100).history
hist["labeled_accuracy"] = pseudo.labeled_accuracy
hist["unlabeled_accuracy"] = pseudo.unlabeled_accuracy
with open(f"result_pseudo_trans_mobile/history_{n_labeled_data:05}.dat", "wb") as fp:
pickle.dump(hist, fp)
示例14: __init__
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def __init__(self, X_train, X_val, y_train, y_val, model_module, optimizer, load_to_memory):
self.model_module = model_module
self.dataset_mean = np.load(os.path.join(MODEL_MEANS_BASEPATH, "{}_mean.npy".format(model_module.BASE_NAME)))
self.optimizer = optimizer if optimizer != 'sgd' else SGD(lr=self.init_lr, momentum=0.9, nesterov=True)
self.in_memory_data = load_to_memory
extended_x_train, extended_y_train = self._get_extended_data(X_train, y_train)
extended_x_val, extended_y_val = self._get_extended_data(X_val, y_val)
self.y_train = extended_y_train
self.y_val = extended_y_val
if self.in_memory_data:
self.X_train = self._load_features(extended_x_train)
self.X_val = self._load_features(extended_x_val)
else:
self.X_train = extended_x_train
self.X_val = extended_x_val
示例15: compile_model
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import SGD [as 别名]
def compile_model(model):
lrate = 0.01
sgd = SGD(lr=lrate, momentum=0.9, decay=1e-6, nesterov=True)
model.compile(loss='sparse_categorical_crossentropy', optimizer=sgd)
return model