本文整理匯總了Python中keras.optimizers.deserialize方法的典型用法代碼示例。如果您正苦於以下問題:Python optimizers.deserialize方法的具體用法?Python optimizers.deserialize怎麽用?Python optimizers.deserialize使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類keras.optimizers
的用法示例。
在下文中一共展示了optimizers.deserialize方法的7個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: prepare_model
# 需要導入模塊: from keras import optimizers [as 別名]
# 或者: from keras.optimizers import deserialize [as 別名]
def prepare_model(self):
"""Prepares the model for training."""
# Set the Keras directory.
set_keras_base_directory()
if K.backend() == 'tensorflow':
# set GPU option allow_growth to False for GPU-enabled tensorflow
config = tf.ConfigProto()
config.gpu_options.allow_growth = False
sess = tf.Session(config=config)
K.set_session(sess)
# Deserialize the Keras model.
self.model = deserialize_keras_model(self.model)
self.optimizer = deserialize(self.optimizer)
# Compile the model with the specified loss and optimizer.
self.model.compile(loss=self.loss, loss_weights = self.loss_weights,
optimizer=self.optimizer, metrics=self.metrics)
示例2: _test_optimizer
# 需要導入模塊: from keras import optimizers [as 別名]
# 或者: from keras.optimizers import deserialize [as 別名]
def _test_optimizer(optimizer, target=0.75):
x_train, y_train = get_test_data()
model = get_model(x_train.shape[1], 10, y_train.shape[1])
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
assert history.history['acc'][-1] >= target
config = optimizers.serialize(optimizer)
custom_objects = {optimizer.__class__.__name__: optimizer.__class__}
optim = optimizers.deserialize(config, custom_objects)
new_config = optimizers.serialize(optim)
assert config == new_config
示例3: _test_optimizer
# 需要導入模塊: from keras import optimizers [as 別名]
# 或者: from keras.optimizers import deserialize [as 別名]
def _test_optimizer(optimizer, target=0.75):
x_train, y_train = get_test_data()
model = Sequential()
model.add(Dense(10, input_shape=(x_train.shape[1],)))
model.add(Activation('relu'))
model.add(Dense(y_train.shape[1]))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
assert history.history['acc'][-1] >= target
config = optimizers.serialize(optimizer)
optim = optimizers.deserialize(config)
new_config = optimizers.serialize(optim)
new_config['class_name'] = new_config['class_name'].lower()
assert config == new_config
# Test constraints.
model = Sequential()
dense = Dense(10,
input_shape=(x_train.shape[1],),
kernel_constraint=lambda x: 0. * x + 1.,
bias_constraint=lambda x: 0. * x + 2.,)
model.add(dense)
model.add(Activation('relu'))
model.add(Dense(y_train.shape[1]))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
model.train_on_batch(x_train[:10], y_train[:10])
kernel, bias = dense.get_weights()
assert_allclose(kernel, 1.)
assert_allclose(bias, 2.)
示例4: _test_optimizer
# 需要導入模塊: from keras import optimizers [as 別名]
# 或者: from keras.optimizers import deserialize [as 別名]
def _test_optimizer(optimizer, target=0.75):
x_train, y_train = get_test_data()
model = Sequential()
model.add(Dense(10, input_shape=(x_train.shape[1],)))
model.add(Activation('relu'))
model.add(Dense(y_train.shape[1]))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
# TODO PlaidML fails this test
assert history.history['acc'][-1] >= target
config = k_optimizers.serialize(optimizer)
optim = k_optimizers.deserialize(config)
new_config = k_optimizers.serialize(optim)
new_config['class_name'] = new_config['class_name'].lower()
assert config == new_config
# Test constraints.
model = Sequential()
dense = Dense(10,
input_shape=(x_train.shape[1],),
kernel_constraint=lambda x: 0. * x + 1.,
bias_constraint=lambda x: 0. * x + 2.,)
model.add(dense)
model.add(Activation('relu'))
model.add(Dense(y_train.shape[1]))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
model.train_on_batch(x_train[:10], y_train[:10])
kernel, bias = dense.get_weights()
assert_allclose(kernel, 1.)
assert_allclose(bias, 2.)
示例5: clone_optimizer
# 需要導入模塊: from keras import optimizers [as 別名]
# 或者: from keras.optimizers import deserialize [as 別名]
def clone_optimizer(optimizer):
if type(optimizer) is str:
return optimizers.get(optimizer)
# Requires Keras 1.0.7 since get_config has breaking changes.
params = dict([(k, v) for k, v in optimizer.get_config().items()])
config = {
'class_name': optimizer.__class__.__name__,
'config': params,
}
if hasattr(optimizers, 'optimizer_from_config'):
# COMPATIBILITY: Keras < 2.0
clone = optimizers.optimizer_from_config(config)
else:
clone = optimizers.deserialize(config)
return clone
示例6: build
# 需要導入模塊: from keras import optimizers [as 別名]
# 或者: from keras.optimizers import deserialize [as 別名]
def build(self):
from keras.optimizers import deserialize
opt_config = {'class_name': self.name, 'config': self.config}
opt = deserialize(opt_config)
if self.horovod_wrapper:
import horovod.keras as hvd
if hasattr(opt, 'lr'):
opt.lr *= hvd.size()
opt = hvd.DistributedOptimizer(opt)
return opt
示例7: from_config
# 需要導入模塊: from keras import optimizers [as 別名]
# 或者: from keras.optimizers import deserialize [as 別名]
def from_config(cls, config, custom_objects=None):
optimizer_config = config.pop('optimizer')
optimizer = deserialize(optimizer_config)
return cls(optimizer=optimizer, **config)