本文整理汇总了Python中keras.optimizers.deserialize方法的典型用法代码示例。如果您正苦于以下问题:Python optimizers.deserialize方法的具体用法?Python optimizers.deserialize怎么用?Python optimizers.deserialize使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras.optimizers
的用法示例。
在下文中一共展示了optimizers.deserialize方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: prepare_model
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import deserialize [as 别名]
def prepare_model(self):
"""Prepares the model for training."""
# Set the Keras directory.
set_keras_base_directory()
if K.backend() == 'tensorflow':
# set GPU option allow_growth to False for GPU-enabled tensorflow
config = tf.ConfigProto()
config.gpu_options.allow_growth = False
sess = tf.Session(config=config)
K.set_session(sess)
# Deserialize the Keras model.
self.model = deserialize_keras_model(self.model)
self.optimizer = deserialize(self.optimizer)
# Compile the model with the specified loss and optimizer.
self.model.compile(loss=self.loss, loss_weights = self.loss_weights,
optimizer=self.optimizer, metrics=self.metrics)
示例2: _test_optimizer
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import deserialize [as 别名]
def _test_optimizer(optimizer, target=0.75):
x_train, y_train = get_test_data()
model = get_model(x_train.shape[1], 10, y_train.shape[1])
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
assert history.history['acc'][-1] >= target
config = optimizers.serialize(optimizer)
custom_objects = {optimizer.__class__.__name__: optimizer.__class__}
optim = optimizers.deserialize(config, custom_objects)
new_config = optimizers.serialize(optim)
assert config == new_config
示例3: _test_optimizer
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import deserialize [as 别名]
def _test_optimizer(optimizer, target=0.75):
x_train, y_train = get_test_data()
model = Sequential()
model.add(Dense(10, input_shape=(x_train.shape[1],)))
model.add(Activation('relu'))
model.add(Dense(y_train.shape[1]))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
assert history.history['acc'][-1] >= target
config = optimizers.serialize(optimizer)
optim = optimizers.deserialize(config)
new_config = optimizers.serialize(optim)
new_config['class_name'] = new_config['class_name'].lower()
assert config == new_config
# Test constraints.
model = Sequential()
dense = Dense(10,
input_shape=(x_train.shape[1],),
kernel_constraint=lambda x: 0. * x + 1.,
bias_constraint=lambda x: 0. * x + 2.,)
model.add(dense)
model.add(Activation('relu'))
model.add(Dense(y_train.shape[1]))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
model.train_on_batch(x_train[:10], y_train[:10])
kernel, bias = dense.get_weights()
assert_allclose(kernel, 1.)
assert_allclose(bias, 2.)
示例4: _test_optimizer
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import deserialize [as 别名]
def _test_optimizer(optimizer, target=0.75):
x_train, y_train = get_test_data()
model = Sequential()
model.add(Dense(10, input_shape=(x_train.shape[1],)))
model.add(Activation('relu'))
model.add(Dense(y_train.shape[1]))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
# TODO PlaidML fails this test
assert history.history['acc'][-1] >= target
config = k_optimizers.serialize(optimizer)
optim = k_optimizers.deserialize(config)
new_config = k_optimizers.serialize(optim)
new_config['class_name'] = new_config['class_name'].lower()
assert config == new_config
# Test constraints.
model = Sequential()
dense = Dense(10,
input_shape=(x_train.shape[1],),
kernel_constraint=lambda x: 0. * x + 1.,
bias_constraint=lambda x: 0. * x + 2.,)
model.add(dense)
model.add(Activation('relu'))
model.add(Dense(y_train.shape[1]))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
model.train_on_batch(x_train[:10], y_train[:10])
kernel, bias = dense.get_weights()
assert_allclose(kernel, 1.)
assert_allclose(bias, 2.)
示例5: clone_optimizer
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import deserialize [as 别名]
def clone_optimizer(optimizer):
if type(optimizer) is str:
return optimizers.get(optimizer)
# Requires Keras 1.0.7 since get_config has breaking changes.
params = dict([(k, v) for k, v in optimizer.get_config().items()])
config = {
'class_name': optimizer.__class__.__name__,
'config': params,
}
if hasattr(optimizers, 'optimizer_from_config'):
# COMPATIBILITY: Keras < 2.0
clone = optimizers.optimizer_from_config(config)
else:
clone = optimizers.deserialize(config)
return clone
示例6: build
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import deserialize [as 别名]
def build(self):
from keras.optimizers import deserialize
opt_config = {'class_name': self.name, 'config': self.config}
opt = deserialize(opt_config)
if self.horovod_wrapper:
import horovod.keras as hvd
if hasattr(opt, 'lr'):
opt.lr *= hvd.size()
opt = hvd.DistributedOptimizer(opt)
return opt
示例7: from_config
# 需要导入模块: from keras import optimizers [as 别名]
# 或者: from keras.optimizers import deserialize [as 别名]
def from_config(cls, config, custom_objects=None):
optimizer_config = config.pop('optimizer')
optimizer = deserialize(optimizer_config)
return cls(optimizer=optimizer, **config)