本文整理汇总了Python中chainer.serializers.save_hdf5方法的典型用法代码示例。如果您正苦于以下问题:Python serializers.save_hdf5方法的具体用法?Python serializers.save_hdf5怎么用?Python serializers.save_hdf5使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.serializers
的用法示例。
在下文中一共展示了serializers.save_hdf5方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: save_model
# 需要导入模块: from chainer import serializers [as 别名]
# 或者: from chainer.serializers import save_hdf5 [as 别名]
def save_model(self, model_filename):
"""Save a network model to a file
"""
serializers.save_hdf5(model_filename, self.model)
serializers.save_hdf5(model_filename + '.opt', self.optimizer)
示例2: save_and_load_hdf5
# 需要导入模块: from chainer import serializers [as 别名]
# 或者: from chainer.serializers import save_hdf5 [as 别名]
def save_and_load_hdf5(src, dst):
"""Saves ``src`` to an HDF5 file and loads it to ``dst``.
This is a short cut of :func:`save_and_load` using HDF5 de/serializers.
Args:
src: An object to save.
dst: An object to load to.
"""
save_and_load(src, dst, 'tmp.h5',
serializers.save_hdf5, serializers.load_hdf5)
示例3: save
# 需要导入模块: from chainer import serializers [as 别名]
# 或者: from chainer.serializers import save_hdf5 [as 别名]
def save(self):
serializers.save_hdf5("conv.model", self.conv)
if self.fcl_eliminated is False:
serializers.save_hdf5("fc.model", self.fc)
示例4: save
# 需要导入模块: from chainer import serializers [as 别名]
# 或者: from chainer.serializers import save_hdf5 [as 别名]
def save(self,filename):
cs.save_hdf5(filename,self.func.copy().to_cpu())
示例5: save
# 需要导入模块: from chainer import serializers [as 别名]
# 或者: from chainer.serializers import save_hdf5 [as 别名]
def save(self,filename):
cs.save_hdf5(filename,self.model.copy().to_cpu())
示例6: save
# 需要导入模块: from chainer import serializers [as 别名]
# 或者: from chainer.serializers import save_hdf5 [as 别名]
def save(self,filename):
#cs.save_hdf5(filename,self.func.copy().to_cpu())
cs.save_hdf5(filename,self.func.copy())
示例7: save
# 需要导入模块: from chainer import serializers [as 别名]
# 或者: from chainer.serializers import save_hdf5 [as 别名]
def save(self,filename):
cs.save_hdf5(filename,self.func.to_cpu())
示例8: train
# 需要导入模块: from chainer import serializers [as 别名]
# 或者: from chainer.serializers import save_hdf5 [as 别名]
def train(epoch_num):
image_groups, sentence_groups = make_groups(train_image_ids, train_sentences)
test_image_groups, test_sentence_groups = make_groups(test_image_ids, test_sentences, train=False)
for epoch in range(epoch_num):
batches = random_batches(image_groups, sentence_groups)
sum_loss = 0
sum_acc = 0
sum_size = 0
batch_num = len(batches)
for i, (image_id_batch, sentence_batch) in enumerate(batches):
loss, acc, size = forward(caption_net, images[image_id_batch], sentence_batch)
caption_net.cleargrads()
loss.backward()
loss.unchain_backward()
optimizer.update()
sentence_length = sentence_batch.shape[1]
sum_loss += float(loss.data) * size
sum_acc += acc * size
sum_size += size
if (i + 1) % 500 == 0:
print '{} / {} loss: {} accuracy: {}'.format(i + 1, batch_num, sum_loss / sum_size, sum_acc / sum_size)
print 'epoch: {} done'.format(epoch + 1)
print 'train loss: {} accuracy: {}'.format(sum_loss / sum_size, sum_acc / sum_size)
sum_loss = 0
sum_acc = 0
sum_size = 0
for image_ids, sentences in zip(test_image_groups, test_sentence_groups):
if len(sentences) == 0:
continue
size = len(sentences)
for i in range(0, size, batch_size):
image_id_batch = image_ids[i:i + batch_size]
sentence_batch = sentences[i:i + batch_size]
loss, acc, size = forward(caption_net, images[image_id_batch], sentence_batch, train=False)
sentence_length = sentence_batch.shape[1]
sum_loss += float(loss.data) * size
sum_acc += acc * size
sum_size += size
print 'test loss: {} accuracy: {}'.format(sum_loss / sum_size, sum_acc / sum_size)
serializers.save_hdf5(args.output + '_{0:04d}.model'.format(epoch), caption_net)
serializers.save_hdf5(args.output + '_{0:04d}.state'.format(epoch), optimizer)