本文整理汇总了Python中cPickle.dump方法的典型用法代码示例。如果您正苦于以下问题:Python cPickle.dump方法的具体用法?Python cPickle.dump怎么用?Python cPickle.dump使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类cPickle
的用法示例。
在下文中一共展示了cPickle.dump方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: save_mappings
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def save_mappings(self, id_to_word, id_to_char, id_to_tag):
#{{{
"""
We need to save the mappings if we want to use the model later.
"""
self.id_to_word = id_to_word
self.id_to_char = id_to_char
self.id_to_tag = id_to_tag
with open(self.mappings_path, 'wb') as f:
mappings = {
'id_to_word': self.id_to_word,
'id_to_char': self.id_to_char,
'id_to_tag': self.id_to_tag,
}
cPickle.dump(mappings, f)
#}}}
示例2: register
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def register(self, name, serializer):
"""Register ``serializer`` object under ``name``.
Raises :class:`AttributeError` if ``serializer`` in invalid.
.. note::
``name`` will be used as the file extension of the saved files.
:param name: Name to register ``serializer`` under
:type name: ``unicode`` or ``str``
:param serializer: object with ``load()`` and ``dump()``
methods
"""
# Basic validation
getattr(serializer, 'load')
getattr(serializer, 'dump')
self._serializers[name] = serializer
示例3: save
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def save(self):
"""Save settings to JSON file specified in ``self._filepath``.
If you're using this class via :attr:`Workflow.settings`, which
you probably are, ``self._filepath`` will be ``settings.json``
in your workflow's data directory (see :attr:`~Workflow.datadir`).
"""
if self._nosave:
return
data = {}
data.update(self)
with LockFile(self._filepath, 0.5):
with atomic_writer(self._filepath, 'wb') as fp:
json.dump(data, fp, sort_keys=True, indent=2,
encoding='utf-8')
# dict methods
示例4: __init__
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def __init__(self, model_nm, cell_nm, attention_type):
"""
:param model_nm:
:param cell_nm:
:param attention_type:
"""
self.model_nm = model_nm
self.cell_nm = cell_nm
self.attention_type = attention_type
self.last_ckpt = None
self.last_id = 0
self.step_save_location = 'steps.p'
self.data_save_location = 'data'
self.mapper_save_location = 'mapper.p'
self.steps_per_ckpt = None
self.num_steps_per_prediction = None
self.present_checkpoints = None
self.outfile = None
# initialize the steps if not initialized
if self.step_save_location not in os.listdir(self.get_checkpoint_location()):
pickle.dump(0,open(self.get_step_file(), 'wb'))
示例5: save
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def save(self, file_name):
"""
Saves state variables (weights, biases) of neural network
Params:
file_name (str): model is saved in folder tf_save as file_name.ckpt
"""
with self.graph.as_default():
saver = tf.train.Saver()
saver.save(self.session, io.tf_save_path + file_name + '.ckpt')
params = {'latent_size': self.latent_size,
'input_size': self.input_size,
'encoder_num_units': self.encoder_num_units,
'decoder_num_units': self.decoder_num_units,
'tot_epochs': self.tot_epochs,
'name': self.name}
with open(io.tf_save_path + file_name + '.pkl', 'wb') as f:
pickle.dump(params, f)
print "Saved network to file " + file_name
示例6: set
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def set(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
filename = self._get_filename(key)
self._prune()
try:
fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix,
dir=self._path)
f = os.fdopen(fd, 'wb')
try:
pickle.dump(int(time() + timeout), f, 1)
pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
finally:
f.close()
rename(tmp, filename)
os.chmod(filename, self._mode)
except (IOError, OSError):
pass
示例7: save_model
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def save_model(model, fname):
m0 = model
trainer_dict = {'v_template': np.asarray(m0.v_template),'J': np.asarray(m0.J),'weights': np.asarray(m0.weights),'kintree_table': m0.kintree_table,'f': m0.f, 'bs_type': m0.bs_type, 'posedirs': np.asarray(m0.posedirs)}
if hasattr(model, 'J_regressor'):
trainer_dict['J_regressor'] = m0.J_regressor
if hasattr(model, 'J_regressor_prior'):
trainer_dict['J_regressor_prior'] = m0.J_regressor_prior
if hasattr(model, 'weights_prior'):
trainer_dict['weights_prior'] = m0.weights_prior
if hasattr(model, 'shapedirs'):
trainer_dict['shapedirs'] = m0.shapedirs
if hasattr(model, 'vert_sym_idxs'):
trainer_dict['vert_sym_idxs'] = m0.vert_sym_idxs
if hasattr(model, 'bs_style'):
trainer_dict['bs_style'] = model.bs_style
else:
trainer_dict['bs_style'] = 'lbs'
pickle.dump(trainer_dict, open(fname, 'w'), -1)
示例8: pickle_model
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def pickle_model(
path,
model,
word2index_x,
word2index_y,
index2word_x,
index2word_y):
import sys
import cPickle as pickle
modifier=10
tmp = sys.getrecursionlimit()
sys.setrecursionlimit(tmp*modifier)
with open(path, 'wb') as f:
p_dict = {'model':model,
'word2index_x':word2index_x,
'word2index_y':word2index_y,
'index2word_x':index2word_x,
'index2word_y':index2word_y}
pickle.dump(p_dict, f, protocol=2)
sys.setrecursionlimit(tmp)
示例9: dump
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def dump(self, filename=None):
"""
Save a pickle dump of the crashing object on filename.
If filename is None, the crash dump is saved on a file created by
the tempfile module.
Return the filename.
"""
if filename is None:
# This 'temporary file' should actually stay 'forever', i.e. until
# deleted by the user.
(fd, filename)=_tempfile.mkstemp(suffix=".pic", prefix="MDPcrash_")
fl = _os.fdopen(fd, 'w+b', -1)
else:
fl = open(filename, 'w+b', -1)
_cPickle.dump(self.crashing_obj, fl)
fl.close()
return filename
示例10: __init__
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def __init__(self, *args):
"""Allow crash recovery.
Arguments: (error_string, flow_instance, parent_exception)
The triggering parent exception is kept in self.parent_exception.
If flow_instance._crash_recovery is set, save a crash dump of
flow_instance on the file self.filename"""
CrashRecoveryException.__init__(self, *args)
rec = self.crashing_obj._crash_recovery
errstr = args[0]
if rec:
if isinstance(rec, str):
name = rec
else:
name = None
name = CrashRecoveryException.dump(self, name)
dumpinfo = '\nA crash dump is available on: "%s"' % name
self.filename = name
errstr = errstr+dumpinfo
Exception.__init__(self, errstr)
示例11: set_crash_recovery
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def set_crash_recovery(self, state = True):
"""Set crash recovery capabilities.
When a node raises an Exception during training, execution, or
inverse execution that the flow is unable to handle, a FlowExceptionCR
is raised. If crash recovery is set, a crash dump of the flow
instance is saved for later inspection. The original exception
can be found as the 'parent_exception' attribute of the
FlowExceptionCR instance.
- If 'state' = False, disable crash recovery.
- If 'state' is a string, the crash dump is saved on a file
with that name.
- If 'state' = True, the crash dump is saved on a file created by
the tempfile module.
"""
self._crash_recovery = state
示例12: sync
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def sync(self):
'''Write the dict to disk'''
if self.flag == 'r':
return
filename = self.filename
tempname = filename + '.tmp'
fileobj = open(tempname, 'wb' if self.file_format == 'pickle' else 'w')
try:
self.dump(fileobj)
except Exception:
os.remove(tempname)
raise
finally:
fileobj.close()
shutil.move(tempname, self.filename) # atomic commit
if self.mode is not None:
os.chmod(self.filename, self.mode)
示例13: cache
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def cache(filename):
"""
A simple decorator to cache results to disk.
"""
def decorator(func):
"""Note: it is the function that is finally returned"""
def cached_function(*args):
"""Note: needed to access the returned value"""
try:
return pickle.load(open(filename, "r"))
except IOError:
value = func(*args)
pickle.dump(value, open(filename, "w"))
return value
return cached_function
return decorator
示例14: save_objects_to_file
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def save_objects_to_file(file_name, data_dict):
"""Write the network devices out to a file."""
# Determine whether .pkl, .yml, or .json file
if file_name.count(".") == 1:
_, out_format = file_name.split(".")
else:
raise ValueError("Invalid file name: {}".format(file_name))
if out_format == 'pkl':
with open(file_name, 'wb') as f:
pickle.dump(data_dict, f)
elif out_format == 'yml':
with open(file_name, 'w') as f:
f.write(yaml.dump(data_dict, default_flow_style=False))
elif out_format == 'json':
with open(file_name, 'w') as f:
json.dump(data_dict, f)
示例15: gt_segdb
# 需要导入模块: import cPickle [as 别名]
# 或者: from cPickle import dump [as 别名]
def gt_segdb(self):
"""
return ground truth image regions database
:return: imdb[image_index]['', 'flipped']
"""
print("======== Starting to get gt_segdb ========")
cache_file = os.path.join(self.cache_path, self.name + '_gt_segdb.pkl')
if os.path.exists(cache_file):
with open(cache_file, 'rb') as fid:
segdb = cPickle.load(fid)
print '========= {} gt segdb loaded from {}'.format(self.name, cache_file)
return segdb
print("======== Starting to create gt_segdb ======")
gt_segdb = []
for index in tqdm(self.image_set_index):
gt_segdb.append(self.load_segdb_from_index(index))
# gt_segdb = [self.load_segdb_from_index(index) for index in self.image_set_index]
with open(cache_file, 'wb') as fid:
cPickle.dump(gt_segdb, fid, cPickle.HIGHEST_PROTOCOL)
print '========= Wrote gt segdb to {}'.format(cache_file)
return gt_segdb