本文整理匯總了Python中pickle.load方法的典型用法代碼示例。如果您正苦於以下問題:Python pickle.load方法的具體用法?Python pickle.load怎麽用?Python pickle.load使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類pickle
的用法示例。
在下文中一共展示了pickle.load方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: _deserialize
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def _deserialize(self, data, type_):
if self.compress:
# decompress the data if needed
data = lz4.frame.decompress(data)
if type_ == _NUMPY:
# deserialize numpy arrays
buf = io.BytesIO(data)
data = np.load(buf)
elif type_ == _PICKLE:
# deserialize other python objects
data = pickle.loads(data)
else:
# Otherwise we just return data as it is (bytes)
pass
return data
示例2: register
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def register(self, name, serializer):
"""Register ``serializer`` object under ``name``.
Raises :class:`AttributeError` if ``serializer`` in invalid.
.. note::
``name`` will be used as the file extension of the saved files.
:param name: Name to register ``serializer`` under
:type name: ``unicode`` or ``str``
:param serializer: object with ``load()`` and ``dump()``
methods
"""
# Basic validation
getattr(serializer, 'load')
getattr(serializer, 'dump')
self._serializers[name] = serializer
示例3: from_snapshot
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def from_snapshot(self, sfile, nfile):
print('Restoring model snapshots from {:s}'.format(sfile))
self.net.load_state_dict(torch.load(str(sfile)))
print('Restored.')
# Needs to restore the other hyper-parameters/states for training, (TODO xinlei) I have
# tried my best to find the random states so that it can be recovered exactly
# However the Tensorflow state is currently not available
with open(nfile, 'rb') as fid:
st0 = pickle.load(fid)
cur = pickle.load(fid)
perm = pickle.load(fid)
cur_val = pickle.load(fid)
perm_val = pickle.load(fid)
last_snapshot_iter = pickle.load(fid)
np.random.set_state(st0)
self.data_layer._cur = cur
self.data_layer._perm = perm
self.data_layer_val._cur = cur_val
self.data_layer_val._perm = perm_val
return last_snapshot_iter
開發者ID:Sunarker,項目名稱:Collaborative-Learning-for-Weakly-Supervised-Object-Detection,代碼行數:24,代碼來源:train_val.py
示例4: gt_roidb
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def gt_roidb(self):
"""
Return the database of ground-truth regions of interest.
This function loads/saves from/to a cache file to speed up future calls.
"""
cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl')
if os.path.exists(cache_file):
with open(cache_file, 'rb') as fid:
try:
roidb = pickle.load(fid)
except:
roidb = pickle.load(fid, encoding='bytes')
print('{} gt roidb loaded from {}'.format(self.name, cache_file))
return roidb
gt_roidb = [self._load_pascal_labels(index)
for index in self.image_index]
with open(cache_file, 'wb') as fid:
pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
print('wrote gt roidb to {}'.format(cache_file))
return gt_roidb
開發者ID:Sunarker,項目名稱:Collaborative-Learning-for-Weakly-Supervised-Object-Detection,代碼行數:25,代碼來源:pascal_voc.py
示例5: gt_roidb
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def gt_roidb(self):
"""
Return the database of ground-truth regions of interest.
This function loads/saves from/to a cache file to speed up future calls.
"""
cache_file = osp.join(self.cache_path, self.name + '_gt_roidb.pkl')
if osp.exists(cache_file):
with open(cache_file, 'rb') as fid:
roidb = pickle.load(fid)
print('{} gt roidb loaded from {}'.format(self.name, cache_file))
return roidb
gt_roidb = [self._load_coco_annotation(index)
for index in self._image_index]
with open(cache_file, 'wb') as fid:
pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
print('wrote gt roidb to {}'.format(cache_file))
return gt_roidb
開發者ID:Sunarker,項目名稱:Collaborative-Learning-for-Weakly-Supervised-Object-Detection,代碼行數:21,代碼來源:coco.py
示例6: _load
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def _load(self, path=None):
assert self.locked, ('The session load without being locked. '
"Check your tools' priority levels.")
if path is None:
path = self._get_file_path()
try:
f = open(path, 'rb')
try:
return pickle.load(f)
finally:
f.close()
except (IOError, EOFError):
e = sys.exc_info()[1]
if self.debug:
cherrypy.log('Error loading the session pickle: %s' %
e, 'TOOLS.SESSIONS')
return None
示例7: main
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def main(cache_dir):
files_list = list(os.listdir(cache_dir))
for file in files_list:
full_filename = os.path.join(cache_dir, file)
if os.path.isfile(full_filename):
print("Processing {}".format(full_filename))
m, stored_kwargs = pickle.load(open(full_filename, 'rb'))
updated_kwargs = util.get_compatible_kwargs(model.Model, stored_kwargs)
model_hash = util.object_hash(updated_kwargs)
print("New hash -> " + model_hash)
model_filename = os.path.join(cache_dir, "model_{}.p".format(model_hash))
sys.setrecursionlimit(100000)
pickle.dump((m,updated_kwargs), open(model_filename,'wb'), protocol=pickle.HIGHEST_PROTOCOL)
os.remove(full_filename)
示例8: assemble_batch
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def assemble_batch(story_fns, num_answer_words, format_spec):
stories = []
for sfn in story_fns:
with gzip.open(sfn,'rb') as f:
cvtd_story, _, _, _ = pickle.load(f)
stories.append(cvtd_story)
sents, graphs, queries, answers = zip(*stories)
cvtd_sents = np.array(sents, np.int32)
cvtd_queries = np.array(queries, np.int32)
max_ans_len = max(len(a) for a in answers)
cvtd_answers = np.stack([convert_answer(answer, num_answer_words, format_spec, max_ans_len) for answer in answers])
num_new_nodes, new_node_strengths, new_node_ids, next_edges = zip(*graphs)
num_new_nodes = np.stack(num_new_nodes)
new_node_strengths = np.stack(new_node_strengths)
new_node_ids = np.stack(new_node_ids)
next_edges = np.stack(next_edges)
return cvtd_sents, cvtd_queries, cvtd_answers, num_new_nodes, new_node_strengths, new_node_ids, next_edges
示例9: main
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def main():
global HITMASKS, ITERATIONS, VERBOSE, bot
parser = argparse.ArgumentParser("learn.py")
parser.add_argument("--iter", type=int, default=1000, help="number of iterations to run")
parser.add_argument(
"--verbose", action="store_true", help="output [iteration | score] to stdout"
)
args = parser.parse_args()
ITERATIONS = args.iter
VERBOSE = args.verbose
# load dumped HITMASKS
with open("data/hitmasks_data.pkl", "rb") as input:
HITMASKS = pickle.load(input)
while True:
movementInfo = showWelcomeAnimation()
crashInfo = mainGame(movementInfo)
showGameOverScreen(crashInfo)
示例10: create_cifar100
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def create_cifar100(tfrecord_dir, cifar100_dir):
print('Loading CIFAR-100 from "%s"' % cifar100_dir)
import pickle
with open(os.path.join(cifar100_dir, 'train'), 'rb') as file:
data = pickle.load(file, encoding='latin1')
images = data['data'].reshape(-1, 3, 32, 32)
labels = np.array(data['fine_labels'])
assert images.shape == (50000, 3, 32, 32) and images.dtype == np.uint8
assert labels.shape == (50000,) and labels.dtype == np.int32
assert np.min(images) == 0 and np.max(images) == 255
assert np.min(labels) == 0 and np.max(labels) == 99
onehot = np.zeros((labels.size, np.max(labels) + 1), dtype=np.float32)
onehot[np.arange(labels.size), labels] = 1.0
with TFRecordExporter(tfrecord_dir, images.shape[0]) as tfr:
order = tfr.choose_shuffled_order()
for idx in range(order.size):
tfr.add_image(images[order[idx]])
tfr.add_labels(onehot[order])
#----------------------------------------------------------------------------
示例11: __init__
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def __init__(self, transform=None, target_transform=None, filename="adv_set_e_2.p", transp = False):
"""
:param transform:
:param target_transform:
:param filename:
:param transp: Set shuff= False for PGD based attacks
:return:
"""
self.transform = transform
self.target_transform = target_transform
self.adv_dict=pickle.load(open(filename,"rb"))
self.adv_flat=self.adv_dict["adv_input"]
self.num_adv=np.shape(self.adv_flat)[0]
self.shuff = transp
self.sample_num = 0
示例12: __init__
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def __init__(self, transform=None, target_transform=None, filename="adv_set_e_2.p", transp = False):
"""
:param transform:
:param target_transform:
:param filename:
:param transp: Set shuff= False for PGD based attacks
:return:
"""
self.transform = transform
self.target_transform = target_transform
self.adv_dict=pickle.load(open(filename,"rb"))
self.adv_flat=self.adv_dict["adv_input"]
self.num_adv=np.shape(self.adv_flat)[0]
self.transp = transp
self.sample_num = 0
示例13: __load_mean
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def __load_mean(self):
mean = None
if self.mean_image_file:
if os.path.isfile(self.mean_image_file):
_, ext = os.path.splitext(os.path.basename(self.mean_image_file))
if ext.lower() == ".npy":
mean = pickle.load(open(self.mean_image_file, "rb"))
else:
m_image = LabeledImage(self.mean_image_file) # mean image is already `converted` when calculation.
m_image.load()
mean = m_image.to_array(np, self.color)
else:
raise Exception("Mean image is not exist at {0}.".format(self.mean_image_file))
else:
self.label_file._logger.warning("Mean image is not set. So if you train the model, it will be difficult to converge.")
return mean
示例14: __init__
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def __init__(self, pos_features, pipeline_obj_path):
"""
Args:
pos_features: list of positional features to use
pipeline_obj_path: path to the serialized pipeline obj_path
"""
self.pos_features = pos_features
self.pipeline_obj_path = pipeline_obj_path
# deserialize the pickle file
with open(self.pipeline_obj_path, "rb") as f:
pipeline_obj = pickle.load(f)
self.POS_FEATURES = pipeline_obj[0]
self.minmax_scaler = pipeline_obj[1]
self.imp = pipeline_obj[2]
self.funct_transform = FunctionTransformer(func=sign_log_func,
inverse_func=sign_log_func_inverse)
# for simplicity, assume all current pos_features are the
# same as from before
assert self.POS_FEATURES == self.pos_features
示例15: __init__
# 需要導入模塊: import pickle [as 別名]
# 或者: from pickle import load [as 別名]
def __init__(self, pos_features, pipeline_obj_path):
"""
Args:
pos_features: list of positional features to use
pipeline_obj_path: path to the serialized pipeline obj_path
"""
self.pos_features = pos_features
self.pipeline_obj_path = pipeline_obj_path
# deserialize the pickle file
with open(self.pipeline_obj_path, "rb") as f:
pipeline_obj = pickle.load(f)
self.POS_FEATURES = pipeline_obj[0]
self.preproc_pipeline = pipeline_obj[1]
self.imp = pipeline_obj[2]
# for simplicity, assume all current pos_features are the
# same as from before
assert self.POS_FEATURES == self.pos_features