本文整理汇总了Python中hickle.dump方法的典型用法代码示例。如果您正苦于以下问题:Python hickle.dump方法的具体用法?Python hickle.dump怎么用?Python hickle.dump使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类hickle
的用法示例。
在下文中一共展示了hickle.dump方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: evaluateAndSave
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def evaluateAndSave():
print('RUNNING EVALUATION...')
eval_pairs = hyperparams.getEvalDataGen()
pair_outs = loadOrCalculateOuts()
get_rt = type(eval_pairs[0]) == sequences.PairWithStereo
fps = [[system.forwardPassFromHicklable(im) for im in pair]
for pair in pair_outs]
pairs_fps = zip(eval_pairs, fps)
stat_Rerr_terr = [evaluate.leastNumForKInliers(
pair_fps[0], pair_fps[1], FLAGS.k, get_rt=get_rt)
for pair_fps in pairs_fps]
if get_rt:
result = [[i[j] for i in stat_Rerr_terr] for j in range(3)]
else:
result = [stat_Rerr_terr, None, None]
hkl.dump(result, open(hyperparams.evalPath(), 'w'))
示例2: cache
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def cache():
hyperparams.announceEval()
eval_pairs = hyperparams.getEvalDataGen()
pair_outs = []
if FLAGS.baseline == 'super':
forward_pass_dict = baselines.parseSuperPointOuts(eval_pairs)
for pair_i in range(len(eval_pairs)):
pair = eval_pairs[pair_i]
folder, a, b = pair.name().split(' ')
forward_passes = [forward_pass_dict['%s%s' % (folder, i)]
for i in [a, b]]
pair_outs.append([fp.hicklable() for fp in forward_passes])
else:
graph, sess = hyperparams.modelFromCheckpoint()
forward_passer = hyperparams.getForwardPasser(graph, sess)
fp_cache = system.ForwardPassCache(forward_passer)
for pair_i in range(len(eval_pairs)):
print('%d/%d' % (pair_i, len(eval_pairs)))
pair = eval_pairs[pair_i]
print(pair.name())
fps = [fp_cache[im] for im in pair.im]
pair_outs.append([fp.hicklable() for fp in fps])
hkl.dump(pair_outs, open(hyperparams.cachedForwardPath(), 'w'))
示例3: _serialize_ld_info_
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def _serialize_ld_info_(local_ld_dict_file, ld_dict, verbose=False, compressed=True, use_hickle=False):
t0 = time.time()
if use_hickle:
f = h5py.File(local_ld_dict_file, 'w')
if compressed:
print('Storing compressed LD information to hdf5 file')
hickle.dump(ld_dict, f, compression='gzip')
else:
hickle.dump(ld_dict, f)
f.close()
else:
if compressed:
print('Storing LD information to compressed pickle file')
f = gzip.open(local_ld_dict_file, 'wb')
else:
f = open(local_ld_dict_file, 'wb')
pickle.dump(ld_dict, f, protocol=-1)
f.close()
t1 = time.time()
t = (t1 - t0)
if verbose:
print('\nIt took %d minutes and %0.2f seconds to write LD information to disk.' % (t / 60, t % 60))
print('LD information file size on disk: %0.4f Mb' % float(os.path.getsize(local_ld_dict_file)/1000000.0))
示例4: save_dataset
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def save_dataset(origin_dataset,save_dir):
dataset = {}
for key in origin_dataset[0].keys():
dataset[key] = [item[key] for item in origin_dataset]
dataset['seq'] = [item.encode('ascii','ignore') for item in dataset['seq']]
for key in origin_dataset[0].keys():
dataset[key] = np.array(dataset[key])
hkl.dump(dataset,save_dir, mode='w', compression='gzip')
print 'Training data generation is finished!'
#generate dataset
示例5: model_test
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def model_test(net, X_test, y_test, outputfile):
#net.load_params_from('saved_weights_file')
y_pred = net.predict(X_test)
y_prob = net.predict_proba(X_test)
print 'Accuracy score is {}'.format(metrics.accuracy_score(y_test, y_pred))
print 'ROC AUC score is {}'.format(metrics.roc_auc_score(y_test, y_prob[:,-1]))
hkl.dump([y_prob[:,-1],y_test],outputfile)
#save model parameters
示例6: model_test
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def model_test(model, X_test, y_test,outputfile):
#net.load_params_from('/path/to/weights_file')
y_pred = model.predict(X_test)
print stats.linregress(y_test,y_pred[:,0])
hkl.dump([y_pred[:,0],y_test],outputfile)
#save model parameters
示例7: serialize_to_file_json
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def serialize_to_file_json(obj, path, protocol=pickle.HIGHEST_PROTOCOL):
f = open(path, 'w')
json.dump(obj, f)
f.close()
示例8: serialize_to_file_hdf5
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def serialize_to_file_hdf5(obj, path, protocol=pickle.HIGHEST_PROTOCOL):
f = open(path, 'w')
hickle.dump(obj, f)
f.close()
示例9: serialize_to_file
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def serialize_to_file(obj, path, protocol=pickle.HIGHEST_PROTOCOL):
print('serialize to %s' % path)
f = open(path, 'wb')
pickle.dump(obj, f, protocol=protocol)
f.close()
示例10: dump
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def dump(self, filename):
hkl.dump([self.tracked_ids, self.nextid], open(filename, 'w'))
示例11: cache
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def cache(n_apparent, n_true, inl_stats, R_errs, t_errs):
hkl.dump([n_apparent, n_true, inl_stats, R_errs, t_errs], open(path(), 'w'))
示例12: run
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def run():
seqs = hyperparams.getEvalSequences()
for seq in seqs:
wrapper = sequences.Wrapper(seq)
print(seq.name())
spath = os.path.join(hyperparams.seqFpsPath(), seq.name())
assert os.path.exists(spath)
n = 0
while os.path.exists(os.path.join(spath, '%05d.jpg' % n)):
n = n + 1
confmat = np.ones((n, n), dtype=int) * 500
least_nums = []
stereo_cache = p3p.StereoCache(n)
for i in range(n):
print('%d/%d' % (i, n))
fp_i = system.forwardPassFromHicklable(hkl.load(os.path.join(
spath, '%05d.hkl' % i)))
assert fp_i.ip_scores.size == 500
confmat[i, i] = 0
for j in range(i + 1, n):
fp_j = system.forwardPassFromHicklable(hkl.load(os.path.join(
spath, '%05d.hkl' % j)))
pair = wrapper.makePair([i, j])
if pair.imname(0) not in stereo_cache:
stereo_images = [pair.im[0], pair.rim_0]
stereo_cache[pair.imname(0)] = p3p.pointsFromStereo(
stereo_images, fp_i.ips_rc, pair.K, pair.baseline)
least_num = evaluate.leastNumForKInliers(
pair, [fp_i, fp_j], 20, stereo_cache=stereo_cache)
confmat[i, j] = least_num
confmat[j, i] = least_num
if least_num == 500:
break
least_nums.append(least_num)
print('\t%05d: %d' % (j, least_num))
hkl.dump([confmat, least_nums], open(
hyperparams.resultPath() + '_confmat_%s.hkl' % seq.name(), 'w'))
示例13: process
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def process(spath, irange, seq, fpasser):
fps = fpasser.parallelForward(
[cv2.imread(seq.images[i], cv2.IMREAD_GRAYSCALE) for i in irange])
for i in irange:
print('%d/%d' % (i, len(seq.images)))
im = seq.images[i]
path = os.path.join(spath, '%05d' % i)
fp = fps[i - irange[0]]
rendering = fp.render()
if FLAGS.debug_plot:
cv2.imshow('render', rendering)
cv2.waitKey(1)
cv2.imwrite(path + '.jpg', rendering)
hkl.dump(fp.hicklable(), open(path + '.hkl', 'w'))
示例14: dump_files
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def dump_files(filename, priors=None, df=None, conf=False):
"""Dump files used to train the model and feature extraction
Parameters
----------
filename : str
Fullpath of prefix name for helper-files
priors : ndarray, optional
2-dim array of location priors
df : DataFrame, optional
Table returned by compute_priors
conf : bool, optional
Save a file with confidence of each prior on every segment
Note: Files should be parse together to get insightful understanding of
the information because no further indexing is included in each file.
"""
filefmt = filename + '_{}.{}'
# HDF5 with priors
if priors is not None:
hkl.dump(priors.astype(np.float32),
filefmt.format('priors', 'hkl'), mode='w',
compression='gzip', compression_opts=1)
# List of videos ready for C3D feature extractor wrapper
if df is not None:
df.rename(columns={'video-frames': 'num-frame',
'f-init': 'i-frame'}, inplace=True)
lst = ['video-name', 'num-frame', 'i-frame', 'duration']
df[lst].to_csv(filefmt.format('ref', 'lst'), sep=' ', index=False)
# Rename columns again to avoid modify df without increase memory
df.rename(columns={'num-frame': 'video-frames',
'i-frame': 'f-init'}, inplace=True)
# HDF5 with confidences
if conf and df is not None:
lst = ['c_{}'.format(i) for i in range(df.columns.size - 4)]
hkl.dump(np.array(df.loc[:, lst]).astype(np.int32),
filefmt.format('conf', 'hkl'),
mode='w', compression='gzip', compression_opts=1)
示例15: save_hkl_file
# 需要导入模块: import hickle [as 别名]
# 或者: from hickle import dump [as 别名]
def save_hkl_file(filename, data):
hkl_filename = filename + '.hkl'
try:
hkl.dump(data, hkl_filename, mode="w")
return True
except Exception:
if os.path.isfile(filename):
os.remove(hkl_filename)