本文整理汇总了Python中dataset.load_dataset方法的典型用法代码示例。如果您正苦于以下问题:Python dataset.load_dataset方法的具体用法?Python dataset.load_dataset怎么用?Python dataset.load_dataset使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类dataset
的用法示例。
在下文中一共展示了dataset.load_dataset方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: save_train_dataset_as_nifti
# 需要导入模块: import dataset [as 别名]
# 或者: from dataset import load_dataset [as 别名]
def save_train_dataset_as_nifti(results_dir=os.path.join(paths.results_folder, "final"),
out_dir=os.path.join(paths.results_folder, "training_set_results")):
if not os.path.isdir(out_dir):
os.mkdir(out_dir)
a = load_dataset()
for fold in range(5):
working_dir = os.path.join(results_dir, "fold%d"%fold, "validation")
ids_in_fold = os.listdir(working_dir)
ids_in_fold.sort()
ids_in_fold = [i for i in ids_in_fold if os.path.isdir(os.path.join(working_dir, i))]
ids_in_fold_as_int = [int(i) for i in ids_in_fold]
for pat_id in ids_in_fold_as_int:
pat_in_dataset = a[pat_id]
seg_pred = np.load(os.path.join(working_dir, "%03.0d"%pat_id, "segs.npz"))['seg_pred']
b = convert_to_original_coord_system(seg_pred, pat_in_dataset)
sitk_img = sitk.GetImageFromArray(b)
sitk_img.SetSpacing(pat_in_dataset['spacing'])
sitk_img.SetDirection(pat_in_dataset['direction'])
sitk_img.SetOrigin(pat_in_dataset['origin'])
sitk.WriteImage(sitk_img, os.path.join(out_dir, pat_in_dataset['name'] + ".nii.gz"))
示例2: save_val_dataset_as_nifti
# 需要导入模块: import dataset [as 别名]
# 或者: from dataset import load_dataset [as 别名]
def save_val_dataset_as_nifti(results_dir=os.path.join(paths.results_folder, "final"),
out_dir=os.path.join(paths.results_folder, "val_set_results_new")):
if not os.path.isdir(out_dir):
os.mkdir(out_dir)
a = load_dataset(folder=paths.preprocessed_validation_data_folder)
for pat in a.keys():
probs = []
for fold in range(5):
working_dir = os.path.join(results_dir, "fold%d"%fold, "pred_val_set")
res = np.load(os.path.join(working_dir, "%03.0d"%pat, "segs.npz"))
probs.append(res['softmax_ouput'][None])
prediction = np.vstack(probs).mean(0).argmax(0)
prediction_new = convert_to_brats_seg(prediction)
np.savez_compressed(os.path.join(out_dir, "%03.0d.npz"%pat), seg=prediction)
b = convert_to_original_coord_system(prediction_new, a[pat])
sitk_img = sitk.GetImageFromArray(b)
sitk_img.SetSpacing(a[pat]['spacing'])
sitk_img.SetDirection(a[pat]['direction'])
sitk_img.SetOrigin(a[pat]['origin'])
sitk.WriteImage(sitk_img, os.path.join(out_dir, a[pat]['name'] + ".nii.gz"))
示例3: save_test_set_as_nifti
# 需要导入模块: import dataset [as 别名]
# 或者: from dataset import load_dataset [as 别名]
def save_test_set_as_nifti(results_dir=os.path.join(paths.results_folder, "final"),
out_dir=os.path.join(paths.results_folder, "test_set_results")):
if not os.path.isdir(out_dir):
os.mkdir(out_dir)
a = load_dataset(folder=paths.preprocessed_testing_data_folder)
for pat in a.keys():
probs = []
for fold in range(5):
working_dir = os.path.join(results_dir, "fold%d"%fold, "pred_test_set")
res = np.load(os.path.join(working_dir, "%03.0d"%pat, "segs.npz"))
probs.append(res['softmax_ouput'][None])
prediction = np.vstack(probs).mean(0).argmax(0)
prediction_new = convert_to_brats_seg(prediction)
np.savez_compressed(os.path.join(out_dir, "%03.0d.npz"%pat), seg=prediction)
b = convert_to_original_coord_system(prediction_new, a[pat])
sitk_img = sitk.GetImageFromArray(b)
sitk_img.SetSpacing(a[pat]['spacing'])
sitk_img.SetDirection(a[pat]['direction'])
sitk_img.SetOrigin(a[pat]['origin'])
sitk.WriteImage(sitk_img, os.path.join(out_dir, a[pat]['name'] + ".nii.gz"))
示例4: prepare_data
# 需要导入模块: import dataset [as 别名]
# 或者: from dataset import load_dataset [as 别名]
def prepare_data(device, args):
data = load_dataset(args.dataset)
g, features, labels, n_classes, train_nid, val_nid, test_nid = data
in_feats = features.shape[1]
feats = preprocess(g, features, args, device)
# move to device
labels = labels.to(device)
train_nid = train_nid.to(device)
val_nid = val_nid.to(device)
test_nid = test_nid.to(device)
train_feats = [x[train_nid] for x in feats]
train_labels = labels[train_nid]
return feats, labels, train_feats, train_labels, in_feats, \
n_classes, train_nid, val_nid, test_nid
示例5: run
# 需要导入模块: import dataset [as 别名]
# 或者: from dataset import load_dataset [as 别名]
def run(fold=0):
print fold
I_AM_FOLD = fold
all_data = load_dataset(folder=paths.preprocessed_testing_data_folder)
use_patients = all_data
experiment_name = "final"
results_folder = os.path.join(paths.results_folder, experiment_name,
"fold%d"%I_AM_FOLD)
write_images = False
save_npy = True
INPUT_PATCH_SIZE =(None, None, None)
BATCH_SIZE = 2
n_repeats=3
num_classes=4
x_sym = T.tensor5()
net, seg_layer = build_net(x_sym, INPUT_PATCH_SIZE, num_classes, 4, 16, batch_size=BATCH_SIZE,
do_instance_norm=True)
output_layer = seg_layer
results_out_folder = os.path.join(results_folder, "pred_test_set")
if not os.path.isdir(results_out_folder):
os.mkdir(results_out_folder)
with open(os.path.join(results_folder, "%s_Params.pkl" % (experiment_name)), 'r') as f:
params = cPickle.load(f)
lasagne.layers.set_all_param_values(output_layer, params)
print "compiling theano functions"
output = softmax_helper(lasagne.layers.get_output(output_layer, x_sym, deterministic=False,
batch_norm_update_averages=False, batch_norm_use_averages=False))
pred_fn = theano.function([x_sym], output)
_ = pred_fn(np.random.random((BATCH_SIZE, 4, 176, 192, 176)).astype(np.float32))
run_validation_mirroring(pred_fn, results_out_folder, use_patients, write_images=write_images, hasBrainMask=False,
BATCH_SIZE=BATCH_SIZE, num_repeats=n_repeats, preprocess_fn=preprocess, save_npy=save_npy,
save_proba=False)
示例6: load_dataset_for_previous_run
# 需要导入模块: import dataset [as 别名]
# 或者: from dataset import load_dataset [as 别名]
def load_dataset_for_previous_run(run_id, **kwargs): # => dataset_obj, mirror_augment
result_subdir = locate_result_subdir(run_id)
# Parse config.txt.
parsed_cfg = dict()
with open(os.path.join(result_subdir, 'config.txt'), 'rt') as f:
for line in f:
if line.startswith('dataset =') or line.startswith('train ='):
exec(line, parsed_cfg, parsed_cfg)
dataset_cfg = parsed_cfg.get('dataset', dict())
train_cfg = parsed_cfg.get('train', dict())
mirror_augment = train_cfg.get('mirror_augment', False)
# Handle legacy options.
if 'h5_path' in dataset_cfg:
dataset_cfg['tfrecord_dir'] = dataset_cfg.pop('h5_path').replace('.h5', '')
if 'mirror_augment' in dataset_cfg:
mirror_augment = dataset_cfg.pop('mirror_augment')
if 'max_labels' in dataset_cfg:
v = dataset_cfg.pop('max_labels')
if v is None: v = 0
if v == 'all': v = 'full'
dataset_cfg['max_label_size'] = v
if 'max_images' in dataset_cfg:
dataset_cfg.pop('max_images')
# Handle legacy dataset names.
v = dataset_cfg['tfrecord_dir']
v = v.replace('-32x32', '').replace('-32', '')
v = v.replace('-128x128', '').replace('-128', '')
v = v.replace('-256x256', '').replace('-256', '')
v = v.replace('-1024x1024', '').replace('-1024', '')
v = v.replace('celeba-hq', 'celebahq')
v = v.replace('cifar-10', 'cifar10')
v = v.replace('cifar-100', 'cifar100')
v = v.replace('mnist-rgb', 'mnistrgb')
v = re.sub('lsun-100k-([^-]*)', 'lsun-\\1-100k', v)
v = re.sub('lsun-full-([^-]*)', 'lsun-\\1-full', v)
dataset_cfg['tfrecord_dir'] = v
# Load dataset.
dataset_cfg.update(kwargs)
dataset_obj = dataset.load_dataset(data_dir=config.data_dir, **dataset_cfg)
return dataset_obj, mirror_augment
示例7: run
# 需要导入模块: import dataset [as 别名]
# 或者: from dataset import load_dataset [as 别名]
def run(fold=0):
print fold
I_AM_FOLD = fold
all_data = load_dataset()
keys_sorted = np.sort(all_data.keys())
crossval_folds = KFold(len(all_data.keys()), n_folds=5, shuffle=True, random_state=123456)
ctr = 0
for train_idx, test_idx in crossval_folds:
print len(train_idx), len(test_idx)
if ctr == I_AM_FOLD:
test_keys = [keys_sorted[i] for i in test_idx]
break
ctr += 1
validation_data = {i:all_data[i] for i in test_keys}
use_patients = validation_data
EXPERIMENT_NAME = "final"
results_folder = os.path.join(paths.results_folder,
EXPERIMENT_NAME, "fold%d" % I_AM_FOLD)
write_images = False
save_npy = True
INPUT_PATCH_SIZE =(None, None, None)
BATCH_SIZE = 2
n_repeats=2
num_classes=4
x_sym = T.tensor5()
net, seg_layer = build_net(x_sym, INPUT_PATCH_SIZE, num_classes, 4, 16, batch_size=BATCH_SIZE,
do_instance_norm=True)
output_layer = seg_layer
results_out_folder = os.path.join(results_folder, "validation")
if not os.path.isdir(results_out_folder):
os.mkdir(results_out_folder)
with open(os.path.join(results_folder, "%s_Params.pkl" % (EXPERIMENT_NAME)), 'r') as f:
params = cPickle.load(f)
lasagne.layers.set_all_param_values(output_layer, params)
print "compiling theano functions"
output = softmax_helper(lasagne.layers.get_output(output_layer, x_sym, deterministic=False,
batch_norm_update_averages=False, batch_norm_use_averages=False))
pred_fn = theano.function([x_sym], output)
_ = pred_fn(np.random.random((BATCH_SIZE, 4, 176, 192, 176)).astype(np.float32)) # preallocate memory on GPU
run_validation_mirroring(pred_fn, results_out_folder, use_patients, write_images=write_images, hasBrainMask=False,
BATCH_SIZE=BATCH_SIZE, num_repeats=n_repeats, preprocess_fn=preprocess, save_npy=save_npy)