本文整理匯總了Python中scipy.io.savemat方法的典型用法代碼示例。如果您正苦於以下問題:Python io.savemat方法的具體用法?Python io.savemat怎麽用?Python io.savemat使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類scipy.io
的用法示例。
在下文中一共展示了io.savemat方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: make_mnist_data
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def make_mnist_data(path, isconv=False):
X, Y = load_mnist(path, True)
X = X.astype(np.float64)
X2, Y2 = load_mnist(path, False)
X2 = X2.astype(np.float64)
X3 = np.concatenate((X, X2), axis=0)
minmaxscale = MinMaxScaler().fit(X3)
X = minmaxscale.transform(X)
if isconv:
X = X.reshape((-1, 1, 28, 28))
sio.savemat(osp.join(path, 'traindata.mat'), {'X': X, 'Y': Y})
X2 = minmaxscale.transform(X2)
if isconv:
X2 = X2.reshape((-1, 1, 28, 28))
sio.savemat(osp.join(path, 'testdata.mat'), {'X': X2, 'Y': Y2})
示例2: doLaplacianSolveWithConstraints
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def doLaplacianSolveWithConstraints(self, evt):
anchorWeights = 1e8
anchors = np.zeros((len(self.laplacianConstraints), 3))
i = 0
anchorsIdx = []
for anchor in self.laplacianConstraints:
anchorsIdx.append(anchor)
anchors[i, :] = self.laplacianConstraints[anchor]
i += 1
#IGL Cotangent weights
(L, M_inv, solver, deltaCoords) = makeLaplacianMatrixSolverIGLSoft(self.mesh.VPos, self.mesh.ITris, anchorsIdx, anchorWeights)
self.mesh.VPos = solveLaplacianMatrixIGLSoft(solver, L, M_inv, deltaCoords, anchorsIdx, anchors, anchorWeights)
# #My umbrella weights
# L = makeLaplacianMatrixUmbrellaWeights(self.mesh.VPos, self.mesh.ITris, anchorsIdx, anchorWeights)
# deltaCoords = L.dot(self.mesh.VPos)[0:self.mesh.VPos.shape[0], :]
# self.mesh.VPos = np.array(solveLaplacianMatrix(L, deltaCoords, anchors, anchorWeights), dtype=np.float32)
sio.savemat("anchors.mat", {'deltaCoords':deltaCoords, 'anchors':anchors, 'anchorsIdx':np.array(anchorsIdx)})
self.mesh.needsDisplayUpdate = True
self.mesh.updateIndexDisplayList()
self.Refresh()
示例3: cal_pca_matrix
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def cal_pca_matrix(path='PCA_matrix.mat', ksize=15, l_max=12.0, dim_pca=15, num_samples=500):
kernels = np.zeros([ksize*ksize, num_samples], dtype=np.float32)
for i in range(num_samples):
theta = np.pi*np.random.rand(1)
l1 = 0.1+l_max*np.random.rand(1)
l2 = 0.1+(l1-0.1)*np.random.rand(1)
k = anisotropic_Gaussian(ksize=ksize, theta=theta[0], l1=l1[0], l2=l2[0])
# util.imshow(k)
kernels[:, i] = np.reshape(k, (-1), order="F") # k.flatten(order='F')
# io.savemat('k.mat', {'k': kernels})
pca_matrix = get_pca_matrix(kernels, dim_pca=dim_pca)
io.savemat(path, {'p': pca_matrix})
return pca_matrix
示例4: precision_recall
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def precision_recall(params):
database_code = np.array(params['database_code'])
validation_code = np.array(params['validation_code'])
database_labels = np.array(params['database_labels'])
validation_labels = np.array(params['validation_labels'])
database_code = np.sign(database_code)
validation_code = np.sign(validation_code)
database_labels.astype(np.int)
validation_labels.astype(np.int)
sim = np.dot(database_code, validation_code.T)
ids = np.argsort(-sim, axis=0)
ones = np.ones((ids.shape[0], ids.shape[1]), dtype=np.int)
print(np.min(ids))
ids = ids + ones
print(np.min(ids))
mat_ids = dict(
ids=ids,
LBase=database_labels,
LTest=validation_labels
)
scio.savemat('./data/data.mat', mat_ids)
示例5: bench_run
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def bench_run():
str_io = BytesIO()
print()
print('Read / writing matlab structs')
print('='*60)
print(' write | read | vars | fields | structs | compressed')
print('-'*60)
print()
for n_vars, n_fields, n_structs in (
(10, 10, 20), (20, 20, 40), (30, 30, 50)):
var_dict = make_structarr(n_vars, n_fields, n_structs)
for compression in (False, True):
str_io = BytesIO()
write_time = measure('sio.savemat(str_io, var_dict, do_compression=%r)' % compression)
read_time = measure('sio.loadmat(str_io)')
print('%.5f | %.5f | %5d | %5d | %5d | %r' % (
write_time,
read_time,
n_vars,
n_fields,
n_structs,
compression))
示例6: createAccount
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def createAccount(template, mask, name, exinfo):
'''
Description:
Create an account in database based on extracted feature, and some
extra information from the enroller.
Input:
template - Extracted template from the iris image
mask - Extracted mask from the iris image
name - Name of the enroller
exinfo - Extra information of the enroller
'''
# Get file name for the account
files = []
for file in os.listdir(temp_database_path):
if file.endswith(".mat"):
files.append(file)
filename = str(len(files) + 1)
# Save the file
sio.savemat(temp_database_path + filename + '.mat', \
mdict={'template':template, 'mask':mask,\
'name':name, 'exinfo':exinfo})
示例7: predict_probs
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def predict_probs(img, net, FLAGS, DATA):
# open image
cvim = cv2.imread(img, cv2.IMREAD_UNCHANGED)
if cvim is None:
print("No image to open for ", img)
return
# predict mask from image
start = time.time()
probs = net.predict(cvim, path=FLAGS.path + '/' +
FLAGS.model, verbose=FLAGS.verbose, as_probs=True)
print("Prediction for img ", img, ". Elapsed: ", time.time() - start, "s")
# save to matlab matrix
matname = FLAGS.log + "/" + \
os.path.splitext(os.path.basename(img))[0] + ".mat"
sio.savemat(matname, {'p': probs})
return
示例8: stitchPatch
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def stitchPatch(root_folder, dir1, imgname, featfolder, savefolder):
# stitch the features of patches to feature of full image
name = os.path.join(dir1, imgname)
print 'name:%s\n' %(name)
Im = os.path.join(featfolder, name[0:-4])
I = [None]*16
for i in range(9):
dict1 = sio.loadmat(Im+'_0'+str(i+1)+'.mat')
I[i] = dict1['feat']
for i in range(9,16):
dict2 = sio.loadmat(Im+'_'+str(i+1)+'.mat')
I[i] = dict2['feat']
A = np.zeros((4*500,4*500))
for row in range(4):
for col in range(4):
A[row*500:(row+1)*500,col*500:(col+1)*500] = I[row*4+col]
sio.savemat(savefolder+name[0:-4], {'A':np.mat(A)})
示例9: get_feature
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def get_feature():
inputs = tf.placeholder("float", [None, 64, 64, 1])
is_training = tf.placeholder("bool")
_, feature = googlenet(inputs, is_training)
feature = tf.squeeze(feature, [1, 2])
sess = tf.Session()
sess.run(tf.global_variables_initializer())
saver = tf.train.Saver()
data = sio.loadmat("../data/dataset.mat")
testdata = data["test"] / 127.5 - 1.0
testlabels = data["testlabels"]
saver.restore(sess, "../save_para/.\\model.ckpt")
nums_test = testdata.shape[0]
FEATURE = np.zeros([nums_test, 1024])
for i in range(nums_test // BATCH_SIZE):
FEATURE[i * BATCH_SIZE:i * BATCH_SIZE + BATCH_SIZE] = sess.run(feature, feed_dict={inputs: testdata[i * BATCH_SIZE:i * BATCH_SIZE + BATCH_SIZE], is_training: False})
FEATURE[(nums_test // BATCH_SIZE - 1) * BATCH_SIZE + BATCH_SIZE:] = sess.run(feature, feed_dict={inputs: testdata[(nums_test // BATCH_SIZE - 1) * BATCH_SIZE + BATCH_SIZE:], is_training: False})
sio.savemat("../data/feature.mat", {"feature": FEATURE, "testlabels": testlabels})
開發者ID:MingtaoGuo,項目名稱:Chinese-Character-and-Calligraphic-Image-Processing,代碼行數:20,代碼來源:feature_distribution(t-sne).py
示例10: tsne
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def tsne():
data = sio.loadmat("../data/feature.mat")
feature_test = data["feature"]
proj = TSNE().fit_transform(feature_test)
sio.savemat("../data/proj.mat", {"proj": proj})
開發者ID:MingtaoGuo,項目名稱:Chinese-Character-and-Calligraphic-Image-Processing,代碼行數:7,代碼來源:feature_distribution(t-sne).py
示例11: to_file_map
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def to_file_map(self, file_map=None):
''' Write image to `file_map` or contained ``self.file_map``
Extends Analyze ``to_file_map`` method by writing ``mat`` file
Parameters
----------
file_map : None or mapping, optional
files mapping. If None (default) use object's ``file_map``
attribute instead
'''
if file_map is None:
file_map = self.file_map
super(Spm99AnalyzeImage, self).to_file_map(file_map)
mat = self._affine
if mat is None:
return
import scipy.io as sio
hdr = self._header
if hdr.default_x_flip:
M = np.dot(np.diag([-1, 1, 1, 1]), mat)
else:
M = mat
# Adjust for matlab 1,1,1 voxel origin
from_111 = np.eye(4)
from_111[:3,3] = -1
M = np.dot(M, from_111)
mat = np.dot(mat, from_111)
# use matlab 4 format to allow gzipped write without error
mfobj = file_map['mat'].get_prepare_fileobj(mode='wb')
sio.savemat(mfobj, {'M': M, 'mat': mat}, format='4')
if file_map['mat'].filename is not None: # was filename
mfobj.close()
示例12: create_random_data
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def create_random_data(ntrain=10**4, nbase=10**4, nquery=10**2):
"""
Create random data
"""
# synthetic dataset
vtrain, vbase, vquery, ids_gnd = load_random(ntrain, nbase, nquery)
spio.savemat('./test-tmp/hdidx_test_vbase.mat', {'feat': vbase[:10, :]})
return np.require(vtrain, np.single, requirements="C"),\
np.require(vbase, np.single, requirements="C"), \
np.require(vquery, np.single, requirements="C"), \
ids_gnd
示例13: save_misc_data
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def save_misc_data(path, X, Y, N):
threshold_index = int(N * 4/5)
sio.savemat(osp.join(path, 'traindata.mat'), {'X': X[:threshold_index], 'Y': Y[:threshold_index]})
sio.savemat(osp.join(path, 'testdata.mat'), {'X': X[threshold_index:], 'Y': Y[threshold_index:]})
示例14: compressed_data
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def compressed_data(dataset, n_samples, k, preprocess=None, algo='mknn', isPCA=None, format='mat'):
datadir = get_data_dir(dataset)
if format == 'pkl':
labels, features = load_train_and_validation(load_data, datadir, n_samples)
elif format == 'h5':
labels, features = load_train_and_validation(load_data_h5py, datadir, n_samples)
else:
labels, features = load_train_and_validation(load_matdata, datadir, n_samples)
features = feature_transformation(features, preprocessing=preprocess)
# PCA is computed for Text dataset. Please refer RCC paper for exact details.
features1 = features.copy()
if isPCA is not None:
pca = PCA(n_components=isPCA, svd_solver='full').fit(features)
features1 = pca.transform(features)
t0 = time()
if algo == 'knn':
weights = kNN(features1, k=k, measure='euclidean')
else:
weights = mkNN(features1, k=k, measure='cosine')
print('The time taken for edge set computation is {}'.format(time() - t0))
filepath = os.path.join(datadir, 'pretrained')
if format == 'h5':
import h5py
fo = h5py.File(filepath + '.h5', 'w')
fo.create_dataset('X', data=features)
fo.create_dataset('w', data=weights[:, :2])
fo.create_dataset('gtlabels', data=labels)
fo.close()
else:
sio.savemat(filepath + '.mat', mdict={'X': features, 'w': weights[:, :2], 'gtlabels': labels})
示例15: SavePredictionScores
# 需要導入模塊: from scipy import io [as 別名]
# 或者: from scipy.io import savemat [as 別名]
def SavePredictionScores(pred_scores, adv_scores, im_height, im_width, args, is_debug=False):
"""Saves the outputs of the network in a mat file."""
pred_scores = softmax(pred_scores)
adv_scores = softmax(adv_scores)
conf = pred_scores.max(axis = 0)
adv_conf = adv_scores.max(axis = 0)
entropy_map = entropy(pred_scores)
conf_ratio_map = conf_ratio(pred_scores)
adv_entropy_map = entropy(adv_scores)
adv_conf_ratio_map = conf_ratio(adv_scores)
model_name = args.model_name
image_name = os.path.basename(args.image).split('.')[0]
save_name = os.path.join(
args.out_dir, "{}_scores_{}_eps={}.mat".format(image_name, model_name, args.eps))
if not is_debug:
sio.savemat(save_name, {'conf': conf, 'adv_conf': adv_conf, 'im_height' : im_height, 'im_width': im_width, 'entropy': entropy_map, 'conf_ratio': conf_ratio_map, 'adv_entropy': adv_entropy_map, 'adv_conf_ratio': adv_conf_ratio_map}, do_compression=True)
else:
sio.savemat(save_name, {'unary': pred_scores, 'unary_adv': adv_scores, 'conf': conf, 'adv_conf': adv_conf, 'im_height' : im_height, 'im_width': im_width, 'entropy': entropy_map, 'conf_ratio': conf_ratio_map, 'adv_entropy': adv_entropy_map, 'adv_conf_ratio': adv_conf_ratio_map}, do_compression=True)
return conf