本文整理汇总了Python中scipy.io.loadmat函数的典型用法代码示例。如果您正苦于以下问题:Python loadmat函数的具体用法?Python loadmat怎么用?Python loadmat使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了loadmat函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: keypoint_detection
def keypoint_detection():
try:
data = sio.loadmat('data.mat')
except:
load.csv()
data = sio.loadmat('data.mat')
train_x = data['train_x']
train_y = data['train_y']
test_x = data['test_x']
# data normalization
train_x = train_x / 256.0
train_y = (train_y - 48) / 48.0
test_x = test_x / 256.0
sklearn.utils.shuffle(train_x, train_y, random_state=0)
train_x, valid_x = train_x[:-400], train_x[-400:]
train_y, valid_y = train_y[:-400], train_y[-400:]
model = Model(0.01, 0.9, 0.0005, 100, 10000)
model.add_layer(layers.FullConnectedLayer(9216, 256, 1, layers.rectify))
model.add_layer(layers.DropoutLayer(0.5))
model.add_layer(layers.FullConnectedLayer(256, 100, 1, layers.rectify))
model.add_layer(layers.DropoutLayer(0.5))
model.add_layer(layers.FullConnectedLayer(100, 30))
model.set_loss_function(layers.EuclideanLoss)
model.build()
print 'build model complete'
model.train_model(train_x, train_y, valid_x, valid_y)
model.save_test_result(test_x)
示例2: __init__
def __init__(self, complete_path):
if complete_path.endswith('.mat.gz'):
temp_filename = complete_path.split('.gz')[0]
with open(temp_filename, "wb") as tmp:
shutil.copyfileobj(gzip.open(complete_path), tmp)
dict_mr = sio.loadmat(temp_filename)
os.remove(temp_filename)
elif complete_path.endswith('.mat'):
dict_mr = sio.loadmat(complete_path)
else:
print('Unknown file extension for MountainRange file. Should be ' +
'.mat or .mat.gz')
self.value = dict_mr['value']
self.trigger_stamp = dict_mr['triggerStamp']
self.SC_numb = np.int(np.squeeze(dict_mr['superCycleNb']))
self.first_trigger_t_stamp_unix = dict_mr['first_trigger_t_stamp_unix']
self.sample_interval = float(np.squeeze(dict_mr['sampleInterval']))
self.first_sample_time = dict_mr['firstSampleTime']
self.sensitivity = dict_mr['sensitivity']
self.offset = dict_mr['offset']
self.SPSuser = dict_mr['SPSuser']
self.t_stamp_unix = dict_mr['t_stamp_unix']
self.time_axis = np.float_(range(self.value.shape[1]))*self.sample_interval-self.value.shape[1]*self.sample_interval/2.
示例3: load_matlab_matrix
def load_matlab_matrix( matfile, matname=None ):
"""
Wraps scipy.io.loadmat.
If matname provided, returns np.ndarray representing the index
map. Otherwise, the full dict provided by loadmat is returns.
"""
if not matname:
out = spio.loadmat( matfile )
mat = _extract_mat( out )
# if mat is a sparse matrix, convert it to numpy matrix
try:
mat = np.matrix( mat.toarray() )
except AttributeError:
mat = np.matrix( mat )
return mat
else:
matdict = spio.loadmat( matfile )
mat = matdict[ matname ]
# if mat is a sparse matrix, convert it to numpy matrix
try:
mat = np.matrix( mat.toarray() )
except AttributeError:
mat = np.matrix( mat )
return mat #np.matrix( mat[ matname ] )
示例4: _loadGEval
def _loadGEval(self):
print('Loading densereg GT..')
prefix = os.path.dirname(__file__) + '/../../DensePoseData/eval_data/'
print(prefix)
SMPL_subdiv = loadmat(prefix + 'SMPL_subdiv.mat')
self.PDIST_transform = loadmat(prefix + 'SMPL_SUBDIV_TRANSFORM.mat')
self.PDIST_transform = self.PDIST_transform['index'].squeeze()
UV = np.array([
SMPL_subdiv['U_subdiv'],
SMPL_subdiv['V_subdiv']
]).squeeze()
ClosestVertInds = np.arange(UV.shape[1])+1
self.Part_UVs = []
self.Part_ClosestVertInds = []
for i in np.arange(24):
self.Part_UVs.append(
UV[:, SMPL_subdiv['Part_ID_subdiv'].squeeze()==(i+1)]
)
self.Part_ClosestVertInds.append(
ClosestVertInds[SMPL_subdiv['Part_ID_subdiv'].squeeze()==(i+1)]
)
arrays = {}
f = h5py.File( prefix + 'Pdist_matrix.mat')
for k, v in f.items():
arrays[k] = np.array(v)
self.Pdist_matrix = arrays['Pdist_matrix']
print('Loaded')
示例5: contrast_session
def contrast_session(session, C_path = os.getcwd()): # takes an integer for session ID, and a path.
"""-\nsession ID, and data path required.\nreturns list of ST_tbc matrices for every image in session."""
session_data = sio.loadmat(os.path.join(C_path, 'goodCh_cont'))['goodCh_cont']
sName = session_data[session,0][0]
one_session = sio.loadmat(os.path.join(C_path, sName))
trials = one_session['MUA'] #trials.shape => (32 channel, ~900 trial)
img = np.squeeze(one_session['Cond'])
img_trialNum = Counter(img)
ST = []
for p in range(len(img_trialNum)):
st = np.zeros((img_trialNum[p+1],4500,trials.shape[0])) # ST_tbc
for channel in range(trials.shape[0]):
img_trials = trials[channel, img == p+1] # list of trials here.
for trial in range(len(img_trials)): # mert kell az index, hogy el tudjam helyezni.
for ap in range(img_trials[trial].shape[1]):
b = int(np.ceil(img_trials[trial][0][ap]*1000)) # change to ms!
st[trial, b, channel] = 1 # b stands for bin, bin is occ in python.
ST.append(st)
print sName
return ST
示例6: test_spm_hrf_octave
def test_spm_hrf_octave():
# Test SPM hrf against output from SPM code running in Octave
my_path = dirname(__file__)
hrfs_path = pjoin(my_path, 'spm_hrfs.mat')
# mat file resulting from make_hrfs.m
hrfs_mat = sio.loadmat(hrfs_path, squeeze_me=True)
params = hrfs_mat['params']
hrfs = hrfs_mat['hrfs']
for i, pvec in enumerate(params):
dt, ppk, upk, pdsp, udsp, rat = pvec
t_vec = np.arange(0, 32.1, dt)
our_hrf = spm_hrf_compat(t_vec,
peak_delay=ppk,
peak_disp=pdsp,
under_delay=upk,
under_disp=udsp,
p_u_ratio=rat)
# Normalize integral to match SPM
assert_almost_equal(our_hrf, hrfs[i])
# Test basis functions
# mat file resulting from get_td_dd.m
bases_path = pjoin(my_path, 'spm_bases.mat')
bases_mat = sio.loadmat(bases_path, squeeze_me=True)
dt = bases_mat['dt']
t_vec = np.arange(0, 32 + dt, dt)
# SPM function divides by sum of values - revert with dt
assert_almost_equal(spmt(t_vec), bases_mat['hrf'] / dt, 4)
assert_almost_equal(dspmt(t_vec), bases_mat['dhrf'] / dt, 4)
assert_almost_equal(ddspmt(t_vec), bases_mat['ddhrf'] / dt, 4)
示例7: get_images
def get_images(self, img_name):
stp = str(img_name)
if img_name < 10:
stp = '0000' + stp
elif img_name < 100:
stp = '000' + stp
elif img_name < 1000:
stp = '00' + stp
else:
stp = '0' + stp
img_path = 'data/portraitFCN_data/' + stp + '.mat'
alpha_path = 'data/images_mask/' + stp + '_mask.mat'
if os.path.exists(img_path) and os.path.exists(alpha_path):
imat = sio.loadmat(img_path)['img']
amat = sio.loadmat(alpha_path)['mask']
nimat = np.array(imat, dtype=np.float)
namat = np.array(amat, dtype=np.int)
org_mat = np.zeros(nimat.shape, dtype=np.int)
h, w, _ = nimat.shape
for i in range(h):
for j in range(w):
org_mat[i][j][0] = round(nimat[i][j][2] * 255 + 122.675)
org_mat[i][j][1] = round(nimat[i][j][1] * 255 + 116.669)
org_mat[i][j][2] = round(nimat[i][j][0] * 255 + 104.008)
return nimat, namat, org_mat
return None, None, None
示例8: load_pertub_data_cifar
def load_pertub_data_cifar(dirs='data_imputation/', dataset='cifar10_gcn_var', pertub_type=3, pertub_prob=6):
# perturb data
print 'Loading perturbed data...'
if pertub_type==4:
zz = sio.loadmat(dirs+dataset+'_type_'+str(pertub_type)+'_params_'+str(int(pertub_prob*100))+'_noise_rawdata.mat')
elif pertub_type==3:
pertub_prob = int(pertub_prob)
zz = sio.loadmat(dirs+dataset+'_type_'+str(pertub_type)+'_params_'+str(pertub_prob)+'_noise_rawdata.mat')
elif pertub_type==5:
zz = sio.loadmat(dirs+dataset+'_type_'+str(pertub_type)+'_params_noise_rawdata.mat')
else:
print 'Error in load_pertub_data'
print dirs, pertub_type, pertub_prob
exit()
data_train = zz['z_train'].T
data = zz['z_test_original'].T
data_perturbed = zz['z_test'].T
pertub_label = zz['pertub_label'].astype(np.float32).T
pertub_number = float(np.sum(1-pertub_label))
print pertub_number, data_train.shape, data.shape, data_perturbed.shape, pertub_label.shape
data_train = theano.shared(np.asarray(data_train, dtype=theano.config.floatX), borrow=True)
data = theano.shared(np.asarray(data, dtype=theano.config.floatX), borrow=True)
data_perturbed = theano.shared(np.asarray(data_perturbed, dtype=theano.config.floatX), borrow=True)
pertub_label = theano.shared(np.asarray(pertub_label, dtype=theano.config.floatX), borrow=True)
return data_train, data, data_perturbed, pertub_label, pertub_number
示例9: main
def main():
predicted_mat = loadmat(args.predicted_mat)['labels']
truth_mat = loadmat(args.truth_mat)['GT']
mode = args.mode
min_shape = np.minimum(predicted_mat.shape, truth_mat.shape)
error = None
if(mode == 'all'):
error = compare_all_mats(predicted_mat, truth_mat, min_shape)
print 1 - error/(min_shape[0]*min_shape[1]*min_shape[2])
else:
error = compare_single_mats(predicted_mat, truth_mat, min_shape)
error = 1 - error/(min_shape[0]*min_shape[1])
print error
should_graph = args.graph
if(should_graph == 'True'):
y_axis = error
x_axis = np.arange(len(y_axis))
fig = plt.figure()
ax = fig.add_subplot(111)
print type(y_axis)
#ax.plot(x_axis, y_axis)
ax.scatter(x_axis, y_axis)
ax.set_xlim([0,len(x_axis)])
ax.set_ylim([0,1])
plt.savefig(args.output)
示例10: loadMAT
def loadMAT(slice_filename,parameters_filename):
'''
Created to convert .mat files with specific configuration for ECoG data of Newcastle Hospitals and create a dict.
If you want to load other .mat file, use scipy.io. loadmat and create_DataObj
Parameters
----------
slice_filename: str
Name of the slice (.mat) file
parameters_filename: str
Name of the parameters (.mat) file
'''
mat = sio.loadmat(parameters_filename, struct_as_record=False, squeeze_me=True)
parameters = mat['parameters']
ch_l = parameters.channels
ch_labels = [str(x) for x in ch_l]
sample_rate = parameters.sr
f = sio.loadmat(slice_filename, struct_as_record=False, squeeze_me=True)
Data = f['Data']
time_vec = Data.time_vec
signal = Data.raw.T
amp_unit = '$\mu V$'
Data = DataObj(signal,sample_rate,amp_unit,ch_labels,time_vec,[])
return Data
示例11: load_simTB_data
def load_simTB_data(source_directory):
"""
Load simTB data along with simulation info.
"""
nifti_files = natural_sort(glob(path.join(source_directory, "*_DATA.nii")))
sim_files = natural_sort(glob(path.join(source_directory, "*_SIM.mat")))
if len(nifti_files) != len(sim_files):
raise ValueError("Different number of DATA and SIM files found int %s"
% source_directory)
assert len(nifti_files) > 0
param_files = glob(path.join(source_directory, "*PARAMS.mat"))
if len(param_files) != 1:
raise ValueError("Exactly one param file needed, found %d in %s"
% (len(param_files), source_directory))
params = tuple(io.loadmat(param_files[0])["sP"][0][0])
sim_dict = {}
for i, (nifti_file, sim_file) in enumerate(zip(nifti_files, sim_files)):
assert "%03d" % (i + 1) in nifti_file
assert "%03d" % (i + 1) in sim_file
sims = io.loadmat(sim_file)
tcs = sims["TC"].T
sms = sims["SM"]
sim_dict[i] = {"SM": sms, "TC": tcs}
sim_dict["params"] = params
data, labels, base = read_niftis(nifti_files)
return data, labels, sim_dict
示例12: TenTwentyDownslopeBPF
def TenTwentyDownslopeBPF(FullPath):
TempClipData = spio.loadmat(FullPath)
TempDataArray = TempClipData['data']
TempDataArray = TempDataArray.transpose()
Fsample = float(TempClipData['freq']) #Sampling frequency
dt = 1.0/Fsample #Time between samples
TimeValues = np.arange(0.0, 1.0, dt) #Construct ndarray of time values
LastChan = int(TempDataArray.shape[1]) #Last channel number
Channels = np.arange(0, LastChan,1) #List of channel numbers
FeatureOutput = np.zeros(LastChan) #Initialize the output
# Read in the digital filter coeficients and place in ndarrays
FilterInfo=spio.loadmat('FilterSetTenTwentyDownslopeBPF.mat')
FilterCoefI = FilterInfo['FilterCoefI'].flatten()
FilterCoefQ = FilterInfo['FilterCoefQ'].flatten()
# Calculate the feature values for each channel
for i in Channels:
Iproduct = FilterCoefI*TempDataArray[:,i]
Isum=np.sum(Iproduct[i])
Qproduct = FilterCoefQ*TempDataArray[:,i]
Qsum=np.sum(Qproduct)
FeatureOutput[i] = np.log( np.sqrt(Isum*Isum + Qsum*Qsum) )
FeatureOutput[i] = bender(FeatureOutput[i], 4.0, 4.0) #Limit to range of 0 to 1. Second arg is mean, third is span
FeatureList = FeatureOutput.tolist() #Convert ndarray to list. The returned value will be appended other values; this would be very inefficent with ndarray
#Return feature vector in form of a list
return(FeatureList)
示例13: read_dataset
def read_dataset( stimulus_pattern='stimulus_%d.mat', data_file='data.mat'):
from scipy.io import loadmat
data = loadmat(data_file)
data = data['data']
spikes = data['spike_rate'][0][0]
del data['spike_rate']
data['rgc_ids'] = data['rgc_ids'][0][0][0]
data['cone_weights'] = data['cone_weights'][0][0]
data['cone_types'] = data['cone_types'][0][0].tolist()
data['cone_locations'] = data['cone_locations'][0][0]
data['rgc_locations'] = numpy.array([d[0][0] for d in data['rgc_locations'][0][0]])
data['rgc_types'] = dict((d[0][0],d[1][0].tolist())
for d in filter( lambda d : len( d[0] )>0 , [d[0][0]
for d in data['cell_types'][0][0][0]] ))
try:
i = 0
N_timebins = 0
while 1:
data['stimulus'] = loadmat(stimulus_pattern % i)['cone_input'].T
data['spikes'] = spikes[N_timebins:N_timebins+data['stimulus'].shape[1]]
N_timebins += data['stimulus'].shape[1]
i += 1
yield data
except:
raise StopIteration()
示例14: save_crop_images_and_joints
def save_crop_images_and_joints():
training_indices = loadmat('data/FLIC-full/tr_plus_indices.mat')
training_indices = training_indices['tr_plus_indices'].flatten()
examples = loadmat('data/FLIC-full/examples.mat')
examples = examples['examples'][0]
joint_ids = ['lsho', 'lelb', 'lwri', 'rsho', 'relb', 'rwri', 'lhip',
'lkne', 'lank', 'rhip', 'rkne', 'rank', 'leye', 'reye',
'lear', 'rear', 'nose', 'msho', 'mhip', 'mear', 'mtorso',
'mluarm', 'mruarm', 'mllarm', 'mrlarm', 'mluleg', 'mruleg',
'mllleg', 'mrlleg']
available = joint_ids[:8]
available.extend(joint_ids[12:14])
available.extend([joint_ids[16]])
target_joints = ['lsho', 'lelb', 'lwri',
'leye', 'reye', 'nose',
'rsho', 'relb', 'rwri']
fp_train = open('data/FLIC-full/train_joints.csv', 'w')
fp_test = open('data/FLIC-full/test_joints.csv', 'w')
for i, example in enumerate(examples):
joint = example[2].T
joint = dict(zip(joint_ids, joint))
fname = example[3][0]
joint = get_joint_list(joint)
msg = '{},{}'.format(fname, ','.join([str(j) for j in joint.tolist()]))
if i in training_indices:
print(msg, file=fp_train)
else:
print(msg, file=fp_test)
示例15: loadfile_hfreud
def loadfile_hfreud(filename, alpha, rho, n):
filename = os.path.join(data_directory, filename)
try:
data = loadmat(filename)['data'].flatten()
except:
data = np.zeros(0)
if data.size < n+1:
# Run matlab to generate/populate file
print("Calling matlab....")
cwd = os.path.dirname(os.path.abspath(__file__))
command = "cd(" + "'" + cwd + "'); cd ..; "
command += "data = load_fhfreud({:d}, {:.4f}, {:.4f}); ".format(n, alpha, rho)
command += "data = fidistinv_hfreud_setup({:d}, {:.4f}, {:.4f}, data); ".format(n, alpha, rho)
command += "save_fhfreud(data, {:.4f}, {:.4f}); ".format(alpha, rho)
command += "exit"
print(command)
subprocess.call([matlab_binary, "-nodisplay", "-r", command])
print("...finished")
data = loadmat(filename)['data'].flatten()
return data