本文整理汇总了Python中mne.io.Raw.resample方法的典型用法代码示例。如果您正苦于以下问题:Python Raw.resample方法的具体用法?Python Raw.resample怎么用?Python Raw.resample使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mne.io.Raw
的用法示例。
在下文中一共展示了Raw.resample方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_chpi_subtraction
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import resample [as 别名]
def test_chpi_subtraction():
"""Test subtraction of cHPI signals"""
raw = Raw(chpi_fif_fname, allow_maxshield='yes', preload=True)
with catch_logging() as log:
filter_chpi(raw, include_line=False, verbose=True)
assert_true('5 cHPI' in log.getvalue())
# MaxFilter doesn't do quite as well as our algorithm with the last bit
raw.crop(0, 16, copy=False)
raw_c = Raw(sss_hpisubt_fname, preload=True).crop(0, 16, copy=False)
raw_c.pick_types(meg=True, eeg=True, eog=True, ecg=True, stim=True,
misc=True, copy=False) # remove cHPI status chans
assert_meg_snr(raw, raw_c, 143, 624)
# Degenerate cases
raw_nohpi = Raw(test_fif_fname, preload=True)
assert_raises(RuntimeError, filter_chpi, raw_nohpi)
# When MaxFliter downsamples, like::
# $ maxfilter -nosss -ds 2 -f test_move_anon_raw.fif \
# -o test_move_anon_ds2_raw.fif
# it can strip out some values of info, which we emulate here:
raw = Raw(chpi_fif_fname, allow_maxshield='yes')
raw.crop(0, 1, copy=False).load_data()
raw.resample(600., npad='auto')
raw.info['buffer_size_sec'] = np.float64(2.)
raw.info['lowpass'] = 200.
del raw.info['maxshield']
del raw.info['hpi_results'][0]['moments']
del raw.info['hpi_subsystem']['event_channel']
with catch_logging() as log:
filter_chpi(raw, verbose=True)
assert_true('2 cHPI' in log.getvalue())
示例2: test_calculate_chpi_positions
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import resample [as 别名]
def test_calculate_chpi_positions():
"""Test calculation of cHPI positions
"""
trans, rot, t = head_pos_to_trans_rot_t(read_head_pos(pos_fname))
raw = Raw(chpi_fif_fname, allow_maxshield="yes", preload=True)
t -= raw.first_samp / raw.info["sfreq"]
quats = _calculate_chpi_positions(raw, verbose="debug")
trans_est, rot_est, t_est = head_pos_to_trans_rot_t(quats)
_compare_positions((trans, rot, t), (trans_est, rot_est, t_est), 0.003)
# degenerate conditions
raw_no_chpi = Raw(test_fif_fname)
assert_raises(RuntimeError, _calculate_chpi_positions, raw_no_chpi)
raw_bad = raw.copy()
for d in raw_bad.info["dig"]:
if d["kind"] == FIFF.FIFFV_POINT_HPI:
d["coord_frame"] = 999
break
assert_raises(RuntimeError, _calculate_chpi_positions, raw_bad)
raw_bad = raw.copy()
for d in raw_bad.info["dig"]:
if d["kind"] == FIFF.FIFFV_POINT_HPI:
d["r"] = np.ones(3)
raw_bad.crop(0, 1.0, copy=False)
with warnings.catch_warnings(record=True): # bad pos
with catch_logging() as log_file:
_calculate_chpi_positions(raw_bad, verbose=True)
# ignore HPI info header and [done] footer
for line in log_file.getvalue().strip().split("\n")[4:-1]:
assert_true("0/5 good" in line)
# half the rate cuts off cHPI coils
with warnings.catch_warnings(record=True): # uint cast suggestion
raw.resample(300.0, npad="auto")
assert_raises_regex(RuntimeError, "above the", _calculate_chpi_positions, raw)
示例3: test_calculate_chpi_positions
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import resample [as 别名]
def test_calculate_chpi_positions():
"""Test calculation of cHPI positions
"""
trans, rot, t = head_pos_to_trans_rot_t(read_head_pos(pos_fname))
raw = Raw(chpi_fif_fname, allow_maxshield='yes', preload=True)
t -= raw.first_samp / raw.info['sfreq']
quats = _calculate_chpi_positions(raw, verbose='debug')
trans_est, rot_est, t_est = head_pos_to_trans_rot_t(quats)
_compare_positions((trans, rot, t), (trans_est, rot_est, t_est), 0.003)
# degenerate conditions
raw_no_chpi = Raw(test_fif_fname)
assert_raises(RuntimeError, _calculate_chpi_positions, raw_no_chpi)
raw_bad = raw.copy()
for d in raw_bad.info['dig']:
if d['kind'] == FIFF.FIFFV_POINT_HPI:
d['coord_frame'] = 999
break
assert_raises(RuntimeError, _calculate_chpi_positions, raw_bad)
raw_bad = raw.copy()
for d in raw_bad.info['dig']:
if d['kind'] == FIFF.FIFFV_POINT_HPI:
d['r'] = np.ones(3)
raw_bad.crop(0, 1., copy=False)
with warnings.catch_warnings(record=True): # bad pos
with catch_logging() as log_file:
_calculate_chpi_positions(raw_bad, verbose=True)
# ignore HPI info header and [done] footer
for line in log_file.getvalue().strip().split('\n')[4:-1]:
assert_true('0/5 good' in line)
# half the rate cuts off cHPI coils
raw.resample(300., npad='auto')
assert_raises_regex(RuntimeError, 'above the',
_calculate_chpi_positions, raw)
示例4: test_resample
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import resample [as 别名]
def test_resample():
"""Test resample (with I/O and multiple files)
"""
tempdir = _TempDir()
raw = Raw(fif_fname).crop(0, 3, False)
raw.preload_data()
raw_resamp = raw.copy()
sfreq = raw.info['sfreq']
# test parallel on upsample
raw_resamp.resample(sfreq * 2, n_jobs=2)
assert_equal(raw_resamp.n_times, len(raw_resamp.times))
raw_resamp.save(op.join(tempdir, 'raw_resamp-raw.fif'))
raw_resamp = Raw(op.join(tempdir, 'raw_resamp-raw.fif'), preload=True)
assert_equal(sfreq, raw_resamp.info['sfreq'] / 2)
assert_equal(raw.n_times, raw_resamp.n_times / 2)
assert_equal(raw_resamp._data.shape[1], raw_resamp.n_times)
assert_equal(raw._data.shape[0], raw_resamp._data.shape[0])
# test non-parallel on downsample
raw_resamp.resample(sfreq, n_jobs=1)
assert_equal(raw_resamp.info['sfreq'], sfreq)
assert_equal(raw._data.shape, raw_resamp._data.shape)
assert_equal(raw.first_samp, raw_resamp.first_samp)
assert_equal(raw.last_samp, raw.last_samp)
# upsampling then downsampling doubles resampling error, but this still
# works (hooray). Note that the stim channels had to be sub-sampled
# without filtering to be accurately preserved
# note we have to treat MEG and EEG+STIM channels differently (tols)
assert_allclose(raw._data[:306, 200:-200],
raw_resamp._data[:306, 200:-200],
rtol=1e-2, atol=1e-12)
assert_allclose(raw._data[306:, 200:-200],
raw_resamp._data[306:, 200:-200],
rtol=1e-2, atol=1e-7)
# now check multiple file support w/resampling, as order of operations
# (concat, resample) should not affect our data
raw1 = raw.copy()
raw2 = raw.copy()
raw3 = raw.copy()
raw4 = raw.copy()
raw1 = concatenate_raws([raw1, raw2])
raw1.resample(10)
raw3.resample(10)
raw4.resample(10)
raw3 = concatenate_raws([raw3, raw4])
assert_array_equal(raw1._data, raw3._data)
assert_array_equal(raw1._first_samps, raw3._first_samps)
assert_array_equal(raw1._last_samps, raw3._last_samps)
assert_array_equal(raw1._raw_lengths, raw3._raw_lengths)
assert_equal(raw1.first_samp, raw3.first_samp)
assert_equal(raw1.last_samp, raw3.last_samp)
assert_equal(raw1.info['sfreq'], raw3.info['sfreq'])
示例5: test_compute_covariance_auto_reg
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import resample [as 别名]
def test_compute_covariance_auto_reg():
"""Test automated regularization"""
raw = Raw(raw_fname, preload=True)
raw.resample(100, npad='auto') # much faster estimation
events = find_events(raw, stim_channel='STI 014')
event_ids = [1, 2, 3, 4]
reject = dict(mag=4e-12)
# cov with merged events and keep_sample_mean=True
events_merged = merge_events(events, event_ids, 1234)
# we need a few channels for numerical reasons in PCA/FA
picks = pick_types(raw.info, meg='mag', eeg=False)[:10]
raw.pick_channels([raw.ch_names[pick] for pick in picks])
raw.info.normalize_proj()
epochs = Epochs(
raw, events_merged, 1234, tmin=-0.2, tmax=0,
baseline=(-0.2, -0.1), proj=True, reject=reject, preload=True)
epochs = epochs.crop(None, 0)[:10]
method_params = dict(factor_analysis=dict(iter_n_components=[3]),
pca=dict(iter_n_components=[3]))
covs = compute_covariance(epochs, method='auto',
method_params=method_params,
projs=True,
return_estimators=True)
logliks = [c['loglik'] for c in covs]
assert_true(np.diff(logliks).max() <= 0) # descending order
methods = ['empirical',
'factor_analysis',
'ledoit_wolf',
'pca']
cov3 = compute_covariance(epochs, method=methods,
method_params=method_params, projs=None,
return_estimators=True)
assert_equal(set([c['method'] for c in cov3]),
set(methods))
# invalid prespecified method
assert_raises(ValueError, compute_covariance, epochs, method='pizza')
# invalid scalings
assert_raises(ValueError, compute_covariance, epochs, method='shrunk',
scalings=dict(misc=123))
示例6: print
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import resample [as 别名]
'-o', color='red')
plt.xlabel('time (s)')
plt.legend(['original', 'downsampled'], loc='best')
plt.title('Effect of downsampling')
mne.viz.tight_layout()
###############################################################################
# When resampling epochs is unwanted or impossible, for example when the data
# doesn't fit into memory or your analysis pipeline doesn't involve epochs at
# all, the alternative approach is to resample the continous data. This
# can also be done on non-preloaded data.
# Resample to 300 Hz
raw_resampled = raw.resample(300, copy=True)
###############################################################################
# Because resampling also affects the stim channels, some trigger onsets might
# be lost in this case. While MNE attempts to downsample the stim channels in
# an intelligent manner to avoid this, the recommended approach is to find
# events on the original data before downsampling.
print('Number of events before resampling:', len(mne.find_events(raw)))
# Resample to 100 Hz (generates warning)
raw_resampled = raw.resample(100, copy=True)
print('Number of events after resampling:',
len(mne.find_events(raw_resampled)))
# To avoid losing events, jointly resample the data and event matrix
events = mne.find_events(raw)
示例7: test_resample
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import resample [as 别名]
def test_resample():
"""Test resample (with I/O and multiple files)
"""
tempdir = _TempDir()
raw = Raw(fif_fname).crop(0, 3, False)
raw.load_data()
raw_resamp = raw.copy()
sfreq = raw.info['sfreq']
# test parallel on upsample
raw_resamp.resample(sfreq * 2, n_jobs=2, npad='auto')
assert_equal(raw_resamp.n_times, len(raw_resamp.times))
raw_resamp.save(op.join(tempdir, 'raw_resamp-raw.fif'))
raw_resamp = Raw(op.join(tempdir, 'raw_resamp-raw.fif'), preload=True)
assert_equal(sfreq, raw_resamp.info['sfreq'] / 2)
assert_equal(raw.n_times, raw_resamp.n_times / 2)
assert_equal(raw_resamp._data.shape[1], raw_resamp.n_times)
assert_equal(raw._data.shape[0], raw_resamp._data.shape[0])
# test non-parallel on downsample
raw_resamp.resample(sfreq, n_jobs=1, npad='auto')
assert_equal(raw_resamp.info['sfreq'], sfreq)
assert_equal(raw._data.shape, raw_resamp._data.shape)
assert_equal(raw.first_samp, raw_resamp.first_samp)
assert_equal(raw.last_samp, raw.last_samp)
# upsampling then downsampling doubles resampling error, but this still
# works (hooray). Note that the stim channels had to be sub-sampled
# without filtering to be accurately preserved
# note we have to treat MEG and EEG+STIM channels differently (tols)
assert_allclose(raw._data[:306, 200:-200],
raw_resamp._data[:306, 200:-200],
rtol=1e-2, atol=1e-12)
assert_allclose(raw._data[306:, 200:-200],
raw_resamp._data[306:, 200:-200],
rtol=1e-2, atol=1e-7)
# now check multiple file support w/resampling, as order of operations
# (concat, resample) should not affect our data
raw1 = raw.copy()
raw2 = raw.copy()
raw3 = raw.copy()
raw4 = raw.copy()
raw1 = concatenate_raws([raw1, raw2])
raw1.resample(10., npad='auto')
raw3.resample(10., npad='auto')
raw4.resample(10., npad='auto')
raw3 = concatenate_raws([raw3, raw4])
assert_array_equal(raw1._data, raw3._data)
assert_array_equal(raw1._first_samps, raw3._first_samps)
assert_array_equal(raw1._last_samps, raw3._last_samps)
assert_array_equal(raw1._raw_lengths, raw3._raw_lengths)
assert_equal(raw1.first_samp, raw3.first_samp)
assert_equal(raw1.last_samp, raw3.last_samp)
assert_equal(raw1.info['sfreq'], raw3.info['sfreq'])
# test resampling of stim channel
# basic decimation
stim = [1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0]
raw = RawArray([stim], create_info(1, len(stim), ['stim']))
assert_allclose(raw.resample(8., npad='auto')._data,
[[1, 1, 0, 0, 1, 1, 0, 0]])
# decimation of multiple stim channels
raw = RawArray(2 * [stim], create_info(2, len(stim), 2 * ['stim']))
assert_allclose(raw.resample(8., npad='auto')._data,
[[1, 1, 0, 0, 1, 1, 0, 0],
[1, 1, 0, 0, 1, 1, 0, 0]])
# decimation that could potentially drop events if the decimation is
# done naively
stim = [0, 0, 0, 1, 1, 0, 0, 0]
raw = RawArray([stim], create_info(1, len(stim), ['stim']))
assert_allclose(raw.resample(4., npad='auto')._data,
[[0, 1, 1, 0]])
# two events are merged in this case (warning)
stim = [0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0]
raw = RawArray([stim], create_info(1, len(stim), ['stim']))
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
raw.resample(8., npad='auto')
assert_true(len(w) == 1)
# events are dropped in this case (warning)
stim = [0, 1, 1, 0, 0, 1, 1, 0]
raw = RawArray([stim], create_info(1, len(stim), ['stim']))
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
raw.resample(4., npad='auto')
assert_true(len(w) == 1)
# test resampling events: this should no longer give a warning
stim = [0, 1, 1, 0, 0, 1, 1, 0]
raw = RawArray([stim], create_info(1, len(stim), ['stim']))
events = find_events(raw)
raw, events = raw.resample(4., events=events, npad='auto')
assert_equal(events, np.array([[0, 0, 1], [2, 0, 1]]))
# test copy flag
stim = [1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0]
raw = RawArray([stim], create_info(1, len(stim), ['stim']))
#.........这里部分代码省略.........
示例8: preprocess_fif_to_ts
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import resample [as 别名]
def preprocess_fif_to_ts(fif_file, l_freq, h_freq, down_sfreq, is_sensor_space):
import os
import numpy as np
from mne.io import Raw
#from mne.io import RawFIF ## was working in previous versions ofpyMNE
from nipype.utils.filemanip import split_filename as split_f
subj_path,basename,ext = split_f(fif_file)
(data_path, sbj_name) = os.path.split(subj_path)
print data_path
print fif_file
raw = Raw(fif_file,preload = True)
print raw
print len(raw.ch_names)
select_sensors, = np.where(np.array([ch_name[0] == 'M' for ch_name in raw.ch_names],dtype = 'bool') == True)
### save electrode locations
sens_loc = [raw.info['chs'][i]['loc'][:3] for i in select_sensors]
sens_loc = np.array(sens_loc)
# AP 21032016
# channel_coords_file = os.path.join(data_path, "correct_channel_coords.txt")
channel_coords_file = os.path.abspath("correct_channel_coords.txt")
np.savetxt(channel_coords_file ,sens_loc , fmt = '%s')
# print sens_loc
### save electrode names
sens_names = np.array([raw.ch_names[pos] for pos in select_sensors],dtype = "str")
# AP 21032016
# channel_names_file = os.path.join(data_path, "correct_channel_names.txt")
channel_names_file = os.path.abspath("correct_channel_names.txt")
np.savetxt(channel_names_file,sens_names , fmt = '%s')
### filtering + downsampling
data,times = raw[select_sensors,:]
print data.shape
print raw.info['sfreq']
raw.filter(l_freq = None, h_freq = h_freq,picks = select_sensors)
raw.resample(sfreq = down_sfreq,npad = 0,stim_picks = select_sensors)
### save data
data,times = raw[select_sensors,:]
print data.shape
print raw.info['sfreq']
#0/0
ts_file = os.path.abspath(basename +".npy")
np.save(ts_file,data)
if is_sensor_space:
return ts_file,channel_coords_file,channel_names_file,raw.info['sfreq']
else:
return raw, channel_coords_file,channel_names_file,raw.info['sfreq']
示例9: print
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import resample [as 别名]
'-o', color='red')
plt.xlabel('time (s)')
plt.legend(['original', 'downsampled'], loc='best')
plt.title('Effect of downsampling')
mne.viz.tight_layout()
###############################################################################
# When resampling epochs is unwanted or impossible, for example when the data
# doesn't fit into memory or your analysis pipeline doesn't involve epochs at
# all, the alternative approach is to resample the continous data. This
# can also be done on non-preloaded data.
# Resample to 300 Hz
raw_resampled = raw.resample(300, npad='auto', copy=True)
###############################################################################
# Because resampling also affects the stim channels, some trigger onsets might
# be lost in this case. While MNE attempts to downsample the stim channels in
# an intelligent manner to avoid this, the recommended approach is to find
# events on the original data before downsampling.
print('Number of events before resampling:', len(mne.find_events(raw)))
# Resample to 100 Hz (generates warning)
raw_resampled = raw.resample(100, npad='auto', copy=True)
print('Number of events after resampling:',
len(mne.find_events(raw_resampled)))
# To avoid losing events, jointly resample the data and event matrix
events = mne.find_events(raw)
示例10: preprocess_ICA_fif_to_ts
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import resample [as 别名]
def preprocess_ICA_fif_to_ts(fif_file, ECG_ch_name, EoG_ch_name, l_freq, h_freq, down_sfreq, variance, is_sensor_space, data_type):
import os
import numpy as np
import mne
from mne.io import Raw
from mne.preprocessing import ICA, read_ica
from mne.preprocessing import create_ecg_epochs, create_eog_epochs
from mne.report import Report
from nipype.utils.filemanip import split_filename as split_f
report = Report()
subj_path,basename,ext = split_f(fif_file)
(data_path, sbj_name) = os.path.split(subj_path)
print data_path
### Read raw
# If None the compensation in the data is not modified. If set to n, e.g. 3, apply
# gradient compensation of grade n as for CTF systems (compensation=3)
raw = Raw(fif_file, preload=True)
### select sensors
select_sensors = mne.pick_types(raw.info, meg=True, ref_meg= False, exclude='bads')
picks_meeg = mne.pick_types(raw.info, meg=True, eeg=True, exclude='bads')
### save electrode locations
sens_loc = [raw.info['chs'][i]['loc'][:3] for i in select_sensors]
sens_loc = np.array(sens_loc)
# AP 21032016
# channel_coords_file = os.path.join(data_path, "correct_channel_coords.txt")
channel_coords_file = os.path.abspath("correct_channel_coords.txt")
print '*************************** ' + channel_coords_file + '*********************************'
np.savetxt(channel_coords_file ,sens_loc , fmt = '%s')
### save electrode names
sens_names = np.array([raw.ch_names[pos] for pos in select_sensors],dtype = "str")
# AP 21032016
# channel_names_file = os.path.join(data_path, "correct_channel_names.txt")
channel_names_file = os.path.abspath("correct_channel_names.txt")
np.savetxt(channel_names_file,sens_names , fmt = '%s')
### filtering + downsampling
#raw.filter(l_freq = l_freq, h_freq = h_freq, picks = picks_meeg, method='iir', n_jobs=8)
raw.filter(l_freq = l_freq, h_freq = h_freq, picks = picks_meeg, method='iir')
raw.resample(sfreq = down_sfreq, npad = 0)
### 1) Fit ICA model using the FastICA algorithm
# Other available choices are `infomax` or `extended-infomax`
# We pass a float value between 0 and 1 to select n_components based on the
# percentage of variance explained by the PCA components.
ICA_title = 'Sources related to %s artifacts (red)'
is_show = False # visualization
reject = dict(mag=4e-12, grad=4000e-13)
# check if we have an ICA, if yes, we load it
ica_filename = os.path.join(subj_path,basename + "-ica.fif")
if os.path.exists(ica_filename) == False:
ica = ICA(n_components=variance, method='fastica', max_iter=500) # , max_iter=500
ica.fit(raw, picks=select_sensors, reject=reject) # decim = 3,
has_ICA = False
else:
has_ICA = True
print ica_filename + ' exists!!!'
ica = read_ica(ica_filename)
ica.exclude = []
### 2) identify bad components by analyzing latent sources.
# generate ECG epochs use detection via phase statistics
# if we just have exclude channels we jump these steps
# if len(ica.exclude)==0:
n_max_ecg = 3
n_max_eog = 2
# check if ECG_ch_name is in the raw channels
if ECG_ch_name in raw.info['ch_names']:
ecg_epochs = create_ecg_epochs(raw, tmin=-.5, tmax=.5, picks=select_sensors,
ch_name = ECG_ch_name)
# if not a synthetic ECG channel is created from cross channel average
else:
ecg_epochs = create_ecg_epochs(raw, tmin=-.5, tmax=.5, picks=select_sensors)
### ICA for ECG artifact
# threshold=0.25 come default
ecg_inds, scores = ica.find_bads_ecg(ecg_epochs, method='ctps')
print scores
print '*************************** ' + str(len(ecg_inds)) + '*********************************'
if len(ecg_inds) > 0:
ecg_evoked = ecg_epochs.average()
fig1 = ica.plot_scores(scores, exclude=ecg_inds, title=ICA_title % 'ecg', show=is_show)
show_picks = np.abs(scores).argsort()[::-1][:5] # Pick the five largest scores and plot them
#.........这里部分代码省略.........
示例11: preprocess_set_ICA_comp_fif_to_ts
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import resample [as 别名]
def preprocess_set_ICA_comp_fif_to_ts(fif_file, n_comp_exclude, l_freq, h_freq, down_sfreq, is_sensor_space):
import os
import numpy as np
import sys
import mne
from mne.io import Raw
from mne.preprocessing import read_ica
from mne.report import Report
from nipype.utils.filemanip import split_filename as split_f
report = Report()
subj_path,basename,ext = split_f(fif_file)
(data_path, sbj_name) = os.path.split(subj_path)
print '********************************* ' + data_path + '*********************************'
print '********************************* ' + sbj_name + '*********************************'
n_session = int(filter(str.isdigit, basename))
print '********************************* n session = %d' %n_session + '*********************************'
### Read raw
raw = Raw(fif_file, preload=True)
### select sensors
select_sensors = mne.pick_types(raw.info, meg=True, ref_meg= False, exclude='bads')
picks_meeg = mne.pick_types(raw.info, meg=True, eeg=True, exclude='bads')
### save electrode locations
sens_loc = [raw.info['chs'][i]['loc'][:3] for i in select_sensors]
sens_loc = np.array(sens_loc)
# AP 21032016
# channel_coords_file = os.path.join(data_path, "correct_channel_coords.txt")
channel_coords_file = os.path.abspath("correct_channel_coords.txt")
np.savetxt(channel_coords_file ,sens_loc , fmt = '%s')
### save electrode names
sens_names = np.array([raw.ch_names[pos] for pos in select_sensors],dtype = "str")
# AP 21032016
# channel_names_file = os.path.join(data_path, "correct_channel_names.txt")
channel_names_file = os.path.abspath("correct_channel_names.txt")
np.savetxt(channel_names_file,sens_names , fmt = '%s')
### filtering + downsampling
#raw.filter(l_freq = l_freq, h_freq = h_freq, picks = picks_meeg, method='iir', n_jobs=8)
raw.filter(l_freq = l_freq, h_freq = h_freq, picks = picks_meeg, method='iir')
raw.resample(sfreq = down_sfreq, npad = 0)
### load ICA
is_show = False # visualization
ica_filename = os.path.join(subj_path,basename + "-ica.fif")
if os.path.exists(ica_filename) == False:
print "$$$$$$$$$$$$$ Warning, no %s found" %ica_filename
sys.exit()
else:
ica = read_ica(ica_filename)
# AP 210316
print '***** ica.exclude before set components= ', ica.exclude
if n_comp_exclude.has_key(sbj_name):
print '********************************* ICA to be excluded for sbj %s ' %sbj_name + ' ' + str(n_comp_exclude[sbj_name]) + '*********************************'
matrix_c_ICA = n_comp_exclude[sbj_name]
if not matrix_c_ICA[n_session-1]:
print 'no ICA'
else:
print '********************************* ICA to be excluded for session %d ' %n_session + ' ' + str(matrix_c_ICA[n_session-1]) + '*********************************'
ica.exclude = matrix_c_ICA[n_session-1]
# ica.exclude = n_comp_exclude
print '***** ica.exclude after set components = ', ica.exclude
fig1 = ica.plot_overlay(raw, show=is_show)
report.add_figs_to_section(fig1, captions=['Signal'], section = 'Signal quality')
report_filename = os.path.join(subj_path,basename + "-report_NEW.html")
print report_filename
report.save(report_filename, open_browser=False, overwrite=True)
### 3) apply ICA to raw data and save solution and report
# check the amplitudes do not change
raw_ica = ica.apply(raw, copy=True)
### save ICA solution
print ica_filename
ica.save(ica_filename)
### 4) save data
data_noIca,times = raw[select_sensors,:]
data,times = raw_ica[select_sensors,:]
print data.shape
print raw.info['sfreq']
ts_file = os.path.abspath(basename +"_ica.npy")
np.save(ts_file,data)
#.........这里部分代码省略.........