本文整理汇总了Python中mne.io.Raw.time_as_index方法的典型用法代码示例。如果您正苦于以下问题:Python Raw.time_as_index方法的具体用法?Python Raw.time_as_index怎么用?Python Raw.time_as_index使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mne.io.Raw
的用法示例。
在下文中一共展示了Raw.time_as_index方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_time_index
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import time_as_index [as 别名]
def test_time_index():
"""Test indexing of raw times"""
raw_fname = op.join(op.dirname(__file__), '..', '..', 'io', 'tests',
'data', 'test_raw.fif')
raw = Raw(raw_fname)
# Test original (non-rounding) indexing behavior
orig_inds = raw.time_as_index(raw.times)
assert(len(set(orig_inds)) != len(orig_inds))
# Test new (rounding) indexing behavior
new_inds = raw.time_as_index(raw.times, use_rounding=True)
assert(len(set(new_inds)) == len(new_inds))
示例2: test_io_complex
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import time_as_index [as 别名]
def test_io_complex():
"""Test IO with complex data types
"""
rng = np.random.RandomState(0)
tempdir = _TempDir()
dtypes = [np.complex64, np.complex128]
raw = Raw(fif_fname, preload=True)
picks = np.arange(5)
start, stop = raw.time_as_index([0, 5])
data_orig, _ = raw[picks, start:stop]
for di, dtype in enumerate(dtypes):
imag_rand = np.array(1j * rng.randn(data_orig.shape[0], data_orig.shape[1]), dtype)
raw_cp = raw.copy()
raw_cp._data = np.array(raw_cp._data, dtype)
raw_cp._data[picks, start:stop] += imag_rand
# this should throw an error because it's complex
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
raw_cp.save(op.join(tempdir, "raw.fif"), picks, tmin=0, tmax=5, overwrite=True)
# warning gets thrown on every instance b/c simplifilter('always')
assert_equal(len(w), 1)
raw2 = Raw(op.join(tempdir, "raw.fif"))
raw2_data, _ = raw2[picks, :]
n_samp = raw2_data.shape[1]
assert_allclose(raw2_data[:, :n_samp], raw_cp._data[picks, :n_samp])
# with preloading
raw2 = Raw(op.join(tempdir, "raw.fif"), preload=True)
raw2_data, _ = raw2[picks, :]
n_samp = raw2_data.shape[1]
assert_allclose(raw2_data[:, :n_samp], raw_cp._data[picks, :n_samp])
示例3: test_raw_index_as_time
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import time_as_index [as 别名]
def test_raw_index_as_time():
""" Test index as time conversion"""
raw = Raw(fif_fname, preload=True)
t0 = raw.index_as_time([0], True)[0]
t1 = raw.index_as_time([100], False)[0]
t2 = raw.index_as_time([100], True)[0]
assert_true((t2 - t1) == t0)
# ensure we can go back and forth
t3 = raw.index_as_time(raw.time_as_index([0], True), True)
assert_array_almost_equal(t3, [0.0], 2)
t3 = raw.index_as_time(raw.time_as_index(raw.info['sfreq'], True), True)
assert_array_almost_equal(t3, [raw.info['sfreq']], 2)
t3 = raw.index_as_time(raw.time_as_index(raw.info['sfreq'], False), False)
assert_array_almost_equal(t3, [raw.info['sfreq']], 2)
i0 = raw.time_as_index(raw.index_as_time([0], True), True)
assert_true(i0[0] == 0)
i1 = raw.time_as_index(raw.index_as_time([100], True), True)
assert_true(i1[0] == 100)
# Have to add small amount of time because we truncate via int casting
i1 = raw.time_as_index(raw.index_as_time([100.0001], False), False)
assert_true(i1[0] == 100)
示例4: test_io_raw
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import time_as_index [as 别名]
def test_io_raw():
"""Test IO for raw data (Neuromag + CTF + gz)
"""
tempdir = _TempDir()
# test unicode io
for chars in [b'\xc3\xa4\xc3\xb6\xc3\xa9', b'a']:
with Raw(fif_fname) as r:
assert_true('Raw' in repr(r))
assert_true(op.basename(fif_fname) in repr(r))
desc1 = r.info['description'] = chars.decode('utf-8')
temp_file = op.join(tempdir, 'raw.fif')
r.save(temp_file, overwrite=True)
with Raw(temp_file) as r2:
desc2 = r2.info['description']
assert_equal(desc1, desc2)
# Let's construct a simple test for IO first
raw = Raw(fif_fname).crop(0, 3.5, False)
raw.load_data()
# put in some data that we know the values of
data = rng.randn(raw._data.shape[0], raw._data.shape[1])
raw._data[:, :] = data
# save it somewhere
fname = op.join(tempdir, 'test_copy_raw.fif')
raw.save(fname, buffer_size_sec=1.0)
# read it in, make sure the whole thing matches
raw = Raw(fname)
assert_allclose(data, raw[:, :][0], rtol=1e-6, atol=1e-20)
# let's read portions across the 1-sec tag boundary, too
inds = raw.time_as_index([1.75, 2.25])
sl = slice(inds[0], inds[1])
assert_allclose(data[:, sl], raw[:, sl][0], rtol=1e-6, atol=1e-20)
# now let's do some real I/O
fnames_in = [fif_fname, test_fif_gz_fname, ctf_fname]
fnames_out = ['raw.fif', 'raw.fif.gz', 'raw.fif']
for fname_in, fname_out in zip(fnames_in, fnames_out):
fname_out = op.join(tempdir, fname_out)
raw = Raw(fname_in)
nchan = raw.info['nchan']
ch_names = raw.info['ch_names']
meg_channels_idx = [k for k in range(nchan)
if ch_names[k][0] == 'M']
n_channels = 100
meg_channels_idx = meg_channels_idx[:n_channels]
start, stop = raw.time_as_index([0, 5])
data, times = raw[meg_channels_idx, start:(stop + 1)]
meg_ch_names = [ch_names[k] for k in meg_channels_idx]
# Set up pick list: MEG + STI 014 - bad channels
include = ['STI 014']
include += meg_ch_names
picks = pick_types(raw.info, meg=True, eeg=False, stim=True,
misc=True, ref_meg=True, include=include,
exclude='bads')
# Writing with drop_small_buffer True
raw.save(fname_out, picks, tmin=0, tmax=4, buffer_size_sec=3,
drop_small_buffer=True, overwrite=True)
raw2 = Raw(fname_out)
sel = pick_channels(raw2.ch_names, meg_ch_names)
data2, times2 = raw2[sel, :]
assert_true(times2.max() <= 3)
# Writing
raw.save(fname_out, picks, tmin=0, tmax=5, overwrite=True)
if fname_in == fif_fname or fname_in == fif_fname + '.gz':
assert_equal(len(raw.info['dig']), 146)
raw2 = Raw(fname_out)
sel = pick_channels(raw2.ch_names, meg_ch_names)
data2, times2 = raw2[sel, :]
assert_allclose(data, data2, rtol=1e-6, atol=1e-20)
assert_allclose(times, times2)
assert_allclose(raw.info['sfreq'], raw2.info['sfreq'], rtol=1e-5)
# check transformations
for trans in ['dev_head_t', 'dev_ctf_t', 'ctf_head_t']:
if raw.info[trans] is None:
assert_true(raw2.info[trans] is None)
else:
assert_array_equal(raw.info[trans]['trans'],
raw2.info[trans]['trans'])
# check transformation 'from' and 'to'
if trans.startswith('dev'):
from_id = FIFF.FIFFV_COORD_DEVICE
else:
from_id = FIFF.FIFFV_MNE_COORD_CTF_HEAD
if trans[4:8] == 'head':
to_id = FIFF.FIFFV_COORD_HEAD
else:
to_id = FIFF.FIFFV_MNE_COORD_CTF_HEAD
for raw_ in [raw, raw2]:
assert_equal(raw_.info[trans]['from'], from_id)
#.........这里部分代码省略.........
示例5: test_raw_time_as_index
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import time_as_index [as 别名]
def test_raw_time_as_index():
""" Test time as index conversion"""
raw = Raw(fif_fname, preload=True)
with warnings.catch_warnings(record=True): # deprecation
first_samp = raw.time_as_index([0], True)[0]
assert_equal(raw.first_samp, -first_samp)
示例6: test_raw_time_as_index
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import time_as_index [as 别名]
def test_raw_time_as_index():
""" Test time as index conversion"""
raw = Raw(fif_fname, preload=True)
first_samp = raw.time_as_index([0], True)[0]
assert_true(raw.first_samp == -first_samp)
示例7: method
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import time_as_index [as 别名]
data_path = sample.data_path()
fname_inv = data_path + '/MEG/sample/sample_audvis-meg-oct-6-meg-inv.fif'
fname_raw = data_path + '/MEG/sample/sample_audvis_raw.fif'
label_name = 'Aud-lh'
fname_label = data_path + '/MEG/sample/labels/%s.label' % label_name
snr = 1.0 # use smaller SNR for raw data
lambda2 = 1.0 / snr ** 2
method = "sLORETA" # use sLORETA method (could also be MNE or dSPM)
# Load data
raw = Raw(fname_raw)
inverse_operator = read_inverse_operator(fname_inv)
label = mne.read_label(fname_label)
start, stop = raw.time_as_index([0, 15]) # read the first 15s of data
# Compute inverse solution
stc = apply_inverse_raw(raw, inverse_operator, lambda2, method, label,
start, stop, pick_ori=None)
# Save result in stc files
stc.save('mne_%s_raw_inverse_%s' % (method, label_name))
###############################################################################
# View activation time-series
plt.plot(1e3 * stc.times, stc.data[::100, :].T)
plt.xlabel('time (ms)')
plt.ylabel('%s value' % method)
plt.show()
示例8: range
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import time_as_index [as 别名]
title = 'Spatial patterns of ICA components for ECG+EOG (Magnetometers)'
source_idx = range(15)
ica.plot_topomap([ecg_source_idx, eog_source_idx], ch_type='mag')
plt.suptitle(title, fontsize=12)
###############################################################################
# Show MEG data before and after ICA cleaning.
# We now add the eog artifacts to the ica.exclusion list
ica.exclude += [eog_source_idx]
# Restore sensor space data and keep all PCA components
raw_ica = ica.pick_sources_raw(raw, include=None, n_pca_components=1.0)
# let's now compare the date before and after cleaning.
start_compare, stop_compare = raw.time_as_index([100, 106])
data, times = raw[picks, start_compare:stop_compare]
data_clean, _ = raw_ica[picks, start_compare:stop_compare]
# first the raw data
plt.figure()
plt.plot(times, data.T, color='r')
plt.plot(times, data_clean.T, color='k')
plt.xlabel('time (s)')
plt.xlim(100, 106)
plt.show()
# now the affected channel
affected_idx = raw.ch_names.index('MEG 1531')
plt.figure()
plt.plot(times, data[affected_idx], color='r')
示例9: Raw
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import time_as_index [as 别名]
from mne.io import Raw
from mne.datasets import sample
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
###############################################################################
# get raw data
raw = Raw(raw_fname)
# set picks
picks = mne.pick_types(raw.info, meg=True, eeg=False, eog=False,
stim=False, exclude='bads')
# pick times relative to the onset of the MEG measurement.
start, stop = raw.time_as_index([100, 115], use_first_samp=False)
# export to nitime using a copy of the data
raw_ts = raw.to_nitime(start=start, stop=stop, picks=picks, copy=True)
###############################################################################
# explore some nitime timeseries features
# get start
print(raw_ts.t0)
# get duration
print(raw_ts.duration)
# get sample duration (sampling interval)
print(raw_ts.sampling_interval)
示例10: simulate_movement
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import time_as_index [as 别名]
#.........这里部分代码省略.........
if pos is None: # use pos from file
dev_head_ts = [raw.info['dev_head_t']] * 2
offsets = np.array([0, raw.n_times])
interp = 'zero'
else:
if isinstance(pos, string_types):
pos = get_chpi_positions(pos, verbose=False)
if isinstance(pos, tuple): # can be an already-loaded pos file
transs, rots, ts = pos
ts -= raw.first_samp / raw.info['sfreq'] # MF files need reref
dev_head_ts = [np.r_[np.c_[r, t[:, np.newaxis]], [[0, 0, 0, 1]]]
for r, t in zip(rots, transs)]
del transs, rots
elif isinstance(pos, dict):
ts = np.array(list(pos.keys()), float)
ts.sort()
dev_head_ts = [pos[float(tt)] for tt in ts]
else:
raise TypeError('unknown pos type %s' % type(pos))
if not (ts >= 0).all(): # pathological if not
raise RuntimeError('Cannot have t < 0 in transform file')
tend = raw.times[-1]
assert not (ts < 0).any()
assert not (ts > tend).any()
if ts[0] > 0:
ts = np.r_[[0.], ts]
dev_head_ts.insert(0, raw.info['dev_head_t']['trans'])
dev_head_ts = [{'trans': d, 'to': raw.info['dev_head_t']['to'],
'from': raw.info['dev_head_t']['from']}
for d in dev_head_ts]
if ts[-1] < tend:
dev_head_ts.append(dev_head_ts[-1])
ts = np.r_[ts, [tend]]
offsets = raw.time_as_index(ts)
offsets[-1] = raw.n_times # fix for roundoff error
assert offsets[-2] != offsets[-1]
del ts
if isinstance(cov, string_types):
assert cov == 'simple'
cov = make_ad_hoc_cov(raw.info, verbose=False)
assert np.array_equal(offsets, np.unique(offsets))
assert len(offsets) == len(dev_head_ts)
approx_events = int((raw.n_times / raw.info['sfreq']) /
(stc.times[-1] - stc.times[0]))
logger.info('Provided parameters will provide approximately %s event%s'
% (approx_events, '' if approx_events == 1 else 's'))
# get HPI freqs and reorder
hpi_freqs = np.array([x['custom_ref'][0]
for x in raw.info['hpi_meas'][0]['hpi_coils']])
n_freqs = len(hpi_freqs)
order = [x['number'] - 1 for x in raw.info['hpi_meas'][0]['hpi_coils']]
assert np.array_equal(np.unique(order), np.arange(n_freqs))
hpi_freqs = hpi_freqs[order]
hpi_order = raw.info['hpi_results'][0]['order'] - 1
assert np.array_equal(np.unique(hpi_order), np.arange(n_freqs))
hpi_freqs = hpi_freqs[hpi_order]
# extract necessary info
picks = pick_types(raw.info, meg=True, eeg=True) # for simulation
meg_picks = pick_types(raw.info, meg=True, eeg=False) # for CHPI
fwd_info = pick_info(raw.info, picks)
fwd_info['projs'] = []
logger.info('Setting up raw data simulation using %s head position%s'
% (len(dev_head_ts), 's' if len(dev_head_ts) != 1 else ''))
raw.preload_data(verbose=False)
示例11: split_fif_into_epochs
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import time_as_index [as 别名]
def split_fif_into_epochs(fif_file, ep_length, isRemoveMuscleEpochs=False):
"""
Split fif_file into epochs of ep_length. Removes epochs
with muscle activity if isRemoveMuscleEpochs is set to True.
Args:
fif_file
ep_length
isRemoveMuscleEpochs
"""
# ---------------- Imports ---------------------------------#
import os
import numpy as np
import mne
from mne.io import Raw
from nipype.utils.filemanip import split_filename as split_f
# --------------------------------------------------------- #
ep_length = float(ep_length)
subj_path, basename, ext = split_f(fif_file)
# path = '/media/karim/79DE901F11ABED4C/Dimitriy/resting_data/MEG/K0017/'
raw = Raw(fif_file, preload=True)
sfreq = raw.info['sfreq']
half_ep_length = ep_length / 2.
first_time = half_ep_length
last_time = raw.times[-1]
event_times = np.arange(first_time, last_time, ep_length)
event_idx = raw.time_as_index(event_times) + raw.first_samp
events = np.array([event_idx, np.zeros(event_idx.shape),
np.ones(event_idx.shape)], dtype=np.int)
picks = mne.pick_types(
raw.info, meg=True, eeg=False, stim=False, eog=False)
# Remove epoch if its amplitude goes more then stdThreshold STDs above
# baseline
bad_epochs_count = 0
if isRemoveMuscleEpochs:
from split_data import generate_figures_for_report
from mne.report import Report
report = Report()
report_filename = basename + '_muscle_' + '.html'
rawHpass = raw.copy()
rawHpass.filter(l_freq=100, h_freq=None)
rawHpassPicks = mne.pick_types(
rawHpass.info, meg='mag', eeg=False, stim=False, eog=False)
stdThreshold = 4.
maxThreshold = 6.
# print("Before epochs")
epochsHpass = mne.Epochs(rawHpass, events.T, picks=rawHpassPicks,
event_id=1, tmin=-half_ep_length, tmax=half_ep_length,
reject=None, baseline=None, flat=dict(mag=1e-14), preload=True)
# print("After epochs")
ep_data = epochsHpass.get_data()
baseline_std = 5e-14
badChThreshold_std = 15
badChThreshold_max = 50
for iEp in range(len(ep_data[:, -1, -1])):
std_ep = ep_data[iEp, :, :].std(axis=1)
# import pdb; pdb.set_trace()
max_ep = ep_data[iEp, :, :].max(axis=1)
if len(std_ep[std_ep > baseline_std * stdThreshold]) > badChThreshold_std or len(max_ep[max_ep > baseline_std * maxThreshold]) > badChThreshold_max:
# print std_ep[std_ep > baseline_std * std_ep]
bad_epochs_count += 1
events[2, iEp] = 0
# import pdb; pdb.set_trace()
# print len(std_ep[std_ep > baseline_std * std_ep])
print "Number of contaminated channels: ", len(std_ep[std_ep > baseline_std * stdThreshold]) + len(max_ep[max_ep > baseline_std * maxThreshold])
generate_figures_for_report(
report, epochsHpass, rawHpass, events, half_ep_length, ep_length, iEp, picks)
report.save(report_filename, open_browser=False, overwrite=True)
# Set events for epochs
# Need to add raw.first_samp because otherwise it drops out
# epochs with times less than raw.first_samp (see epochs.py: _get_epoch_from_disk)
# import pdb; pdb.set_trace()
epochs = mne.Epochs(raw, events.T, picks=picks, proj=False, event_id=1,
tmin=-half_ep_length, tmax=half_ep_length,
reject=None, baseline=None, preload=True)
np_epochs = epochs.get_data()
epochs_file = os.path.abspath(basename + '_epochs.npy')
np.save(epochs_file, np_epochs)
return epochs, epochs_file, np_epochs, bad_epochs_count
示例12: test_io_raw
# 需要导入模块: from mne.io import Raw [as 别名]
# 或者: from mne.io.Raw import time_as_index [as 别名]
def test_io_raw():
"""Test IO for raw data (Neuromag + CTF + gz)
"""
# test unicode io
for chars in [b"\xc3\xa4\xc3\xb6\xc3\xa9", b"a"]:
with Raw(fif_fname) as r:
desc1 = r.info["description"] = chars.decode("utf-8")
temp_file = op.join(tempdir, "raw.fif")
r.save(temp_file, overwrite=True)
with Raw(temp_file) as r2:
desc2 = r2.info["description"]
assert_equal(desc1, desc2)
# Let's construct a simple test for IO first
raw = Raw(fif_fname, preload=True)
raw.crop(0, 3.5)
# put in some data that we know the values of
data = np.random.randn(raw._data.shape[0], raw._data.shape[1])
raw._data[:, :] = data
# save it somewhere
fname = op.join(tempdir, "test_copy_raw.fif")
raw.save(fname, buffer_size_sec=1.0)
# read it in, make sure the whole thing matches
raw = Raw(fname)
assert_true(np.allclose(data, raw[:, :][0], 1e-6, 1e-20))
# let's read portions across the 1-sec tag boundary, too
inds = raw.time_as_index([1.75, 2.25])
sl = slice(inds[0], inds[1])
assert_true(np.allclose(data[:, sl], raw[:, sl][0], 1e-6, 1e-20))
# now let's do some real I/O
fnames_in = [fif_fname, fif_gz_fname, ctf_fname]
fnames_out = ["raw.fif", "raw.fif.gz", "raw.fif"]
for fname_in, fname_out in zip(fnames_in, fnames_out):
fname_out = op.join(tempdir, fname_out)
raw = Raw(fname_in)
nchan = raw.info["nchan"]
ch_names = raw.info["ch_names"]
meg_channels_idx = [k for k in range(nchan) if ch_names[k][0] == "M"]
n_channels = 100
meg_channels_idx = meg_channels_idx[:n_channels]
start, stop = raw.time_as_index([0, 5])
data, times = raw[meg_channels_idx, start : (stop + 1)]
meg_ch_names = [ch_names[k] for k in meg_channels_idx]
# Set up pick list: MEG + STI 014 - bad channels
include = ["STI 014"]
include += meg_ch_names
picks = pick_types(
raw.info, meg=True, eeg=False, stim=True, misc=True, ref_meg=True, include=include, exclude="bads"
)
# Writing with drop_small_buffer True
raw.save(fname_out, picks, tmin=0, tmax=4, buffer_size_sec=3, drop_small_buffer=True, overwrite=True)
raw2 = Raw(fname_out, preload=True)
sel = pick_channels(raw2.ch_names, meg_ch_names)
data2, times2 = raw2[sel, :]
assert_true(times2.max() <= 3)
# Writing
raw.save(fname_out, picks, tmin=0, tmax=5, overwrite=True)
if fname_in == fif_fname or fname_in == fif_fname + ".gz":
assert_true(len(raw.info["dig"]) == 146)
raw2 = Raw(fname_out)
sel = pick_channels(raw2.ch_names, meg_ch_names)
data2, times2 = raw2[sel, :]
assert_true(np.allclose(data, data2, 1e-6, 1e-20))
assert_allclose(times, times2)
assert_allclose(raw.info["sfreq"], raw2.info["sfreq"], rtol=1e-5)
# check transformations
for trans in ["dev_head_t", "dev_ctf_t", "ctf_head_t"]:
if raw.info[trans] is None:
assert_true(raw2.info[trans] is None)
else:
assert_array_equal(raw.info[trans]["trans"], raw2.info[trans]["trans"])
# check transformation 'from' and 'to'
if trans.startswith("dev"):
from_id = FIFF.FIFFV_COORD_DEVICE
else:
from_id = FIFF.FIFFV_MNE_COORD_CTF_HEAD
if trans[4:8] == "head":
to_id = FIFF.FIFFV_COORD_HEAD
else:
to_id = FIFF.FIFFV_MNE_COORD_CTF_HEAD
for raw_ in [raw, raw2]:
assert_true(raw_.info[trans]["from"] == from_id)
assert_true(raw_.info[trans]["to"] == to_id)
if fname_in == fif_fname or fname_in == fif_fname + ".gz":
assert_allclose(raw.info["dig"][0]["r"], raw2.info["dig"][0]["r"])
# test warnings on bad filenames
#.........这里部分代码省略.........