当前位置: 首页>>代码示例>>Python>>正文


Python datasets.fetch_haxby函数代码示例

本文整理汇总了Python中nilearn.datasets.fetch_haxby函数的典型用法代码示例。如果您正苦于以下问题:Python fetch_haxby函数的具体用法?Python fetch_haxby怎么用?Python fetch_haxby使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了fetch_haxby函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_fetch_haxby

def test_fetch_haxby():
    for i in range(1, 6):
        haxby = datasets.fetch_haxby(data_dir=tmpdir, n_subjects=i,
                                     verbose=0)
        assert_equal(len(url_request.urls), 1 + (i == 1))  # subject_data + md5
        assert_equal(len(haxby.func), i)
        assert_equal(len(haxby.anat), i)
        assert_equal(len(haxby.session_target), i)
        assert_equal(len(haxby.mask_vt), i)
        assert_equal(len(haxby.mask_face), i)
        assert_equal(len(haxby.mask_house), i)
        assert_equal(len(haxby.mask_face_little), i)
        assert_equal(len(haxby.mask_house_little), i)
        url_request.reset()
开发者ID:amadeuskanaan,项目名称:nilearn,代码行数:14,代码来源:test_datasets.py

示例2: read_data_haxby

def read_data_haxby(subject, tr=2.5, masker=False):
    haxby_dataset = fetch_haxby(subjects=[subject])

    # Load fmri data
    fmri_filename = haxby_dataset.func[0]
    fmri = load_img(fmri_filename)
    # mask = haxby_dataset.mask_vt[0]
    masker = NiftiMasker(mask_strategy='epi', standardize=True, detrend=True,
                         high_pass=0.01, t_r=tr, smoothing_fwhm=5)
    fmri = masker.fit_transform(fmri)
    fmri = fmri.reshape(12, -1, fmri.shape[-1])

    # Load stimuli data
    classes = np.array(['rest', 'face', 'house', 'bottle', 'cat', 'chair',
                        'scissors', 'shoe', 'scrambledpix'])
    labels = np.recfromcsv(
        haxby_dataset.session_target[0], delimiter=" ")['labels'].reshape(
            12, -1)
    stimuli, onsets, conditions = (np.zeros((
        12, len(labels[0]), len(classes))), [], [])
    stimuli[:, 0, 0] = 1
    for session in range(12):
        onsets.append([])
        conditions.append([])
        for scan in range(1, len(fmri[session])):
            if (labels[session][scan - 1] == 'rest' and
                labels[session][scan] != 'rest'):
                label = labels[session][scan]
                stimuli[session, scan, np.where(classes == label)[0][0]] = 1
                conditions[session].append(label)
                onsets[session].append(scan * tr)
            else:
                stimuli[session, scan, 0] = 1

    if subject == 5:
        fmri = np.vstack((fmri[:8], fmri[9:]))
        stimuli = np.vstack((stimuli[:8], stimuli[9:]))
        onsets = np.vstack((onsets[:8], onsets[9:]))
        conditions = np.vstack((conditions[:8], conditions[9:]))

    if masker:
        return fmri, stimuli, onsets, conditions, masker

    return fmri, stimuli, onsets, conditions
开发者ID:Joaoloula,项目名称:time-decoding,代码行数:44,代码来源:data_reading.py

示例3: image

"""
NeuroImaging volumes visualization
==================================

Simple example to show Nifti data visualization.
"""

##############################################################################
# Fetch data
from nilearn import datasets

# By default 2nd subject will be fetched
haxby_dataset = datasets.fetch_haxby()

# print basic information on the dataset
print('First anatomical nifti image (3D) located is at: %s' %
      haxby_dataset.anat[0])
print('First functional nifti image (4D) is located at: %s' %
      haxby_dataset.func[0])

##############################################################################
# Visualization
from nilearn.image.image import mean_img

# Compute the mean EPI: we do the mean along the axis 3, which is time
func_filename = haxby_dataset.func[0]
mean_haxby = mean_img(func_filename)

from nilearn.plotting import plot_epi, show
plot_epi(mean_haxby)
开发者ID:AlexandreAbraham,项目名称:nilearn,代码行数:30,代码来源:plot_visualization.py

示例4: image

----------
[1] Winkler, A. M. et al. (2014).
    Permutation inference for the general linear model. Neuroimage.

[2] Anderson, M. J. & Robinson, J. (2001).
    Permutation tests for linear models.
    Australian & New Zealand Journal of Statistics, 43(1), 75-88.
    (http://avesbiodiv.mncn.csic.es/estadistica/permut2.pdf)

"""
# Author: Virgile Fritsch, <[email protected]>, Feb. 2014

##############################################################################
# Load Haxby dataset
from nilearn import datasets
haxby_dataset = datasets.fetch_haxby(subjects=[2])

# print basic information on the dataset
print('Mask nifti image (3D) is located at: %s' % haxby_dataset.mask)
print('Functional nifti image (4D) is located at: %s' % haxby_dataset.func[0])

##############################################################################
# Mask data
mask_filename = haxby_dataset.mask
from nilearn.input_data import NiftiMasker
nifti_masker = NiftiMasker(
    smoothing_fwhm=8,
    mask_img=mask_filename,
    memory='nilearn_cache', memory_level=1)  # cache options
func_filename = haxby_dataset.func[0]
fmri_masked = nifti_masker.fit_transform(func_filename)
开发者ID:Joaoloula,项目名称:nilearn,代码行数:31,代码来源:plot_haxby_mass_univariate.py

示例5: _crop_mask

def _crop_mask(mask):
    """Crops input mask to produce tighter (i.e smaller) bounding box with
    the same support (active voxels)"""
    idx = np.where(mask)
    i_min = max(idx[0].min() - 1, 0)
    i_max = idx[0].max()
    j_min = max(idx[1].min() - 1, 0)
    j_max = idx[1].max()
    k_min = max(idx[2].min() - 1, 0)
    k_max = idx[2].max()
    return mask[i_min:i_max + 1, j_min:j_max + 1, k_min:k_max + 1]
    
### Load haxby dataset ########################################################
from nilearn.datasets import fetch_haxby
data_files = fetch_haxby('/home/eugene/Documents/')

### Load Target labels ########################################################
import numpy as np
labels = np.recfromcsv(data_files.session_target[0], delimiter=" ")


### split data into train and test samples ####################################
n_train=6
target = labels['labels']
condition_mask = np.logical_or(target == "scissors", target == "scrambledpix")
condition_mask_train = np.logical_and(condition_mask, labels['chunks'] <= n_train)
condition_mask_test = np.logical_and(condition_mask, labels['chunks'] > n_train)

# make X (design matrix) and y (response variable)
import nibabel
开发者ID:eugenium,项目名称:StructuredSparsityRegularization,代码行数:30,代码来源:FMRI_Example.py

示例6: image

mask, and how simple operations can improve the quality of the mask
obtained.
"""
### Coordinates of the selected slice #########################################

coronal = -24
sagittal = -33
axial = -17
cut_coords = (coronal, sagittal, axial)

### Load the data #############################################################

# Fetch the data files from Internet
from nilearn import datasets
import nibabel
haxby_dataset = datasets.fetch_haxby(n_subjects=1)

# print basic information on the dataset
print('First subject anatomical nifti image (3D) located is at: %s' %
      haxby_dataset.anat[0])
print('First subject functional nifti image (4D) is located at: %s' %
      haxby_dataset.func[0])

# Second, load the labels
import numpy as np

session_target = np.recfromcsv(haxby_dataset.session_target[0], delimiter=" ")
haxby_labels = session_target['labels']

### Visualization function ####################################################
开发者ID:DavidDJChen,项目名称:nilearn,代码行数:30,代码来源:plot_roi_extraction.py

示例7: sorted

"""
Show stimuli of Haxby et al. dataset
===============================================================================

In this script we plot an overview of the stimuli used in "Distributed
and Overlapping Representations of Faces and Objects in Ventral Temporal
Cortex" (Science 2001)
"""

from scipy.misc import imread
import matplotlib.pyplot as plt

from nilearn import datasets

haxby_dataset = datasets.fetch_haxby(n_subjects=0, fetch_stimuli=True)
stimulus_information = haxby_dataset.stimuli

for stim_type in sorted(stimulus_information.keys()):
    if stim_type == "controls":
        # skip control images, there are too many
        continue

    file_names = stimulus_information[stim_type]

    plt.figure()
    for i in range(48):
        plt.subplot(6, 8, i + 1)
        try:
            plt.imshow(imread(file_names[i]), cmap=plt.cm.gray)
        except:
            # just go to the next one if the file is not present
开发者ID:andreas-koukorinis,项目名称:gaelvaroquaux.github.io,代码行数:31,代码来源:plot_haxby_stimuli.py

示例8:

"""
NeuroImaging volumes visualization
====================================

Simple example to show Nifti data visualization.
"""

### Fetch data ################################################################

from nilearn import datasets

haxby_files = datasets.fetch_haxby(n_subjects=1)

### Load an fMRI file #########################################################

import nibabel

fmri_img = nibabel.load(haxby_files.func[0])
fmri_data = fmri_img.get_data()
fmri_affine = fmri_img.get_affine()

### Visualization #############################################################

import numpy as np
import matplotlib.pyplot as plt

# Compute the mean EPI: we do the mean along the axis 3, which is time
mean_img = np.mean(fmri_data, axis=3)
# Note that this can also be done on Nifti images using
# nilearn.image.mean_img
开发者ID:ainafp,项目名称:nilearn,代码行数:30,代码来源:plot_visualization.py

示例9: prior

"""
Decoding with SpaceNet: face vs house object recognition
=========================================================

Here is a simple example of decoding with a SpaceNet prior (i.e Graph-Net,
TV-l1, etc.), reproducing the Haxby 2001 study on a face vs house
discrimination task.

See also the SpaceNet documentation: :ref:`space_net`.
"""

##############################################################################
# Load the Haxby dataset
from nilearn.datasets import fetch_haxby

data_files = fetch_haxby()

# Load Target labels
import numpy as np

labels = np.recfromcsv(data_files.session_target[0], delimiter=" ")


# Restrict to face and house conditions
target = labels["labels"]
condition_mask = np.logical_or(target == b"face", target == b"house")

# Split data into train and test samples, using the chunks
condition_mask_train = np.logical_and(condition_mask, labels["chunks"] <= 6)
condition_mask_test = np.logical_and(condition_mask, labels["chunks"] > 6)
开发者ID:juhuntenburg,项目名称:nilearn,代码行数:30,代码来源:plot_haxby_space_net.py

示例10: round

"""
Plot Haxby masks
=================

Small script to plot the masks of the Haxby dataset.
"""
from scipy import linalg
import matplotlib.pyplot as plt

from nilearn import datasets
data = datasets.fetch_haxby()

# Build the mean image because we have no anatomic data
from nilearn import image
mean_img = image.mean_img(data.func[0])

z_slice = -24
from nilearn.image.resampling import coord_transform
affine = mean_img.get_affine()
_, _, k_slice = coord_transform(0, 0, z_slice,
                                linalg.inv(affine))
k_slice = round(k_slice)

fig = plt.figure(figsize=(4, 5.4), facecolor='k')

from nilearn.plotting import plot_anat
display = plot_anat(mean_img, display_mode='z', cut_coords=[z_slice],
                    figure=fig)
display.add_contours(data.mask_vt[0], contours=1, antialiased=False,
                     linewidths=4., levels=[0], colors=['red'])
display.add_contours(data.mask_house[0], contours=1, antialiased=False,
开发者ID:andreas-koukorinis,项目名称:gaelvaroquaux.github.io,代码行数:31,代码来源:plot_haxby_masks.py

示例11: fetch_haxby

This is a block-design fMRI dataset from a study on face and object\
 representation in human ventral temporal cortex. It consists of 6 subjects\
 with 12 runs per subject. In each run, the subjects passively viewed \
greyscale images of eight object categories, grouped in 24s blocks separated\
 by rest periods. Each image was shown for 500ms and was followed by a 1500ms\
 inter-stimulus interval. Full-brain fMRI data were recorded with a volume \
repetition time of 2.5s, thus, a stimulus block was covered by roughly 9 \
volumes.

Get full description <a href="http://dev.pymvpa.org/datadb/haxby2001.html">\
here</a>.\
"""

# fetch HAXBY dataset
N_SUBJECTS = 2
haxby_data = fetch_haxby(n_subjects=N_SUBJECTS)

# set output dir
OUTPUT_DIR = os.path.join(os.path.dirname(haxby_data.mask),
                          "haxby_runs")
if not os.path.exists(OUTPUT_DIR):
    os.makedirs(OUTPUT_DIR)

# get subject data
subjects = []
for subject_id in set([os.path.basename(os.path.dirname(x))
                       for x in haxby_data.func]):
    # instantiate subject_data object
    subject_data = SubjectData()
    subject_data.subject_id = subject_id
    subject_data.session_id = "haxby2001"
开发者ID:AlexandreAbraham,项目名称:pypreprocess,代码行数:31,代码来源:nipype_preproc_spm_haxby.py

示例12: sorted

"""
Show stimuli of Haxby et al. dataset
===============================================================================

In this script we plot an overview of the stimuli used in "Distributed
and Overlapping Representations of Faces and Objects in Ventral Temporal
Cortex" (Science 2001)
"""

from scipy.misc import imread
import matplotlib.pyplot as plt

from nilearn import datasets
from nilearn.plotting import show

haxby_dataset = datasets.fetch_haxby(subjects=[], fetch_stimuli=True)
stimulus_information = haxby_dataset.stimuli

for stim_type in sorted(stimulus_information.keys()):
    if stim_type == b'controls':
        # skip control images, there are too many
        continue

    file_names = stimulus_information[stim_type]

    plt.figure()
    for i in range(48):
        plt.subplot(6, 8, i + 1)
        try:
            plt.imshow(imread(file_names[i]), cmap=plt.cm.gray)
        except:
开发者ID:jeromedockes,项目名称:nilearn,代码行数:31,代码来源:plot_haxby_stimuli.py

示例13: fetch_haxby

from nilearn.datasets import fetch_haxby

haxby_data = fetch_haxby(n_subjects=1, fetch_stimuli=True)
开发者ID:eickenberg,项目名称:nilearn_workshop,代码行数:3,代码来源:fetch_data.py

示例14: fetch_haxby

"""
Show stimuli of Haxby et al. dataset
===============================================================================

In this script we plot an overview of the stimuli used in "Distributed
and Overlapping Representations of Faces and Objects in Ventral Temporal
Cortex" (Science 2001)
"""

from scipy.misc import imread
import matplotlib.pyplot as plt

from nilearn.datasets import fetch_haxby

stimulus_information = fetch_haxby(n_subjects=0,
                                   fetch_stimuli=True).stimuli

for stim_type in sorted(stimulus_information.keys()):
    if stim_type == "controls":
        # skip control images, there are too many
        continue

    file_names = stimulus_information[stim_type]

    plt.figure()
    for i in range(48):
        plt.subplot(6, 8, i + 1)
        try:
            plt.imshow(imread(file_names[i]))

            plt.gray()
开发者ID:armaneshaghi,项目名称:nilearn,代码行数:31,代码来源:plot_haxby_stimuli.py

示例15: len

    if axis is None:
        axis = 0
        array = array.ravel()

    array = np.rollaxis(array, axis)
    remaining_shape = array.shape[1:]
    length = len(array)

    array = array.reshape(length, -1)
    x = np.arange(length)
    x -= x.mean()
    x /= x.max()

    regressors = np.array([x ** order for order in range(polyorder + 1)]).T
    coef, resids, rank, s = lstsq(regressors, array)

    return coef.reshape([polyorder + 1] + list(remaining_shape))


if __name__ == "__main__":
    import nibabel as nb
    from nilearn.datasets import fetch_haxby
    h = fetch_haxby(n_subjects=1)
    data = nb.load(h.func[0]).get_data().T

    skewmap = skewness(data, axis=0)
    kurtmap = kurtosis(data, axis=0)
    trendmap = trend_coef(data, polyorder=2, axis=0)

    
开发者ID:AlexandreAbraham,项目名称:movements,代码行数:28,代码来源:features.py


注:本文中的nilearn.datasets.fetch_haxby函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。