本文整理汇总了Python中mvpa2.datasets.mri.fmri_dataset函数的典型用法代码示例。如果您正苦于以下问题:Python fmri_dataset函数的具体用法?Python fmri_dataset怎么用?Python fmri_dataset使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了fmri_dataset函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_nifti_dataset_from3_d
def test_nifti_dataset_from3_d():
"""Test NiftiDataset based on 3D volume(s)
"""
tssrc = os.path.join(pymvpa_dataroot, "bold.nii.gz")
masrc = os.path.join(pymvpa_dataroot, "mask.nii.gz")
# Test loading of 3D volumes
# by default we are enforcing 4D, testing here with the demo 3d mask
ds = fmri_dataset(masrc, mask=masrc, targets=1)
assert_equal(len(ds), 1)
import nibabel
plain_data = nibabel.load(masrc).get_data()
# Lets check if mapping back works as well
assert_array_equal(plain_data, map2nifti(ds).get_data().reshape(plain_data.shape))
# test loading from a list of filenames
# for now we should fail if trying to load a mix of 4D and 3D volumes
assert_raises(ValueError, fmri_dataset, (masrc, tssrc), mask=masrc, targets=1)
# Lets prepare some custom NiftiImage
dsfull = fmri_dataset(tssrc, mask=masrc, targets=1)
ds_selected = dsfull[3]
nifti_selected = map2nifti(ds_selected)
# Load dataset from a mix of 3D volumes
# (given by filenames and NiftiImages)
labels = [123, 2, 123]
ds2 = fmri_dataset((masrc, masrc, nifti_selected), mask=masrc, targets=labels)
assert_equal(ds2.nsamples, 3)
assert_array_equal(ds2.samples[0], ds2.samples[1])
assert_array_equal(ds2.samples[2], dsfull.samples[3])
assert_array_equal(ds2.targets, labels)
开发者ID:schoeke,项目名称:PyMVPA,代码行数:35,代码来源:test_niftidataset.py
示例2: loadrundata
def loadrundata(p, s, r, m=None, c=None):
# inputs:
# p: paths list
# s: string representing subject ('LMVPA001')
# r: run ID ('Run1')
from os.path import join as pjoin
from mvpa2.datasets import eventrelated as er
from mvpa2.datasets.mri import fmri_dataset
from mvpa2.datasets.sources import bids as bids
# bfn = pjoin(p[0], 'data', s, 'func', 'extra', s+'_'+r+'_mc.nii.gz')
# motion corrected and coregistered
bfn = pjoin(p[0], 'data', s, 'func', s + '_' + r + '.nii.gz')
if m is not None:
m = pjoin(p[0], 'data', s, 'masks', s+'_'+m+'.nii.gz')
d = fmri_dataset(bfn, chunks=int(r.split('n')[1]), mask=m)
else:
d = fmri_dataset(bfn, chunks=int(r.split('n')[1]))
# This line-- should be different if we're doing GLM, etc.
efn = pjoin(p[0], 'data', s, 'func', s + '_' + r + '.tsv')
fe = bids.load_events(efn)
if c is None:
tmpe = events2dict(fe)
c = tmpe.keys()
if isinstance(c, basestring):
# must be a list/tuple/array for the logic below
c = [c]
for ci in c:
e = adjustevents(fe, ci)
d = er.assign_conditionlabels(d, e, noinfolabel='rest', label_attr=ci)
return d
开发者ID:njchiang,项目名称:LanguageMVPA,代码行数:32,代码来源:lmvpautils.py
示例3: test_multiple_calls
def test_multiple_calls():
"""Test if doing exactly the same operation twice yields the same result
"""
data = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d.nii.gz'),
targets=1, sprefix='abc')
data2 = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d.nii.gz'),
targets=1, sprefix='abc')
assert_array_equal(data.a.abc_eldim, data2.a.abc_eldim)
开发者ID:Arthurkorn,项目名称:PyMVPA,代码行数:8,代码来源:test_niftidataset.py
示例4: test_er_nifti_dataset
def test_er_nifti_dataset():
# setup data sources
tssrc = os.path.join(pymvpa_dataroot, u"bold.nii.gz")
evsrc = os.path.join(pymvpa_dataroot, "fslev3.txt")
masrc = os.path.join(pymvpa_dataroot, "mask.nii.gz")
evs = FslEV3(evsrc).to_events()
# load timeseries
ds_orig = fmri_dataset(tssrc)
# segment into events
ds = eventrelated_dataset(ds_orig, evs, time_attr="time_coords")
# we ask for boxcars of 9s length, and the tr in the file header says 2.5s
# hence we should get round(9.0/2.4) * np.prod((1,20,40) == 3200 features
assert_equal(ds.nfeatures, 3200)
assert_equal(len(ds), len(evs))
# the voxel indices are reflattened after boxcaring , but still 3D
assert_equal(ds.fa.voxel_indices.shape, (ds.nfeatures, 3))
# and they have been broadcasted through all boxcars
assert_array_equal(ds.fa.voxel_indices[:800], ds.fa.voxel_indices[800:1600])
# each feature got an event offset value
assert_array_equal(ds.fa.event_offsetidx, np.repeat([0, 1, 2, 3], 800))
# check for all event attributes
assert_true("onset" in ds.sa)
assert_true("duration" in ds.sa)
assert_true("features" in ds.sa)
# check samples
origsamples = _load_anyimg(tssrc)[0]
for i, onset in enumerate([value2idx(e["onset"], ds_orig.sa.time_coords, "floor") for e in evs]):
assert_array_equal(ds.samples[i], origsamples[onset : onset + 4].ravel())
assert_array_equal(ds.sa.time_indices[i], np.arange(onset, onset + 4))
assert_array_equal(ds.sa.time_coords[i], np.arange(onset, onset + 4) * 2.5)
for evattr in [a for a in ds.sa if a.count("event_attrs") and not a.count("event_attrs_event")]:
assert_array_equal(evs[i]["_".join(evattr.split("_")[2:])], ds.sa[evattr].value[i])
# check offset: only the last one exactly matches the tr
assert_array_equal(ds.sa.orig_offset, [1, 1, 0])
# map back into voxel space, should ignore addtional features
nim = map2nifti(ds)
# origsamples has t,x,y,z
assert_equal(nim.get_shape(), origsamples.shape[1:] + (len(ds) * 4,))
# check shape of a single sample
nim = map2nifti(ds, ds.samples[0])
# pynifti image has [t,]z,y,x
assert_equal(nim.get_shape(), (40, 20, 1, 4))
# and now with masking
ds = fmri_dataset(tssrc, mask=masrc)
ds = eventrelated_dataset(ds, evs, time_attr="time_coords")
nnonzero = len(_load_anyimg(masrc)[0].nonzero()[0])
assert_equal(nnonzero, 530)
# we ask for boxcars of 9s length, and the tr in the file header says 2.5s
# hence we should get round(9.0/2.4) * np.prod((1,20,40) == 3200 features
assert_equal(ds.nfeatures, 4 * 530)
assert_equal(len(ds), len(evs))
# and they have been broadcasted through all boxcars
assert_array_equal(ds.fa.voxel_indices[:nnonzero], ds.fa.voxel_indices[nnonzero : 2 * nnonzero])
开发者ID:schoeke,项目名称:PyMVPA,代码行数:56,代码来源:test_niftidataset.py
示例5: test_nifti_mapper
def test_nifti_mapper(filename):
"""Basic testing of map2Nifti
"""
skip_if_no_external('scipy')
import nibabel
data = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d.nii.gz'),
targets=[1,2])
# test mapping of ndarray
vol = map2nifti(data, np.ones((294912,), dtype='int16'))
if externals.versions['nibabel'] >= '1.2':
vol_shape = vol.shape
else:
vol_shape = vol.get_shape()
assert_equal(vol_shape, (128, 96, 24))
assert_true((vol.get_data() == 1).all())
# test mapping of the dataset
vol = map2nifti(data)
if externals.versions['nibabel'] >= '1.2':
vol_shape = vol.shape
else:
vol_shape = vol.get_shape()
assert_equal(vol_shape, (128, 96, 24, 2))
ok_(isinstance(vol, data.a.imgtype))
# test providing custom imgtypes
vol = map2nifti(data, imgtype=nibabel.Nifti1Pair)
if externals.versions['nibabel'] >= '1.2':
vol_shape = vol.shape
else:
vol_shape = vol.get_shape()
ok_(isinstance(vol, nibabel.Nifti1Pair))
# Lets generate a dataset using an alternative format (MINC)
# and see if type persists
volminc = nibabel.MincImage(vol.get_data(),
vol.get_affine(),
vol.get_header())
ok_(isinstance(volminc, nibabel.MincImage))
dsminc = fmri_dataset(volminc, targets=1)
ok_(dsminc.a.imgtype is nibabel.MincImage)
ok_(isinstance(dsminc.a.imghdr, nibabel.minc.MincImage.header_class))
# Lets test if we could save/load now into Analyze volume/dataset
if externals.versions['nibabel'] < '1.1.0':
raise SkipTest('nibabel prior 1.1.0 had an issue with types comprehension')
volanal = map2nifti(dsminc, imgtype=nibabel.AnalyzeImage) # MINC has no 'save' capability
ok_(isinstance(volanal, nibabel.AnalyzeImage))
volanal.to_filename(filename)
dsanal = fmri_dataset(filename, targets=1)
# this one is tricky since it might become Spm2AnalyzeImage
ok_('AnalyzeImage' in str(dsanal.a.imgtype))
ok_('AnalyzeHeader' in str(dsanal.a.imghdr.__class__))
volanal_ = map2nifti(dsanal)
ok_(isinstance(volanal_, dsanal.a.imgtype)) # type got preserved
开发者ID:Arthurkorn,项目名称:PyMVPA,代码行数:56,代码来源:test_niftidataset.py
示例6: setUp
def setUp(self):
self.tmpdir = mkdtemp()
data_ = fmri_dataset(datafn)
datafn_hdf5 = pjoin(self.tmpdir, 'datain.hdf5')
h5save(datafn_hdf5, data_)
mask_ = fmri_dataset(maskfn)
maskfn_hdf5 = pjoin(self.tmpdir, 'maskfn.hdf5')
h5save(maskfn_hdf5, mask_)
self.datafn = [datafn, datafn_hdf5]
self.outfn = [pjoin(self.tmpdir, 'output') + ext
for ext in ['.nii.gz', '.nii', '.hdf5', '.h5']]
self.maskfn = ['', maskfn, maskfn_hdf5]
开发者ID:PyMVPA,项目名称:PyMVPA,代码行数:15,代码来源:test_cmdline_ttest.py
示例7: test_surface_voxel_query_engine
def test_surface_voxel_query_engine(self):
vol_shape = (10, 10, 10, 1)
vol_affine = np.identity(4)
vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5
vg = volgeom.VolGeom(vol_shape, vol_affine)
# make the surfaces
sphere_density = 10
outer = surf.generate_sphere(sphere_density) * 25. + 15
inner = surf.generate_sphere(sphere_density) * 20. + 15
vs = volsurf.VolSurfMaximalMapping(vg, inner, outer)
radius = 10
for fallback, expected_nfeatures in ((True, 1000), (False, 183)):
voxsel = surf_voxel_selection.voxel_selection(vs, radius)
qe = SurfaceVoxelsQueryEngine(voxsel, fallback_euclidian_distance=fallback)
m = _Voxel_Count_Measure()
sl = Searchlight(m, queryengine=qe)
data = np.random.normal(size=vol_shape)
img = nb.Nifti1Image(data, vol_affine)
ds = fmri_dataset(img)
sl_map = sl(ds)
counts = sl_map.samples
assert_true(np.all(np.logical_and(5 <= counts, counts <= 18)))
assert_equal(sl_map.nfeatures, expected_nfeatures)
开发者ID:armaneshaghi,项目名称:PyMVPA,代码行数:34,代码来源:test_surfing_voxelselection.py
示例8: prepare_subject_for_hyperalignment
def prepare_subject_for_hyperalignment(subject_label, bold_fname, mask_fname, out_dir):
print('Loading data %s with mask %s' % (bold_fname, mask_fname))
ds = fmri_dataset(samples=bold_fname, mask=mask_fname)
zscore(ds, chunks_attr=None)
out_fname = os.path.join(out_dir, 'sub-%s_data.hdf5' % subject_label)
print('Saving to %s' % out_fname)
h5save(out_fname, ds)
开发者ID:BIDS-Apps,项目名称:hyperalignment,代码行数:7,代码来源:run.py
示例9: load_example_fmri_dataset
def load_example_fmri_dataset(name='1slice', literal=False):
"""Load minimal fMRI dataset that is shipped with PyMVPA."""
from mvpa2.datasets.eventrelated import events2sample_attr
from mvpa2.datasets.sources.openfmri import OpenFMRIDataset
from mvpa2.datasets.mri import fmri_dataset
from mvpa2.misc.io import SampleAttributes
basedir = os.path.join(pymvpa_dataroot, 'openfmri')
mask = {'1slice': os.path.join(pymvpa_dataroot, 'mask.nii.gz'),
'25mm': os.path.join(basedir, 'sub001', 'masks', '25mm',
'brain.nii.gz')}[name]
if literal:
model = 1
subj = 1
openfmri = OpenFMRIDataset(basedir)
ds = openfmri.get_model_bold_dataset(model, subj, flavor=name,
mask=mask, noinfolabel='rest')
# re-imagine the global time_coords of a concatenated time series
# this is only for the purpose of keeping the example data in the
# exact same shape as it has always been. in absolute terms this makes no
# sense as there is no continuous time in this dataset
ds.sa['run_time_coords'] = ds.sa.time_coords
ds.sa['time_coords'] = np.arange(len(ds)) * 2.5
else:
if name == '25mm':
raise ValueError("The 25mm dataset is no longer available with "
"numerical labels")
attr = SampleAttributes(os.path.join(pymvpa_dataroot, 'attributes.txt'))
ds = fmri_dataset(samples=os.path.join(pymvpa_dataroot, 'bold.nii.gz'),
targets=attr.targets, chunks=attr.chunks,
mask=mask)
return ds
开发者ID:Arthurkorn,项目名称:PyMVPA,代码行数:34,代码来源:data_generators.py
示例10: nifti_to_dataset
def nifti_to_dataset(nifti_file, attr_file=None, annot_file=None, subject_id=None, session_id=None):
logger.info("Loading fmri dataset: {}".format(nifti_file))
ds = fmri_dataset(samples=nifti_file)
if attr_file is not None:
logger.info("Loading attributes: {}".format(attr_file))
attr = ColumnData(attr_file)
valid = min(ds.nsamples, attr.nrows)
valid = int(valid / 180) * 180 # FIXME: ...
print valid
ds = ds[:valid, :]
for k in attr.keys():
ds.sa[k] = attr[k][:valid]
if annot_file is not None:
logger.info("Loading annotation: {}".format(annot_file))
annot = nibabel.freesurfer.io.read_annot(annot_file)
ds.fa["annotation"] = [annot[2][i] for i in annot[0]] # FIXME: roi cannot be a fa
if subject_id is not None:
ds.sa["subject_id"] = [subject_id] * ds.nsamples
if session_id is not None:
ds.sa["session_id"] = [session_id] * ds.nsamples
return ds
开发者ID:afloren,项目名称:neurometrics,代码行数:27,代码来源:ANOVA.py
示例11: test_fmridataset
def test_fmridataset():
# full-blown fmri dataset testing
import nibabel
maskimg = nibabel.load(os.path.join(pymvpa_dataroot, 'mask.nii.gz'))
data = maskimg.get_data().copy()
data[data>0] = np.arange(1, np.sum(data) + 1)
maskimg = nibabel.Nifti1Image(data, None, maskimg.get_header())
ds = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'bold.nii.gz'),
mask=maskimg,
sprefix='subj1',
add_fa={'myintmask': maskimg})
# content
assert_equal(len(ds), 1452)
assert_true(ds.nfeatures, 530)
assert_array_equal(sorted(ds.sa.keys()),
['time_coords', 'time_indices'])
assert_array_equal(sorted(ds.fa.keys()),
['myintmask', 'subj1_indices'])
assert_array_equal(sorted(ds.a.keys()),
['imghdr', 'imgtype', 'mapper', 'subj1_dim', 'subj1_eldim'])
# vol extent
assert_equal(ds.a.subj1_dim, (40, 20, 1))
# check time
assert_equal(ds.sa.time_coords[-1], 3627.5)
# non-zero mask values
assert_array_equal(ds.fa.myintmask, np.arange(1, ds.nfeatures + 1))
# we know that imgtype must be:
ok_(ds.a.imgtype is nibabel.Nifti1Image)
开发者ID:Arthurkorn,项目名称:PyMVPA,代码行数:28,代码来源:test_niftidataset.py
示例12: test_volgeom_masking
def test_volgeom_masking(self):
maskstep = 5
vg = volgeom.VolGeom((2 * maskstep, 2 * maskstep, 2 * maskstep), np.identity(4))
mask = vg.get_empty_array()
sh = vg.shape
# mask a subset of the voxels
rng = range(0, sh[0], maskstep)
for i in rng:
for j in rng:
for k in rng:
mask[i, j, k] = 1
# make a new volgeom instance
vg = volgeom.VolGeom(vg.shape, vg.affine, mask)
data = vg.get_masked_nifti_image(nt=1)
msk = vg.get_masked_nifti_image()
dset = fmri_dataset(data, mask=msk)
vg_dset = volgeom.from_any(dset)
# ensure that the mask is set properly and
assert_equal(vg.nvoxels, vg.nvoxels_mask * maskstep ** 3)
assert_equal(vg_dset, vg)
dilates = range(0, 8, 2)
nvoxels_masks = [] # keep track of number of voxels for each size
for dilate in dilates:
covers_full_volume = dilate * 2 >= maskstep * 3 ** .5 + 1
# constr gets values: None, Sphere(0), 2, Sphere(2), ...
for i, constr in enumerate([Sphere, lambda x:x if x else None]):
dilater = constr(dilate)
img_dilated = vg.get_masked_nifti_image(dilate=dilater)
data = img_dilated.get_data()
assert_array_equal(data, vg.get_masked_array(dilate=dilater))
n = np.sum(data)
# number of voxels in mask is increasing
assert_true(all(n >= p for p in nvoxels_masks))
# results should be identical irrespective of constr
if i == 0:
# - first call with this value of dilate: has to be more
# voxels than very previous dilation value, unless the
# full volume is covered - then it can be equal too
# - every next call: ensure size matches
cmp = lambda x, y:(x >= y if covers_full_volume else x > y)
assert_true(all(cmp(n, p) for p in nvoxels_masks))
nvoxels_masks.append(n)
else:
# same size as previous call
assert_equal(n, nvoxels_masks[-1])
# if dilate is not None or zero, then it should
# have selected all the voxels if the radius is big enough
assert_equal(np.sum(data) == vg.nvoxels, covers_full_volume)
开发者ID:Arthurkorn,项目名称:PyMVPA,代码行数:60,代码来源:test_surfing.py
示例13: test_fmri_to_cosmo
def test_fmri_to_cosmo():
skip_if_no_external('nibabel')
from mvpa2.datasets.mri import fmri_dataset
# test exporting an fMRI dataset to CoSMoMVPA
pymvpa_ds = fmri_dataset(
samples=pathjoin(pymvpa_dataroot, 'example4d.nii.gz'),
targets=[1, 2], sprefix='voxel')
cosmomvpa_struct = cosmo.map2cosmo(pymvpa_ds)
_assert_set_equal(cosmomvpa_struct.keys(), ['a', 'fa', 'sa', 'samples'])
a_dict = dict(_obj2tup(cosmomvpa_struct['a']))
mri_keys = ['imgaffine', 'voxel_eldim', 'voxel_dim']
_assert_subset(mri_keys, a_dict.keys())
for k in mri_keys:
c_value = a_dict[k]
p_value = pymvpa_ds.a[k].value
if isinstance(p_value, tuple):
c_value = c_value.ravel()
p_value = np.asarray(p_value).ravel()
assert_array_almost_equal(c_value, p_value)
fa_dict = dict(_obj2tup(cosmomvpa_struct['fa']))
fa_keys = ['voxel_indices']
_assert_set_equal(fa_dict.keys(), fa_keys)
for k in fa_keys:
assert_array_almost_equal(fa_dict[k].T, pymvpa_ds.fa[k].value)
开发者ID:Anhmike,项目名称:PyMVPA,代码行数:29,代码来源:test_cosmo.py
示例14: test_queryengine_io
def test_queryengine_io(self, fn):
skip_if_no_external("h5py")
from mvpa2.base.hdf5 import h5save, h5load
vol_shape = (10, 10, 10, 1)
vol_affine = np.identity(4)
vg = volgeom.VolGeom(vol_shape, vol_affine)
# generate some surfaces,
# and add some noise to them
sphere_density = 10
outer = surf.generate_sphere(sphere_density) * 5 + 8
inner = surf.generate_sphere(sphere_density) * 3 + 8
radius = 5.0
add_fa = ["center_distances", "grey_matter_position"]
qe = disc_surface_queryengine(radius, vg, inner, outer, add_fa=add_fa)
ds = fmri_dataset(vg.get_masked_nifti_image())
# the following is not really a strong requirement. XXX remove?
assert_raises(ValueError, lambda: qe[qe.ids[0]])
# check that after training it behaves well
qe.train(ds)
i = qe.ids[0]
try:
m = qe[i]
except ValueError, e:
raise AssertionError(
"Failed to query %r from %r after training on %r. " "Exception was: %r" % (i, qe, ds, e)
)
开发者ID:beausievers,项目名称:PyMVPA,代码行数:31,代码来源:test_surfing_voxelselection.py
示例15: load_example_fmri_dataset
def load_example_fmri_dataset(name="1slice", literal=False):
"""Load minimal fMRI dataset that is shipped with PyMVPA."""
from mvpa2.datasets.sources.openfmri import OpenFMRIDataset
from mvpa2.datasets.mri import fmri_dataset
from mvpa2.misc.io import SampleAttributes
basedir = op.join(pymvpa_dataroot, "haxby2001")
mask = {
"1slice": op.join(pymvpa_dataroot, "mask.nii.gz"),
"25mm": op.join(basedir, "sub001", "masks", "25mm", "brain.nii.gz"),
}[name]
if literal:
model = 1
subj = 1
openfmri = OpenFMRIDataset(basedir)
ds = openfmri.get_model_bold_dataset(model, subj, flavor=name, mask=mask, noinfolabel="rest")
# re-imagine the global time_coords of a concatenated time series
# this is only for the purpose of keeping the example data in the
# exact same shape as it has always been. in absolute terms this makes no
# sense as there is no continuous time in this dataset
ds.sa["run_time_coords"] = ds.sa.time_coords
ds.sa["time_coords"] = np.arange(len(ds)) * 2.5
else:
if name == "25mm":
raise ValueError("The 25mm dataset is no longer available with " "numerical labels")
attr = SampleAttributes(op.join(pymvpa_dataroot, "attributes.txt"))
ds = fmri_dataset(
samples=op.join(pymvpa_dataroot, "bold.nii.gz"), targets=attr.targets, chunks=attr.chunks, mask=mask
)
return ds
开发者ID:hwd15508,项目名称:nidata,代码行数:32,代码来源:native.py
示例16: test_fmridataset
def test_fmridataset():
# full-blown fmri dataset testing
import nibabel
maskimg = nibabel.load(os.path.join(pymvpa_dataroot, "mask.nii.gz"))
data = maskimg.get_data().copy()
data[data > 0] = np.arange(1, np.sum(data) + 1)
maskimg = nibabel.Nifti1Image(data, None, maskimg.get_header())
attr = SampleAttributes(os.path.join(pymvpa_dataroot, "attributes.txt"))
ds = fmri_dataset(
samples=os.path.join(pymvpa_dataroot, "bold.nii.gz"),
targets=attr.targets,
chunks=attr.chunks,
mask=maskimg,
sprefix="subj1",
add_fa={"myintmask": maskimg},
)
# content
assert_equal(len(ds), 1452)
assert_true(ds.nfeatures, 530)
assert_array_equal(sorted(ds.sa.keys()), ["chunks", "targets", "time_coords", "time_indices"])
assert_array_equal(sorted(ds.fa.keys()), ["myintmask", "subj1_indices"])
assert_array_equal(sorted(ds.a.keys()), ["imghdr", "imgtype", "mapper", "subj1_dim", "subj1_eldim"])
# vol extent
assert_equal(ds.a.subj1_dim, (40, 20, 1))
# check time
assert_equal(ds.sa.time_coords[-1], 3627.5)
# non-zero mask values
assert_array_equal(ds.fa.myintmask, np.arange(1, ds.nfeatures + 1))
# we know that imgtype must be:
ok_(ds.a.imgtype is nibabel.Nifti1Image)
开发者ID:schoeke,项目名称:PyMVPA,代码行数:31,代码来源:test_niftidataset.py
示例17: load_dot
def load_dot():
"""
load dot file
"""
filename = raw_input("dot>>>")
maskfile = raw_input("mask>>>")
print "load data:"
data = np.loadtxt(filename)
print data
print "load mask:"
seed_set = fmri_dataset(samples=maskfile,mask=maskfile)
seed = seed_set.copy(sa=[])
print seed
sparse_set = csc_matrix((data[:,2],(data[:,0]-1,data[:,1]-1)))
seed.samples = sparse_set.T.todense()
print seed.samples.shape
print seed.a
print seed.sa
print seed.fa
seed.save(filename.replace('.dot','.T.hdf5'))
return 0
开发者ID:BloodD,项目名称:my-utils,代码行数:27,代码来源:dot2hdf5.py
示例18: test_4d_mask
def test_4d_mask():
""" Test masking with 4D datasets
If the time dimension has length 1, permit, otherwise fail"""
import nibabel
bold = pathjoin(pymvpa_dataroot, 'bold.nii.gz')
mask = pathjoin(pymvpa_dataroot, 'mask.nii.gz')
# mask4d.nii.gz is simply mask.nii.gz with an extra dimension added
mask4d = pathjoin(pymvpa_dataroot, 'mask4d.nii.gz')
# mask4dfail.nii.gz is mask.nii.gz copied twice in the 4th dimension
mask4df = pathjoin(pymvpa_dataroot, 'mask4dfail.nii.gz')
assert_equal(nibabel.load(mask).shape + (1,),
nibabel.load(mask4d).shape)
bold1 = fmri_dataset(bold, mask=mask)
bold2 = fmri_dataset(bold, mask=mask4d)
assert_equal(bold1.shape, bold2.shape)
assert_raises(ValueError, fmri_dataset, bold, mask=mask4df)
开发者ID:PyMVPA,项目名称:PyMVPA,代码行数:18,代码来源:test_niftidataset.py
示例19: load_run
def load_run(runstring):
ds=fmri_dataset(samples=os.path.join(root,subject,'BOLD',runstring,'bold.nii.gz'),mask=mask)
task, run = extract_task_and_run(runstring)
ds.sa['chunks'] = np.empty(len(ds))
ds.sa.chunks.fill(run)
ds.sa['task'] = np.empty(len(ds))
ds.sa.task.fill(task)
return ds
开发者ID:mart1nl,项目名称:neuroscience,代码行数:9,代码来源:openfmri.py
示例20: test_niml_dset_voxsel
def test_niml_dset_voxsel(self, fn):
if not externals.exists('nibabel'):
return
# This is actually a bit of an integration test.
# It tests storing and retrieving searchlight results.
# Imports are inline here so that it does not mess up the header
# and makes the other unit tests more modular
# XXX put this in a separate file?
from mvpa2.misc.surfing import volgeom, surf_voxel_selection, queryengine
from mvpa2.measures.searchlight import Searchlight
from mvpa2.support.nibabel import surf
from mvpa2.measures.base import Measure
from mvpa2.datasets.mri import fmri_dataset
class _Voxel_Count_Measure(Measure):
# used to check voxel selection results
is_trained = True
def __init__(self, dtype, **kwargs):
Measure.__init__(self, **kwargs)
self.dtype = dtype
def _call(self, dset):
return self.dtype(dset.nfeatures)
sh = (20, 20, 20)
vg = volgeom.VolGeom(sh, np.identity(4))
density = 20
outer = surf.generate_sphere(density) * 10. + 5
inner = surf.generate_sphere(density) * 5. + 5
intermediate = outer * .5 + inner * .5
xyz = intermediate.vertices
radius = 50
sel = surf_voxel_selection.run_voxel_selection(radius, vg, inner, outer)
qe = queryengine.SurfaceVerticesQueryEngine(sel)
for dtype in (int, float):
sl = Searchlight(_Voxel_Count_Measure(dtype), queryengine=qe)
ds = fmri_dataset(vg.get_empty_nifti_image(1))
r = sl(ds)
niml.write(fn, r)
rr = niml.read(fn)
os.remove(fn)
assert_array_equal(r.samples, rr.samples)
开发者ID:Anhmike,项目名称:PyMVPA,代码行数:56,代码来源:test_surfing_afni.py
注:本文中的mvpa2.datasets.mri.fmri_dataset函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论