本文整理汇总了Python中mvpa.base.externals.exists函数的典型用法代码示例。如果您正苦于以下问题:Python exists函数的具体用法?Python exists怎么用?Python exists使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了exists函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: plot
def plot(self, label_index=0):
"""
TODO: make it friendly to labels given by values?
should we also treat labels_map?
"""
externals.exists("pylab", raise_=True)
import pylab as pl
self._compute()
labels = self._labels
# select only rocs for the given label
rocs = self.rocs[label_index]
fig = pl.gcf()
ax = pl.gca()
pl.plot([0, 1], [0, 1], 'k:')
for ROC in rocs:
pl.plot(ROC.fp, ROC.tp, linewidth=1)
pl.axis((0.0, 1.0, 0.0, 1.0))
pl.axis('scaled')
pl.title('Label %s. Mean AUC=%.2f' % (label_index, self.aucs[label_index]))
pl.xlabel('False positive rate')
pl.ylabel('True positive rate')
开发者ID:B-Rich,项目名称:PyMVPA,代码行数:29,代码来源:transerror.py
示例2: _postcall
def _postcall(self, dataset, result):
"""Some postprocessing on the result
"""
self.raw_result = result
if not self.__transformer is None:
if __debug__:
debug("SA_", "Applying transformer %s" % self.__transformer)
result = self.__transformer(result)
# estimate the NULL distribution when functor is given
if not self.__null_dist is None:
if __debug__:
debug("SA_", "Estimating NULL distribution using %s"
% self.__null_dist)
# we need a matching datameasure instance, but we have to disable
# the estimation of the null distribution in that child to prevent
# infinite looping.
measure = copy.copy(self)
measure.__null_dist = None
self.__null_dist.fit(measure, dataset)
if self.states.isEnabled('null_t'):
# get probability under NULL hyp, but also request
# either it belong to the right tail
null_prob, null_right_tail = \
self.__null_dist.p(result, return_tails=True)
self.null_prob = null_prob
externals.exists('scipy', raiseException=True)
from scipy.stats import norm
# TODO: following logic should appear in NullDist,
# not here
tail = self.null_dist.tail
if tail == 'left':
acdf = N.abs(null_prob)
elif tail == 'right':
acdf = 1.0 - N.abs(null_prob)
elif tail in ['any', 'both']:
acdf = 1.0 - N.clip(N.abs(null_prob), 0, 0.5)
else:
raise RuntimeError, 'Unhandled tail %s' % tail
# We need to clip to avoid non-informative inf's ;-)
# that happens due to lack of precision in mantissa
# which is 11 bits in double. We could clip values
# around 0 at as low as 1e-100 (correspond to z~=21),
# but for consistency lets clip at 1e-16 which leads
# to distinguishable value around p=1 and max z=8.2.
# Should be sufficient range of z-values ;-)
clip = 1e-16
null_t = norm.ppf(N.clip(acdf, clip, 1.0 - clip))
null_t[~null_right_tail] *= -1.0 # revert sign for negatives
self.null_t = null_t # store
else:
# get probability of result under NULL hypothesis if available
# and don't request tail information
self.null_prob = self.__null_dist.p(result)
return result
开发者ID:gorlins,项目名称:PyMVPA,代码行数:60,代码来源:base.py
示例3: __init__
def __init__(self, sd=0, distribution='rdist', fpp=None, nbins=400, **kwargs):
"""L2-Norm the values, convert them to p-values of a given distribution.
Parameters
----------
sd : int
Samples dimension (if len(x.shape)>1) on which to operate
distribution : string
Which distribution to use. Known are: 'rdist' (later normal should
be there as well)
fpp : float
At what p-value (both tails) if not None, to control for false
positives. It would iteratively prune the tails (tentative real positives)
until empirical p-value becomes less or equal to numerical.
nbins : int
Number of bins for the iterative pruning of positives
WARNING: Highly experimental/slow/etc: no theoretical grounds have been
presented in any paper, nor proven
"""
externals.exists('scipy', raise_=True)
ClassWithCollections.__init__(self, **kwargs)
self.sd = sd
if not (distribution in ['rdist']):
raise ValueError, "Actually only rdist supported at the moment" \
" got %s" % distribution
self.distribution = distribution
self.fpp = fpp
self.nbins = nbins
开发者ID:esc,项目名称:PyMVPA,代码行数:30,代码来源:transformers.py
示例4: __init__
def __init__(self, source):
"""Reader MEG data from texfiles or file-like objects.
Parameters
----------
source : str or file-like
Strings are assumed to be filenames (with `.gz` suffix
compressed), while all other object types are treated as file-like
objects.
"""
self.ntimepoints = None
self.timepoints = None
self.nsamples = None
self.channelids = []
self.data = []
self.samplingrate = None
# open textfiles
if isinstance(source, str):
if source.endswith('.gz'):
externals.exists('gzip', raise_=True)
import gzip
source = gzip.open(source, 'r')
else:
source = open(source, 'r')
# read file
for line in source:
# split ID
colon = line.find(':')
# ignore lines without id
if colon == -1:
continue
id = line[:colon]
data = line[colon+1:].strip()
if id == 'Sample Number':
timepoints = np.fromstring(data, dtype=int, sep='\t')
# one more as it starts with zero
self.ntimepoints = int(timepoints.max()) + 1
self.nsamples = int(len(timepoints) / self.ntimepoints)
elif id == 'Time':
self.timepoints = np.fromstring(data,
dtype=float,
count=self.ntimepoints,
sep='\t')
self.samplingrate = self.ntimepoints \
/ (self.timepoints[-1] - self.timepoints[0])
else:
# load data
self.data.append(
np.fromstring(data, dtype=float, sep='\t').reshape(
self.nsamples, self.ntimepoints))
# store id
self.channelids.append(id)
# reshape data from (channels x samples x timepoints) to
# (samples x chanels x timepoints)
self.data = np.swapaxes(np.array(self.data), 0, 1)
开发者ID:B-Rich,项目名称:PyMVPA,代码行数:60,代码来源:meg.py
示例5: _data2img
def _data2img(data, hdr=None, imgtype=None):
# input data is t,x,y,z
if externals.exists("nibabel"):
# let's try whether we can get it done with nibabel
import nibabel
if imgtype is None:
# default is NIfTI1
itype = nibabel.Nifti1Image
else:
itype = imgtype
if issubclass(itype, nibabel.spatialimages.SpatialImage) and (hdr is None or hasattr(hdr, "get_data_dtype")):
# we can handle the desired image type and hdr with nibabel
# use of `None` for the affine should cause to pull it from
# the header
return itype(_get_xyzt_shaped(data), None, hdr)
# otherwise continue and see if there is hope ....
if externals.exists("nifti"):
# maybe pynifti can help
import nifti
if imgtype is None:
itype = nifti.NiftiImage
else:
itype = imgtype
if issubclass(itype, nifti.NiftiImage) and (hdr is None or isinstance(hdr, dict)):
# pynifti wants it transposed
return itype(_get_xyzt_shaped(data).T, hdr)
raise RuntimeError(
"Cannot convert data to an MRI image "
"(backends: nibabel(%s), pynifti(%s). Got hdr='%s', "
"imgtype='%s'." % (externals.exists("nibabel"), externals.exists("nifti"), hdr, imgtype)
)
开发者ID:arokem,项目名称:PyMVPA,代码行数:34,代码来源:mri.py
示例6: testExternalsCorrect2ndInvocation
def testExternalsCorrect2ndInvocation(self):
# always fails
externals._KNOWN["checker2"] = "raise ImportError"
self.failUnless(not externals.exists("checker2"), msg="Should be False on 1st invocation")
self.failUnless(not externals.exists("checker2"), msg="Should be False on 2nd invocation as well")
externals._KNOWN.pop("checker2")
开发者ID:gorlins,项目名称:PyMVPA,代码行数:9,代码来源:test_externals.py
示例7: plot
def plot(self):
"""Plot correlation coefficients
"""
externals.exists('pylab', raise_=True)
import pylab as pl
pl.plot(self['corrcoef'])
pl.title('Auto-correlation of the sequence')
pl.xlabel('Offset')
pl.ylabel('Correlation Coefficient')
pl.show()
开发者ID:arokem,项目名称:PyMVPA,代码行数:10,代码来源:miscfx.py
示例8: test_externals_correct2nd_invocation
def test_externals_correct2nd_invocation(self):
# always fails
externals._KNOWN['checker2'] = 'raise ImportError'
self.failUnless(not externals.exists('checker2'),
msg="Should be False on 1st invocation")
self.failUnless(not externals.exists('checker2'),
msg="Should be False on 2nd invocation as well")
externals._KNOWN.pop('checker2')
开发者ID:B-Rich,项目名称:PyMVPA,代码行数:11,代码来源:test_externals.py
示例9: testExternalsNoDoubleInvocation
def testExternalsNoDoubleInvocation(self):
# no external should be checking twice (unless specified
# explicitely)
class Checker(object):
"""Helper class to increment count of actual checks"""
def __init__(self):
self.checked = 0
def check(self):
self.checked += 1
checker = Checker()
externals._KNOWN["checker"] = "checker.check()"
externals.__dict__["checker"] = checker
externals.exists("checker")
self.failUnlessEqual(checker.checked, 1)
externals.exists("checker")
self.failUnlessEqual(checker.checked, 1)
externals.exists("checker", force=True)
self.failUnlessEqual(checker.checked, 2)
externals.exists("checker")
self.failUnlessEqual(checker.checked, 2)
# restore original externals
externals.__dict__.pop("checker")
externals._KNOWN.pop("checker")
开发者ID:gorlins,项目名称:PyMVPA,代码行数:29,代码来源:test_externals.py
示例10: _reverse
def _reverse(self, data):
if __debug__:
debug('MAP', "Converting signal back using DWP")
if self.__level is None:
raise NotImplementedError
else:
if not externals.exists('pywt wp reconstruct'):
raise NotImplementedError, \
"Reconstruction for a single level for versions of " \
"pywt < 0.1.7 (revision 103) is not supported"
if not externals.exists('pywt wp reconstruct fixed'):
warning("Reconstruction using available version of pywt might "
"result in incorrect data in the tails of the signal")
return self.__reverseSingleLevel(data)
开发者ID:gorlins,项目名称:PyMVPA,代码行数:15,代码来源:wavelet.py
示例11: test_chi_square_searchlight
def test_chi_square_searchlight(self):
# only do partial to save time
# Can't yet do this since test_searchlight isn't yet "under nose"
#skip_if_no_external('scipy')
if not externals.exists('scipy'):
return
from mvpa.misc.stats import chisquare
transerror = TransferError(sample_clf_lin)
cv = CrossValidatedTransferError(
transerror,
NFoldSplitter(cvtype=1),
enable_ca=['confusion'])
def getconfusion(data):
cv(data)
return chisquare(cv.ca.confusion.matrix)[0]
sl = sphere_searchlight(getconfusion, radius=0,
center_ids=[3,50])
# run searchlight
results = sl(self.dataset)
self.failUnless(results.nfeatures == 2)
开发者ID:arokem,项目名称:PyMVPA,代码行数:27,代码来源:test_searchlight.py
示例12: skip_if_no_external
def skip_if_no_external(dep, ver_dep=None, min_version=None, max_version=None):
"""Raise SkipTest if external is missing
Parameters
----------
dep : string
Name of the external
ver_dep : string, optional
If for version checking use some different key, e.g. shogun:rev.
If not specified, `dep` will be used.
min_version : None or string or tuple
Minimal required version
max_version : None or string or tuple
Maximal required version
"""
if not externals.exists(dep):
raise SkipTest, \
"External %s is not present thus tests battery skipped" % dep
if ver_dep is None:
ver_dep = dep
if min_version is not None and externals.versions[ver_dep] < min_version:
raise SkipTest, \
"Minimal version %s of %s is required. Present version is %s" \
". Test was skipped." \
% (min_version, ver_dep, externals.versions[ver_dep])
if max_version is not None and externals.versions[ver_dep] > max_version:
raise SkipTest, \
"Maximal version %s of %s is required. Present version is %s" \
". Test was skipped." \
% (min_version, ver_dep, externals.versions[ver_dep])
开发者ID:arokem,项目名称:PyMVPA,代码行数:34,代码来源:tools.py
示例13: testResampling
def testResampling(self):
ds = EEPDataset(os.path.join(pymvpa_dataroot, 'eep.bin'),
labels=[1, 2], labels_map={1:100, 2:101})
channelids = N.array(ds.channelids).copy()
self.failUnless(N.round(ds.samplingrate) == 500.0)
if not externals.exists('scipy'):
return
# should puke when called with nothing
self.failUnlessRaises(ValueError, ds.resample)
# now for real -- should divide nsamples into half
rds = ds.resample(sr=250, inplace=False)
# We should have not changed anything
self.failUnless(N.round(ds.samplingrate) == 500.0)
# by default do 'inplace' resampling
ds.resample(sr=250)
for d in [rds, ds]:
self.failUnless(N.round(d.samplingrate) == 250)
self.failUnless(d.nsamples == 2)
self.failUnless(N.abs((d.dt - 1.0/250)/d.dt)<1e-5)
self.failUnless(N.all(d.channelids == channelids))
# lets now see if we still have a mapper
self.failUnless(d.O.shape == (2, len(channelids), 2))
# and labels_map
self.failUnlessEqual(d.labels_map, {1:100, 2:101})
开发者ID:gorlins,项目名称:PyMVPA,代码行数:28,代码来源:test_eepdataset.py
示例14: test_dist_p_value
def test_dist_p_value(self):
"""Basic testing of DistPValue"""
if not externals.exists('scipy'):
return
ndb = 200
ndu = 20
nperd = 2
pthr = 0.05
Nbins = 400
# Lets generate already normed data (on sphere) and add some nonbogus features
datau = (np.random.normal(size=(nperd, ndb)))
dist = np.sqrt((datau * datau).sum(axis=1))
datas = (datau.T / dist.T).T
tn = datax = datas[0, :]
dataxmax = np.max(np.abs(datax))
# now lets add true positive features
tp = [-dataxmax * 1.1] * (ndu/2) + [dataxmax * 1.1] * (ndu/2)
x = np.hstack((datax, tp))
# lets add just pure normal to it
x = np.vstack((x, np.random.normal(size=x.shape))).T
for distPValue in (DistPValue(), DistPValue(fpp=0.05)):
result = distPValue(x)
self.failUnless((result>=0).all)
self.failUnless((result<=1).all)
if cfg.getboolean('tests', 'labile', default='yes'):
self.failUnless(distPValue.ca.positives_recovered[0] > 10)
self.failUnless((np.array(distPValue.ca.positives_recovered) +
np.array(distPValue.ca.nulldist_number) == ndb + ndu).all())
self.failUnlessEqual(distPValue.ca.positives_recovered[1], 0)
开发者ID:esc,项目名称:PyMVPA,代码行数:34,代码来源:test_transformers.py
示例15: testChiSquareSearchlight
def testChiSquareSearchlight(self):
# only do partial to save time
if not externals.exists('scipy'):
return
from mvpa.misc.stats import chisquare
transerror = TransferError(sample_clf_lin)
cv = CrossValidatedTransferError(
transerror,
NFoldSplitter(cvtype=1),
enable_states=['confusion'])
def getconfusion(data):
cv(data)
return chisquare(cv.confusion.matrix)[0]
# contruct radius 1 searchlight
sl = Searchlight(getconfusion, radius=1.0,
center_ids=[3,50])
# run searchlight
results = sl(self.dataset)
self.failUnless(len(results) == 2)
开发者ID:gorlins,项目名称:PyMVPA,代码行数:26,代码来源:test_searchlight.py
示例16: __init__
def __init__(self, queryengine, roi_ids=None, nproc=None, **kwargs):
"""
Parameters
----------
queryengine : QueryEngine
Engine to use to discover the "neighborhood" of each feature.
See :class:`~mvpa.misc.neighborhood.QueryEngine`.
roi_ids : None or list(int) or str
List of feature ids (not coordinates) the shall serve as ROI seeds
(e.g. sphere centers). Alternatively, this can be the name of a
feature attribute of the input dataset, whose non-zero values
determine the feature ids. By default all features will be used.
nproc : None or int
How many processes to use for computation. Requires `pprocess`
external module. If None -- all available cores will be used.
**kwargs
In addition this class supports all keyword arguments of its
base-class :class:`~mvpa.measures.base.Measure`.
"""
Measure.__init__(self, **kwargs)
if nproc > 1 and not externals.exists('pprocess'):
raise RuntimeError("The 'pprocess' module is required for "
"multiprocess searchlights. Please either "
"install python-pprocess, or reduce `nproc` "
"to 1 (got nproc=%i)" % nproc)
self._queryengine = queryengine
if roi_ids is not None and not isinstance(roi_ids, str) \
and not len(roi_ids):
raise ValueError, \
"Cannot run searchlight on an empty list of roi_ids"
self.__roi_ids = roi_ids
self.nproc = nproc
开发者ID:esc,项目名称:PyMVPA,代码行数:34,代码来源:searchlight.py
示例17: save
def save(dataset, destination, name=None, compression=None):
"""Save Dataset into HDF5 file
Parameters
----------
dataset : `Dataset`
destination : `h5py.highlevel.File` or str
name : str, optional
compression : None or int or {'gzip', 'szip', 'lzf'}, optional
Level of compression for gzip, or another compression strategy.
"""
if not externals.exists('h5py'):
raise RuntimeError("Missing 'h5py' package -- saving is not possible.")
import h5py
from mvpa.base.hdf5 import obj2hdf
# look if we got an hdf file instance already
if isinstance(destination, h5py.highlevel.File):
own_file = False
hdf = destination
else:
own_file = True
hdf = h5py.File(destination, 'w')
obj2hdf(hdf, dataset, name, compression=compression)
# if we opened the file ourselves we close it now
if own_file:
hdf.close()
return
开发者ID:B-Rich,项目名称:PyMVPA,代码行数:31,代码来源:dataset.py
示例18: testBinds
def testBinds(self):
ds = normalFeatureDataset()
ds_data = ds.samples.copy()
ds_chunks = ds.chunks.copy()
self.failUnless(N.all(ds.samples == ds_data)) # sanity check
funcs = ['zscore', 'coarsenChunks']
if externals.exists('scipy'):
funcs.append('detrend')
for f in funcs:
eval('ds.%s()' % f)
self.failUnless(N.any(ds.samples != ds_data) or
N.any(ds.chunks != ds_chunks),
msg="We should have modified original dataset with %s" % f)
ds.samples = ds_data.copy()
ds.chunks = ds_chunks.copy()
# and some which should just return results
for f in ['aggregateFeatures', 'removeInvariantFeatures',
'getSamplesPerChunkLabel']:
res = eval('ds.%s()' % f)
self.failUnless(res is not None,
msg='We should have got result from function %s' % f)
self.failUnless(N.all(ds.samples == ds_data),
msg="Function %s should have not modified original dataset" % f)
开发者ID:gorlins,项目名称:PyMVPA,代码行数:26,代码来源:test_datasetfx.py
示例19: __init__
def __init__(self, normalizer_cls=None, normalizer_args=None, **kwargs):
"""
Parameters
----------
normalizer_cls : sg.Kernel.CKernelNormalizer
Class to use as a normalizer for the kernel. Will be instantiated
upon compute(). Only supported for shogun >= 0.6.5.
By default (if left None) assigns IdentityKernelNormalizer to assure no
normalization.
normalizer_args : None or list
If necessary, provide a list of arguments for the normalizer.
"""
SGKernel.__init__(self, **kwargs)
if (normalizer_cls is not None) and (versions['shogun:rev'] < 3377):
raise ValueError, \
"Normalizer specification is supported only for sg >= 0.6.5. " \
"Please upgrade shogun python modular bindings."
if normalizer_cls is None and exists('sg ge 0.6.5'):
normalizer_cls = sgk.IdentityKernelNormalizer
self._normalizer_cls = normalizer_cls
if normalizer_args is None:
normalizer_args = []
self._normalizer_args = normalizer_args
开发者ID:geeragh,项目名称:PyMVPA,代码行数:25,代码来源:sg.py
示例20: _img2data
def _img2data(src):
# break early of nothing has been given
# XXX feels a little strange to handle this so deep inside, but well...
if src is None:
return None
excpt = None
if externals.exists('nibabel'):
# let's try whether we can get it done with nibabel
import nibabel
if isinstance(src, str):
# filename
try:
img = nibabel.load(src)
except nibabel.spatialimages.ImageFileError, excpt:
# nibabel has some problem, but we might be lucky with
# pynifti below. if not, we have stored the exception
# and raise it below
img = None
pass
else:
# assume this is an image already
img = src
if isinstance(img, nibabel.spatialimages.SpatialImage):
# nibabel image, dissect and return pieces
return _get_txyz_shaped(img.get_data()), img.get_header()
开发者ID:B-Rich,项目名称:PyMVPA,代码行数:26,代码来源:mri.py
注:本文中的mvpa.base.externals.exists函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论