• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Python hdf5.h5save函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中mvpa2.base.hdf5.h5save函数的典型用法代码示例。如果您正苦于以下问题:Python h5save函数的具体用法?Python h5save怎么用?Python h5save使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了h5save函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: test_save_load_object_dtype_ds

def test_save_load_object_dtype_ds(obj=None):
    """Test saving of custom object ndarray (GH #84)
    """
    aobjf = np.asanyarray(obj).flatten()

    if not aobjf.size and externals.versions['hdf5'] < '1.8.7':
        raise SkipTest("Versions of hdf5 before 1.8.7 have problems with empty arrays")

    # print obj, obj.shape
    f = tempfile.NamedTemporaryFile()

    # save/reload
    h5save(f.name, obj)
    obj_ = h5load(f.name)

    # and compare
    # neh -- not versatile enough
    #assert_objectarray_equal(np.asanyarray(obj), np.asanyarray(obj_))

    assert_array_equal(obj.shape, obj_.shape)
    assert_equal(type(obj), type(obj_))
    # so we could test both ds and arrays
    aobjf_ = np.asanyarray(obj_).flatten()
    # checks if having just array above
    if aobjf.size:
        assert_equal(type(aobjf[0]), type(aobjf_[0]))
        assert_array_equal(aobjf[0]['d'], aobjf_[0]['d'])
开发者ID:andreirusu,项目名称:PyMVPA,代码行数:27,代码来源:test_hdf5.py


示例2: prepare_subject_for_hyperalignment

def prepare_subject_for_hyperalignment(subject_label, bold_fname, mask_fname, out_dir):
    print('Loading data %s with mask %s' % (bold_fname, mask_fname))
    ds = fmri_dataset(samples=bold_fname, mask=mask_fname)
    zscore(ds, chunks_attr=None)
    out_fname = os.path.join(out_dir, 'sub-%s_data.hdf5' % subject_label)
    print('Saving to %s' % out_fname)
    h5save(out_fname, ds)
开发者ID:BIDS-Apps,项目名称:hyperalignment,代码行数:7,代码来源:run.py


示例3: test_save_load_python_objs

def test_save_load_python_objs(fname, obj):
    """Test saving objects of various types
    """
    # try:
    #     print type(obj), " ",
    #     print obj # , obj.shape
    # except Exception as e:
    #     print e
    # save/reload
    try:
        h5save(fname, obj)
    except Exception as e:
        raise AssertionError("Failed to h5save %s: %s" % (safe_str(obj), e))
    try:
        obj_ = h5load(fname)
    except Exception as e:
        raise AssertionError("Failed to h5load %s: %s" % (safe_str(obj), e))

    assert_equal(type(obj), type(obj_))

    if isinstance(obj, np.ndarray):
        assert_equal(obj.dtype, obj_.dtype)
        assert_array_equal(obj, obj_)
    else:
        assert_equal(obj, obj_)
开发者ID:PyMVPA,项目名称:PyMVPA,代码行数:25,代码来源:test_hdf5.py


示例4: generate_testing_fmri_dataset

def generate_testing_fmri_dataset(filename=None):
    """Helper to generate a dataset for regression testing of mvpa2/nibabel

    Parameters
    ----------
    filename : str
       Filename of a dataset file to store.  If not provided, it is composed
       using :func:`get_testing_fmri_dataset_filename`

    Returns
    -------
    Dataset, string
       Generated dataset, filename to the HDF5 where it was stored
    """
    import mvpa2
    from mvpa2.base.hdf5 import h5save
    from mvpa2.datasets.sources import load_example_fmri_dataset
    # Load our sample dataset
    ds_full = load_example_fmri_dataset(name='1slice', literal=False)
    # Subselect a small "ROI"
    ds = ds_full[20:23, 10:14]
    # collect all versions/dependencies for possible need to troubleshoot later
    ds.a['wtf'] = mvpa2.wtf()
    ds.a['versions'] = mvpa2.externals.versions
    # save to a file identified by version of PyMVPA and nibabel and hash of
    # all other versions
    out_filename = filename or get_testing_fmri_dataset_filename()
    h5save(out_filename, ds, compression=9)
    # ATM it produces >700kB .hdf5 which is this large because of
    # the ds.a.mapper with both Flatten and StaticFeatureSelection occupying
    # more than 190kB each, with ds.a.mapper as a whole generating 570kB file
    # Among those .ca seems to occupy notable size, e.g. 130KB for the FlattenMapper
    # even though no heavy storage is really needed for any available value --
    # primarily all is meta-information embedded into hdf5 to describe our things
    return ds, out_filename
开发者ID:Anhmike,项目名称:PyMVPA,代码行数:35,代码来源:regress.py


示例5: test_h5py_dataset_typecheck

def test_h5py_dataset_typecheck():
    ds = datasets["uni2small"]

    fd, fpath = tempfile.mkstemp("mvpa", "test")
    os.close(fd)
    fd, fpath2 = tempfile.mkstemp("mvpa", "test")
    os.close(fd)

    h5save(fpath2, [[1, 2, 3]])
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath2)
    # this one just catches if there is such a group
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath2, name="bogus")

    hdf = h5py.File(fpath, "w")
    ds = AttrDataset([1, 2, 3])
    obj2hdf(hdf, ds, name="non-bogus")
    obj2hdf(hdf, [1, 2, 3], name="bogus")
    hdf.close()

    assert_raises(ValueError, AttrDataset.from_hdf5, fpath, name="bogus")
    ds_loaded = AttrDataset.from_hdf5(fpath, name="non-bogus")
    assert_array_equal(ds, ds_loaded)  # just to do smth useful with ds ;)

    # cleanup and ignore stupidity
    os.remove(fpath)
    os.remove(fpath2)
开发者ID:robbisg,项目名称:PyMVPA,代码行数:26,代码来源:test_hdf5.py


示例6: test_store_metaclass_types

def test_store_metaclass_types(fname):
    from mvpa2.kernels.base import Kernel
    allowedtype=Kernel
    h5save(fname, allowedtype)
    lkrn = h5load(fname)
    assert_equal(lkrn, Kernel)
    assert_equal(lkrn.__metaclass__, Kernel.__metaclass__)
开发者ID:feilong,项目名称:PyMVPA,代码行数:7,代码来源:test_hdf5.py


示例7: test_product_flatten

def test_product_flatten():
    nsamples = 17
    product_name_values = [('chan', ['C1', 'C2']),
                         ('freq', np.arange(4, 20, 6)),
                         ('time', np.arange(-200, 800, 200))]

    shape = (nsamples,) + tuple(len(v) for _, v in product_name_values)

    sample_names = ['samp%d' % i for i in xrange(nsamples)]

    # generate random data in four dimensions
    data = np.random.normal(size=shape)
    ds = Dataset(data, sa=dict(sample_names=sample_names))

    # apply flattening to ds
    flattener = ProductFlattenMapper(product_name_values)

    # test I/O (only if h5py is available)
    if externals.exists('h5py'):
        from mvpa2.base.hdf5 import h5save, h5load
        import tempfile
        import os

        _, testfn = tempfile.mkstemp('mapper.h5py', 'test_product')
        h5save(testfn, flattener)
        flattener = h5load(testfn)
        os.unlink(testfn)

    mds = flattener(ds)

    prod = lambda x:reduce(operator.mul, x)

    # ensure the size is ok
    assert_equal(mds.shape, (nsamples,) + (prod(shape[1:]),))

    ndim = len(product_name_values)

    idxs = [range(len(v)) for _, v in product_name_values]
    for si in xrange(nsamples):
        for fi, p in enumerate(itertools.product(*idxs)):
            data_tup = (si,) + p

            x = mds[si, fi]

            # value should match
            assert_equal(data[data_tup], x.samples[0, 0])

            # indices should match as well
            all_idxs = tuple(x.fa['chan_freq_time_indices'].value.ravel())
            assert_equal(p, all_idxs)

            # values and indices in each dimension should match
            for i, (name, value) in enumerate(product_name_values):
                assert_equal(x.fa[name].value, value[p[i]])
                assert_equal(x.fa[name + '_indices'].value, p[i])

    product_name_values += [('foo', [1, 2, 3])]
    flattener = ProductFlattenMapper(product_name_values)
    assert_raises(ValueError, flattener, ds)
开发者ID:pckillerbrici,项目名称:PyMVPA,代码行数:59,代码来源:test_mapper.py


示例8: test_store_metaclass_types

def test_store_metaclass_types():
    f = tempfile.NamedTemporaryFile()
    from mvpa2.kernels.base import Kernel
    allowedtype=Kernel
    h5save(f.name, allowedtype)
    lkrn = h5load(f.name)
    assert_equal(lkrn, Kernel)
    assert_equal(lkrn.__metaclass__, Kernel.__metaclass__)
开发者ID:PepGardiola,项目名称:PyMVPA,代码行数:8,代码来源:test_hdf5.py


示例9: test_directaccess

def test_directaccess():
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, "test")
    assert_equal(h5load(f.name), "test")
    f.close()
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, datasets["uni4medium"])
    assert_array_equal(h5load(f.name).samples, datasets["uni4medium"].samples)
开发者ID:robbisg,项目名称:PyMVPA,代码行数:8,代码来源:test_hdf5.py


示例10: test_state_cycle_with_custom_reduce

def test_state_cycle_with_custom_reduce(fname):
    # BoxcarMapper has a custom __reduce__ implementation . The 'space'
    # setting will only survive a svae/load cycle if the state is correctly
    # handle for custom reduce iplementations.
    bm = BoxcarMapper([0], 1, space='boxy')
    h5save(fname, bm)
    bm_rl = h5load(fname)
    assert_equal(bm_rl.get_space(), 'boxy')
开发者ID:feilong,项目名称:PyMVPA,代码行数:8,代码来源:test_hdf5.py


示例11: _reduce_mapper

def _reduce_mapper(
    node2volume_attributes,
    attribute_mapper,
    src_trg_indices,
    eta_step=1,
    proc_id=None,
    results_backend="native",
    tmp_prefix="tmpvoxsel",
):
    """applies voxel selection to a list of src_trg_indices
    results are added to node2volume_attributes.
    """

    if not src_trg_indices:
        return None

    if not results_backend in ("native", "hdf5"):
        raise ValueError("Illegal results backend %r" % results_backend)

    def _pat(index, xs=src_trg_indices, f=max):
        try:
            if not xs:
                y = 1
            else:
                y = f(x[index] for x in xs)
            if y < 1:
                y = 1
            p = "%%%dd" % math.ceil(math.log10(y))
        except:
            p = "%s"
        return p

    progresspat = "node %s -> %s [%%3d%%%%]" % (_pat(0), _pat(1))

    # start the clock
    tstart = time.time()
    n = len(src_trg_indices)

    for i, (src, trg) in enumerate(src_trg_indices):
        idxs, misc_attrs = attribute_mapper(trg)

        if idxs is not None:
            node2volume_attributes.add(int(src), idxs, misc_attrs)

        if _debug() and eta_step and (i % eta_step == 0 or i == n - 1):
            msg = _eta(tstart, float(i + 1) / n, progresspat % (src, trg, 100.0 * (i + 1) / n), show=False)
            if not proc_id is None:
                msg += " (#%s)" % proc_id
            debug("SVS", msg, cr=True)

    if results_backend == "hdf5":
        tmp_postfix = "__tmp__%d_%s.h5py" % (hash(time.time()), proc_id)
        tmp_fn = tmp_prefix + tmp_postfix
        h5save(tmp_fn, node2volume_attributes)
        return tmp_fn
    else:
        return node2volume_attributes
开发者ID:pckillerbrici,项目名称:PyMVPA,代码行数:57,代码来源:surf_voxel_selection.py


示例12: run

def run(args):
    """Run it"""
    verbose(1, "Loading %d result files" % len(args.data))

    filetype_in = guess_backend(args.data[0])

    if filetype_in == 'nifti':
        dss = [fmri_dataset(f) for f in args.data]
    elif filetype_in == 'hdf5':
        dss = [h5load(f) for f in args.data]
    data = np.asarray([d.samples[args.isample] for d in dss])

    if args.mask:
        filetype_mask = guess_backend(args.mask)
        if filetype_mask == 'nifti':
            mask = nib.load(args.mask).get_data()
        elif filetype_mask == 'hdf5':
            mask = h5load(args.mask).samples
        out_of_mask = mask == 0
    else:
        # just take where no voxel had a value
        out_of_mask = np.sum(data != 0, axis=0)==0

    t, p = ttest_1samp(data, popmean=args.chance_level, axis=0,
                       alternative=args.alternative)

    if args.stat == 'z':
        if args.alternative == 'two-sided':
            s = stats.norm.isf(p/2)
        else:
            s = stats.norm.isf(p)
        # take the sign of the original t
        s = np.abs(s) * np.sign(t)
    elif args.stat == 'p':
        s = p
    elif args.stat == 't':
        s = t
    else:
        raise ValueError('WTF you gave me? have no clue about %r' % (args.stat,))

    if s.shape != out_of_mask.shape:
        try:
            out_of_mask = out_of_mask.reshape(s.shape)
        except ValueError:
            raise ValueError('Cannot use mask of shape {0} with '
                             'data of shape {1}'.format(out_of_mask.shape, s.shape))
    s[out_of_mask] = 0

    verbose(1, "Saving to %s" % args.output)
    filetype_out = guess_backend(args.output)
    if filetype_out == 'nifti':
        map2nifti(dss[0], data=s).to_filename(args.output)
    else:  # filetype_out is hdf5
        s = Dataset(np.atleast_2d(s), fa=dss[0].fa, a=dss[0].a)
        h5save(args.output, s)
    return s
开发者ID:PyMVPA,项目名称:PyMVPA,代码行数:56,代码来源:cmd_ttest.py


示例13: test_state_setter_getter

def test_state_setter_getter(fname):
    # make sure the presence of custom __setstate__, __getstate__ methods
    # is honored -- numpy's RNGs have it
    from numpy.random.mtrand import RandomState
    r = RandomState()
    h5save(fname, r)
    rl = h5load(fname)
    rl_state = rl.get_state()
    for i, v in enumerate(r.get_state()):
        assert_array_equal(v, rl_state[i])
开发者ID:feilong,项目名称:PyMVPA,代码行数:10,代码来源:test_hdf5.py


示例14: test_recursion

def test_recursion(fname):
    obj = range(2)
    obj.append(HDFDemo())
    obj.append(obj)
    h5save(fname, obj)
    lobj = h5load(fname)
    assert_equal(obj[:2], lobj[:2])
    assert_equal(type(obj[2]), type(lobj[2]))
    ok_(obj[3] is obj)
    ok_(lobj[3] is lobj)
开发者ID:feilong,项目名称:PyMVPA,代码行数:10,代码来源:test_hdf5.py


示例15: test_various_special_cases

def test_various_special_cases(fname):
    # 0d object ndarray
    a = np.array(0, dtype=object)
    h5save(fname, a)
    a_ = h5load(fname)
    ok_(a == a_)
    # slice
    h5save(fname, slice(2,5,3))
    sl = h5load(fname)
    ok_(sl == slice(2,5,3))
开发者ID:feilong,项目名称:PyMVPA,代码行数:10,代码来源:test_hdf5.py


示例16: test_various_special_cases

def test_various_special_cases():
    # 0d object ndarray
    f = tempfile.NamedTemporaryFile()
    a = np.array(0, dtype=object)
    h5save(f.name, a)
    a_ = h5load(f.name)
    ok_(a == a_)
    # slice
    h5save(f.name, slice(2,5,3))
    sl = h5load(f.name)
    ok_(sl == slice(2,5,3))
开发者ID:PepGardiola,项目名称:PyMVPA,代码行数:11,代码来源:test_hdf5.py


示例17: test_recursion

def test_recursion():
    obj = range(2)
    obj.append(HDFDemo())
    obj.append(obj)
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, obj)
    lobj = h5load(f.name)
    assert_equal(obj[:2], lobj[:2])
    assert_equal(type(obj[2]), type(lobj[2]))
    ok_(obj[3] is obj)
    ok_(lobj[3] is lobj)
开发者ID:PepGardiola,项目名称:PyMVPA,代码行数:11,代码来源:test_hdf5.py


示例18: test_save_load_python_objs

def test_save_load_python_objs(obj):
    """Test saving objects of various types
    """
    # print obj, obj.shape
    f = tempfile.NamedTemporaryFile()

    # save/reload
    try:
        h5save(f.name, obj)
    except Exception, e:
        raise AssertionError("Failed to h5save %s: %s" % (obj, e))
开发者ID:Arthurkorn,项目名称:PyMVPA,代码行数:11,代码来源:test_hdf5.py


示例19: test_gifti_dataset_h5py

def test_gifti_dataset_h5py(fn, include_nodes):
    if not externals.exists('h5py'):
        raise SkipTest

    from mvpa2.base.hdf5 import h5save, h5load

    ds = _get_test_dataset(include_nodes)

    h5save(fn, ds)
    ds2 = h5load(fn)

    assert_datasets_equal(ds, ds2)
开发者ID:Anhmike,项目名称:PyMVPA,代码行数:12,代码来源:test_giftidataset.py


示例20: run

def run(args):
    from mvpa2.base.hdf5 import h5save
    ds = None
    if not args.txt_data is None:
        verbose(1, "Load data from TXT file '%s'" % args.txt_data)
        samples = _load_from_txt(args.txt_data)
        ds = Dataset(samples)
    elif not args.npy_data is None:
        verbose(1, "Load data from NPY file '%s'" % args.npy_data)
        samples = _load_from_npy(args.npy_data)
        ds = Dataset(samples)
    elif not args.mri_data is None:
        verbose(1, "Load data from MRI image(s) %s" % args.mri_data)
        from mvpa2.datasets.mri import fmri_dataset
        vol_attr = dict()
        if not args.add_vol_attr is None:
            # XXX add a way to use the mapper of an existing dataset to
            # add a volume attribute without having to load the entire
            # mri data again
            vol_attr = dict(args.add_vol_attr)
            if not len(args.add_vol_attr) == len(vol_attr):
                warning("--vol-attr option with duplicate attribute name: "
                        "check arguments!")
            verbose(2, "Add volumetric feature attributes: %s" % vol_attr)
        ds = fmri_dataset(args.mri_data, mask=args.mask, add_fa=vol_attr)

    if ds is None:
        if args.data is None:
            raise RuntimeError('no data source specific')
        else:
            ds = hdf2ds(args.data)[0]
    else:
        if args.data is not None:
            verbose(1, 'ignoring dataset input in favor of other data source -- remove either one to disambiguate')

    # act on all attribute options
    ds = process_common_dsattr_opts(ds, args)

    if not args.add_fsl_mcpar is None:
        from mvpa2.misc.fsl.base import McFlirtParams
        mc_par = McFlirtParams(args.add_fsl_mcpar)
        for param in mc_par:
            verbose(2, "Add motion regressor as sample attribute '%s'"
                       % ('mc_' + param))
            ds.sa['mc_' + param] = mc_par[param]

    verbose(3, "Dataset summary %s" % (ds.summary()))
    # and store
    outfilename = args.output
    if not outfilename.endswith('.hdf5'):
        outfilename += '.hdf5'
    verbose(1, "Save dataset to '%s'" % outfilename)
    h5save(outfilename, ds, mkdir=True, compression=args.hdf5_compression)
开发者ID:liujiantong,项目名称:PyMVPA,代码行数:53,代码来源:cmd_mkds.py



注:本文中的mvpa2.base.hdf5.h5save函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python node.ChainNode类代码示例发布时间:2022-05-27
下一篇:
Python hdf5.h5load函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap