• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Python nipype.Node类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中nipype.Node的典型用法代码示例。如果您正苦于以下问题:Python Node类的具体用法?Python Node怎么用?Python Node使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了Node类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: run_freesurfer

def run_freesurfer(subject_id, T1_images, subjects_dir, T2_image=None):
    """Run freesurfer, convert to nidm and extract stats
    """
    from nipype import freesurfer as fs
    from nipype import Node
    from fs_dir_to_graph import to_graph
    from query_convert_fs_stats import get_collections, process_collection

    recon = Node(fs.ReconAll(), name='recon')
    recon.inputs.T1_files = T1_images
    recon.inputs.subject_id = subject_id
    recon.inputs.subjects_dir = subjects_dir
    recon.inputs.openmp = 4
    if T2_image:
        recon.inputs.T2_file = T2_image
    recon.base_dir = os.path.abspath(os.path.join('working', subject_id))

    results = recon.run()
    provgraph = results.provenance
    newgraph = to_graph(os.path.join(results.outputs.subjects_dir,
                                     results.outputs.subject_id))
    provgraph.add_bundle(newgraph)
    provgraph.rdf().serialize('test1.ttl', format='turtle')
    results = get_collections(provgraph.rdf())
    collections = []
    for row in results:
        collections.append(str(row[0]))
    if len(collections) > 1:
        raise ValueError('More than one freesurfer directory collection found')
    provgraph, termsrdf = process_collection(provgraph, collections.pop())
    rdfgraph = provgraph.rdf() + termsrdf
    return provgraph, rdfgraph
开发者ID:nicholsn,项目名称:incf_engine,代码行数:32,代码来源:run_fs.py


示例2: nipype_convert

def nipype_convert(item_dicoms, prefix, with_prov, bids, tmpdir):
    """ """
    import nipype
    if with_prov:
        from nipype import config
        config.enable_provenance()
    from nipype import Node
    from nipype.interfaces.dcm2nii import Dcm2niix

    item_dicoms = list(map(op.abspath, item_dicoms)) # absolute paths

    dicom_dir = op.dirname(item_dicoms[0]) if item_dicoms else None

    convertnode = Node(Dcm2niix(), name='convert')
    convertnode.base_dir = tmpdir
    convertnode.inputs.source_names = item_dicoms
    convertnode.inputs.out_filename = op.basename(op.dirname(prefix))

    if nipype.__version__.split('.')[0] == '0':
        # deprecated since 1.0, might be needed(?) before
        convertnode.inputs.terminal_output = 'allatonce'
    else:
        convertnode.terminal_output = 'allatonce'
    convertnode.inputs.bids_format = bids
    eg = convertnode.run()

    # prov information
    prov_file = prefix + '_prov.ttl' if with_prov else None
    if prov_file:
        safe_copyfile(op.join(convertnode.base_dir,
                              convertnode.name,
                              'provenance.ttl'),
                      prov_file)

    return eg, prov_file
开发者ID:cni-md,项目名称:heudiconv,代码行数:35,代码来源:convert.py


示例3: create_surface_projection_workflow

def create_surface_projection_workflow(name="surfproj", exp_info=None):
    """Project the group mask and thresholded zstat file onto the surface."""
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(IdentityInterface(["zstat_file", "mask_file"]), "inputs")

    # Sample the zstat image to the surface
    hemisource = Node(IdentityInterface(["mni_hemi"]), "hemisource")
    hemisource.iterables = ("mni_hemi", ["lh", "rh"])

    zstatproj = Node(freesurfer.SampleToSurface(
        sampling_method=exp_info["sampling_method"],
        sampling_range=exp_info["sampling_range"],
        sampling_units=exp_info["sampling_units"],
        smooth_surf=exp_info["surf_smooth"],
        subject_id="fsaverage",
        mni152reg=True,
        target_subject="fsaverage"),
        "zstatproj")

    # Sample the mask to the surface
    maskproj = Node(freesurfer.SampleToSurface(
        sampling_range=exp_info["sampling_range"],
        sampling_units=exp_info["sampling_units"],
        subject_id="fsaverage",
        mni152reg=True,
        target_subject="fsaverage"),
        "maskproj")
    if exp_info["sampling_method"] == "point":
        maskproj.inputs.sampling_method = "point"
    else:
        maskproj.inputs.sampling_method = "max"

    outputnode = Node(IdentityInterface(["surf_zstat",
                                         "surf_mask"]), "outputs")

    # Define and connect the workflow
    proj = Workflow(name)
    proj.connect([
        (inputnode, zstatproj,
            [("zstat_file", "source_file")]),
        (inputnode, maskproj,
            [("mask_file", "source_file")]),
        (hemisource, zstatproj,
            [("mni_hemi", "hemi")]),
        (hemisource, maskproj,
            [("mni_hemi", "hemi")]),
        (zstatproj, outputnode,
            [("out_file", "surf_zstat")]),
        (maskproj, outputnode,
            [("out_file", "surf_mask")]),
        ])

    return proj
开发者ID:boydmeredith,项目名称:lyman,代码行数:55,代码来源:mixedfx.py


示例4: rawdataChecker

def rawdataChecker(input_file):
    # If the input is a single DCM-file instead of a multi-dim-NifTI, we have to fetch all the other files in the series
    if input_file.endswith('.dcm'):
        from nipype.interfaces.io import DataFinder
        from os import path
        from nipype import Node

        # Setup a datafinder to find the paths to the specific DICOM files
        t1FinderNode = Node(DataFinder(), name = 't1Finder')
        t1FinderNode.inputs.match_regex = '.*\.dcm'
        t1FinderNode.inputs.root_paths = path.split(input_file)[0]

        return t1FinderNode.run().outputs.out_paths
    else:
        return input_file  # If other datatype just return the same path
开发者ID:BrainModes,项目名称:TVB-Pypeline,代码行数:15,代码来源:TVB_pipeline.py


示例5: run_bet

def run_bet(T1_image, workdir):
    """Run freesurfer, convert to nidm and extract stats
    """
    from nipype import fsl
    from nipype import Node
    from fs_dir_to_graph import to_graph
    from query_convert_fs_stats import get_collections, process_collection

    strip = Node(fsl.BET(), name='skullstripper')
    strip.inputs.in_file = T1_image
    strip.base_dir = workdir

    results = strip.run()
    provgraph = results.provenance
    return provgraph
开发者ID:nicholsn,项目名称:incf_engine,代码行数:15,代码来源:run_bet.py


示例6: print

    # Releasing Mr Loggins...
    dangerZone.setLevel('NOTSET')
    print('Done!')

    return SC_cap_row_filename, SC_dist_row_filename

import numpy as np
debugPath = '/Users/srothmei/Desktop/charite/toronto/Adalberto/debug/'

roi = 68
subid = 'Adalberto'
tracksPath = debugPath

wmBorder_file = debugPath + 'wmborder.npy'

wmborder = np.load(wmBorder_file)

affine_matrix_file = debugPath + 'affine_matrix.npy'

affine_matrix = np.load(affine_matrix_file)

from nipype import Node
from nipype.interfaces.io import DataFinder
tckFinder = Node(DataFinder(match_regex = '.*\.npy', root_paths = tracksPath), name = 'tckFinder')

res = tckFinder.run()
track_files = res.outputs.out_paths

#
compute_connectivity_row(roi, subid, affine_matrix, wmborder, tracksPath, track_files)
开发者ID:BrainModes,项目名称:TVB-Pypeline,代码行数:30,代码来源:debugSCRow.py


示例7: group_multregress_openfmri

def group_multregress_openfmri(dataset_dir, model_id=None, task_id=None, l1output_dir=None, out_dir=None, 
                               no_reversal=False, plugin=None, plugin_args=None, flamemodel='flame1',
                               nonparametric=False, use_spm=False):

    meta_workflow = Workflow(name='mult_regress')
    meta_workflow.base_dir = work_dir
    for task in task_id:
        task_name = get_taskname(dataset_dir, task)
        cope_ids = l1_contrasts_num(model_id, task_name, dataset_dir)
        regressors_needed, contrasts, groups, subj_list = get_sub_vars(dataset_dir, task_name, model_id)
        for idx, contrast in enumerate(contrasts):
            wk = Workflow(name='model_%03d_task_%03d_contrast_%s' % (model_id, task, contrast[0][0]))

            info = Node(util.IdentityInterface(fields=['model_id', 'task_id', 'dataset_dir', 'subj_list']),
                        name='infosource')
            info.inputs.model_id = model_id
            info.inputs.task_id = task
            info.inputs.dataset_dir = dataset_dir
            
            dg = Node(DataGrabber(infields=['model_id', 'task_id', 'cope_id'],
                                  outfields=['copes', 'varcopes']), name='grabber')
            dg.inputs.template = os.path.join(l1output_dir,
                                              'model%03d/task%03d/%s/%scopes/%smni/%scope%02d.nii%s')
            if use_spm:
                dg.inputs.template_args['copes'] = [['model_id', 'task_id', subj_list, '', 'spm/',
                                                     '', 'cope_id', '']]
                dg.inputs.template_args['varcopes'] = [['model_id', 'task_id', subj_list, 'var', 'spm/',
                                                        'var', 'cope_id', '.gz']]
            else:
                dg.inputs.template_args['copes'] = [['model_id', 'task_id', subj_list, '', '', '', 
                                                     'cope_id', '.gz']]
                dg.inputs.template_args['varcopes'] = [['model_id', 'task_id', subj_list, 'var', '',
                                                        'var', 'cope_id', '.gz']]
            dg.iterables=('cope_id', cope_ids)
            dg.inputs.sort_filelist = False

            wk.connect(info, 'model_id', dg, 'model_id')
            wk.connect(info, 'task_id', dg, 'task_id')

            model = Node(MultipleRegressDesign(), name='l2model')
            model.inputs.groups = groups
            model.inputs.contrasts = contrasts[idx]
            model.inputs.regressors = regressors_needed[idx]
            
            mergecopes = Node(Merge(dimension='t'), name='merge_copes')
            wk.connect(dg, 'copes', mergecopes, 'in_files')
            
            if flamemodel != 'ols':
                mergevarcopes = Node(Merge(dimension='t'), name='merge_varcopes')
                wk.connect(dg, 'varcopes', mergevarcopes, 'in_files')
            
            mask_file = fsl.Info.standard_image('MNI152_T1_2mm_brain_mask.nii.gz')
            flame = Node(FLAMEO(), name='flameo')
            flame.inputs.mask_file =  mask_file
            flame.inputs.run_mode = flamemodel
            #flame.inputs.infer_outliers = True

            wk.connect(model, 'design_mat', flame, 'design_file')
            wk.connect(model, 'design_con', flame, 't_con_file')
            wk.connect(mergecopes, 'merged_file', flame, 'cope_file')
            if flamemodel != 'ols':
                wk.connect(mergevarcopes, 'merged_file', flame, 'var_cope_file')
            wk.connect(model, 'design_grp', flame, 'cov_split_file')
            
            if nonparametric:
                palm = Node(Function(input_names=['cope_file', 'design_file', 'contrast_file', 
                                                  'group_file', 'mask_file', 'cluster_threshold'],
                                     output_names=['palm_outputs'],
                                     function=run_palm),
                            name='palm')
                palm.inputs.cluster_threshold = 3.09
                palm.inputs.mask_file = mask_file
                palm.plugin_args = {'sbatch_args': '-p om_all_nodes -N1 -c2 --mem=10G', 'overwrite': True}
                wk.connect(model, 'design_mat', palm, 'design_file')
                wk.connect(model, 'design_con', palm, 'contrast_file')
                wk.connect(mergecopes, 'merged_file', palm, 'cope_file')
                wk.connect(model, 'design_grp', palm, 'group_file')
                
            smoothest = Node(SmoothEstimate(), name='smooth_estimate')
            wk.connect(flame, 'zstats', smoothest, 'zstat_file')
            smoothest.inputs.mask_file = mask_file
        
            cluster = Node(Cluster(), name='cluster')
            wk.connect(smoothest,'dlh', cluster, 'dlh')
            wk.connect(smoothest, 'volume', cluster, 'volume')
            cluster.inputs.connectivity = 26
            cluster.inputs.threshold = 2.3
            cluster.inputs.pthreshold = 0.05
            cluster.inputs.out_threshold_file = True
            cluster.inputs.out_index_file = True
            cluster.inputs.out_localmax_txt_file = True
            
            wk.connect(flame, 'zstats', cluster, 'in_file')
    
            ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'),
                           name='z2pval')
            wk.connect(flame, 'zstats', ztopval,'in_file')
            
            sinker = Node(DataSink(), name='sinker')
            sinker.inputs.base_directory = os.path.join(out_dir, 'task%03d' % task, contrast[0][0])
#.........这里部分代码省略.........
开发者ID:rromeo2,项目名称:openfmri,代码行数:101,代码来源:group_multregress_bids.py


示例8: Node

                            radius_or_number_of_bins=[32, 32, 4],
                            sampling_percentage=[0.25, 0.25, 1],
                            sampling_strategy=['Regular', 'Regular', 'None'],
                            shrink_factors=[[8, 4, 2, 1]] * 3,
                            smoothing_sigmas=[[3, 2, 1, 0]] * 3,
                            transform_parameters=[(0.1,), (0.1,),
                                                  (0.1, 3.0, 0.0)],
                            use_histogram_matching=True,
                            write_composite_transform=True),
               name='antsreg')

###
# Input & Output Stream

# Infosource - a function free node to iterate over the list of subject names
infosource = Node(IdentityInterface(fields=['subject_id']),
                  name="infosource")
infosource.iterables = [('subject_id', subject_list)]

# SelectFiles - to grab the data (alternative to DataGrabber)
anat_file = opj('sub-{subject_id}', 'ses-test', 'anat', 'sub-{subject_id}_ses-test_T1w.nii.gz')
templates = {'anat': anat_file}

selectfiles = Node(SelectFiles(templates,
                               base_directory='/data/ds000114'),
                   name="selectfiles")

# Datasink - creates output folder for important outputs
datasink = Node(DataSink(base_directory=experiment_dir,
                         container=output_dir),
                name="datasink")
开发者ID:miykael,项目名称:nipype_tutorial,代码行数:31,代码来源:ANTS_registration.py


示例9: create_workflow

def create_workflow(files,
                    subject_id,
                    n_vol=0,
                    despike=True,
                    TR=None,
                    slice_times=None,
                    slice_thickness=None,
                    fieldmap_images=[],
                    norm_threshold=1,
                    num_components=6,
                    vol_fwhm=None,
                    surf_fwhm=None,
                    lowpass_freq=-1,
                    highpass_freq=-1,
                    sink_directory=os.getcwd(),
                    FM_TEdiff=2.46,
                    FM_sigma=2,
                    FM_echo_spacing=.7,
                    target_subject=['fsaverage3', 'fsaverage4'],
                    name='resting'):

    wf = Workflow(name=name)

    # Skip starting volumes
    remove_vol = MapNode(fsl.ExtractROI(t_min=n_vol, t_size=-1),
                         iterfield=['in_file'],
                         name="remove_volumes")
    remove_vol.inputs.in_file = files

    # Run AFNI's despike. This is always run, however, whether this is fed to
    # realign depends on the input configuration
    despiker = MapNode(afni.Despike(outputtype='NIFTI_GZ'),
                       iterfield=['in_file'],
                       name='despike')
    #despiker.plugin_args = {'qsub_args': '-l nodes=1:ppn='}

    wf.connect(remove_vol, 'roi_file', despiker, 'in_file')

    # Run Nipy joint slice timing and realignment algorithm
    realign = Node(nipy.SpaceTimeRealigner(), name='realign')
    realign.inputs.tr = TR
    realign.inputs.slice_times = slice_times
    realign.inputs.slice_info = 2

    if despike:
        wf.connect(despiker, 'out_file', realign, 'in_file')
    else:
        wf.connect(remove_vol, 'roi_file', realign, 'in_file')

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(realign, 'out_file', tsnr, 'in_file')

    # Compute the median image across runs
    calc_median = Node(Function(input_names=['in_files'],
                                output_names=['median_file'],
                                function=median,
                                imports=imports),
                       name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')

    # Coregister the median to the surface
    register = Node(freesurfer.BBRegister(),
                    name='bbregister')
    register.inputs.subject_id = subject_id
    register.inputs.init = 'fsl'
    register.inputs.contrast_type = 't2'
    register.inputs.out_fsl_file = True
    register.inputs.epi_mask = True

    # Compute fieldmaps and unwarp using them
    if fieldmap_images:
        fieldmap = Node(interface=EPIDeWarp(), name='fieldmap_unwarp')
        fieldmap.inputs.tediff = FM_TEdiff
        fieldmap.inputs.esp = FM_echo_spacing
        fieldmap.inputs.sigma = FM_sigma
        fieldmap.inputs.mag_file = fieldmap_images[0]
        fieldmap.inputs.dph_file = fieldmap_images[1]
        wf.connect(calc_median, 'median_file', fieldmap, 'exf_file')

        dewarper = MapNode(interface=fsl.FUGUE(), iterfield=['in_file'],
                           name='dewarper')
        wf.connect(tsnr, 'detrended_file', dewarper, 'in_file')
        wf.connect(fieldmap, 'exf_mask', dewarper, 'mask_file')
        wf.connect(fieldmap, 'vsm_file', dewarper, 'shift_in_file')
        wf.connect(fieldmap, 'exfdw', register, 'source_file')
    else:
        wf.connect(calc_median, 'median_file', register, 'source_file')

    # Get the subject's freesurfer source directory
    fssource = Node(FreeSurferSource(),
                    name='fssource')
    fssource.inputs.subject_id = subject_id
    fssource.inputs.subjects_dir = os.environ['SUBJECTS_DIR']

    # Extract wm+csf, brain masks by eroding freesurfer lables and then
    # transform the masks into the space of the median
    wmcsf = Node(freesurfer.Binarize(), name='wmcsfmask')
    mask = wmcsf.clone('anatmask')
    wmcsftransform = Node(freesurfer.ApplyVolTransform(inverse=True,
#.........这里部分代码省略.........
开发者ID:adamatus,项目名称:nipype,代码行数:101,代码来源:rsfmri_preprocessing.py


示例10: create_workflow

def create_workflow(files,
                    anat_file,
                    subject_id,
                    TR,
                    num_slices,
                    norm_threshold=1,
                    num_components=5,
                    vol_fwhm=None,
                    lowpass_freq=-1,
                    highpass_freq=-1,
                    sink_directory=os.getcwd(),
                    name='resting'):

    wf = Workflow(name=name)

    # Rename files in case they are named identically
    name_unique = MapNode(Rename(format_string='rest_%(run)02d'),
                          iterfield=['in_file', 'run'],
                          name='rename')
    name_unique.inputs.keep_ext = True
    name_unique.inputs.run = range(1, len(files) + 1)
    name_unique.inputs.in_file = files

    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.jobtype = 'estwrite'

    slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing")
    slice_timing.inputs.num_slices = num_slices
    slice_timing.inputs.time_repetition = TR
    slice_timing.inputs.time_acquisition = TR - TR/float(num_slices)
    slice_timing.inputs.slice_order = range(1, num_slices + 1, 2) + range(2, num_slices + 1, 2)
    slice_timing.inputs.ref_slice = int(num_slices/2)

    """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid
    body registration of the functional data to the structural data.
    """

    coregister = Node(interface=spm.Coregister(), name="coregister")
    coregister.inputs.jobtype = 'estimate'
    coregister.inputs.target = anat_file

    """Use :class:`nipype.algorithms.rapidart` to determine which of the
    images in the functional series are outliers based on deviations in
    intensity or movement.
    """

    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, False]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = norm_threshold
    art.inputs.zintensity_threshold = 3
    art.inputs.mask_type = 'spm_global'
    art.inputs.parameter_source = 'SPM'

    segment = Node(interface=spm.Segment(), name="segment")
    segment.inputs.save_bias_corrected = True
    segment.inputs.data = anat_file

    """Uncomment the following line for faster execution
    """

    #segment.inputs.gaussians_per_class = [1, 1, 1, 4]

    """Warp functional and structural data to SPM's T1 template using
    :class:`nipype.interfaces.spm.Normalize`.  The tutorial data set
    includes the template image, T1.nii.
    """

    normalize_func = Node(interface=spm.Normalize(), name = "normalize_func")
    normalize_func.inputs.jobtype = "write"
    normalize_func.inputs.write_voxel_sizes =[2., 2., 2.]

    """Smooth the functional data using
    :class:`nipype.interfaces.spm.Smooth`.
    """

    smooth = Node(interface=spm.Smooth(), name = "smooth")
    smooth.inputs.fwhm = vol_fwhm

    """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose
    to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal
    voxel sizes.
    """

    wf.connect([(name_unique, realign, [('out_file', 'in_files')]),
                (realign, coregister, [('mean_image', 'source')]),
                (segment, normalize_func, [('transformation_mat', 'parameter_file')]),
                (realign, slice_timing, [('realigned_files', 'in_files')]),
                (slice_timing, normalize_func, [('timecorrected_files', 'apply_to_files')]),
                (normalize_func, smooth, [('normalized_files', 'in_files')]),
                (realign, art, [('realignment_parameters', 'realignment_parameters')]),
                (smooth, art, [('smoothed_files', 'realigned_files')]),
                ])

    def selectN(files, N=1):
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(filename_to_list(files)[:N])

    mask = Node(fsl.BET(), name='getmask')
    mask.inputs.mask = True
#.........这里部分代码省略.........
开发者ID:satra,项目名称:rscourse,代码行数:101,代码来源:rsfmri_conn_spm_preprocessing.py


示例11: dict

from glob import glob

from nipype import Node, Function, Workflow, IdentityInterface
from nipype.interfaces.freesurfer import ReconAll
from nipype.interfaces.io import DataGrabber

#curr_dir_age = 'cmind_age00_raw'
#data_dir = '/home/data/madlab/data/mri/cmind/raw_data'

#sids = os.listdir('%s/%s' % (data_dir, curr_dir_age))
#sids = sids [:-1] 	#REMOVES THE .tar file
sids = ['783125', '783126', '783127', '783128', '783129', '783131', '783132', '783133']

info = dict(T1=[['subject_id']])

infosource = Node(IdentityInterface(fields=['subject_id']), name='infosource')
infosource.iterables = ('subject_id', sids)

# Create a datasource node to get the T1 file
datasource = Node(DataGrabber(infields=['subject_id'],outfields=info.keys()),name = 'datasource')
datasource.inputs.template = '%s/%s'
datasource.inputs.base_directory = os.path.abspath('/home/data/madlab/data/mri/seqtrd/')
datasource.inputs.field_template = dict(T1='%s/anatomy/T1_*.nii.gz')
datasource.inputs.template_args = info
datasource.inputs.sort_filelist = True

reconall_node = Node(ReconAll(), name='reconall_node')
reconall_node.inputs.openmp = 2
reconall_node.inputs.subjects_dir = os.environ['SUBJECTS_DIR']
reconall_node.inputs.terminal_output = 'allatonce'
reconall_node.plugin_args={'bsub_args': ('-q PQ_madlab -n 2'), 'overwrite': True}
开发者ID:mattfeld,项目名称:mri_misc,代码行数:31,代码来源:run_recon.py


示例12: cpu_count

# reconallNode.inputs.T1_files = firstFile
# reconallNode.inputs.subjects_dir = subPath
reconallNode.inputs.subject_id = reconallFolderName
reconallNode.inputs.directive = 'all'
reconallNode.inputs.openmp = cpu_count()
# reconallNode.inputs.args = '-notal-check'

# OAR Workaround
# reconallNode.plugin_args = {'overwrite': True, 'oarsub_args': '-l nodes=1,walltime=16:00:00'}

# Convert the T1 mgz image to nifti format for later usage
# mriConverter = Node(freesurfer.preprocess.MRIConvert(), name = 'convertAparcAseg')
# mriConverter.inputs.out_type = 'niigz'
# mriConverter.inputs.out_orientation = 'RAS'
mriConverter = Node(Function(input_names = ['in_file', 'out_file'],
                            output_names = ['out_file'],
                            function = mri_convert_bm),
                   name = 'convertAparcAseg')

# Convert the Brainmask file
# brainmaskConv = Node(freesurfer.preprocess.MRIConvert(), name = 'convertBrainmask')
# brainmaskConv.inputs.out_type = 'niigz'
# brainmaskConv.inputs.out_orientation = 'RAS'
brainmaskConv = mriConverter.clone('convertBrainmask')


# ### Diffusion Data (dwMRI) preprocessing
# First extract the diffusion vectors and the pulse intensity (bvec and bval)
# Use dcm2nii for this task
dcm2niiNode = Node(Dcm2nii(), name = 'dcm2niiAndBvecs')
dcm2niiNode.inputs.gzip_output = True
dcm2niiNode.inputs.date_in_filename = False
开发者ID:BrainModes,项目名称:TVB-Pypeline,代码行数:32,代码来源:preprocSub.py


示例13: create_reg_workflow

def create_reg_workflow(name='registration'):
    """Create a FEAT preprocessing workflow together with freesurfer

    Parameters
    ----------

        name : name of workflow (default: 'registration')

    Inputs::

        inputspec.source_files : files (filename or list of filenames to register)
        inputspec.mean_image : reference image to use
        inputspec.anatomical_image : anatomical image to coregister to
        inputspec.target_image : registration target

    Outputs::

        outputspec.func2anat_transform : FLIRT transform
        outputspec.anat2target_transform : FLIRT+FNIRT transform
        outputspec.transformed_files : transformed files in target space
        outputspec.transformed_mean : mean image in target space
    """

    register = Workflow(name=name)

    inputnode = Node(interface=IdentityInterface(fields=['source_files',
                                                         'mean_image',
                                                         'subject_id',
                                                         'subjects_dir',
                                                         'target_image']),
                     name='inputspec')

    outputnode = Node(interface=IdentityInterface(fields=['func2anat_transform',
                                                          'out_reg_file',
                                                          'anat2target_transform',
                                                          'transforms',
                                                          'transformed_mean',
                                                          'segmentation_files',
                                                          'anat2target',
                                                          'aparc',
                                                          'min_cost_file'
                                                          ]),
                      name='outputspec')

    # Get the subject's freesurfer source directory
    fssource = Node(FreeSurferSource(),
                    name='fssource')
    fssource.run_without_submitting = True
    register.connect(inputnode, 'subject_id', fssource, 'subject_id')
    register.connect(inputnode, 'subjects_dir', fssource, 'subjects_dir')

    convert = Node(freesurfer.MRIConvert(out_type='nii'),
                   name="convert")
    register.connect(fssource, 'T1', convert, 'in_file')

    # Coregister the median to the surface
    bbregister = Node(freesurfer.BBRegister(),
                      name='bbregister')
    bbregister.inputs.init = 'fsl'
    bbregister.inputs.contrast_type = 't2'
    bbregister.inputs.out_fsl_file = True
    bbregister.inputs.epi_mask = True
    register.connect(inputnode, 'subject_id', bbregister, 'subject_id')
    register.connect(inputnode, 'mean_image', bbregister, 'source_file')
    register.connect(inputnode, 'subjects_dir', bbregister, 'subjects_dir')

    """
    Estimate the tissue classes from the anatomical image. But use aparc+aseg's brain mask
    """

    binarize = Node(fs.Binarize(min=0.5, out_type="nii.gz", dilate=1), name="binarize_aparc")
    register.connect(fssource, ("aparc_aseg", get_aparc_aseg), binarize, "in_file")
    stripper = Node(fsl.ApplyMask(), name='stripper')
    register.connect(binarize, "binary_file", stripper, "mask_file")
    register.connect(convert, 'out_file', stripper, 'in_file')

    fast = Node(fsl.FAST(), name='fast')
    register.connect(stripper, 'out_file', fast, 'in_files')

    """
    Binarize the segmentation
    """

    binarize = MapNode(fsl.ImageMaths(op_string='-nan -thr 0.9 -ero -bin'),
                       iterfield=['in_file'],
                       name='binarize')
    register.connect(fast, 'partial_volume_files', binarize, 'in_file')

    """
    Apply inverse transform to take segmentations to functional space
    """

    applyxfm = MapNode(freesurfer.ApplyVolTransform(inverse=True,
                                                    interp='nearest'),
                       iterfield=['target_file'],
                       name='inverse_transform')
    register.connect(inputnode, 'subjects_dir', applyxfm, 'subjects_dir')
    register.connect(bbregister, 'out_reg_file', applyxfm, 'reg_file')
    register.connect(binarize, 'out_file', applyxfm, 'target_file')
    register.connect(inputnode, 'mean_image', applyxfm, 'source_file')
#.........这里部分代码省略.........
开发者ID:Conxz,项目名称:nipype,代码行数:101,代码来源:rsfmri_vol_surface_preprocessing_nipy.py


示例14: create_fs_reg_workflow

def create_fs_reg_workflow(name="registration"):
    """Create a FEAT preprocessing workflow together with freesurfer

    Parameters
    ----------

    ::

        name : name of workflow (default: 'registration')

    Inputs::

        inputspec.source_files : files (filename or list of filenames to register)
        inputspec.mean_image : reference image to use
        inputspec.target_image : registration target

    Outputs::

        outputspec.func2anat_transform : FLIRT transform
        outputspec.anat2target_transform : FLIRT+FNIRT transform
        outputspec.transformed_files : transformed files in target space
        outputspec.transformed_mean : mean image in target space

    Example
    -------

    """

    register = Workflow(name=name)

    inputnode = Node(
        interface=IdentityInterface(
            fields=["source_files", "mean_image", "subject_id", "subjects_dir", "target_image"]
        ),
        name="inputspec",
    )

    outputnode = Node(
        interface=IdentityInterface(
            fields=[
                "func2anat_transform",
                "out_reg_file",
                "anat2target_transform",
                "transforms",
                "transformed_mean",
                "transformed_files",
                "min_cost_file",
                "anat2target",
                "aparc",
                "mean2anat_mask",
            ]
        ),
        name="outputspec",
    )

    # Get the subject's freesurfer source directory
    fssource = Node(FreeSurferSource(), name="fssource")
    fssource.run_without_submitting = True
    register.connect(inputnode, "subject_id", fssource, "subject_id")
    register.connect(inputnode, "subjects_dir", fssource, "subjects_dir")

    convert = Node(freesurfer.MRIConvert(out_type="nii"), name="convert")
    register.connect(fssource, "T1", convert, "in_file")

    # Coregister the median to the surface
    bbregister = Node(freesurfer.BBRegister(registered_file=True), name="bbregister")
    bbregister.inputs.init = "fsl"
    bbregister.inputs.contrast_type = "t2"
    bbregister.inputs.out_fsl_file = True
    bbregister.inputs.epi_mask = True
    register.connect(inputnode, "subject_id", bbregister, "subject_id")
    register.connect(inputnode, "mean_image", bbregister, "source_file")
    register.connect(inputnode, "subjects_dir", bbregister, "subjects_dir")

    # Create a mask of the median coregistered to the anatomical image
    mean2anat_mask = Node(fsl.BET(mask=True), name="mean2anat_mask")
    register.connect(bbregister, "registered_file", mean2anat_mask, "in_file")

    """
    use aparc+aseg's brain mask
    """

    binarize = Node(fs.Binarize(min=0.5, out_type="nii.gz", dilate=1), name="binarize_aparc")
    register.connect(fssource, ("aparc_aseg", get_aparc_aseg), binarize, "in_file")

    stripper = Node(fsl.ApplyMask(), name="stripper")
    register.connect(binarize, "binary_file", stripper, "mask_file")
    register.connect(convert, "out_file", stripper, "in_file")

    """
    Apply inverse transform to aparc file
    """
    aparcxfm = Node(freesurfer.ApplyVolTransform(inverse=True, interp="nearest"), name="aparc_inverse_transform")
    register.connect(inputnode, "subjects_dir", aparcxfm, "subjects_dir")
    register.connect(bbregister, "out_reg_file", aparcxfm, "reg_file")
    register.connect(fssource, ("aparc_aseg", get_aparc_aseg), aparcxfm, "target_file")
    register.connect(inputnode, "mean_image", aparcxfm, "source_file")

    """
    Convert the BBRegister transformation to ANTS ITK format
#.........这里部分代码省略.........
开发者ID:jpellman,项目名称:nipype,代码行数:101,代码来源:fmri_ants_openfmri.py


示例15: embed_metadata_from_dicoms

def embed_metadata_from_dicoms(bids, item_dicoms, outname, outname_bids,
                               prov_file, scaninfo, tempdirs, with_prov,
                               min_meta):
    """
    Enhance sidecar information file with more information from DICOMs

    Parameters
    ----------
    bids
    item_dicoms
    outname
    outname_bids
    prov_file
    scaninfo
    tempdirs
    with_prov
    min_meta

    Returns
    -------

    """
    from nipype import Node, Function
    tmpdir = tempdirs(prefix='embedmeta')

    # We need to assure that paths are absolute if they are relative
    item_dicoms = list(map(op.abspath, item_dicoms))

    embedfunc = Node(Function(input_names=['dcmfiles', 'niftifile', 'infofile',
                                           'bids_info', 'force', 'min_meta'],
                              output_names=['outfile', 'meta'],
                              function=embed_nifti),
                     name='embedder')
    embedfunc.inputs.dcmfiles = item_dicoms
    embedfunc.inputs.niftifile = op.abspath(outname)
    embedfunc.inputs.infofile = op.abspath(scaninfo)
    embedfunc.inputs.min_meta = min_meta
    if bids:
        embedfunc.inputs.bids_info = load_json(op.abspath(outname_bids))
    else:
        embedfunc.inputs.bids_info = None
    embedfunc.inputs.force = True
    embedfunc.base_dir = tmpdir
    cwd = os.getcwd()
    lgr.debug("Embedding into %s based on dicoms[0]=%s for nifti %s",
              scaninfo, item_dicoms[0], outname)
    try:
        if op.lexists(scaninfo):
            # TODO: handle annexed file case
            if not op.islink(scaninfo):
                set_readonly(scaninfo, False)
        res = embedfunc.run()
        set_readonly(scaninfo)
        if with_prov:
            g = res.provenance.rdf()
            g.parse(prov_file,
                    format='turtle')
            g.serialize(prov_file, format='turtle')
            set_readonly(prov_file)
    except Exception as exc:
        lgr.error("Embedding failed: %s", str(exc))
        os.chdir(cwd)
开发者ID:cni-md,项目名称:heudiconv,代码行数:62,代码来源:dicoms.py


示例16: create_surfdist_workflow

def create_surfdist_workflow(subjects_dir,
                             subject_list,
                             sources,
                             target,
                             hemi,
                             atlas,
                             labs,
                             name):

  sd = Workflow(name=name)
    
  # Run a separate tree for each template, hemisphere and source structure
  infosource = Node(IdentityInterface(fields=['template','hemi','source']), name="infosource")
  infosource.iterables = [('template', target),('hemi', hemi),('source',sources)]

  # Get template files
  fsst = Node(FreeSurferSource(),name='FS_Source_template')
  fsst.inputs.subjects_dir = subjects_dir

  sd.connect(infosource,'template',fsst,'subject_id')
  sd.connect(infosource,'hemi',fsst,'hemi')

  # Generate folder name for output
  genfoldname = Node(Function(input_names=['hemi','source','target'],
                      output_names=['cname'], function=genfname),
                      name='genfoldname')
  sd.connect(infosource,'hemi',genfoldname,'hemi')
  sd.connect(infosource,'source',genfoldname,'source')
  sd.connect(infosource,'template',genfoldname,'target')

  # Get subjects
  fss = Node(FreeSurferSource(),name='FS_Source')
  fss.iterables = ('subject_id', subject_list)
  fss.inputs.subjects_dir = subjects_dir
  fss.inputs.subject_id = subject_list

  sd.connect(infosource,'hemi',fss,'hemi')

  # Trim labels
  tlab = Node(Function(input_names=['itemz','phrase'],
                        output_names=['item'], function=trimming),
                        name='tlab')
  tlab.inputs.phrase = labs
  sd.connect(fss,'label',tlab,'itemz')

  # Trim annotations
  tannot = Node(Function(input_names=['itemz','phrase'],
                        output_names=['item'], function=trimming),
                        name='tannot')
  tannot.inputs.phrase = atlas
  sd.connect(fss,'annot',tannot,'itemz')

  # Calculate distances for each hemi
  sdist = Node(Function(input_names=['surface','labels','annot','reg','origin','target'],
                        output_names=['distances'], function=calc_surfdist), 
                        name='sdist')
  sd.connect(infosource,'source',sdist,'origin')
  sd.connect(fss,'pial',sdist,'surface')
  sd.connect(tlab,'item',sdist,'labels')
  sd.connect(tannot,'item',sdist,'annot')
  sd.connect(fss,'sphere_reg',sdist,'reg')
  sd.connect(fsst,'sphere_reg',sdist,'target')
  
  # Gather data for each hemi from all subjects
  bucket = JoinNode(Function(input_names=['files','hemi','source','target'],output_names=['group_dist'], 
                         function=stack_files), joinsource = fss, joinfield = 'files', name='bucket')
  sd.connect(infosource,'source',bucket,'source')
  sd.connect(infosource,'template',bucket,'target')
  sd.connect(infosource,'hemi',bucket,'hemi')
  sd.connect(sdist,'distances',bucket,'files')

  # Sink the data
  datasink = Node(DataSink(), name='sinker')
  datasink.inputs.parameterization = False
  datasink.inputs.base_directory = os.path.abspath(args.sink)
  sd.connect(genfoldname,'cname',datasink,'container')
  sd.connect(bucket,'group_dist',datasink,'group_distances')

  return sd
开发者ID:BmBaczkowski,项目名称:surfdist,代码行数:79,代码来源:surfdist_nipype.py


示例17: Node

bbregNode = Node(freesurfer.preprocess.BBRegister(), name="BBRegister")
bbregNode.inputs.init = "fsl"
bbregNode.inputs.contrast_type = "t2"
bbregNode.inputs.epi_mask = True
bbregNode.inputs.out_fsl_file = True
bbregNode.inputs.args = "--tol1d 1e-3"
bbregNode.inputs.subject_id = reconallFolderName


# ### Surface2Vol

# In[ ]:

# Transform Left Hemisphere
lhWhiteFilename = "lh_white.nii.gz"
surf2volNode_lh = Node(freesurfer.utils.Surface2VolTransform(), name="surf2vol_lh")
surf2volNode_lh.inputs.hemi = "lh"
surf2volNode_lh.inputs.mkmask = True
surf2volNode_lh.inputs.subject_id = reconallFolderName

# Transform right hemisphere
surf2volNode_rh = surf2volNode_lh.clone("surf2vol_rh")
surf2volNode_rh.inputs.hemi = "rh"

# Merge the hemispheres
mergeHemisNode = Node(fsl.BinaryMaths(), name="mergeHemis")
mergeHemisNode.inputs.operation = "add"
mergeHemisNode.inputs.output_type = "NIFTI_GZ"


# ### Registration
开发者ID:gitter-badger,项目名称:TVB-Pypeline,代码行数:31,代码来源:preprocSub

鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python nipype.Workflow类代码示例发布时间:2022-05-27
下一篇:
Python nipype.MapNode类代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap