How to use the nipype.interfaces.utility function in nipype

To help you get started, we’ve selected a few nipype examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github aramis-lab / clinica / clinica / pipelines / dwi_preprocessing_phase_difference_fieldmap3 / dwi_preprocessing_phase_difference_fieldmap3_utils.py View on Github external
.. admonition:: References
      .. [Jeurissen2014] Jeurissen B. et al., `Multi-tissue constrained
        spherical deconvolution for improved analysis of multi-shell diffusion
        MRI data `_.squeue

        NeuroImage (2014). doi: 10.1016/j.neuroimage.2014.07.061
    Example
    -------
    >>> from nipype.workflows.dmri.fsl.artifacts import remove_bias
    >>> bias = remove_bias()
    >>> bias.inputs.inputnode.in_file = 'epi.nii'
    >>> bias.inputs.inputnode.in_bval = 'diffusion.bval'
    >>> bias.inputs.inputnode.in_mask = 'mask.nii'
    >>> bias.run() # doctest: +SKIP
    """
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file']), name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file','b0_mask']),
                         name='outputnode')

    getb0 = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name='get_b0')

    mask_b0 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True), name='mask_b0')

    n4 = pe.Node(ants.N4BiasFieldCorrection(
        dimension=3, save_bias=True, bspline_fitting_distance=600),
        name='Bias_b0')
    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    mult = pe.MapNode(fsl.MultiImageMaths(op_string='-div %s'),
                      iterfield=['in_file'], name='RemoveBiasOfDWIs')
    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
github nipy / nipype / nipype / workflows / dmri / fsl / epi.py View on Github external
inputnode.in_file
        inputnode.ref_num

    Outputs::

        outputnode.eddy_corrected
    """

    warnings.warn(
        ('This workflow is deprecated from v.1.0.0, use '
         'nipype.workflows.dmri.preprocess.epi.ecc_pipeline instead'),
        DeprecationWarning)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['in_file', 'ref_num']), name='inputnode')

    pipeline = pe.Workflow(name=name)

    split = pe.Node(fsl.Split(dimension='t'), name='split')
    pick_ref = pe.Node(niu.Select(), name='pick_ref')
    coregistration = pe.MapNode(
        fsl.FLIRT(no_search=True, padding_size=1, interp='trilinear'),
        name='coregistration',
        iterfield=['in_file'])
    merge = pe.Node(fsl.Merge(dimension='t'), name='merge')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['eddy_corrected']), name='outputnode')

    pipeline.connect([(inputnode, split, [('in_file', 'in_file')]),
                      (split, pick_ref,
                       [('out_files', 'inlist')]), (inputnode, pick_ref,
github aramis-lab / clinica / clinica / pipelines / fmri_preprocessing / fmri_preprocessing_pipeline.py View on Github external
norm_node = npe.MapNode(interface=spm.Normalize12(),
                                iterfield=['image_to_align', 'apply_to_files'],
                                name='Normalization')

        # Smoothing
        # =========
        smooth_node = npe.MapNode(interface=spm.Smooth(),
                                  iterfield=['in_files'],
                                  name='Smoothing')
        smooth_node.inputs.fwhm = self.parameters['full_width_at_half_maximum']

        # Zipping
        # =======
        zip_node = npe.MapNode(name='Zipping',
                               iterfield=['in_file'],
                               interface=nutil.Function(input_names=['in_file'],
                                                        output_names=[
                                                            'out_file'],
                                                        function=zip_nii))

        zip_bet_node = zip_node.clone('ZippingBET')
        zip_mc_node = zip_node.clone('ZippingMC')
        zip_reg_node = zip_node.clone('ZippingRegistration')
        zip_norm_node = zip_node.clone('ZippingNormalization')
        zip_smooth_node = zip_node.clone('ZippingSmoothing')

        # Connections
        # ===========

        if self.parameters['freesurfer_brain_mask']:
            self.connect([
                # Brain extraction
github FCP-INDI / C-PAC / CPAC / generate_motion_statistics / generate_motion_statistics.py View on Github external
function=gen_motion_parameters),
                                     name='calc_motion_parameters')
    pm.connect(inputNode, 'subject_id',
               calc_motion_parameters, 'subject_id')
    pm.connect(inputNode, 'scan_id',
               calc_motion_parameters, 'scan_id')
    pm.connect(inputNode, 'movement_parameters',
                calc_motion_parameters, 'movement_parameters')
    pm.connect(inputNode, 'max_displacement',
               calc_motion_parameters, 'max_displacement')
    
    pm.connect(calc_motion_parameters, 'out_file', 
               outputNode, 'motion_params')


    calc_power_parameters = pe.Node(util.Function(input_names=["subject_id", 
                                                                "scan_id", 
                                                                "FD_1D",
                                                                "FDJ_1D", 
                                                                "threshold",
                                                                "DVARS"],
                                                   output_names=['out_file'],
                                                   function=gen_power_parameters),
                                     name='calc_power_parameters')
    pm.connect(inputNode, 'subject_id',
               calc_power_parameters, 'subject_id')
    pm.connect(inputNode, 'scan_id',
               calc_power_parameters, 'scan_id')
    pm.connect(cal_DVARS, 'out_file',
               calc_power_parameters, 'DVARS')
    pm.connect(calculate_FD, 'out_file',
               calc_power_parameters, 'FD_1D')
github nipy / nipype / examples / workshop_dartmouth_2010.py View on Github external
datasource4.inputs.sort_filelist = True
datasource4.inputs.run = [3, 7]
datasource4.inputs.subject_id = ['s1', 's3']
results = datasource4.run()
print results.outputs

"""
Iterables
---------

Iterables is a special field of the Node class that enables to iterate all workfloes/nodes connected to it over
some parameters. Here we'll use it to iterate over two subjects.
"""

import nipype.interfaces.utility as util
infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']),
                     name="infosource")
infosource.iterables = ('subject_id', ['s1', 's3'])

datasource = pe.Node(nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), name="datasource")
datasource.inputs.template = '%s/%s.nii'
datasource.inputs.base_directory = os.path.abspath('data')
datasource.inputs.template_args = dict(func=[['subject_id','f3']], struct=[['subject_id','struct']])
datasource.inputs.sort_filelist = True

my_workflow = pe.Workflow(name="my_workflow")
my_workflow.base_dir = os.path.abspath('.')

my_workflow.connect([(infosource, datasource, [('subject_id', 'subject_id')]),
                     (datasource, preproc, [('func', 'inputspec.func'),
                                          ('struct', 'inputspec.struct')])])
my_workflow.run()
github nipy / nipype / nipype / workflows / dmri / camino / connectivity_mapping.py View on Github external
inputnode.resolution_network_file

    Outputs::

        outputnode.connectome
        outputnode.cmatrix
        outputnode.gpickled_network
        outputnode.fa
        outputnode.struct
        outputnode.trace
        outputnode.tracts
        outputnode.tensors

    """

    inputnode_within = pe.Node(interface=util.IdentityInterface(fields=["subject_id",
                                                                        "dwi",
                                                                        "bvecs",
                                                                        "bvals",
                                                                        "subjects_dir",
                                                                        "resolution_network_file",
                                                                        ]),
                               name="inputnode_within")

    FreeSurferSource = pe.Node(interface=nio.FreeSurferSource(), name='fssource')

    FreeSurferSourceLH = pe.Node(interface=nio.FreeSurferSource(), name='fssourceLH')
    FreeSurferSourceLH.inputs.hemi = 'lh'

    FreeSurferSourceRH = pe.Node(interface=nio.FreeSurferSource(), name='fssourceRH')
    FreeSurferSourceRH.inputs.hemi = 'rh'
github FCP-INDI / C-PAC / CPAC / utils / datasource.py View on Github external
inputnode.iterables = [('scan', rest_dict.keys())]

    selectrest = pe.Node(util.Function(input_names=['scan', 'rest_dict'],
                                       output_names=['rest'],
                        function=get_rest),
                         name='selectrest')
    selectrest.inputs.rest_dict = rest_dict

    check_s3_node = pe.Node(util.Function(input_names=['file_path', 'creds_path'],
                                          output_names=['local_path'],
                                          function=check_for_s3),
                            name='check_for_s3')
    wf.connect(selectrest, 'rest', check_s3_node, 'file_path')
    wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path')

    outputnode = pe.Node(util.IdentityInterface(fields=['subject',
                                                     'rest',
                                                     'scan' ]),
                         name='outputspec')

    wf.connect(inputnode, 'scan', selectrest, 'scan')

    wf.connect(inputnode, 'subject', outputnode, 'subject')
    wf.connect(check_s3_node, 'local_path', outputnode, 'rest')
    wf.connect(inputnode, 'scan', outputnode, 'scan')

    return wf
github FCP-INDI / C-PAC / CPAC / seg_preproc / seg_preproc.py View on Github external
if use_erosion:
            # mask erosion 
            eroded_mask = pe.Node(util.Function(input_names = ['roi_mask', 'skullstrip_mask', 'mask_erosion_prop'], 
                                                output_names = ['output_roi_mask', 'eroded_skullstrip_mask'], 
                                                function = mask_erosion,
                                                imports = ero_imports),                                    
                                                name='erode_skullstrip_mask_%s' % (wf_name))
            eroded_mask.inputs.mask_erosion_prop =  erosion_prop**3 
            preproc.connect(inputNode, 'brain_mask', eroded_mask, 'skullstrip_mask')
            preproc.connect(input_1, value_1, eroded_mask, 'roi_mask')
            
            input_1, value_1 = (eroded_mask, 'output_roi_mask')

            # erosion 
            erosion_segmentmap = pe.Node(util.Function(input_names = ['roi_mask', 'erosion_prop'], 
                                                output_names = ['eroded_roi_mask'], 
                                                function = erosion,
                                                imports = ero_imports),                                    
                                                name='erosion_segmentmap_%s' % (wf_name))

            erosion_segmentmap.inputs.erosion_prop =  erosion_prop   
            preproc.connect(input_1, value_1, erosion_segmentmap, 'roi_mask')
            input_1, value_1 = (erosion_segmentmap, 'eroded_roi_mask')

        #connect to output nodes
        # preproc.connect(tissueprior_mni_to_t1, 'output_image', outputNode, 'tissueprior_mni2t1')
        
        # preproc.connect(overlap_segmentmap_with_prior, 'out_file', outputNode, 'segment_combo')

        # preproc.connect(segmentmap_threshold, 'out_file', outputNode, 'segment_thresh')
github FCP-INDI / C-PAC / CPAC / EPI_DistCorr / warp_nipype.py View on Github external
-- PrepareFieldMap: Preparing the fieldmap. 
                           For more details, check:https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FUGUE/Guide
                           in_file = phase_file, magnitude_file
                           deltaTE = Delay in the time taken by the scanners to record the two echos.
                                     (default == 2.46 ms)
                           scanner = SIEMENS, default.
    -- FUGUE           : One of the steps in EPI-DistCorrection toolbox, it unwarps the fieldmaps 
                            in_file = field map which is a 4D image (containing 2 unwarpped image)
                            mask_file = epi_mask.nii
                            dwell_to_asymm ratio = (0.77e-3 * 3)/(2.46e-3)
  """                          
                           
inputNode = pe.Node(util.IdentityInterface(fields=['distcorr']),
                        name='inputspec')

outputNode = pe.Node(util.IdentityInterface(fields=['roi_file','fieldmap','epireg']),name='outputspec')
preproc = pe.Workflow(name='preprocflow')
## Specify commands to be run

# Extract first three volumes from fmri
fslroi = pe.Node(interface=fsl.ExtractROI(),name='fslroi')
fslroi.inputs.t_min=0
fslroi.inputs.t_size=3

preproc.connect(inputNode,'distcorr',fslroi,'in_file') 

preproc.connect(fslroi,'roi_file',outputNode,'fslroi_file')
# Skullstrip

skullstrip = pe.Node(interface=afni.preprocess.SkullStrip(),name='skullstrip')
skullstrip.inputs.outputtype='NIFTI_GZ'
github nipy / nipype / nipype / workflows / dmri / dipy / denoise.py View on Github external
Example
    -------

    >>> from nipype.workflows.dmri.dipy.denoise import nlmeans_pipeline
    >>> denoise = nlmeans_pipeline()
    >>> denoise.inputs.inputnode.in_file = 'diffusion.nii'
    >>> denoise.inputs.inputnode.in_mask = 'mask.nii'
    >>> denoise.run() # doctest: +SKIP


    """
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['in_file', 'in_mask']), name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['out_file']), name='outputnode')

    nmask = pe.Node(
        niu.Function(
            input_names=['in_file', 'in_mask'],
            output_names=['out_file'],
            function=bg_mask),
        name='NoiseMsk')
    nlmeans = pe.Node(dipy.Denoise(**params), name='NLMeans')

    wf = pe.Workflow(name=name)
    wf.connect([(inputnode, nmask, [
        ('in_file', 'in_file'), ('in_mask', 'in_mask')
    ]), (inputnode, nlmeans, [('in_file', 'in_file'), ('in_mask', 'in_mask')]),
                (nmask, nlmeans, [('out_file', 'noise_mask')]),
                (nlmeans, outputnode, [('out_file', 'out_file')])])
    return wf