How to use the nipype.pipeline.engine.MapNode function in nipype

To help you get started, we’ve selected a few nipype examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github nipy / nipype / nipype / workflows / dmri / fsl / utils.py View on Github external
iterfield=['premat', 'postmat'],
        name='ConvertWarp')

    selref = pe.Node(niu.Select(index=[0]), name='Reference')

    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    unwarp = pe.MapNode(
        fsl.ApplyWarp(),
        iterfield=['in_file', 'field_file'],
        name='UnwarpDWIs')

    coeffs = pe.MapNode(
        fsl.WarpUtils(out_format='spline'),
        iterfield=['in_file'],
        name='CoeffComp')
    jacobian = pe.MapNode(
        fsl.WarpUtils(write_jacobian=True),
        iterfield=['in_file'],
        name='JacobianComp')
    jacmult = pe.MapNode(
        fsl.MultiImageMaths(op_string='-mul %s'),
        iterfield=['in_file', 'operand_files'],
        name='ModulateDWIs')

    thres = pe.MapNode(
        fsl.Threshold(thresh=0.0),
        iterfield=['in_file'],
        name='RemoveNegative')
    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs')

    wf = pe.Workflow(name=name)
    wf.connect([(inputnode, warps, [
github poldracklab / smriprep / smriprep / workflows / surfaces.py View on Github external
name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(['surfaces']), name='outputnode')

    get_surfaces = pe.Node(nio.FreeSurferSource(), name='get_surfaces')

    midthickness = pe.MapNode(
        MakeMidthickness(thickness=True, distance=0.5, out_name='midthickness'),
        iterfield='in_file',
        name='midthickness')

    save_midthickness = pe.Node(nio.DataSink(parameterization=False),
                                name='save_midthickness')

    surface_list = pe.Node(niu.Merge(4, ravel_inputs=True),
                           name='surface_list', run_without_submitting=True)
    fs2gii = pe.MapNode(fs.MRIsConvert(out_datatype='gii'),
                        iterfield='in_file', name='fs2gii')
    fix_surfs = pe.MapNode(NormalizeSurf(), iterfield='in_file', name='fix_surfs')

    workflow.connect([
        (inputnode, get_surfaces, [('subjects_dir', 'subjects_dir'),
                                   ('subject_id', 'subject_id')]),
        (inputnode, save_midthickness, [('subjects_dir', 'base_directory'),
                                        ('subject_id', 'container')]),
        # Generate midthickness surfaces and save to FreeSurfer derivatives
        (get_surfaces, midthickness, [('smoothwm', 'in_file'),
                                      ('graymid', 'graymid')]),
        (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]),
        # Produce valid GIFTI surface files (dense mesh)
        (get_surfaces, surface_list, [('smoothwm', 'in1'),
                                      ('pial', 'in2'),
                                      ('inflated', 'in3')]),
github nipy / nipype / 0.11.0 / _downloads / smoothing_comparison.py View on Github external
"inputnode.fwhm")
preprocessing.connect(compute_mask, "brain_mask", anisotropic_voxel_smooth,
                      'inputnode.mask_file')



recon_all = pe.Node(interface=fs.ReconAll(), name = "recon_all")

surfregister = pe.Node(interface=fs.BBRegister(),name='surfregister')
surfregister.inputs.init = 'fsl'
surfregister.inputs.contrast_type = 't2'
preprocessing.connect(realign, 'mean_image', surfregister, 'source_file')
preprocessing.connect(recon_all, 'subject_id', surfregister, 'subject_id')
preprocessing.connect(recon_all, 'subjects_dir', surfregister, 'subjects_dir')

isotropic_surface_smooth = pe.MapNode(interface=fs.Smooth(proj_frac_avg=(0,1,0.1)),
                                      iterfield=['in_file'],
                                      name="isotropic_surface_smooth")
preprocessing.connect(surfregister, 'out_reg_file', isotropic_surface_smooth,
                      'reg_file')
preprocessing.connect(realign, "realigned_files", isotropic_surface_smooth,
                      "in_file")
preprocessing.connect(iter_fwhm, "fwhm", isotropic_surface_smooth,
                      "surface_fwhm")
preprocessing.connect(iter_fwhm, "fwhm", isotropic_surface_smooth, "vol_fwhm")
preprocessing.connect(recon_all, 'subjects_dir', isotropic_surface_smooth,
                      'subjects_dir')

merge_smoothed_files = pe.Node(interface=util.Merge(3),
                               name='merge_smoothed_files')
preprocessing.connect(isotropic_voxel_smooth, 'smoothed_files',
                      merge_smoothed_files, 'in1')
github poldracklab / niworkflows / niworkflows / anat / ants.py View on Github external
thr_brainmask = pe.Node(
        ThresholdImage(
            dimension=3, th_low=0.5, th_high=1.0, inside_value=1, outside_value=0
        ),
        name="thr_brainmask",
    )

    # Morphological dilation, radius=2
    dil_brainmask = pe.Node(ImageMath(operation="MD", op2="2"), name="dil_brainmask")
    # Get largest connected component
    get_brainmask = pe.Node(
        ImageMath(operation="GetLargestComponent"), name="get_brainmask"
    )

    # Refine INU correction
    inu_n4_final = pe.MapNode(
        N4BiasFieldCorrection(
            dimension=3,
            save_bias=True,
            copy_header=True,
            n_iterations=[50] * 5,
            convergence_threshold=1e-7,
            shrink_factor=4,
            bspline_fitting_distance=bspline_fitting_distance,
        ),
        n_procs=omp_nthreads,
        name="inu_n4_final",
        iterfield=["input_image"],
    )
    if _ants_version and parseversion(_ants_version) >= Version("2.1.0"):
        inu_n4_final.inputs.rescale_intensities = True
    else:
github mwaskom / lyman / lyman / workflows / archive / restingstate.py View on Github external
iterfield=["in_file", "image_edges"],
                           name="meanslice")
    meanslice.inputs.sample_axial = 2

    # Rename the outputs
    meanname = pe.MapNode(util.Rename(format_string="mean_func",
                                      keep_ext=True),
                          iterfield=["in_file"],
                          name="meanname")

    maskname = pe.MapNode(util.Rename(format_string="functional_mask",
                                      keep_ext=True),
                          iterfield=["in_file"],
                          name="maskname")

    pngname = pe.MapNode(util.Rename(format_string="mean_func.png"),
                         iterfield=["in_file"],
                         name="pngname")

    # Define the workflow outputs
    outputnode = pe.Node(util.IdentityInterface(fields=["timeseries",
                                                        "mean_func",
                                                        "mask_file",
                                                        "report_png"]),
                         name="outputs")

    # Define and connect the workflow
    skullstrip = pe.Workflow(name=name)

    skullstrip.connect([
        (inputnode,  meanfunc1,     [("timeseries", "in_file")]),
        (meanfunc1,  stripmean,     [("out_file", "in_file")]),
github FCP-INDI / C-PAC / CPAC / registration.py View on Github external
.. image:: 
        :width: 500
       
    """

    import nipype.interfaces.ants as ants

    apply_ants_warp_wf = pe.Workflow(name=name)

    inputspec = pe.Node(util.IdentityInterface(fields=['input_image', 
            'reference_image', 'transforms', 'dimension', 'input_image_type', 
            'interpolation']), name='inputspec')

    if map_node:
        apply_ants_warp = pe.MapNode(interface=ants.ApplyTransforms(),
                name='apply_ants_warp_mapnode', iterfield=['input_image', \
                'transforms'])

    else:
        apply_ants_warp = pe.Node(interface=ants.ApplyTransforms(),
                name='apply_ants_warp')

    apply_ants_warp.inputs.out_postfix = '_antswarp'
    apply_ants_warp.interface.num_threads = ants_threads
    apply_ants_warp.interface.estimated_memory_gb = 1.5

    outputspec = pe.Node(util.IdentityInterface(fields=['output_image']),
            name='outputspec')


    # connections from inputspec
github FCP-INDI / C-PAC / CPAC / registration / output_func_to_standard.py View on Github external
if isinstance(func_key, str):
        if func_key == "leaf":
            func_node, func_file = strat.get_leaf_properties()
        else:
            func_node, func_file = strat[func_key]
    elif isinstance(func_key, tuple):
        func_node, func_file = func_key

    if isinstance(ref_key, str):
        ref_node, ref_out_file = strat[ref_key]
    elif isinstance(ref_key, tuple):
        ref_node, ref_out_file = ref_key

    if map_node == True:
        # func_mni_warp
        func_mni_warp = pe.MapNode(interface=fsl.ApplyWarp(),
                name='func_mni_fsl_warp_{0}_{1:d}'.format(output_name, num_strat),
                iterfield=['in_file'],
                mem_gb=1.5)
    else:
        # func_mni_warp
        func_mni_warp = pe.Node(interface=fsl.ApplyWarp(),
                name='func_mni_fsl_warp_{0}_{1:d}'.format(output_name, num_strat))

        
    func_mni_warp.inputs.interp = interpolation_method

    workflow.connect(func_node, func_file,
                     func_mni_warp, 'in_file')

    workflow.connect(ref_node, ref_out_file,
                     func_mni_warp, 'ref_file')
github mwaskom / lyman / workflows / registration.py View on Github external
def create_mni_reg_workflow(name="mni_reg", interp="spline"):
    """Set up a workflow to register an epi into FSL's MNI space."""
    inputnode = Node(IdentityInterface(fields=["source_image",
                                               "warpfield",
                                               "fsl_affine"]),
                     name="inputnode")

    target = fsl.Info.standard_image("avg152T1_brain.nii.gz")

    getinterp = MapNode(Function(input_names=["source_file",
                                              "default_interp"],
                                 output_names="interp",
                                 function=get_interp),
                        iterfield=["source_file"],
                        name="getinterp")
    getinterp.inputs.default_interp=interp

    applywarp = MapNode(fsl.ApplyWarp(ref_file=target),
                        iterfield=["in_file", "premat", "interp"],
                        name="applywarp")

    outputnode = Node(IdentityInterface(fields=["out_file"]),
                      name="outputnode")

    warpflow = Workflow(name=name)
    warpflow.connect([
github mwaskom / lyman / lyman / workflows / archive / restingstate.py View on Github external
# Convert to Nifti for FSL tools
    convertwm = pe.MapNode(fs.MRIConvert(out_type="niigz"),
                           iterfield=["in_file"],
                           name="convertwm")

    convertcsf = pe.MapNode(fs.MRIConvert(out_type="niigz"),
                            iterfield=["in_file"],
                            name="convertcsf")
    
    convertbrain= pe.MapNode(fs.MRIConvert(out_type="niigz"),
                            iterfield=["in_file"],
                            name="convertbrain")
    
    # Add the mask images together for a report image
    addconfmasks = pe.MapNode(fsl.ImageMaths(suffix="conf", 
                                             op_string="-mul 2 -add",
                                             out_data_type="char"),
                              iterfield=["in_file", "in_file2"],
                              name="addconfmasks")

    # Overlay and slice the confound mask overlaied on mean func for reporting
    confoverlay = pe.MapNode(fsl.Overlay(auto_thresh_bg=True,
                                         stat_thresh=(.7, 2)),
                             iterfield=["background_image", "stat_image"],
                             name="confoverlay")

    confslice = pe.MapNode(fsl.Slicer(image_width = 800,
                                      label_slices = False),
                           iterfield=["in_file"],
                           name="confslice")
    confslice.inputs.sample_axial = 2