How to use the nipype.pipeline.engine function in nipype

To help you get started, we’ve selected a few nipype examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github nipy / nipype / examples / bedpostx.py View on Github external
def create_bedpostx_pipeline(name="bedpostx"):
    inputnode = pe.Node(interface = util.IdentityInterface(fields=["dwi", "mask"]), 
                        name="inputnode")
    
    mask_dwi = pe.Node(interface = fsl.ImageMaths(op_string = "-mas"), 
                       name="mask_dwi")
    slice_dwi = pe.Node(interface = fsl.Split(dimension="z"), name="slice_dwi")
    slice_mask = pe.Node(interface = fsl.Split(dimension="z"), 
                         name="slice_mask")
    
    preproc = pe.Workflow(name="preproc")
    
    preproc.connect([(inputnode, mask_dwi, [('dwi', 'in_file')]),
                     (inputnode, mask_dwi, [('mask', 'in_file2')]),
                     (mask_dwi, slice_dwi, [('out_file', 'in_file')]),
                     (inputnode, slice_mask, [('mask', 'in_file')])
                     ])
github nipy / nipype / nipype / workflows / dmri / camino / group_connectivity.py View on Github external
def create_group_cff_pipeline_part3(group_list, data_dir ,subjects_dir, output_dir, title='group'):
    """
    Next the groups are combined in the 3rd level pipeline.
    """
    l3infosource = pe.Node(interface=util.IdentityInterface(fields=['group_id']), name='l3infosource')
    l3infosource.inputs.group_id = group_list.keys()

    l3source = pe.Node(nio.DataGrabber(infields=['group_id'], outfields=['CFFfiles']), name='l3source')
    l3source.inputs.template_args = dict(CFFfiles=[['group_id','group_id']])
    l3source.inputs.template=op.join(output_dir,'%s/%s.cff')

    l3inputnode = pe.Node(interface=util.IdentityInterface(fields=['Group_CFFs']), name='l3inputnode')

    MergeCNetworks_grp = pe.Node(interface=cmtk.MergeCNetworks(), name="MergeCNetworks_grp")
    MergeCNetworks_grp.inputs.out_file = title

    l3datasink = pe.Node(interface=nio.DataSink(), name="l3datasink")
    l3datasink.inputs.base_directory = output_dir

    l3pipeline = pe.Workflow(name="l3output")
    l3pipeline.base_dir = output_dir
    l3pipeline.connect([
                        (l3infosource,l3source,[('group_id', 'group_id')]),
                        (l3source,l3inputnode,[('CFFfiles','Group_CFFs')]),
                    ])

    l3pipeline.connect([(l3inputnode,MergeCNetworks_grp,[('Group_CFFs','in_files')])])
    l3pipeline.connect([(MergeCNetworks_grp, l3datasink, [('connectome_file', '@l3output')])])
github FCP-INDI / C-PAC / CPAC / image_utils / spatial_smoothing.py View on Github external
def spatial_smooth(workflow, func_key, mask_key, output_name,
                  strat, num_strat, pipeline_config_object, input_image_type='func_derivative'):

    image_types = ['func_derivative', 'func_derivative_multi',
            'func_4d', 'func_mask']

    if input_image_type not in image_types:
        raise ValueError('Input image type {0} should be one of {1}'.format(input_image_type,
            ', '.join(image_types)))

    if input_image_type == 'func_derivative_multi':
        output_smooth = pe.MapNode(interface=fsl.MultiImageMaths(),
                                   name='{0}_multi_{1}'.format(output_name, num_strat),
                                   iterfield=['in_file'])
    else:
        output_smooth = pe.Node(interface=fsl.MultiImageMaths(),
                                name='{0}_{1}'.format(output_name,
                                                             num_strat))

    if isinstance(func_key, str):

        if func_key == 'leaf':
            func_node, func_file = strat.get_leaf_properties()
        else:
            try:
                func_node, func_file = strat[func_key]
            except KeyError as e:
                print('Could not find func_key {0} in resource pool'.format(func_key))

    elif isinstance(func_key, tuple):
        func_node, func_file = func_key
github poldracklab / niworkflows / niworkflows / anat / ants.py View on Github external
Calculated brain mask
    bias_corrected : str
        The ``in_files`` input images, after :abbr:`INU (intensity non-uniformity)`
        correction, before skull-stripping.
    bias_image : str
        The :abbr:`INU (intensity non-uniformity)` field estimated for each
        input in ``in_files``
    out_segm : str
        Output segmentation by ATROPOS
    out_tpms : str
        Output :abbr:`TPMs (tissue probability maps)` by ATROPOS

    """
    from templateflow.api import get as get_template

    wf = pe.Workflow(name)

    template_spec = template_spec or {}

    # suffix passed via spec takes precedence
    template_spec["suffix"] = template_spec.get("suffix", bids_suffix)

    tpl_target_path, common_spec = get_template_specs(
        in_template, template_spec=template_spec
    )

    # Get probabilistic brain mask if available
    tpl_mask_path = get_template(
        in_template, label="brain", suffix="probseg", **common_spec
    ) or get_template(in_template, desc="brain", suffix="mask", **common_spec)

    if omp_nthreads is None or omp_nthreads < 1:
github APPIAN-PET / APPIAN / nipype.bk / interfaces / minc / conversion.py View on Github external
def ecattomincWorkflow(name):
    workflow = pe.Workflow(name=name)
    #Define input node that will receive input from outside of workflow
    inputNode = pe.Node(niu.IdentityInterface(fields=["in_file", "header"]), name='inputNode')
    conversionNode = pe.Node(interface=ecattomincCommand(), name="conversionNode")
    mincConversionNode = pe.Node(interface=mincconvertCommand(), name="mincConversionNode")
    fixHeaderNode = pe.Node(interface=FixHeaderCommand(), name="fixHeaderNode")
    paramNode = pe.Node(interface=param2xfmCommand(), name="param2xfmNode")
    paramNode.inputs.rotation = "0 180 0"
    resampleNode = pe.Node(interface=ResampleCommand(), name="resampleNode")
    resampleNode.inputs.use_input_sampling=True
    outputNode  = pe.Node(niu.IdentityInterface(fields=["out_file"]), name='outputNode')

    workflow.connect(inputNode, 'in_file', conversionNode, 'in_file')
    workflow.connect(conversionNode, 'out_file', fixHeaderNode, 'in_file')
    workflow.connect(inputNode, 'header', fixHeaderNode, 'header')
    workflow.connect(fixHeaderNode, 'out_file', outputNode, 'out_file')

    # workflow.connect(inputNode, 'in_file', conversionNode, 'in_file')
    # workflow.connect(conversionNode, 'out_file', outputNode, 'out_file')
    
    # workflow.connect(inputNode, 'header', fixHeaderNode, 'header')
    # workflow.connect(fixHeaderNode, 'out_file', resampleNode, 'in_file')
    # workflow.connect(paramNode, 'out_file', resampleNode, 'transformation')
    # workflow.connect(resampleNode, 'out_file', outputNode, 'out_file')  
    # workflow.connect(fixHeaderNode, 'out_file', outputNode, 'out_file')

    return(workflow)
github nipy / nipype / examples / smri_freesurfer.py View on Github external
"""
from __future__ import unicode_literals

import os

import nipype.pipeline.engine as pe
import nipype.interfaces.io as nio
from nipype.interfaces.freesurfer.preprocess import ReconAll
from nipype.interfaces.freesurfer.utils import MakeAverageSubject


subject_list = ['s1', 's3']
data_dir = os.path.abspath('data')
subjects_dir = os.path.abspath('amri_freesurfer_tutorial/subjects_dir')

wf = pe.Workflow(name="l1workflow")
wf.base_dir = os.path.abspath('amri_freesurfer_tutorial/workdir')

"""
Grab data
"""

datasource = pe.MapNode(interface=nio.DataGrabber(infields=['subject_id'],
                                                  outfields=['struct']),
                        name='datasource',
                        iterfield=['subject_id'])
datasource.inputs.base_directory = data_dir
datasource.inputs.template = '%s/%s.nii'
datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']])
datasource.inputs.subject_id = subject_list
datasource.inputs.sort_filelist = True
"""
github dPys / PyNets / pynets / workflows.py View on Github external
def functional_connectometry(func_file, ID, atlas_select, network, node_size, roi, thr, uatlas_select, conn_model,
                             dens_thresh, conf, plot_switch, parc, ref_txt, procmem, multi_thr,
                             multi_atlas, max_thr, min_thr, step_thr, k, clust_mask, k_min, k_max, k_step,
                             k_clustering, user_atlas_list, clust_mask_list, node_size_list, conn_model_list,
                             min_span_tree, use_AAL_naming, smooth, smooth_list, disp_filt, prune, multi_nets,
                             clust_type, clust_type_list, plugin_type, c_boot, block_size, mask):
    import os
    from nipype.pipeline import engine as pe
    from nipype.interfaces import utility as niu
    from pynets import nodemaker, utils, estimation, plotting, thresholding, clustools

    import_list = ["import sys", "import os", "import numpy as np", "import networkx as nx", "import nibabel as nib"]
    functional_connectometry_wf = pe.Workflow(name="%s%s" % ('functional_connectometry_', ID))
    # Create input/output nodes
    inputnode = pe.Node(niu.IdentityInterface(fields=['func_file', 'ID', 'atlas_select', 'network',
                                                      'node_size', 'roi', 'thr',
                                                      'uatlas_select', 'multi_nets',
                                                      'conn_model', 'dens_thresh',
                                                      'conf', 'plot_switch', 'parc', 'ref_txt',
                                                      'procmem', 'k', 'clust_mask', 'k_min', 'k_max',
                                                      'k_step', 'k_clustering', 'user_atlas_list',
                                                      'min_span_tree', 'use_AAL_naming', 'smooth',
                                                      'disp_filt', 'prune', 'multi_nets', 'clust_type',
                                                      'c_boot', 'block_size', 'mask']), name='inputnode')

    inputnode.inputs.func_file = func_file
    inputnode.inputs.ID = ID
    inputnode.inputs.atlas_select = atlas_select
    inputnode.inputs.network = network
github nipy / nipype / nipype / workflows / smri / freesurfer / bem.py View on Github external
'subjects_dir']),
                        name='inputspec')

    """
    Define all the nodes of the workflow:

      fssource: used to retrieve aseg.mgz
      mri_convert : converts aseg.mgz to aseg.nii
      tessellate : tessellates regions in aseg.mgz
      surfconvert : converts regions to stereolithographic (.stl) format

    """

    watershed_bem = pe.Node(interface=mne.WatershedBEM(), name='WatershedBEM')

    surfconvert = pe.MapNode(fs.MRIsConvert(out_datatype=out_format),
                             iterfield=['in_file'],
                             name='surfconvert')

    """
    Connect the nodes
    """

    bemflow.connect([
        (inputnode, watershed_bem, [('subject_id', 'subject_id'),
                                    ('subjects_dir', 'subjects_dir')]),
        (watershed_bem, surfconvert, [('mesh_files', 'in_file')]),
    ])

    """
    Setup an outputnode that defines relevant inputs of the workflow.
    """
github nipreps / dmriprep / dmriprep / workflows / dwi / outputs.py View on Github external
def init_reportlets_wf(output_dir, name='reportlets_wf'):
    """Set up a battery of datasinks to store reports in the right location."""
    from niworkflows.interfaces.masks import SimpleShowMaskRPT
    workflow = Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['source_file', 'dwi_ref', 'dwi_mask',
                'validation_report']),
        name='inputnode')
    mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet')

    ds_report_mask = pe.Node(
        DerivativesDataSink(base_directory=output_dir, desc='brain', suffix='mask',
                            datatype="figures"),
        name='ds_report_mask', run_without_submitting=True)
    ds_report_validation = pe.Node(
        DerivativesDataSink(base_directory=output_dir, desc='validation', datatype="figures"),
        name='ds_report_validation', run_without_submitting=True)

    workflow.connect([
        (inputnode, mask_reportlet, [('dwi_ref', 'background_file'),
                                     ('dwi_mask', 'mask_file')]),
github FCP-INDI / C-PAC / CPAC / timeseries / timeseries_analysis.py View on Github external
>>> wf.run()
    
    """

    wflow = pe.Workflow(name=wf_name)

    inputNode = pe.Node(util.IdentityInterface(fields=['rest',
                                                       'output_type']),
                        name='inputspec')
    inputNode_mask = pe.Node(util.IdentityInterface(fields=['mask']),
                                name='input_mask')

    outputNode = pe.Node(util.IdentityInterface(fields=['mask_outputs']),
                        name='outputspec')

    timeseries_voxel = pe.Node(util.Function(input_names=['data_file',
                                                         'template',
                                                         'output_type'],
                                            output_names=['out_file'],
                                            function=gen_voxel_timeseries),
                              name='timeseries_voxel')

    wflow.connect(inputNode, 'rest',
                  timeseries_voxel, 'data_file')
    wflow.connect(inputNode, 'output_type',
                  timeseries_voxel, 'output_type')
    wflow.connect(inputNode_mask, 'mask',
                  timeseries_voxel, 'template')

    wflow.connect(timeseries_voxel, 'out_file',
                  outputNode, 'mask_outputs')