How to use the nipype.interfaces.fsl function in nipype

To help you get started, we’ve selected a few nipype examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github aramis-lab / clinica / clinica / workflows / dwi_preprocessing.py View on Github external
from nipype.workflows.dmri.fsl.utils import _checkinitxfm

    from nipype.workflows.dmri.fsl.utils import enhance

    inputnode = pe.Node(niu.IdentityInterface(fields=['reference',
                        'in_file', 'ref_mask', 'in_xfms', 'in_bval']),
                        name='inputnode')

    initmat = pe.Node(niu.Function(input_names=['in_bval', 'in_xfms',
                      'excl_nodiff'], output_names=['init_xfms'],
                                   function=_checkinitxfm), name='InitXforms')
    initmat.inputs.excl_nodiff = excl_nodiff
    dilate = pe.Node(fsl.maths.MathsCommand(nan2zeros=True,
                     args='-kernel sphere 5 -dilM'), name='MskDilate')
    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias')
    flirt = pe.MapNode(fsl.FLIRT(**flirt_param), name='CoRegistration',
                       iterfield=['in_file', 'in_matrix_file'])
    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')
    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file',
                         'out_xfms', 'out_ref']), name='outputnode')
    enhb0 = pe.Node(niu.Function(
        input_names=['in_file', 'in_mask', 'clip_limit'],
        output_names=['out_file'], function=enhance), name='B0Equalize')
    enhb0.inputs.clip_limit = 0.015
    enhdw = pe.MapNode(niu.Function(
        input_names=['in_file', 'in_mask'], output_names=['out_file'],
        function=enhance), name='DWEqualize', iterfield=['in_file'])
    # enhb0.inputs.clip_limit = clip_limit
github nipy / nipype / nipype / workflows / dmri / fsl / epi.py View on Github external
[('merged_file',
                                                         'epi_corrected')])])

    if fieldmap_registration:
        """ Register magfw to example epi. There are some parameters here that may need to be tweaked. Should probably strip the mag
            Pre-condition: forward warp the mag in order to reg with func. What does mask do here?
        """
        # Select reference volume from EPI (B0 in dMRI and a middle frame in
        # fMRI)
        select_epi = pe.Node(fsl.utils.ExtractROI(t_size=1), name='select_epi')

        # fugue -i %s -w %s --loadshift=%s --mask=%s % ( mag_name, magfw_name,
        # vsmmag_name, mask_name ), log ) # Forward Map
        vsm_fwd = pe.Node(fsl.FUGUE(forward_warping=True), name='vsm_fwd')
        vsm_reg = pe.Node(
            fsl.FLIRT(
                bins=256,
                cost='corratio',
                dof=6,
                interp='spline',
                searchr_x=[-10, 10],
                searchr_y=[-10, 10],
                searchr_z=[-10, 10]),
            name='vsm_registration')
        # 'flirt -in %s -ref %s -out %s -init %s -applyxfm' % ( vsmmag_name, ref_epi, vsmmag_name, magfw_mat_out )
        vsm_applyxfm = pe.Node(
            fsl.ApplyXfm(interp='spline'), name='vsm_apply_xfm')
        # 'flirt -in %s -ref %s -out %s -init %s -applyxfm' % ( mask_name, ref_epi, mask_name, magfw_mat_out )
        msk_applyxfm = pe.Node(
            fsl.ApplyXfm(interp='nearestneighbour'), name='msk_apply_xfm')

        pipeline.connect(
github nipy / nipype / _downloads / fmri_ants_openfmri.py View on Github external
import nipype.interfaces.utility as niu
from nipype.workflows.fmri.fsl import (create_featreg_preproc,
                                       create_modelfit_workflow,
                                       create_fixed_effects_flow)

from nipype import LooseVersion
from nipype import Workflow, Node, MapNode
from nipype.interfaces import (fsl, Function, ants, freesurfer)

from nipype.interfaces.utility import Rename, Merge, IdentityInterface
from nipype.utils.filemanip import filename_to_list
from nipype.interfaces.io import DataSink, FreeSurferSource
import nipype.interfaces.freesurfer as fs

version = 0
if fsl.Info.version() and \
    LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'):
    version = 507

fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

imports = ['import os',
           'import nibabel as nb',
           'import numpy as np',
           'import scipy as sp',
           'from nipype.utils.filemanip import filename_to_list, list_to_filename, split_filename',
           'from scipy.special import legendre'
           ]

def median(in_files):
    """Computes an average of the median of each realigned timeseries
github nipy / nipype / old-docs / 0.10.0 / _downloads / rsfmri_vol_surface_preprocessing.py View on Github external
'num_components',
                                                  'extra_regressors'],
                                     output_names=['out_files'],
                                     function=extract_noise_components,
                                     imports=imports),
                            iterfield=['realigned_file', 'extra_regressors'],
                            name='makecompcorrfilter')
    createfilter2.inputs.num_components = num_components

    wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
    wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
    wf.connect(registration, ('outputspec.segmentation_files', selectindex, [0, 2]),
               createfilter2, 'mask_file')


    filter2 = MapNode(fsl.GLM(out_f_name='F.nii',
                              out_pf_name='pF.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filter_noise_nosmooth')
    wf.connect(filter1, 'out_res', filter2, 'in_file')
    wf.connect(filter1, ('out_res', rename, '_cleaned'),
               filter2, 'out_res_name')
    wf.connect(createfilter2, 'out_files', filter2, 'design')
    wf.connect(mask, 'mask_file', filter2, 'mask')

    bandpass = Node(Function(input_names=['files', 'lowpass_freq',
                                           'highpass_freq', 'fs'],
                              output_names=['out_files'],
                              function=bandpass_filter,
                              imports=imports),
                     name='bandpass_unsmooth')
github mwaskom / lyman / lyman / workflows / archive / restingstate.py View on Github external
# Extract the mean signal from over the whole brain
    globaltcourse = pe.MapNode(fs.SegStats(exclude_id=0, avgwf_txt_file=True),
                               iterfield=["segmentation_file", "in_file"],
                               name="globaltcourse")

    # Build the confound design matrix
    conf_inputs = ["motion_params", "global_waveform", "wm_waveform", "csf_waveform"]
    confmatrix = pe.MapNode(util.Function(input_names=conf_inputs,
                                          output_names=["confound_matrix"],
                                          function=make_confound_matrix),
                           iterfield=conf_inputs,
                           name="confmatrix")

    # Regress the confounds out of the timeseries
    confregress = pe.MapNode(fsl.FilterRegressor(filter_all=True),
                             iterfield=["in_file", "design_file", "mask"],
                             name="confregress")

    # Rename the confound mask png
    renamepng = pe.MapNode(util.Rename(format_string="confound_sources.png"),
                           iterfield=["in_file"],
                           name="renamepng")

    # Define the outputs
    outputnode = pe.Node(util.IdentityInterface(fields=["timeseries",
                                                        "confound_sources"]),
                         name="outputs")

    # Define and connect the confound workflow
    confound = pe.Workflow(name=workflow_name)
github aramis-lab / clinica / clinica / pipelines / dwi_connectome / dwi_connectome_pipeline.py View on Github external
dof=6, interp='spline', cost='normmi',
                                      cost_func='normmi',
                                  ))
        t12b0_reg_node.inputs.output_type = "NIFTI_GZ"

        # MGZ File Conversion (only if space=b0)
        # -------------------
        t1_brain_conv_node = npe.Node(name="Reg-0-T1-T1BrainConvertion",
                                      interface=fs.MRIConvert())
        wm_mask_conv_node = npe.Node(name="Reg-0-T1-WMMaskConvertion",
                                     interface=fs.MRIConvert())

        # WM Transformation (only if space=b0)
        # -----------------
        wm_transform_node = npe.Node(name="Reg-2-WMTransformation",
                                     interface=fsl.ApplyXFM())
        wm_transform_node.inputs.apply_xfm = True

        # Nodes Generation
        # ----------------
        label_convert_node = npe.MapNode(name="0-LabelsConversion",
                                         iterfield=['in_file', 'in_config',
                                                    'in_lut', 'out_file'],
                                         interface=mrtrix3.LabelConvert())
        label_convert_node.inputs.in_config = utils.get_conversion_luts()
        label_convert_node.inputs.in_lut = utils.get_luts()

        # FSL flirt matrix to MRtrix matrix Conversion (only if space=b0)
        # --------------------------------------------
        fsl2mrtrix_conv_node = npe.Node(
            name='Reg-2-FSL2MrtrixConversion',
            interface=niu.Function(
github IBT-FMI / SAMRI / samri / pipelines / preprocessing.py View on Github external
import nipype.interfaces.utility as util		# utility
import nipype.pipeline.engine as pe				# pypeline engine
import pandas as pd
from nipype.interfaces import afni, bru2nii, fsl, nipy

from nodes import *
from utils import ss_to_path, sss_filename, fslmaths_invert_values
from utils import STIM_PROTOCOL_DICTIONARY
from samri.utilities import N_PROCS

DUMMY_SCANS=10
N_PROCS=max(N_PROCS-4, 2)

#set all outputs to compressed NIfTI
afni.base.AFNICommand.set_default_output_type('NIFTI_GZ')
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

#relative paths
thisscriptspath = path.dirname(path.realpath(__file__))
scan_classification_file_path = path.join(thisscriptspath,"scan_type_classification.csv")

def bruker(measurements_base,
	functional_scan_types=[],
	structural_scan_types=[],
	sessions=[],
	subjects=[],
	measurements=[],
	exclude_subjects=[],
	exclude_measurements=[],
	actual_size=False,
	functional_blur_xy=False,
	functional_registration_method="structural",
github nipy / nipype / 0.11.0 / _downloads / fmri_openfmri.py View on Github external
'flameo.mask_file')]),
                (modelfit, fixed_fx, [(('outputspec.copes', sort_copes),
                                       'inputspec.copes'),
                                       ('outputspec.dof_file',
                                        'inputspec.dof_files'),
                                       (('outputspec.varcopes',
                                         sort_copes),
                                        'inputspec.varcopes'),
                                       (('outputspec.copes', num_copes),
                                        'l2model.num_copes'),
                                       ])
                ])

    wf.connect(preproc, 'outputspec.mean', registration, 'inputspec.mean_image')
    wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image')
    registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
    registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
    registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'

    def merge_files(copes, varcopes, zstats):
        out_files = []
        splits = []
        out_files.extend(copes)
        splits.append(len(copes))
        out_files.extend(varcopes)
        splits.append(len(varcopes))
        out_files.extend(zstats)
        splits.append(len(zstats))
        return out_files, splits

    mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes',
                                                  'zstats'],
github mwaskom / lyman / lyman / workflows / preproc.py View on Github external
"in_file", "combine_premats")

    combine_postmats = Node(fsl.ConvertXFM(concat_xfm=True),
                            "combine_postmats")

    # Transform Jacobian images into the template space
    transform_jacobian = Node(fsl.ApplyWarp(relwarp=True),
                              "transform_jacobian")

    # Apply rigid transforms and nonlinear warpfield to time series frames
    restore_timeseries = MapNode(fsl.ApplyWarp(interp="spline", relwarp=True),
                                 ["in_file", "premat"],
                                 "restore_timeseries")

    # Apply rigid transforms and nonlinear warpfield to template frames
    restore_template = MapNode(fsl.ApplyWarp(interp="spline", relwarp=True),
                               ["in_file", "premat", "field_file"],
                               "restore_template")

    # Perform final preprocessing operations on timeseries
    finalize_timeseries = Node(FinalizeTimeseries(experiment=experiment),
                               "finalize_timeseries")

    # Perform final preprocessing operations on template
    finalize_template = JoinNode(FinalizeTemplate(experiment=experiment),
                                 name="finalize_template",
                                 joinsource="run_source",
                                 joinfield=["mean_files", "tsnr_files",
                                            "mask_files", "noise_files"])

    # --- Workflow ouptut
github nipy / nipype / examples / fmri_spm_auditory.py View on Github external
"""

preproc = pe.Workflow(name='preproc')
"""We strongly encourage to use 4D files insteead of series of 3D for fMRI analyses
for many reasons (cleanness and saving and filesystem inodes are among them). However,
the the workflow presented in the SPM8 manual which this tutorial is based on
uses 3D files. Therefore we leave converting to 4D as an option. We are using `merge_to_4d`
variable, because switching between 3d and 4d requires some additional steps (explauned later on).
Use :class:`nipype.interfaces.fsl.Merge` to merge a series of 3D files along the time
dimension creating a 4d file.
"""

merge_to_4d = True

if merge_to_4d:
    merge = pe.Node(interface=fsl.Merge(), name="merge")
    merge.inputs.dimension = "t"
"""Use :class:`nipype.interfaces.spm.Realign` for motion correction
and register all images to the mean image.
"""

realign = pe.Node(interface=spm.Realign(), name="realign")
"""Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid
body registration of the functional data to the structural data.
"""

coregister = pe.Node(interface=spm.Coregister(), name="coregister")
coregister.inputs.jobtype = 'estimate'

segment = pe.Node(interface=spm.Segment(), name="segment")
"""Uncomment the following line for faster execution
"""