Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"""
Remove the plotting connection so that plot iterables don't propagate
to the model stage
"""
preproc.disconnect(preproc.get_node('plot_motion'), 'out_file',
preproc.get_node('outputspec'), 'motion_plots')
"""
Set up openfmri data specific components
"""
subjects = sorted([path.split(os.path.sep)[-1] for path in
glob(os.path.join(data_dir, subj_prefix))])
infosource = pe.Node(niu.IdentityInterface(fields=['subject_id',
'model_id',
'task_id']),
name='infosource')
if len(subject) == 0:
infosource.iterables = [('subject_id', subjects),
('model_id', [model_id]),
('task_id', task_id)]
else:
infosource.iterables = [('subject_id',
[subjects[subjects.index(subj)] for subj in subject]),
('model_id', [model_id]),
('task_id', task_id)]
subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir',
'task_id', 'model_id'],
output_names=['run_id', 'conds', 'TR'],
You can get the data from:
http://fcon_1000.projects.nitrc.org/indi/pro/eNKI_RS_TRT/FrontPage.html
"""
dataDir = os.path.abspath('nki_rs_data')
workingdir = './tbss_example'
subjects_list = [
'2475376', '3313349', '3808535', '3893245', '8735778', '9630905'
]
gen_fa = pe.Workflow(name="gen_fa")
gen_fa.base_dir = os.path.join(os.path.abspath(workingdir), 'l1')
subject_id_infosource = pe.Node(
util.IdentityInterface(fields=['subject_id']),
name='subject_id_infosource')
subject_id_infosource.iterables = ('subject_id', subjects_list)
datasource = pe.Node(
interface=nio.DataGrabber(
infields=['subject_id'], outfields=['dwi', 'bvec', 'bval']),
name='datasource')
datasource.inputs.base_directory = os.path.abspath(dataDir)
datasource.inputs.template = '%s/session2/DTI_mx_137/dti.%s'
datasource.inputs.template_args = dict(
dwi=[['subject_id', 'nii.gz']],
bvec=[['subject_id', 'bvec']],
bval=[['subject_id', 'bval']])
datasource.inputs.sort_filelist = True
gen_fa.connect(subject_id_infosource, 'subject_id', datasource, 'subject_id')
"""
subject_list = ['s1', 's3']
data_dir = os.path.abspath('data')
subjects_dir = os.path.join(tutorial_dir, 'subjects_dir')
if not os.path.exists(subjects_dir):
os.mkdir(subjects_dir)
wf = pe.Workflow(name="l1workflow")
wf.base_dir = os.path.join(tutorial_dir, 'workdir')
"""
Create inputspec
"""
inputspec = pe.Node(
interface=IdentityInterface(['subject_id']), name="inputspec")
inputspec.iterables = ("subject_id", subject_list)
"""
Grab data
"""
datasource = pe.Node(
interface=nio.DataGrabber(infields=['subject_id'], outfields=['struct']),
name='datasource')
datasource.inputs.base_directory = data_dir
datasource.inputs.template = '%s/%s.nii'
datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']])
datasource.inputs.subject_id = subject_list
datasource.inputs.sort_filelist = True
wf.connect(inputspec, 'subject_id', datasource, 'subject_id')
"""
dataset = config.workflow.inputs.get("T1w", []) \
+ config.workflow.inputs.get("T2w", [])
config.loggers.workflow.info(f"""\
Building anatomical MRIQC workflow for files: {', '.join(dataset)}.""")
# Initialize workflow
workflow = pe.Workflow(name=name)
# Define workflow, inputs and outputs
# 0. Get data
inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode')
inputnode.iterables = [('in_file', dataset)]
outputnode = pe.Node(niu.IdentityInterface(fields=['out_json']), name='outputnode')
# 1. Reorient anatomical image
to_ras = pe.Node(ConformImage(check_dtype=False), name='conform')
# 2. Skull-stripping (afni)
asw = skullstrip_wf(n4_nthreads=config.nipype.omp_nthreads, unifize=False)
# 3. Head mask
hmsk = headmsk_wf()
# 4. Spatial Normalization, using ANTs
norm = spatial_normalization()
# 5. Air mask (with and without artifacts)
amw = airmsk_wf()
# 6. Brain tissue segmentation
segment = pe.Node(fsl.FAST(segments=True, out_basename='segment'),
name='segmentation', mem_gb=5)
# 7. Compute IQMs
iqmswf = compute_iqms()
def build_input_node(self):
"""Build and connect an input node to the pipelines.
"""
import nipype.interfaces.utility as nutil
import nipype.pipeline.engine as npe
read_parameters_node = npe.Node(name="LoadingCLIArguments",
interface=nutil.IdentityInterface(
fields=self.get_input_fields(),
mandatory_inputs=True))
read_parameters_node.inputs.design_matrix = self.parameters['design_matrix']
read_parameters_node.inputs.contrast = self.parameters['contrast']
read_parameters_node.inputs.str_format = self.parameters['str_format']
read_parameters_node.inputs.group_label = self.parameters['group_label']
read_parameters_node.inputs.glm_type = self.parameters['glm_type']
read_parameters_node.inputs.surface_file = self.parameters['custom_file']
read_parameters_node.inputs.full_width_at_half_maximum = self.parameters['full_width_at_half_maximum']
read_parameters_node.inputs.threshold_uncorrected_pvalue = self.parameters['threshold_uncorrected_pvalue']
read_parameters_node.inputs.threshold_corrected_pvalue = self.parameters['threshold_corrected_pvalue']
read_parameters_node.inputs.cluster_threshold = self.parameters['cluster_threshold']
read_parameters_node.inputs.feature_label = self.parameters['feature_label']
self.connect([
(read_parameters_node, self.input_node, [('design_matrix', 'design_matrix')]), # noqa
inputspec.source_files : files (filename or list of filenames to register)
inputspec.mean_image : reference image to use
inputspec.anatomical_image : anatomical image to coregister to
inputspec.target_image : registration target
Outputs::
outputspec.func2anat_transform : FLIRT transform
outputspec.anat2target_transform : FLIRT+FNIRT transform
outputspec.transformed_files : transformed files in target space
outputspec.transformed_mean : mean image in target space
"""
register = Workflow(name=name)
inputnode = Node(interface=IdentityInterface(fields=['source_files',
'mean_image',
'subject_id',
'subjects_dir',
'target_image']),
name='inputspec')
outputnode = Node(interface=IdentityInterface(fields=['func2anat_transform',
'out_reg_file',
'anat2target_transform',
'transforms',
'transformed_mean',
'segmentation_files',
'anat2target',
'aparc'
]),
name='outputspec')
+ 'please choose an other one. Groups that exist' \
+ 's in your CAPS directory are : \n'
list_groups = listdir(join(abspath(self.caps_directory), 'groups'))
has_one_group = False
for e in list_groups:
if e.startswith('group-'):
error_message += e + ' \n'
has_one_group = True
if not has_one_group:
error_message = error_message + 'No group found ! ' \
+ 'Use t1-volume pipeline if you do not ' \
+ 'have a template yet ! '
raise ValueError(error_message)
read_parameters_node = npe.Node(name="LoadingCLIArguments",
interface=nutil.IdentityInterface(fields=self.get_input_fields(),
mandatory_inputs=True))
image_type = self.parameters['image_type']
pet_type = self.parameters['pet_type']
no_pvc = self.parameters['no_pvc']
all_errors = []
if image_type == 't1':
try:
input_image = clinica_file_reader(self.subjects,
self.sessions,
self.caps_directory,
{'pattern': 't1/spm/dartel/group-' + self.parameters['group_id']
+ '/*_T1w_segm-graymatter_space-Ixi549Space_modulated-on_probability.nii.gz',
'description': 'graymatter tissue segmented in T1w MRI in Ixi549 space',
'needed_pipeline': 't1-volume-tissue-segmentation'})
except ClinicaException as e:
def get_workflow(name, preinfosource, datasink, opts):
workflow = pe.Workflow(name=name)
inputnode = pe.Node(niu.IdentityInterface(fields=["t1", "pet2mri", "template"]), name='inputnode')
outputnode = pe.Node(niu.IdentityInterface(fields=["t1_mni", "t1mni_tfm", "brainmask"]), name='outputnode')
infosource=pe.Node()
infosource.iterables = ( 'args', valid_args )
t1_mnc2nii = pe.Node()
workflow.connect(inputnode, 't1', t1_mnc2nii, 'in_file')
template_mnc2nii = pe.Node()
workflow.connect(inputnode, 'template', template_mnc2nii, 'in_file')
reg = Node(Registration(args='--float',
collapse_output_transforms=True,
mapping.connect([(inputnode_within, cmats_to_csv,[("subject_id","inputnode.extra_field")])])
mapping.connect([(creatematrix, cmats_to_csv,[("matlab_matrix_files","inputnode.matlab_matrix_files")])])
mapping.connect([(creatematrix, nfibs_to_csv,[("stats_file","in_file")])])
mapping.connect([(nfibs_to_csv, merge_nfib_csvs,[("csv_files","in_files")])])
mapping.connect([(inputnode_within, merge_nfib_csvs,[("subject_id","extra_field")])])
"""
Create a higher-level workflow
--------------------------------------
Finally, we create another higher-level workflow to connect our mapping workflow with the info and datagrabbing nodes
declared at the beginning. Our tutorial can is now extensible to any arbitrary number of subjects by simply adding
their names to the subject list and their data to the proper folders.
"""
inputnode = pe.Node(interface=util.IdentityInterface(fields=["subject_id", "dwi", "bvecs", "bvals", "subjects_dir"]), name="inputnode")
outputnode = pe.Node(interface = util.IdentityInterface(fields=["fa",
"struct",
"tracts",
"tracks2prob",
"connectome",
"nxstatscff",
"nxmatlab",
"nxcsv",
"fiber_csv",
"cmatrices_csv",
"nxmergedcsv",
"cmatrix",
"networks",
"filtered_tracts",
"rois",
final_template = clinica_group_reader(self.caps_directory,
{'pattern': 'group-' + g_id + '_template.nii*',
'description': 'template file of group ' + g_id,
'needed_pipeline': 't1-volume-create-dartel'})
except ClinicaException as e:
all_errors.append(e)
if len(all_errors) > 0:
error_message = 'Clinica faced errors while trying to read files in your BIDS or CAPS directories.\n'
error_message += 'Please note that you need to provide a template to use this pipeline.\n'
for msg in all_errors:
error_message += str(msg)
raise RuntimeError(error_message)
read_node = npe.Node(name="read_node",
interface=nutil.IdentityInterface(fields=['t1w',
'templates_iter',
'final_template'],
mandatory_inputs=True),
iterables=[('t1w', t1w_images)],
synchronize=True)
read_node.inputs.templates_iter = dartel_iter_templates
read_node.inputs.final_template = final_template
check_volume_location_in_world_coordinate_system(t1w_images, self.bids_directory)
self.connect([
(read_node, self.input_node, [('t1w', 'input_images')]),
(read_node, self.input_node, [('templates_iter', 'dartel_iteration_templates')]),
(read_node, self.input_node, [('final_template', 'dartel_final_template')])
])