How to use the mriqc.config.nipype function in mriqc

To help you get started, weā€™ve selected a few mriqc examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github poldracklab / mriqc / mriqc / testing.py View on Github external
def mock_config():
    """Create a mock config for documentation and testing purposes."""
    from . import config

    filename = Path(pkgrf("mriqc", "data/config-example.toml"))
    settings = loads(filename.read_text())
    for sectionname, configs in settings.items():
        if sectionname != "environment":
            section = getattr(config, sectionname)
            section.load(configs, init=False)
    config.nipype.init()
    config.loggers.init()

    config.execution.work_dir = Path(mkdtemp())
    config.execution.bids_dir = Path(pkgrf("mriqc", "data/tests/ds000005")).absolute()
    config.execution.init()

    yield
github poldracklab / mriqc / mriqc / cli / parser.py View on Github external
opts = parser.parse_args(args, namespace)
    config.execution.log_level = int(max(25 - 5 * opts.verbose_count, DEBUG))
    config.from_dict(vars(opts))
    config.loggers.init()

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml

        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        _plugin = plugin_settings.get("plugin")
        if _plugin:
            config.nipype.plugin = _plugin
            config.nipype.plugin_args = plugin_settings.get("plugin_args", {})
            config.nipype.nprocs = config.nipype.plugin_args.get(
                "nprocs", config.nipype.nprocs
            )

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    if 1 < config.nipype.nprocs < config.nipype.omp_nthreads:
        config.loggers.cli.warning(
            "Per-process threads (--omp-nthreads=%d) exceed total "
            "threads (--nthreads/--n_cpus=%d)",
            config.nipype.omp_nthreads,
            config.nipype.nprocs,
        )

    bids_dir = config.execution.bids_dir
    output_dir = config.execution.output_dir
github poldracklab / mriqc / mriqc / cli / run.py View on Github external
if retcode != 0:
            sys.exit(retcode)

        if mriqc_wf and config.execution.write_graph:
            mriqc_wf.write_graph(graph2use="colored", format="svg", simple_form=True)

        # Clean up master process before running workflow, which may create forks
        gc.collect()

        if not config.execution.dry_run:
            # Warn about submitting measures BEFORE
            if not config.execution.no_sub:
                config.loggers.cli.warning(config.DSA_MESSAGE)

            # run MRIQC
            mriqc_wf.run(**config.nipype.get_plugin())

            # Warn about submitting measures AFTER
            if not config.execution.no_sub:
                config.loggers.cli.warning(config.DSA_MESSAGE)
        config.loggers.cli.log(25, "Participant level finished successfully.")

    # Set up group level
    if "group" in config.workflow.analysis_level:
        from ..utils.bids import DEFAULT_TYPES
        from ..reports import group_html
        from ..utils.misc import generate_tsv  # , generate_pred

        config.loggers.cli.info("Group level started...")

        # Generate reports
        mod_group_reports = []
github poldracklab / mriqc / mriqc / config.py View on Github external
def from_dict(settings):
    """Read settings from a flat dictionary."""
    nipype.load(settings)
    execution.load(settings)
    workflow.load(settings)
    loggers.init()
github poldracklab / mriqc / mriqc / workflows / functional.py View on Github external
from mriqc.workflows.functional import epi_mni_align
        from mriqc.testing import mock_config
        with mock_config():
            wf = epi_mni_align()

    """
    from templateflow.api import get as get_template
    from niworkflows.interfaces.registration import (
        RobustMNINormalizationRPT as RobustMNINormalization
    )

    # Get settings
    testing = config.execution.debug
    n_procs = config.nipype.nprocs
    ants_nthreads = config.nipype.omp_nthreads

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['epi_mean', 'epi_mask']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['epi_mni', 'epi_parc', 'report']), name='outputnode')

    n4itk = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True),
                    name='SharpenEPI')

    norm = pe.Node(RobustMNINormalization(
        explicit_masking=False,
        flavor='testing' if testing else 'precise',
        float=config.execution.ants_float,
        generate_report=True,
        moving='boldref',
github poldracklab / mriqc / mriqc / workflows / functional.py View on Github external
.. workflow::

        from mriqc.workflows.functional import epi_mni_align
        from mriqc.testing import mock_config
        with mock_config():
            wf = epi_mni_align()

    """
    from templateflow.api import get as get_template
    from niworkflows.interfaces.registration import (
        RobustMNINormalizationRPT as RobustMNINormalization
    )

    # Get settings
    testing = config.execution.debug
    n_procs = config.nipype.nprocs
    ants_nthreads = config.nipype.omp_nthreads

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['epi_mean', 'epi_mask']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['epi_mni', 'epi_parc', 'report']), name='outputnode')

    n4itk = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True),
                    name='SharpenEPI')

    norm = pe.Node(RobustMNINormalization(
        explicit_masking=False,
        flavor='testing' if testing else 'precise',
        float=config.execution.ants_float,
        generate_report=True,
github poldracklab / mriqc / mriqc / workflows / anatomical.py View on Github external
)

    # Have the template id handy
    tpl_id = config.workflow.template_id

    # Define workflow interface
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'moving_image', 'moving_mask', 'modality']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'inverse_composite_transform', 'out_report']), name='outputnode')

    # Spatial normalization
    norm = pe.Node(RobustMNINormalization(
        flavor=['testing', 'fast'][config.execution.debug],
        num_threads=config.nipype.omp_nthreads,
        float=config.execution.ants_float,
        template=tpl_id,
        template_resolution=resolution,
        generate_report=True,),
        name='SpatialNormalization',
        # Request all MultiProc processes when ants_nthreads > n_procs
        num_threads=config.nipype.omp_nthreads,
        mem_gb=3)
    norm.inputs.reference_mask = str(
        get_template(tpl_id, resolution=resolution, desc='brain', suffix='mask'))

    workflow.connect([
        (inputnode, norm, [('moving_image', 'moving_image'),
                           ('moving_mask', 'moving_mask'),
                           ('modality', 'reference')]),
        (norm, outputnode, [('inverse_composite_transform', 'inverse_composite_transform'),
github poldracklab / mriqc / mriqc / config.py View on Github external
def get(flat=False):
    """Get config as a dict."""
    settings = {
        "environment": environment.get(),
        "execution": execution.get(),
        "workflow": workflow.get(),
        "nipype": nipype.get(),
    }
    if not flat:
        return settings

    return {
        ".".join((section, k)): v
        for section, configs in settings.items()
        for k, v in configs.items()
    }
github poldracklab / mriqc / mriqc / workflows / anatomical.py View on Github external
Building anatomical MRIQC workflow for files: {', '.join(dataset)}.""")

    # Initialize workflow
    workflow = pe.Workflow(name=name)

    # Define workflow, inputs and outputs
    # 0. Get data
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode')
    inputnode.iterables = [('in_file', dataset)]

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_json']), name='outputnode')

    # 1. Reorient anatomical image
    to_ras = pe.Node(ConformImage(check_dtype=False), name='conform')
    # 2. Skull-stripping (afni)
    asw = skullstrip_wf(n4_nthreads=config.nipype.omp_nthreads, unifize=False)
    # 3. Head mask
    hmsk = headmsk_wf()
    # 4. Spatial Normalization, using ANTs
    norm = spatial_normalization()
    # 5. Air mask (with and without artifacts)
    amw = airmsk_wf()
    # 6. Brain tissue segmentation
    segment = pe.Node(fsl.FAST(segments=True, out_basename='segment'),
                      name='segmentation', mem_gb=5)
    # 7. Compute IQMs
    iqmswf = compute_iqms()
    # Reports
    repwf = individual_reports()

    # Connect all nodes
    workflow.connect([