Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
References
----------
.. [1] Kang, Jian, et al. "Meta analysis of functional neuroimaging data
via Bayesian spatial point processes." Journal of the American
Statistical Association 106.493 (2011): 124-134.
https://doi.org/10.1198/jasa.2011.ap09735
"""
def __init__(self):
pass
def _fit(self, dataset):
pass
@due.dcite(references.HPGRF, description='Introduces the HPGRF model.')
class HPGRF(CBMAEstimator):
"""
Hierarchical Poisson/Gamma random field model [1]_.
Warnings
--------
This method is not yet implemented.
References
----------
.. [1] Kang, Jian, et al. "A Bayesian hierarchical spatial point process
model for multi-type neuroimaging meta-analysis." The annals of applied
statistics 8.3 (2014): 1800.
"""
def __init__(self):
pass
@due.dcite(references.PEAKS2MAPS,
description='Transforms coordinates of peaks to unthresholded maps using a deep '
'convolutional neural net.')
def peaks2maps(contrasts_coordinates, skip_out_of_bounds=True,
tf_verbosity_level=None):
"""
Generate modeled activation (MA) maps using depp ConvNet model peaks2maps
Parameters
----------
contrasts_coordinates : list of lists that are len == 3
List of contrasts and their coordinates
skip_out_of_bounds : aboolean, optional
Remove coordinates outside of the bounding box of the peaks2maps model
tf_verbosity_level : int
Tensorflow verbosity logging level
"""
GloVe model-based annotation.
"""
from ..base import AnnotationModel
from ...due import due
from ... import references
@due.dcite(references.WORD2BRAIN)
class Word2BrainModel(AnnotationModel):
"""
Generate a Word2Brain vector model [1]_.
Warnings
--------
This method is not yet implemented.
References
----------
.. [1] Nunes, Abraham. "word2brain." bioRxiv (2018): 299024.
https://doi.org/10.1101/299024
"""
def __init__(self, text_df, coordinates_df):
pass
@due.dcite(references.META_CLUSTER,
description='Introduces meta-analytic clustering analysis; '
'hierarchically clustering face paradigms.')
@due.dcite(references.META_CLUSTER2,
description='Performs the specific meta-analytic clustering '
'approach implemented here.')
def meta_cluster_workflow(dataset_file, output_dir=None, output_prefix=None,
kernel='ALEKernel', coord=True, algorithm='kmeans',
clust_range=(2, 10)):
"""
Perform a meta-analytic clustering analysis on a dataset file.
Warnings
--------
This method is not yet implemented.
"""
def VI(X, Y):
@due.dcite(references.GCLDA_DECODING, description='Citation for GCLDA decoding.')
def gclda_decode_roi(model, roi, topic_priors=None, prior_weight=1.):
r"""
Perform image-to-text decoding for discrete image inputs (e.g., regions
of interest, significant clusters) according to the method described in
[1]_.
Parameters
----------
model : :obj:`nimare.annotate.topic.GCLDAModel`
Model object needed for decoding.
roi : :obj:`nibabel.nifti1.Nifti1Image` or :obj:`str`
Binary image to decode into text. If string, path to a file with
the binary image.
topic_priors : :obj:`numpy.ndarray` of :obj:`float`, optional
A 1d array of size (n_topics) with values for topic weighting.
If None, no weighting is done. Default is None.
import nibabel as nib
from scipy import ndimage
from nilearn.masking import apply_mask, unmask
from .kernel import ALEKernel, KernelTransformer
from ...results import MetaResult
from .base import CBMAEstimator
from ...due import due
from ... import references
from ...stats import null_to_p, p_to_z
from ...utils import round2
LGR = logging.getLogger(__name__)
@due.dcite(references.ALE1, description='Introduces ALE.')
@due.dcite(references.ALE2,
description='Modifies ALE algorithm to eliminate within-experiment '
'effects and generate MA maps based on subject group '
'instead of experiment.')
@due.dcite(references.ALE3,
description='Modifies ALE algorithm to allow FWE correction and to '
'more quickly and accurately generate the null '
'distribution for significance testing.')
class ALE(CBMAEstimator):
r"""
Activation likelihood estimation
Parameters
----------
kernel_estimator : :obj:`nimare.meta.cbma.base.KernelTransformer`, optional
Kernel with which to convolve coordinates from dataset. Default is
@due.dcite(references.T2Z_IMPLEMENTATION,
description='Python implementation of T-to-Z transform.')
def t_to_z(t_values, dof):
"""
From Vanessa Sochat's TtoZ package.
"""
# Select just the nonzero voxels
nonzero = t_values[t_values != 0]
# We will store our results here
z_values = np.zeros(len(nonzero))
# Select values less than or == 0, and greater than zero
c = np.zeros(len(nonzero))
k1 = (nonzero <= c)
k2 = (nonzero > c)
# Voxel-level FWE
vfwe_map = apply_mask(of_map, self.mask)
for i_vox, val in enumerate(vfwe_map):
vfwe_map[i_vox] = -np.log(null_to_p(val, perm_max_values, 'upper'))
vfwe_map[np.isinf(vfwe_map)] = -np.log(np.finfo(float).eps)
vthresh_of_map = apply_mask(nib.Nifti1Image(vthresh_of_map,
of_map.affine),
self.mask)
images = {'vthresh': vthresh_of_map,
'logp_level-cluster': cfwe_map,
'logp_level-voxel': vfwe_map}
return images
@due.dcite(references.MKDA, description='Introduces MKDA.')
class MKDAChi2(CBMAEstimator):
r"""
Multilevel kernel density analysis- Chi-square analysis [1]_.
Parameters
----------
prior : float, optional
Uniform prior probability of each feature being active in a map in
the absence of evidence from the map. Default: 0.5
kernel_estimator : :obj:`nimare.meta.cbma.base.KernelTransformer`, optional
Kernel with which to convolve coordinates from dataset. Default is
MKDAKernel.
**kwargs
Keyword arguments. Arguments for the kernel_estimator can be assigned
here, with the prefix '\kernel__' in the variable name.
"""
Meta-analytic activation modeling-based parcellation (MAMP).
"""
import numpy as np
import pandas as pd
from sklearn.cluster import k_means
import scipy.ndimage.measurements as meas
from nilearn.masking import apply_mask, unmask
from .base import Parcellator
from ..meta.cbma.kernel import ALEKernel
from ..due import due
from .. import references
@due.dcite(references.MAMP, description='Introduces the MAMP algorithm.')
class MAMP(Parcellator):
"""
Meta-analytic activation modeling-based parcellation (MAMP) [1]_.
Parameters
----------
text : :obj:`list` of :obj:`str`
List of texts to use for parcellation.
mask : :obj:`str` or :obj:`nibabel.Nifti1.Nifti1Image`
Mask file or image.
Notes
-----
MAMP works similarly to CBP, but skips the step of performing a MACM for
each voxel. Here are the steps:
1. Create an MA map for each study in the dataset.