Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def is_categorical(data, c=None):
from pandas.api.types import is_categorical as cat
if c is None:
return cat(data) # if data is categorical/array
if not is_view(data): # if data is anndata view
strings_to_categoricals(data)
return isinstance(c, str) and c in data.obs.keys() and cat(data.obs[c])
Returns
-------
Returns or updates `adata` with the attributes
velocity: `.layers`
velocity vectors for each individual cell
variance_velocity: `.layers`
velocity vectors for the cell variances
velocity_offset, velocity_beta, velocity_gamma, velocity_r2: `.var`
parameters
"""
adata = data.copy() if copy else data
if not use_raw and 'Ms' not in adata.layers.keys(): moments(adata)
logg.info('computing velocities', r=True)
strings_to_categoricals(adata)
if mode is None or (mode == 'dynamical' and 'fit_alpha' not in adata.var.keys()):
mode = 'stochastic'
logg.warn('Falling back to stochastic model. For the dynamical model run tl.recover_dynamics first.')
if mode in {'dynamical', 'dynamical_residuals'}:
from .dynamical_model_utils import mRNA, vectorize, get_reads, get_vars, get_divergence
gene_subset = ~np.isnan(adata.var['fit_alpha'].values)
vdata = adata[:, gene_subset]
alpha, beta, gamma, scaling, t_ = get_vars(vdata)
connect = not adata.uns['recover_dynamics']['use_raw']
kwargs_ = {'kernel_width': None, 'normalized': True, 'var_scale': True, 'reg_par': None, 'min_confidence': 1e-2,
'constraint_time_increments': False, 'fit_steady_states': True, 'fit_basal_transcription': None,
'use_connectivities': connect, 'time_connectivities': connect, 'use_latent_time': use_latent_time}
_adata.uns["velocity_graph"] = vgraph.graph
_adata.uns["velocity_graph_neg"] = vgraph.graph_neg
T = transition_matrix(_adata, self_transitions=self_transitions)
I = np.eye(_adata.n_obs)
fate = np.linalg.inv(I - T)
if issparse(T):
fate = fate.A
cell_fates = np.array(_adata.obs[groupby][fate.argmax(1)])
if disconnected_groups is not None:
idx = _adata.obs[groupby].isin(disconnected_groups)
cell_fates[idx] = _adata.obs[groupby][idx]
adata.obs["cell_fate"] = cell_fates
adata.obs["cell_fate_confidence"] = fate.max(1) / fate.sum(1)
strings_to_categoricals(adata)
logg.info(" finished", time=True, end=" " if settings.verbosity > 2 else "\n")
logg.hint(
"added\n"
" 'cell_fate', most likely cell fate (adata.obs)\n"
" 'cell_fate_confidence', confidence of fate transition (adata.obs)"
)
return adata if copy else None
def select_groups(adata, groups='all', key='louvain'):
"""Get subset of groups in adata.obs[key].
"""
strings_to_categoricals(adata)
if isinstance(groups, list) and isinstance(groups[0], int): groups = [f"{n}" for n in groups]
categories = adata.obs[key].cat.categories
groups_masks = np.array([categories[i] == adata.obs[key].values for i, name in enumerate(categories)])
if groups == 'all':
groups = categories.values
else:
groups_ids = [np.where(categories.values == name)[0][0] for name in groups]
groups_masks = groups_masks[groups_ids]
groups = categories[groups_ids].values
return groups, groups_masks