Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if diff_kinetics in adata.var.keys():
if diff_kinetics in adata.uns['recover_dynamics']:
groupby = adata.uns['recover_dynamics']['fit_diff_kinetics']
else:
groupby = 'clusters'
clusters = adata.obs[groupby]
for i, v in enumerate(np.array(adata.var[diff_kinetics].values, dtype=str)):
if len(v) > 0 and v != 'nan':
idx = 1 - clusters.isin([a.strip() for a in v.split(',')])
adata.layers[vkey][:, i] *= idx
if mode == 'dynamical':
adata.layers[f'{vkey}_u'][:, i] *= idx
adata.uns[f'{vkey}_params'] = {'mode': mode, 'fit_offset': fit_offset, 'perc': perc}
logg.info(' finished', time=True, end=' ' if settings.verbosity > 2 else '\n')
logg.hint('added \n'
f' \'{vkey}\', velocity vectors for each individual cell (adata.layers)')
return adata if copy else None
adata.uns[f"{vkey}_graph_neg"] = vgraph.graph_neg
if vgraph.uncertainties is not None:
adata.uns[f"{vkey}_graph_uncertainties"] = vgraph.uncertainties
adata.obs[f"{vkey}_self_transition"] = vgraph.self_prob
if f"{vkey}_params" in adata.uns.keys():
if "embeddings" in adata.uns[f"{vkey}_params"]:
del adata.uns[f"{vkey}_params"]["embeddings"]
else:
adata.uns[f"{vkey}_params"] = {}
adata.uns[f"{vkey}_params"]["mode_neighbors"] = mode_neighbors
adata.uns[f"{vkey}_params"]["n_recurse_neighbors"] = vgraph.n_recurse_neighbors
logg.info(" finished", time=True, end=" " if settings.verbosity > 2 else "\n")
logg.hint(
"added \n"
f" '{vkey}_graph', sparse matrix with cosine correlations (adata.uns)"
)
return adata if copy else None
def _settings_verbosity_greater_or_equal_than(v):
if isinstance(settings.verbosity, str):
settings_v = _VERBOSITY_LEVELS_FROM_STRINGS[settings.verbosity]
else:
settings_v = settings.verbosity
return settings_v >= v
T = transition_matrix(_adata, self_transitions=self_transitions)
I = np.eye(_adata.n_obs)
fate = np.linalg.inv(I - T)
if issparse(T):
fate = fate.A
cell_fates = np.array(_adata.obs[groupby][fate.argmax(1)])
if disconnected_groups is not None:
idx = _adata.obs[groupby].isin(disconnected_groups)
cell_fates[idx] = _adata.obs[groupby][idx]
adata.obs["cell_fate"] = cell_fates
adata.obs["cell_fate_confidence"] = fate.max(1) / fate.sum(1)
strings_to_categoricals(adata)
logg.info(" finished", time=True, end=" " if settings.verbosity > 2 else "\n")
logg.hint(
"added\n"
" 'cell_fate', most likely cell fate (adata.obs)\n"
" 'cell_fate_confidence', confidence of fate transition (adata.obs)"
)
return adata if copy else None
from .utils import most_common_in_list
vc = vdata.obs['louvain']
cats_nums = {cat: 0 for cat in adata.obs[match_with].cat.categories}
for i, cat in enumerate(vc.cat.categories):
cells_in_cat = np.where(vc == cat)[0]
new_cat = most_common_in_list(adata.obs[match_with][cells_in_cat])
cats_nums[new_cat] += 1
vc = vc.cat.rename_categories({cat: f"{new_cat} ({cats_nums[new_cat]})"})
vdata.obs['louvain'] = vc
else:
vdata.obs['louvain'].cat.categories = np.arange(len(vdata.obs['louvain'].cat.categories))
adata.obs[f'{vkey}_clusters'] = vdata.obs['louvain'].copy()
del vdata
logg.info(' finished', time=True, end=' ' if settings.verbosity > 2 else '\n')
logg.hint(
'added \n'
f' \'{vkey}_clusters\', clusters based on modularity on velocity field (adata.obs)')
return adata if copy else None
)
if "paga" not in adata.uns:
adata.uns["paga"] = {}
paga.compute_connectivities()
adata.uns["paga"]["connectivities"] = paga.connectivities
adata.uns["paga"]["connectivities_tree"] = paga.connectivities_tree
adata.uns[f"{groups}_sizes"] = np.array(paga.ns)
paga.compute_transitions()
adata.uns["paga"]["transitions_confidence"] = paga.transitions_confidence
adata.uns["paga"]["threshold"] = paga.threshold
adata.uns["paga"]["groups"] = groups
logg.info(" finished", time=True, end=" " if settings.verbosity > 2 else "\n")
logg.hint(
"added\n" + " 'paga/connectivities', connectivities adjacency (adata.uns)\n"
" 'paga/connectivities_tree', connectivities subtree (adata.uns)\n"
" 'paga/transitions_confidence', velocity transitions (adata.uns)"
)
return adata if copy else None
def _settings_verbosity_greater_or_equal_than(v):
if isinstance(settings.verbosity, str):
settings_v = _VERBOSITY_LEVELS_FROM_STRINGS[settings.verbosity]
else:
settings_v = settings.verbosity
return settings_v >= v
w = weight_diffusion
latent_time = (1 - w) * latent_time + w * vpt
latent_time[idx_low_confidence] = vpt[idx_low_confidence]
else:
conn_new = conn.copy()
conn_new[:, idx_low_confidence] = 0
conn_new.eliminate_zeros()
latent_time = conn_new.dot(latent_time)
latent_time = scale(latent_time)
if t_max is not None:
latent_time *= t_max
adata.obs["latent_time"] = latent_time
logg.info(" finished", time=True, end=" " if settings.verbosity > 2 else "\n")
logg.hint("added \n" " 'latent_time', shared time (adata.obs)")
return adata if copy else None
adata.uns["neighbors"]["connectivities_key"] = "connectivities"
adata.uns["neighbors"]["distances_key"] = "distances"
except:
adata.uns["neighbors"]["distances"] = neighbors.distances
adata.uns["neighbors"]["connectivities"] = neighbors.connectivities
if hasattr(neighbors, "knn_indices"):
adata.uns["neighbors"]["indices"] = neighbors.knn_indices
adata.uns["neighbors"]["params"] = {
"n_neighbors": n_neighbors,
"method": method,
"metric": metric,
"n_pcs": n_pcs,
}
logg.info(" finished", time=True, end=" " if settings.verbosity > 2 else "\n")
logg.hint(
"added \n"
" 'distances' and 'connectivities', weighted adjacency matrices (adata.obsp)"
)
return adata if copy else None
if autoscale:
V_emb /= 3 * quiver_autoscale(X_emb, V_emb)
if f"{vkey}_params" in adata.uns.keys():
adata.uns[f"{vkey}_params"]["embeddings"] = (
[]
if "embeddings" not in adata.uns[f"{vkey}_params"]
else list(adata.uns[f"{vkey}_params"]["embeddings"])
)
adata.uns[f"{vkey}_params"]["embeddings"].extend([basis])
vkey += f"_{basis}"
adata.obsm[vkey] = V_emb
logg.info(" finished", time=True, end=" " if settings.verbosity > 2 else "\n")
logg.hint("added\n" f" '{vkey}', embedded velocity vectors (adata.obsm)")
return adata if copy else None