Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if diff_kinetics in adata.uns['recover_dynamics']:
groupby = adata.uns['recover_dynamics']['fit_diff_kinetics']
else:
groupby = 'clusters'
clusters = adata.obs[groupby]
for i, v in enumerate(np.array(adata.var[diff_kinetics].values, dtype=str)):
if len(v) > 0 and v != 'nan':
idx = 1 - clusters.isin([a.strip() for a in v.split(',')])
adata.layers[vkey][:, i] *= idx
if mode == 'dynamical':
adata.layers[f'{vkey}_u'][:, i] *= idx
adata.uns[f'{vkey}_params'] = {'mode': mode, 'fit_offset': fit_offset, 'perc': perc}
logg.info(' finished', time=True, end=' ' if settings.verbosity > 2 else '\n')
logg.hint('added \n'
f' \'{vkey}\', velocity vectors for each individual cell (adata.layers)')
return adata if copy else None
tmp_filter &= adata.var["spearmans_score"].values > 0.1
V = V[:, tmp_filter]
X = X[:, tmp_filter]
T = transition_matrix(adata, vkey=vkey, scale=scale)
dX = T.dot(X) - X
dX -= dX.mean(1)[:, None]
V -= V.mean(1)[:, None]
norms = norm(dX) * norm(V)
norms += norms == 0
adata.obs[f"{vkey}_confidence_transition"] = prod_sum_var(dX, V) / norms
logg.hint(f"added '{vkey}_confidence_transition' (adata.obs)")
return adata if copy else None
key = "rank_dynamical_genes"
if key not in adata.uns.keys():
adata.uns[key] = {}
adata.uns[key] = {
"names": np.rec.fromarrays(
[n for n in rankings_gene_names], dtype=[(f"{rn}", "U50") for rn in groups],
),
"scores": np.rec.fromarrays(
[n.round(2) for n in rankings_gene_scores],
dtype=[(f"{rn}", "float32") for rn in groups],
),
}
logg.info(" finished", time=True, end=" " if settings.verbosity > 2 else "\n")
logg.hint("added \n" f" '{key}', sorted scores by group ids (adata.uns)")
return adata if copy else None
T = transition_matrix(_adata, self_transitions=self_transitions, backward=True)
I = np.eye(_adata.n_obs)
fate = np.linalg.inv(I - T)
if issparse(T):
fate = fate.A
cell_fates = np.array(_adata.obs[groupby][fate.argmax(1)])
if disconnected_groups is not None:
idx = _adata.obs[groupby].isin(disconnected_groups)
cell_fates[idx] = _adata.obs[groupby][idx]
adata.obs["cell_origin"] = cell_fates
adata.obs["cell_origin_confidence"] = fate.max(1) / fate.sum(1)
strings_to_categoricals(adata)
logg.info(" finished", time=True, end=" " if settings.verbosity > 2 else "\n")
logg.hint(
"added\n"
" 'cell_origin', most likely cell origin (adata.obs)\n"
vc = vdata.obs['louvain']
cats_nums = {cat: 0 for cat in adata.obs[match_with].cat.categories}
for i, cat in enumerate(vc.cat.categories):
cells_in_cat = np.where(vc == cat)[0]
new_cat = most_common_in_list(adata.obs[match_with][cells_in_cat])
cats_nums[new_cat] += 1
vc = vc.cat.rename_categories({cat: f"{new_cat} ({cats_nums[new_cat]})"})
vdata.obs['louvain'] = vc
else:
vdata.obs['louvain'].cat.categories = np.arange(len(vdata.obs['louvain'].cat.categories))
adata.obs[f'{vkey}_clusters'] = vdata.obs['louvain'].copy()
del vdata
logg.info(' finished', time=True, end=' ' if settings.verbosity > 2 else '\n')
logg.hint(
'added \n'
f' \'{vkey}_clusters\', clusters based on modularity on velocity field (adata.obs)')
return adata if copy else None
if "paga" not in adata.uns:
adata.uns["paga"] = {}
paga.compute_connectivities()
adata.uns["paga"]["connectivities"] = paga.connectivities
adata.uns["paga"]["connectivities_tree"] = paga.connectivities_tree
adata.uns[f"{groups}_sizes"] = np.array(paga.ns)
paga.compute_transitions()
adata.uns["paga"]["transitions_confidence"] = paga.transitions_confidence
adata.uns["paga"]["threshold"] = paga.threshold
adata.uns["paga"]["groups"] = groups
logg.info(" finished", time=True, end=" " if settings.verbosity > 2 else "\n")
logg.hint(
"added\n" + " 'paga/connectivities', connectivities adjacency (adata.uns)\n"
" 'paga/connectivities_tree', connectivities subtree (adata.uns)\n"
" 'paga/transitions_confidence', velocity transitions (adata.uns)"
)
return adata if copy else None
write_pars(adata, [alpha, beta, gamma, t_, scaling])
adata.layers['fit_t'] = T
cur_len = adata.varm['loss'].shape[1] if 'loss' in adata.varm.keys() else 2
max_len = max(np.max([len(l) for l in L]), cur_len)
loss = np.ones((adata.n_vars, max_len)) * np.nan
if 'loss' in adata.varm.keys():
loss[:, :cur_len] = adata.varm['loss']
loss[idx] = np.vstack([np.concatenate([l, np.ones(max_len-len(l)) * np.nan]) for l in L])
adata.varm['loss'] = loss
logg.info(' finished', time=True, end=' ' if settings.verbosity > 2 else '\n')
logg.hint('added \n'
f' \'{add_key}_pars\', fitted parameters for splicing dynamics (adata.var)')
if plot_results: # Plot Parameter Stats
n_rows, n_cols = len(var_names[:4]), 6
figsize = [2 * n_cols, 1.5 * n_rows] # rcParams['figure.figsize']
fontsize = rcParams['font.size']
fig, axes = pl.subplots(nrows=n_rows, ncols=6, figsize=figsize)
pl.subplots_adjust(wspace=0.7, hspace=0.5)
for i, gene in enumerate(var_names[:4]):
P[i] *= np.array([1 / m[idx[i]], 1 / m[idx[i]], 1 / m[idx[i]], m[idx[i]], 1])[:, None]
ax = axes[i] if n_rows > 1 else axes
for j, pij in enumerate(P[i]):
ax[j].plot(pij)
ax[len(P[i])].plot(L[i])
if i == 0:
for j, name in enumerate(['alpha', 'beta', 'gamma', 't_', 'scaling', 'loss']):
T = transition_matrix(_adata, self_transitions=self_transitions)
I = np.eye(_adata.n_obs)
fate = np.linalg.inv(I - T)
if issparse(T):
fate = fate.A
cell_fates = np.array(_adata.obs[groupby][fate.argmax(1)])
if disconnected_groups is not None:
idx = _adata.obs[groupby].isin(disconnected_groups)
cell_fates[idx] = _adata.obs[groupby][idx]
adata.obs["cell_fate"] = cell_fates
adata.obs["cell_fate_confidence"] = fate.max(1) / fate.sum(1)
strings_to_categoricals(adata)
logg.info(" finished", time=True, end=" " if settings.verbosity > 2 else "\n")
logg.hint(
"added\n"
" 'cell_fate', most likely cell fate (adata.obs)\n"
" 'cell_fate_confidence', confidence of fate transition (adata.obs)"
)
return adata if copy else None
adata.uns["neighbors"]["distances_key"] = "distances"
except:
adata.uns["neighbors"]["distances"] = neighbors.distances
adata.uns["neighbors"]["connectivities"] = neighbors.connectivities
if hasattr(neighbors, "knn_indices"):
adata.uns["neighbors"]["indices"] = neighbors.knn_indices
adata.uns["neighbors"]["params"] = {
"n_neighbors": n_neighbors,
"method": method,
"metric": metric,
"n_pcs": n_pcs,
}
logg.info(" finished", time=True, end=" " if settings.verbosity > 2 else "\n")
logg.hint(
"added \n"
" 'distances' and 'connectivities', weighted adjacency matrices (adata.obsp)"
)
return adata if copy else None