Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def get_calculator(model):
import hmmlearn.hmm
if issubclass(type(model), hmmlearn.hmm._BaseHMM):
return HmmlearnModelIncrementalLoglikCalculator(model)
else:
raise Exception('model of type %s is not supported by fast_log_curve_calculation.'%(type(model),))
def get_init_parameters(s1, s2, **info):
n_ = np.array([info['count'], info['count']])
#get observation that occurs most often:
m_ =[float(np.argmax(np.bincount(map(lambda x: x[0], s1)))), float(np.argmax(np.bincount(map(lambda x: x[1], s2)))) ]
p_ = [[-1,-1,-1],[-1,-1,-1]] #first: 1. or 2. emission, second: state
p_[0][0] = 1. / n_[0]
p_[1][0] = 1. / n_[1]
p_[0][1] = m_[0] / n_[0]
p_[1][1] = p_[1][0]
p_[0][2] = p_[0][0]
p_[1][2] = m_[1] / n_[1]
return n_, p_
class BinomialHMM2d3s(_BaseHMM):
def __init__(self, n, init_state_seq=None, p = [[0.4, 0.2, 0.3], [0.6, 0.8, 0.7]], n_components=2, covariance_type='diag', startprob=None,
transmat=None, startprob_prior=None, transmat_prior=None,
algorithm="viterbi", means_prior=None, means_weight=0,
covars_prior=1e-2, covars_weight=1,
random_state=None, n_iter=10, thresh=1e-2,
params=string.ascii_letters,
init_params=string.ascii_letters):
_BaseHMM.__init__(self, n_components, startprob, transmat,
startprob_prior=startprob_prior,
transmat_prior=transmat_prior, algorithm=algorithm,
random_state=random_state, n_iter=n_iter,
thresh=thresh, params=params,
init_params=init_params)
self.n = n
def get_init_parameters(s1, s2, **info):
n_ = np.array([info['count'], info['count']])
#get observation that occurs most often:
m_ =[float(np.argmax(np.bincount([x[0] for x in s1]))), float(np.argmax(np.bincount([x[1] for x in s2]))) ]
p_ = [[-1,-1,-1],[-1,-1,-1]] #first: 1. or 2. emission, second: state
p_[0][0] = 1. / n_[0]
p_[1][0] = 1. / n_[1]
p_[0][1] = m_[0] / n_[0]
p_[1][1] = p_[1][0]
p_[0][2] = p_[0][0]
p_[1][2] = m_[1] / n_[1]
return np.asarray(n_), np.asarray(p_)
class BinomialHMM(_BaseHMM):
def __init__(self, n, p, dim_cond_1, dim_cond_2, init_state_seq=None, n_components=2, covariance_type='diag', startprob_prior=None, transmat_prior=None,
algorithm="viterbi", means_prior=None, means_weight=0,
covars_prior=1e-2, covars_weight=1,
random_state=None, n_iter=10, thresh=1e-2,
params=string.ascii_letters,
init_params=string.ascii_letters):
_BaseHMM.__init__(self, n_components,
startprob_prior=startprob_prior,
transmat_prior=transmat_prior, algorithm=algorithm,
random_state=random_state, n_iter=n_iter,
tol=thresh, params=params,
init_params=init_params)
self.dim = [dim_cond_1, dim_cond_2] #dimension of one emission
self.n = n
alpha[alpha < 0] = 0.001
for el in [mu, alpha]:
high = min(el[0,1], el[1,2]) + 0.5 * fabs(el[0,1] - el[1,2])
low = min(el[1,1], el[0,2]) + 0.5 * fabs(el[1,1] - el[0,2])
med = np.mean([el[0,0], el[1,0]])
el[0,1] = high
el[1,2] = high
el[1,1] = low
el[0,2] = low
el[0,0] = med
el[1,0] = med
return alpha, mu
class NegBinRepHMM(_BaseHMM):
def __init__(self, alpha, mu, dim_cond_1, dim_cond_2, init_state_seq=None, n_components=3, covariance_type='diag',
startprob_prior=1.0, transmat_prior=1.0, func=None,
algorithm="viterbi", means_prior=None, means_weight=0,
covars_prior=1e-2, covars_weight=1,
random_state=None, n_iter=30, thresh=1e-2,
params=string.ascii_letters,
init_params=string.ascii_letters):
_BaseHMM.__init__(self, n_components,
startprob_prior=startprob_prior,
transmat_prior=transmat_prior, algorithm=algorithm,
random_state=random_state, n_iter=n_iter,
tol=thresh, params=params,
init_params=init_params)
self.dim = [dim_cond_1, dim_cond_2] #dimension of one emission