How to use the pyglm.inference.kayak_gibbs.MetropolisHastingsUpdate function in PyGLM

To help you get started, we’ve selected a few PyGLM examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github slinderman / theano_pyglm / pyglm / inference / kayak_gibbs.py View on Github external
update = _DiscreteLocalGibbsLatentLocationUpdate(latent_component)
                else:
                    update = _ContinuousLatentLocationUpdate(latent_component)
                update.preprocess(population)
                self.location_updates.append(update)

    def update(self, x):
        """
        Update each location update in turn
        """
        for update in self.location_updates:
            x = update.update(x)

        return x

class _ContinuousLatentLocationUpdate(MetropolisHastingsUpdate):
    """
    A special subclass to sample continuous latent locations
    """
    def __init__(self, latent_location_component):
        self.location = latent_location_component

    def preprocess(self, population):
        self.syms = population.get_variables()

        # Get the shape of L
        # TODO: Fix this hack!
        self.L = self.location.L
        self.L_shape = population.sample()['latent'][self.location.name]['L'].shape

        # Compute the log probability and its gradients, taking into
        # account the prior and the likelihood of any consumers of the
github slinderman / theano_pyglm / pyglm / inference / kayak_gibbs.py View on Github external
i,j = np.unravel_index(ij, (d1,d2), order='C')
                L[n,0] = prior.min0 + i
                L[n,1] = prior.min1 + j

            else:
                raise Exception('Only supporting Categorical and JointCategorical location priors')
        # Update current L
        if not self._check_bounds(L):
            import pdb; pdb.set_trace()
        x['latent'][self.location.name]['L'] = L.ravel()


        return x


class SharedTuningCurveUpdate(MetropolisHastingsUpdate):
    """
    A special subclass to sample continuous latent locations
    """
    def __init__(self):
        self.n_steps = 2
        self.avg_accept_rate = 0.9
        self.step_sz = 0.1

    def preprocess(self, population):
        self.population = population
        self.glm = self.population.glm
        self.N = population.N

        # Get the shared tuning curve component
        from pyglm.components.latent import LatentTypeWithTuningCurve
        self.tc_model = None
github slinderman / theano_pyglm / pyglm / inference / kayak_gibbs.py View on Github external
if np.log(np.random.rand()) < lp_prop - lp_curr:
                L[n,:] = L_prop
                # print "%d: [%d,%d]->[%d,%d]" % (n, L_curr[0], L_curr[1], L_prop[0],L_prop[1])
            else:
                L[n,:] = L_curr
                # print "%d: [%d,%d]->[%d,%d]" % (n, L_curr[0], L_curr[1], L_curr[0],L_curr[1])

        # Update current L
        if not self._check_bounds(L):
            import pdb; pdb.set_trace()
        x['latent'][self.location.name]['L'] = L.ravel()

        return x


class _DiscreteGibbsLatentLocationUpdate(MetropolisHastingsUpdate):
    """
    A special subclass to sample discrete latent locations on a grid
    """
    def __init__(self, latent_location_component):
        self.location = latent_location_component

    def preprocess(self, population):
        self.N = population.N
        self.population = population
        self.syms = population.get_variables()
        self.L = self.location.Lmatrix

        # Compute the log probability and its gradients, taking into
        # account the prior and the likelihood of any consumers of the
        # location.
        self.log_p = self.location.log_p
github slinderman / theano_pyglm / pyglm / inference / kayak_gibbs.py View on Github external
#                                  debug=False)
        return ars.sample()

    def update(self, x, n):
        """ Collapsed Gibbs sample a column of A and W
        """
        N = self.population.N
        order = np.arange(N)
        np.random.shuffle(order)
        for n_pre in order:
            self._collapsed_sample_AW(n_pre, n, x)

        return x


class LatentLocationUpdate(MetropolisHastingsUpdate):
    """
    Gibbs sample the parameters of a latent distance model, namely the
    latent locations (if they are not given) and the distance scale.
    """
    def __init__(self):
        super(LatentLocationUpdate, self).__init__()

        # Use HMC if the locations are continuous
        # Otherwise, use a Metropolis-Hastings update
        self.avg_accept_rate = 0.9
        self.step_sz = 0.001

    def preprocess(self, population):
        self.N = population.model['N']

        # Get the location model(s)
github slinderman / theano_pyglm / pyglm / inference / kayak_gibbs.py View on Github external
x['latent'][latent_type.name]['Y'] = Y

            # Update alpha with the conjugate dirichlet prior
            from pyglm.components.priors import Dirichlet
            if isinstance(latent_type.alpha_prior, Dirichlet):
                suffstats = latent_type.alpha_prior.alpha0.get_value()
                suffstats += np.bincount(Y, minlength=R)
                alpha = np.random.dirichlet(suffstats)
                x['latent'][latent_type.name]['alpha'] = alpha
            else:
                raise Warning('Cannot update alpha prior!')

        return x

class LatentLocationAndTypeUpdate(MetropolisHastingsUpdate):
    """
    A special subclass to sample discrete latent locations on a grid
    along with the type of the neuron
    """
    def __init__(self):
        raise NotImplementedError('Joint update of location and type has not yet been implemented!')

    def preprocess(self, population):
        self.N = population.N
        self.population = population
        self.syms = population.get_variables()

        # Get the shared tuning curve component
        from pyglm.components.latent import LatentType
        self.latent_types = []
        for latent_component in population.latent.latentlist:
github slinderman / theano_pyglm / pyglm / inference / kayak_gibbs.py View on Github external
if np.log(np.random.rand()) < lnp_accept:
                L[n,:] = np.array(prop_loc)
            else:
                # Reject and stay in current loc
                L[n,:] = np.array(curr_loc)

        # Update current L
        if not self._check_bounds(L):
            import pdb; pdb.set_trace()
        x['latent'][self.location.name]['L'] = L.ravel()

        return x


class LatentTypeUpdate(MetropolisHastingsUpdate):
    """
    A special subclass to sample discrete latent locations on a grid
    """
    def __init__(self):
        pass

    def preprocess(self, population):
        self.N = population.N
        self.population = population
        self.syms = population.get_variables()

        # Get the shared tuning curve component
        from pyglm.components.latent import LatentType
        self.latent_types = []
        for latent_component in population.latent.latentlist:
            if isinstance(latent_component, LatentType):
github slinderman / theano_pyglm / pyglm / inference / kayak_gibbs.py View on Github external
def target_variables(self):
        # Return a list of variables that this update applies to
        return []


    def preprocess(self, population):
        """ Do any req'd preprocessing
        """
        pass

    def update(self, x_curr):
        """ Take a MH step
        """
        return x_curr

class ParallelMetropolisHastingsUpdate(MetropolisHastingsUpdate):
    """ Extending this class indicates that the updates can be
        performed in parallel over n, the index of the neuron.
    """
    def update(self, x_curr, n):
        """ Take a MH step for the n-th neuron. This can be performed in parallel 
            over other n' \in [N]
        """
        pass


class HmcBiasUpdate(ParallelMetropolisHastingsUpdate):
    """
    Update the continuous and unconstrained bias parameters using Hamiltonian
    Monte Carlo. Stochastically follow the gradient of the parameters using
    Hamiltonian dynamics.
    """
github slinderman / theano_pyglm / pyglm / inference / kayak_gibbs.py View on Github external
# Gibbs sample from the 2d distribution
                ij = log_sum_exp_sample(lnp.ravel(order='C'))
                i,j = np.unravel_index(ij, (d1,d2), order='C')
                L[n,0] = prior.min0 + i
                L[n,1] = prior.min1 + j

            else:
                raise Exception('Only supporting Categorical and JointCategorical location priors')
        # Update current L
        if not self._check_bounds(L):
            import pdb; pdb.set_trace()
        x['latent'][self.location.name]['L'] = L.ravel()

        return x

class _DiscreteLocalGibbsLatentLocationUpdate(MetropolisHastingsUpdate):
    """
    A special subclass to sample discrete latent locations on a grid
    This is a Metropolis-Hastings update that takes local steps proportional
    to their relative probability.
    """
    def __init__(self, latent_location_component):
        self.location = latent_location_component

    def preprocess(self, population):
        self.N = population.N
        self.population = population
        self.glm = self.population.glm
        self.syms = population.get_variables()
        self.L = self.location.Lmatrix
        self.Lflat = self.location.Lflat
github slinderman / theano_pyglm / pyglm / inference / kayak_gibbs.py View on Github external
adaptive_step_sz=True,
                                                avg_accept_rate=self.avg_accept_rate)

        # Update step size and accept rate
        self.step_sz = new_step_sz
        # print "Step: ", self.step_sz
        self.avg_accept_rate = new_accept_rate
        # print "Accept: ", self.avg_accept_rate

        # Update current L
        x['latent'][self.location.name]['L'] = L.reshape(self.L_shape)

        return x


class _DiscreteLatentLocationUpdate(MetropolisHastingsUpdate):
    """
    A special subclass to sample discrete latent locations on a grid
    """
    def __init__(self, latent_location_component):
        self.location = latent_location_component

    def preprocess(self, population):
        self.N = population.N
        self.population = population
        self.syms = population.get_variables()
        self.L = self.location.Lmatrix

        # Compute the log probability and its gradients, taking into
        # account the prior and the likelihood of any consumers of the
        # location.
        self.log_p = self.location.log_p