How to use the pyglm.models.Population function in PyGLM

To help you get started, we’ve selected a few PyGLM examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github slinderman / pyglm / examples / generate_synthetic_data.py View on Github external
###########################################################
    #   Network hyperparameters
    ###########################################################
    # c = np.arange(C).repeat((N // C))                       # Neuron to cluster assignments
    # p = 0.5 * np.ones((C,C))                                      # Probability of connection for each pair of clusters
    # # p = 0.9 * np.eye(C) + 0.05 * (1-np.eye(C))              # Probability of connection for each pair of clusters
    # mu = np.zeros((C,C,B))                                  # Mean weight for each pair of clusters
    # Sigma = np.tile( 3**2 * np.eye(B)[None,None,:,:], (C,C,1,1))    # Covariance of weight for each pair of clusters
    # network_hypers = {'C': C, 'c': c, 'p': p, 'mu': mu, 'Sigma': Sigma}

    ###########################################################
    # Create the model with these parameters
    ###########################################################
    network_hypers = {"Sigma_0": 10*np.eye(B)}
    true_model = Population(N=N, dt=dt, dt_max=dt_max, B=B,
                            bias_hypers=bias_hypers,
                            network_hypers=network_hypers)

    ###########################################################
    # Sample from the true model
    ###########################################################
    S, Psi_hat = true_model.generate(T=T, keep=True, return_Psi=True)
    print "Number of generated spikes:\t", S.sum(0)

    ###########################################################
    #  Plot the network, the spike train and mean rate
    ###########################################################
    plt.figure()
    plt.imshow(true_model.weight_model.W_effective.sum(2), vmin=-1.0, vmax=1.0, interpolation="none", cmap="RdGy")

    plt.figure()
github slinderman / pyglm / test / test_ais.py View on Github external
B = 2           # Number of basis functions for the weights

#   Bias hyperparameters
bias_hypers = {"mu_0": -1.0, "sigma_0": 0.25}

p = 0.5                 # Probability of connection for each pair of clusters
mu = np.zeros((B,))     # Mean weight for each pair of clusters
sigma = 1.0 * np.eye(B) # Covariance of weight for each pair of clusters

# Define the true network model for the GLM
true_network = FactorizedNetworkDistribution(
    N,
    BernoulliAdjacencyDistribution, {"p": p},
    FixedGaussianWeightDistribution, {"B": B, "mu": mu, "sigma": sigma})

true_model = Population(N=N, dt=dt, dt_max=dt_max, B=B,
                   bias_hypers=bias_hypers,
                   network=true_network)

# Create another copy of the model with the true network model
test_network = FactorizedNetworkDistribution(
    N,
    BernoulliAdjacencyDistribution, {"p": p},
    FixedGaussianWeightDistribution, {"B": B, "mu": mu, "sigma": sigma})

test_model = Population(N=N, dt=dt, dt_max=dt_max, B=B,
                        bias_hypers=bias_hypers,
                        network=test_network)


# Sample some synthetic data from the true model
S = true_model.generate(T=T, keep=True, verbose=False)
github slinderman / pyglm / test / test_collapsed_geweke.py View on Github external
# Test the model at a particular temperature
    temperature = 0.25

    #   Bias hyperparameters
    bias_hypers = {"mu_0": -1.0, "sigma_0": 0.25}

    p = 0.5                 # Probability of connection for each pair of clusters
    mu = np.zeros((B,))     # Mean weight for each pair of clusters
    sigma = 1.0 * np.eye(B) # Covariance of weight for each pair of clusters

    ###########################################################
    # Create the model with these parameters
    ###########################################################
    network_hypers = {'p': p, 'mu': mu, 'sigma': sigma}
    # Copy the network hypers.
    test_model = Population(N=N, dt=dt, dt_max=dt_max, B=B,
                            bias_hypers=bias_hypers,
                            network_hypers=network_hypers)

    # Sample some initial data
    test_model.generate(T=T, keep=True, verbose=False)

    ###########################################################
    # Run the Geweke test
    ###########################################################
    N_samples = 10000
    samples = []
    lps = []
    import ipdb; ipdb.set_trace()
    for itr in progprint_xrange(N_samples):
        lps.append(test_model.log_probability())
        samples.append(test_model.copy_sample())
github slinderman / pyglm / test / test_ais.py View on Github external
# plt.plot(Bs, mls, '+', linestyle="")
#
# plt.subplot(122)
# plt.boxplot(np.array(lws).T, positions=Bs, widths=5)
# plt.xlim(-5, Bs[-1]+10)
#
# plt.show(block=True)


# Define an alternative model for comparison
alt_network = FactorizedNetworkDistribution(
    N,
    BetaBernoulliAdjacencyDistribution, {},
    FixedGaussianWeightDistribution, {"B": B, "mu": mu, "sigma": sigma})

alt_model = Population(N=N, dt=dt, dt_max=dt_max, B=B,
                       bias_hypers=bias_hypers,
                       network=alt_network)

# Add training data in chunks
chunksz = 1024
for offset in xrange(0, T, chunksz):
    alt_model.add_data(S[offset:min(offset+chunksz,T)])

# Estimate the marginal likelihood under the true and alternative models
test_ml, test_ml_std = test_model.ais(N_samples=20, B=1000, verbose=True)
alt_ml, alt_ml_std  = alt_model.ais(N_samples=20, B=1000, verbose=True)
github slinderman / pyglm / test / test_ais.py View on Github external
true_network = FactorizedNetworkDistribution(
    N,
    BernoulliAdjacencyDistribution, {"p": p},
    FixedGaussianWeightDistribution, {"B": B, "mu": mu, "sigma": sigma})

true_model = Population(N=N, dt=dt, dt_max=dt_max, B=B,
                   bias_hypers=bias_hypers,
                   network=true_network)

# Create another copy of the model with the true network model
test_network = FactorizedNetworkDistribution(
    N,
    BernoulliAdjacencyDistribution, {"p": p},
    FixedGaussianWeightDistribution, {"B": B, "mu": mu, "sigma": sigma})

test_model = Population(N=N, dt=dt, dt_max=dt_max, B=B,
                        bias_hypers=bias_hypers,
                        network=test_network)


# Sample some synthetic data from the true model
S = true_model.generate(T=T, keep=True, verbose=False)

# Add training data in chunks
chunksz = 1024
for offset in xrange(0, T, chunksz):
    test_model.add_data(S[offset:min(offset+chunksz,T)])

# Plot estimated marginal likelihood as a function of number of steps, B
# N_samples = 20
# Bs = [100]
# mls = []
github slinderman / pyglm / test / test_model_comparison.py View on Github external
seed = 1234
# seed = np.random.randint(2**32)

# Create an latent distance model with N nodes and D-dimensional locations
N = 20      # Number of training neurons
T = 1000    # Number of training time bins
B = 1       # Dimensionality of the weights
D = 2       # Dimensionality of the feature space

# Define the true network model for the GLM
true_net_model = FactorizedNetworkDistribution(
    N+1,
    LatentDistanceAdjacencyDistribution, {},
    NIWGaussianWeightDistribution, {})

true_model = Population(N+1, B=B, network=true_net_model)

# Generate synthetic data from the model
Sfull = true_model.generate(keep=False, T=T)
Strain = Sfull[:,:N]
Stest = Sfull[:,N:].ravel()

# Define a list of network models to use for comparison
adj_models = [
    LatentDistanceAdjacencyDistribution,
    SBMAdjacencyDistribution,
    BetaBernoulliAdjacencyDistribution,
]

weight_models = [
    NIWGaussianWeightDistribution,
    SBMGaussianWeightDistribution
github slinderman / pyglm / examples / gibbs_demo.py View on Github external
test_path = os.path.join("data", "synthetic", "synthetic_er_K5_T10000_test.pkl.gz")
    with gzip.open(test_path, 'r') as f:
        S_test, _ = cPickle.load(f)

    T      = S.shape[0]
    N      = true_model.N
    B      = true_model.B
    dt     = true_model.dt
    dt_max = true_model.dt_max

    ###########################################################
    # Create a test spike-and-slab model
    ###########################################################

    # Copy the network hypers.
    test_model = Population(N=N, dt=dt, dt_max=dt_max, B=B,
                            basis_hypers=true_model.basis_hypers,
                            observation_hypers=true_model.observation_hypers,
                            activation_hypers=true_model.activation_hypers,
                            weight_hypers=true_model.weight_hypers,
                            network_hypers=true_model.network_hypers)
    test_model.add_data(S)

    # Convolve the test data for fast heldout likelihood calculations
    F_test = test_model.basis.convolve_with_basis(S_test)

    # Initialize plots
    ln, im_net = initialize_plots(true_model, test_model, S)

    ###########################################################
    # Fit the test model with Gibbs sampling
    ###########################################################
github slinderman / pyglm / pyglm / distributions.py View on Github external
# The marker size denotes the number of spikes
        artists = []
        for n in xrange(N):
            t = np.where(Sclip[:,n] > 0)[0]
            artist = ax.scatter(indices[t], (N-n) * np.ones_like(t),
                     color=color,
                     marker='s',
                     facecolors=color, edgecolor=color,
                     s=10)

            artists.append(artist)

        return artists


class PopulationDistribution(_PopulationDistributionBase, Population):
    _population_class = Population

class NegativeBinomialPopulationDistribution(_PopulationDistributionBase):
    _population_class = NegativeBinomialPopulation


class NegativeBinomialEmptyPopulationDistribution(_PopulationDistributionBase):
    _population_class = NegativeBinomialEmptyPopulation