Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
B = true_model.B
dt = true_model.dt
dt_max = true_model.dt_max
###########################################################
# Create and fit a standard model for initialization
###########################################################
with gzip.open(init_path, 'r') as f:
init_model = cPickle.load(f)
###########################################################
# Create a test spike-and-slab model
###########################################################
# Copy the network hypers.
test_model = NegativeBinomialEigenmodelPopulation(N=N, dt=dt, dt_max=dt_max, B=B,
basis_hypers=true_model.basis_hypers,
observation_hypers=true_model.observation_hypers,
activation_hypers=true_model.activation_hypers,
weight_hypers=true_model.weight_hypers,
bias_hypers=true_model.bias_hypers,
network_hypers=true_model.network_hypers)
test_model.add_data(S)
# Initialize with the standard model
test_model.initialize_with_standard_model(init_model)
# Convolve the test data for fast heldout likelihood calculations
F_test = test_model.basis.convolve_with_basis(S_test)
###########################################################
# Fit the test model with Gibbs sampling
algorithms=["bfgs"])
T = train.shape[0]
N = true_model.N
B = true_model.B
dt = true_model.dt
dt_max = true_model.dt_max
# Create and fit a standard model for initialization
init_model = standard_results["bfgs"]
###########################################################
# Create a test spike-and-slab model
###########################################################
# Copy the network hypers.
test_model = NegativeBinomialEigenmodelPopulation(N=N, dt=dt, dt_max=dt_max, B=B,
basis_hypers=true_model.basis_hypers,
observation_hypers=true_model.observation_hypers,
activation_hypers=true_model.activation_hypers,
weight_hypers=true_model.weight_hypers,
bias_hypers=true_model.bias_hypers,
network_hypers=true_model.network_hypers)
test_model.add_data(train)
# Initialize with the standard model
test_model.initialize_with_standard_model(init_model)
# Convolve the test data for fast heldout likelihood calculations
F_test = test_model.basis.convolve_with_basis(test)
###########################################################
# Fit the test model with Gibbs sampling
# Create and fit a standard model for initialization
init_model = standard_results["bfgs"]
###########################################################
# Create a test spike-and-slab model
###########################################################
# Use the initial model to set hypers
observation_hypers = {"xi": init_model.xi}
bias_hypers = {"mu_0": init_model.bias.mean(),
"sigma_0": init_model.bias.var()}
network_hypers = {"mu_0": init_model.W.mean(axis=(0,1)),
"Sigma_0": np.diag(init_model.W.var(axis=(0,1)))}
# Copy the network hypers.
test_model = NegativeBinomialEigenmodelPopulation(
N=N, dt=dt, dt_max=dt_max, B=B,
basis_hypers=init_model.basis_hypers,
observation_hypers=observation_hypers,
bias_hypers=bias_hypers,
network_hypers=network_hypers)
test_model.add_data(train)
# Initialize the test model parameters with the
# parameters of the L1-regularized model
test_model.initialize_with_standard_model(init_model)
# Convolve the test data for fast heldout likelihood calculations
F_test = test_model.basis.convolve_with_basis(test)
###########################################################
algorithms=["bfgs"])
T = train.shape[0]
N = true_model.N
B = true_model.B
dt = true_model.dt
dt_max = true_model.dt_max
# Create and fit a standard model for initialization
init_model = standard_results["bfgs"]
###########################################################
# Create a test spike-and-slab model
###########################################################
# Copy the network hypers.
test_model = NegativeBinomialEigenmodelPopulation(N=N, dt=dt, dt_max=dt_max, B=B,
basis_hypers=true_model.basis_hypers,
observation_hypers=true_model.observation_hypers,
activation_hypers=true_model.activation_hypers,
weight_hypers=true_model.weight_hypers,
bias_hypers=true_model.bias_hypers,
network_hypers=true_model.network_hypers)
test_model.add_data(train)
# Convolve the test data for fast heldout likelihood calculations
F_test = test_model.basis.convolve_with_basis(test)
# Initialize with the standard model
test_model.initialize_with_standard_model(init_model)
print "Init PLL: ", init_model.heldout_log_likelihood(test)
# Create and fit a standard model for initialization
init_model = standard_results["bfgs"]
###########################################################
# Create a test spike-and-slab model
###########################################################
# Use the initial model to set hypers
observation_hypers = {"xi": init_model.xi}
bias_hypers = {"mu_0": init_model.bias.mean(),
"sigma_0": init_model.bias.var()}
network_hypers = {"mu_0": init_model.W.mean(axis=(0,1)),
"Sigma_0": np.diag(init_model.W.var(axis=(0,1)))}
# Copy the network hypers.
test_model = NegativeBinomialEigenmodelPopulation(
N=N, dt=dt, dt_max=dt_max, B=B,
basis_hypers=init_model.basis_hypers,
observation_hypers=observation_hypers,
bias_hypers=bias_hypers,
network_hypers=network_hypers)
# Add the data in minibatches
test_model.add_data(train, minibatchsize=1000)
# Initialize the test model parameters with the
# parameters of the L1-regularized model
test_model.initialize_with_standard_model(init_model)
# Convolve the test data for fast heldout likelihood calculations
F_test = test_model.basis.convolve_with_basis(test)
dt_max = 10.0 # Max time of synaptic influence
B = 1 # Number of basis functions for the weights
# Bias hyperparameters
bias_hypers = {"mu_0": -4.0, "sigma_0": 0.25}
###########################################################
# Network hyperparameters
###########################################################
network_hypers = {"p": 0.01, "mu_0": 0.*np.ones(B), "Sigma_0": 1**2*np.eye(B),
"sigma_F": 1.0}
###########################################################
# Create the model with these parameters
###########################################################
true_model = NegativeBinomialEigenmodelPopulation(N=N, dt=dt, dt_max=dt_max, B=B,
bias_hypers=bias_hypers,
network_hypers=network_hypers)
###########################################################
# Override the sample with some serious structure
###########################################################
eigenmodel = true_model.network.adjacency_dist
M = 4
th = np.linspace(0,2*np.pi, M, endpoint=False)
centers = np.hstack((np.cos(th)[:,None], np.sin(th)[:,None]))
# centers = [[1,1], [1,-1], [-1,-1], [-1,1]]
for m, center in enumerate(centers):
start = m*N//M
end = min((m+1)*N//M, N)
eigenmodel.F[start:end, :] = \
center + 0.1 * np.random.randn(end-start,2)
algorithms=["bfgs"])
T = train.shape[0]
N = true_model.N
B = true_model.B
dt = true_model.dt
dt_max = true_model.dt_max
# Create and fit a standard model for initialization
init_model = standard_results["bfgs"]
###########################################################
# Create a test spike-and-slab model
###########################################################
# Copy the network hypers.
test_model = NegativeBinomialEigenmodelPopulation(N=N, dt=dt, dt_max=dt_max, B=B,
basis_hypers=true_model.basis_hypers,
observation_hypers=true_model.observation_hypers,
activation_hypers=true_model.activation_hypers,
weight_hypers=true_model.weight_hypers,
bias_hypers=true_model.bias_hypers,
network_hypers={'p': 0.19})
# network_hypers=true_model.network_hypers)
# Add the data in minibatches of 1000 time bins
minibatchsize = 1000
test_model.add_data(train, minibatchsize=minibatchsize)
# Initialize with the standard model
test_model.initialize_with_standard_model(init_model)
test_model.resample_from_mf()
B = true_model.B
dt = true_model.dt
dt_max = true_model.dt_max
###########################################################
# Create and fit a standard model for initialization
###########################################################
with gzip.open(init_path, 'r') as f:
init_model = cPickle.load(f)
###########################################################
# Create a test spike-and-slab model
###########################################################
# Copy the network hypers.
test_model = NegativeBinomialEigenmodelPopulation(N=N, dt=dt, dt_max=dt_max, B=B,
basis_hypers=true_model.basis_hypers,
observation_hypers=true_model.observation_hypers,
activation_hypers=true_model.activation_hypers,
weight_hypers=true_model.weight_hypers,
bias_hypers=true_model.bias_hypers,
network_hypers=true_model.network_hypers)
test_model.add_data(S)
# Initialize with the standard model
test_model.initialize_with_standard_model(init_model)
test_model.resample_from_mf()
# Convolve the test data for fast heldout likelihood calculations
F_test = test_model.basis.convolve_with_basis(S_test)
# Initialize plots