How to use the pyglm.models.model_factory.make_model function in PyGLM

To help you get started, we’ve selected a few PyGLM examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github slinderman / theano_pyglm / test / synth_map_with_xv.py View on Github external
def run_synth_test():
    """ Run a test with synthetic data and MAP inference with cross validation
    """
    options, popn, data, popn_true, x_true = initialize_test_harness()
    
    # Get the list of models for cross validation
    base_model = make_model(options.model, N=data['N'], dt=0.001)
    models = get_xv_models(base_model)

    # TODO Segment data into training and cross validation sets
    train_frac = 0.75
    T_split = data['T'] * train_frac
    train_data = segment_data(data, (0,T_split))
    xv_data = segment_data(data, (T_split,data['T']))

    # Preprocess the data sequences
    train_data = popn.preprocess_data(train_data)
    xv_data = popn.preprocess_data(xv_data)

    # Sample random initial state
    x0 = popn.sample()

    # Track the best model and parameters
github slinderman / theano_pyglm / test / generate_synth_data_kayak.py View on Github external
def run_gen_synth_data():
    """ Run a test with synthetic data and MCMC inference
    """
    options, args = parse_cmd_line_args()
    
    # Create the model
    dt = 0.001
    model = make_model(options.model, N=options.N, dt=dt)
    # Set the sparsity level to minimize the risk of unstable networks
    stabilize_sparsity(model)

    print "Creating master population object"
    popn = KayakPopulation(model)

    # Sample random parameters from the model
    x_true = popn.sample()

    # Check stability of matrix
    assert check_stability(model, x_true, options.N), "ERROR: Sampled network is unstable!"

    # Save the model so it can be loaded alongside the data
    fname_model = os.path.join(options.resultsDir, 'model.pkl')
    print "Saving population to %s" % fname_model
    with open(fname_model,'w') as f:
github slinderman / theano_pyglm / test / fit_latent_network.py View on Github external
def test_latent_distance_network_sampler(N, N_samples=10000):
    """
    Generate a bunch of latent distance networks, run the sampler
    on them to see how well we mix over latent locations.

    :param N: Number of neurons in the network
    """
    true_model_type = 'latent_distance'
    if true_model_type == 'erdos_renyi':
        true_model = make_model('sparse_weighted_model', N)
    elif true_model_type == 'latent_distance':
        true_model = make_model('distance_weighted_model', N)

    distmodel = make_model('distance_weighted_model', N)
    D = distmodel['network']['graph']['N_dims']
    trials = 1
    for t in range(trials):
        # Generate a true random network
        popn_true, x_true, A_true = sample_network_from_prior(true_model)
        dist_popn, x_inf, _ = sample_network_from_prior(distmodel)

        # Seed the inference population with the true network
        x_inf['net']['graph']['A'] = A_true

        # Create a location sampler
        print "Initializing latent location sampler"
        loc_sampler = LatentLocationUpdate()
        loc_sampler.preprocess(dist_popn)

        # Run the sampler
github slinderman / theano_pyglm / test / parallel_map_with_xv.py View on Github external
def run_parallel_map():
    """ Run a test with synthetic data and MCMC inference
    """
    options, popn, data, client, popn_true, x_true = initialize_parallel_test_harness()

    # Get the list of models for cross validation
    base_model = make_model(options.model, N=data['N'])
    models = get_xv_models(base_model)

    # Segment data into training and cross validation sets
    train_frac = 0.75
    T_split = data['T'] * train_frac
    train_data = segment_data(data, (0,T_split))
    xv_data = segment_data(data, (T_split,data['T']))

    # Sample random initial state
    x0 = popn.sample(None)

    # Track the best model and parameters
    best_ind = -1
    best_xv_ll = -np.Inf
    best_x = x0
    best_model = None
github slinderman / theano_pyglm / test / generate_synth_data.py View on Github external
def run_gen_synth_data():
    """ Run a test with synthetic data and MCMC inference
    """
    options, args = parse_cmd_line_args()
    
    # Create the model
    dt = 0.001
    model = make_model(options.model, N=options.N, dt=dt)
    # Set the sparsity level to minimize the risk of unstable networks
    stabilize_sparsity(model)

    print "Creating master population object"
    popn = Population(model)

    # Sample random parameters from the model
    x_true = popn.sample()

    # Check stability of matrix
    assert check_stability(model, x_true, options.N), "ERROR: Sampled network is unstable!"


    # Save the model so it can be loaded alongside the data
    fname_model = os.path.join(options.resultsDir, 'model.pkl')
    print "Saving data to %s" % fname_model
github slinderman / theano_pyglm / test / fit_latent_network.py View on Github external
def test_latent_distance_network_sampler(N, N_samples=10000):
    """
    Generate a bunch of latent distance networks, run the sampler
    on them to see how well we mix over latent locations.

    :param N: Number of neurons in the network
    """
    true_model_type = 'latent_distance'
    if true_model_type == 'erdos_renyi':
        true_model = make_model('sparse_weighted_model', N)
    elif true_model_type == 'latent_distance':
        true_model = make_model('distance_weighted_model', N)

    distmodel = make_model('distance_weighted_model', N)
    D = distmodel['network']['graph']['N_dims']
    trials = 1
    for t in range(trials):
        # Generate a true random network
        popn_true, x_true, A_true = sample_network_from_prior(true_model)
        dist_popn, x_inf, _ = sample_network_from_prior(distmodel)

        # Seed the inference population with the true network
        x_inf['net']['graph']['A'] = A_true

        # Create a location sampler
        print "Initializing latent location sampler"
        loc_sampler = LatentLocationUpdate()
        loc_sampler.preprocess(dist_popn)
github slinderman / theano_pyglm / test / fit_latent_network.py View on Github external
def fit_latent_network_to_mle():
    """ Run a test with synthetic data and MCMC inference
    """
    options, popn, data, popn_true, x_true = initialize_test_harness()

    import pdb; pdb.set_trace()
    # Load MLE parameters from command line
    mle_x = None
    if options.x0_file is not None:
        with open(options.x0_file, 'r') as f:
            print "Initializing with state from: %s" % options.x0_file
            mle_x = cPickle.load(f)

            mle_model = make_model('standard_glm', N=data['N'])
            mle_popn = Population(mle_model)
            mle_popn.set_data(data)

    # Create a location sampler
    print "Initializing latent location sampler"
    loc_sampler = LatentLocationUpdate()
    loc_sampler.preprocess(popn)

    # Convert the mle results into a weighted adjacency matrix
    x_aw = popn.sample(None)
    x_aw = convert_model(mle_popn, mle_model, mle_x, popn, popn.model, x_aw)

    # Get rid of unnecessary keys
    del x_aw['glms']

    # Fit the latent distance network to a thresholded adjacency matrix
github slinderman / theano_pyglm / test / parallel_map.py View on Github external
def run_parallel_map():
    """ Run a test with synthetic data and MCMC inference
    """
    # Parse command line args
    (options, args) = parse_cmd_line_args()

    # Load the data
    data = load_data(options)
    # Get a model for the data
    model_type = 'standard_glm'
    model = make_model(model_type, N=data['N'])

    # Get parallel clients
    rc = Client(profile="sge")
    dview = rc[:]
    # dview = get_engines(n_workers=8)

    # Load imports on the client
    load_imports_on_client(dview)

    # Initialize population objects on the clients
    dview.apply_sync(initialize_client, (model_type,N,data))
github slinderman / theano_pyglm / test / parallel_ais.py View on Github external
"""
    options, popn, data, client, popn_true, x_true = initialize_parallel_test_harness()

    # If x0 specified, load x0 from file
    x0 = None
    if options.x0_file is not None:
        with open(options.x0_file, 'r') as f:
            print "Initializing with state from: %s" % options.x0_file
            prev_x0 = cPickle.load(f)
            if isinstance(prev_x0, list):

                x0 = prev_x0[-1]
            else:
                mle_x0 = prev_x0
                # HACK: We're assuming x0 came from a standard GLM
                mle_model = make_model('standard_glm', N=data['N'])
                mle_popn = Population(mle_model)
                mle_popn.set_data(data)

                x0 = popn.sample(None)
                x0 = convert_model(mle_popn, mle_model, mle_x0, popn, popn.model, x0)

    use_existing = False

    fname = os.path.join(options.resultsDir, '%s_marginal_lkhd.pkl' % options.model)
    if use_existing and  \
       os.path.exists(fname):

        print "Found existing results"
        with open(fname) as f:
            marg_lkhd = cPickle.load(f)
    else: