How to use the pyglm.inference.coord_descent.coord_descent function in PyGLM

To help you get started, we’ve selected a few PyGLM examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github slinderman / theano_pyglm / test / synth_map.py View on Github external
def run_synth_test():
    """ Run a test with synthetic data and MCMC inference
    """
    options, popn, data, popn_true, x_true = initialize_test_harness()

    # Sample random initial state
    x0 = popn.sample()
    ll0 = popn.compute_log_p(x0)
    print "LL0: %f" % ll0

    # Perform inference
    x_inf = coord_descent(popn, x0=x0, maxiter=1)
    ll_inf = popn.compute_log_p(x_inf)
    print "LL_inf: %f" % ll_inf

    # Save results
    results_file = os.path.join(options.resultsDir, 'results.pkl')
    print "Saving results to %s" % results_file
    with open(results_file, 'w') as f:
        cPickle.dump(x_inf, f, protocol=-1)

    # Plot results
    plot_results(popn, x_inf, popn_true, x_true, resdir=options.resultsDir)
github slinderman / theano_pyglm / test / synth_map_with_xv.py View on Github external
print "Total LL: %f" % ll_total
        total_lls[i] = ll_total

        # Update best model
        if ll_xv > best_xv_ll:
            best_ind = i
            best_xv_ll = ll_xv
            best_x = copy.deepcopy(x_inf)
            best_model = copy.deepcopy(model)
        
    # Create a population with the best model
    popn.set_hyperparameters(best_model)
    popn.set_data(data)

    # Fit the best model on the full training data
    best_x = coord_descent(popn, data, x0=x0, maxiter=1,
                           use_hessian=False,
                           use_rop=False)

    # Print results summary
    for i in np.arange(len(models)):
        print "Model %d:\tTrain LL: %.1f\tXV LL: %.1f\tTotal LL: %.1f" % (i, train_lls[i], xv_lls[i], total_lls[i])
    print "Best model: %d" % best_ind
    print "Best Total LL: %f" % popn.compute_ll(best_x)
    print "True LL: %f" % popn_true.compute_ll(x_true)

    # Save results
    results_file = os.path.join(options.resultsDir, 'results.pkl')
    print "Saving results to %s" % results_file
    with open(results_file, 'w') as f:
        cPickle.dump(best_x, f)
github slinderman / theano_pyglm / test / synth_map_with_xv.py View on Github external
best_model = None

    # Fit each model using the optimum of the previous models
    train_lls = np.zeros(len(models))
    xv_lls = np.zeros(len(models))
    total_lls = np.zeros(len(models))
    for (i,model) in enumerate(models):
        print "Training model %d" % i
        x0 = copy.deepcopy(best_x)
        popn.set_hyperparameters(model)
        popn.set_data(train_data)
        ll0 = popn.compute_log_p(x0)
        print "Training LL0: %f" % ll0

        # Perform inference
        x_inf = coord_descent(popn, x0=x0, maxiter=1)
        ll_train = popn.compute_log_p(x_inf)
        print "Training LP_inf: %f" % ll_train
        train_lls[i] = ll_train

        
        # Compute log lkhd on xv data
        popn.set_data(xv_data)
        ll_xv = popn.compute_ll(x_inf)
        print "Cross Validation LL: %f" % ll_xv
        xv_lls[i] = ll_xv

        # Compute log lkhd on total dataset
        popn.set_data(data)
        ll_total = popn.compute_ll(x_inf)
        print "Total LL: %f" % ll_total
        total_lls[i] = ll_total
github slinderman / theano_pyglm / pyglm / inference / gibbs.py View on Github external
if x0 is None:
        x0 = population.sample()
        
        if init_from_mle:
            print "Initializing with coordinate descent"
            from pyglm.models.model_factory import make_model, convert_model
            from pyglm.population import Population
            mle_model = make_model('standard_glm', N=N, dt=dt)
            mle_popn = Population(mle_model)

            for data in population.data_sequences:
                mle_popn.add_data(data)
            mle_x0 = mle_popn.sample()

            # Initialize with MLE under standard GLM
            mle_x0 = coord_descent(mle_popn, x0=mle_x0, maxiter=1)

            # Convert between inferred parameters of the standard GLM
            # and the parameters of this model. Eg. Convert unweighted 
            # networks to weighted networks with normalized impulse responses.
            x0 = convert_model(mle_popn, mle_model, mle_x0, population, population.model, x0)

    # # TODO: Move this to a better place
    # from pyglm.inference.smart_init import initialize_locations_by_correlation
    # initialize_locations_by_correlation(population, x0)

    # Create updates for this population
    serial_updates, parallel_updates = initialize_updates(population)

    # DEBUG Profile the Gibbs sampling loop
    import cProfile, pstats, StringIO
    pr = cProfile.Profile()