How to use the deephyper.benchmarks.keras_cmdline.return_optimizer function in deephyper

To help you get started, we’ve selected a few deephyper examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github deephyper / deephyper / deephyper / benchmarks / gcn / gcn.py View on Github external
def run(param_dict):
    param_dict = keras_cmdline.fill_missing_defaults(augment_parser, param_dict)
    optimizer = keras_cmdline.return_optimizer(param_dict)
    pprint(param_dict)

    EPOCHS = param_dict['epochs']
    FILTER = param_dict['filter']
    MAX_DEGREE = param_dict['max_degree']
    SYM_NORM = param_dict['sys_norm']
    DROPOUT = param_dict['dropout']
    NUNITS = param_dict['nunits']
    ACTIVATION = param_dict['activation']
    BATCH_SIZE = param_dict['batch_size']
    TIMEOUT = param_dict['timeout']

    #SHARE_WEIGHTS = param_dict['share_weights']
    # Define parameters
    DATASET = 'cora'
    #FILTER = 'localpool'  # 'chebyshev'
github deephyper / deephyper / benchmarks / mnistmlp / mnist_mlp.py View on Github external
def run(param_dict):
    param_dict = keras_cmdline.fill_missing_defaults(augment_parser, param_dict)
    optimizer = keras_cmdline.return_optimizer(param_dict)
    pprint(param_dict)
    
    timer.start('stage in')
    if param_dict['data_source']:
        data_source = param_dict['data_source']
    else:
        data_source = os.path.dirname(os.path.abspath(__file__))
        data_source = os.path.join(data_source, 'data')

    (x_train, y_train), (x_test, y_test) = load_data(
        origin=os.path.join(data_source, 'mnist.npz'),
        dest=param_dict['stage_in_destination']
    )
    
    timer.end()
github deephyper / deephyper / benchmarks / b2 / babi_memnn.py View on Github external
def run(param_dict):
    param_dict = keras_cmdline.fill_missing_defaults(augment_parser, param_dict)
    optimizer = keras_cmdline.return_optimizer(param_dict)
    pprint(param_dict)
    start_time = time.time()
    challenges = {
        # QA1 with 10,000 samples
        'single_supporting_fact_10k': 'tasks_1-20_v1-2/en-10k/qa1_single-supporting-fact_{}.txt',
        # QA2 with 10,000 samples
        'two_supporting_facts_10k': 'tasks_1-20_v1-2/en-10k/qa2_two-supporting-facts_{}.txt',
    }
    challenge_type = 'single_supporting_fact_10k'
    challenge = challenges[challenge_type]
    
    timer.start('stage in')
    if param_dict['data_source']:
        data_source = param_dict['data_source']
    else:
        data_source = os.path.dirname(os.path.abspath(__file__))
github deephyper / deephyper / deephyper / benchmarks / capsule / capsule.py View on Github external
def run(param_dict):
    param_dict = keras_cmdline.fill_missing_defaults(augment_parser, param_dict)
    optimizer = keras_cmdline.return_optimizer(param_dict)
    pprint(param_dict)

    BATCH_SIZE = param_dict['batch_size']
    EPOCHS = param_dict['epochs']
    DROPOUT = param_dict['dropout']
    DATA_AUG = param_dict['data_aug']
    NUM_CONV = param_dict['num_conv']
    DIM_CAPS = param_dict['dim_capsule']
    ROUTINGS = param_dict['routings']
    SHARE_WEIGHTS = param_dict['share_weights']
    TIMEOUT = param_dict['timeout']



    num_classes = 10
github deephyper / deephyper / benchmarks / cifar10cnn / cifar10_cnn.py View on Github external
def run(param_dict):
    param_dict = keras_cmdline.fill_missing_defaults(augment_parser, param_dict)
    optimizer = keras_cmdline.return_optimizer(param_dict)
    pprint(param_dict)
    start_time = time.time()
    timer.start('stage in')
    if param_dict['data_source']:
        data_source = param_dict['data_source']
    else:
        data_source = os.path.dirname(os.path.abspath(__file__))
        data_source = os.path.join(data_source, 'data')

    (x_train, y_train), (x_test, y_test) = load_data(
        origin=os.path.join(data_source, 'cifar-10-python.tar.gz'),
        dest=param_dict['stage_in_destination'],
    )

    timer.end()
github deephyper / deephyper / deephyper / benchmarks / b3 / babi_rnn.py View on Github external
def run(param_dict):
    param_dict = keras_cmdline.fill_missing_defaults(augment_parser, param_dict)
    optimizer = keras_cmdline.return_optimizer(param_dict)
    pprint(param_dict)

    BATCH_SIZE = param_dict['batch_size']
    EPOCHS = param_dict['epochs']
    DROPOUT = param_dict['dropout']
    ACTIVATION = param_dict['activation']
    TIMEOUT = param_dict['timeout']
    
    if param_dict['rnn_type'] == 'GRU':
        RNN = layers.GRU
    elif param_dict['rnn_type'] == 'SimpleRNN':
        RNN = layers.SimpleRNN
    else:
        RNN = layers.LSTM

    EMBED_HIDDEN_SIZE = param_dict['embed_hidden_size']