Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
latent_dim_list,
batch_size_list,
n_seeds, n_jobs, _run, _seed):
seed_list = check_random_state(_seed).randint(np.iinfo(np.uint32).max,
size=n_seeds)
param_grid = ParameterGrid(
{'datasets': [['archi', 'hcp']],
'dropout_latent': dropout_latent_list,
'dropout_input': dropout_input_list,
'batch_size': batch_size_list,
'latent_dim': latent_dim_list,
# Hack to iterate over seed first'
'aseed': seed_list})
# Robust labelling of experiments
client = pymongo.MongoClient()
database = client['amensch']
c = database[collection].find({}, {'_id': 1})
c = c.sort('_id', pymongo.DESCENDING).limit(1)
c = c.next()['_id'] + 1 if c.count() else 1
Parallel(n_jobs=n_jobs,
verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
for i, config_updates in enumerate(param_grid))
'human_voice': None}
transfer = [{'datasets': ['archi', 'hcp', 'brainomics', 'camcan'],
'geometric_reduction': True,
'latent_dim': 50,
'dropout_input': 0.25,
'dropout_latent': 0.5,
'train_size': train_size,
'optimizer': 'adam',
'seed': seed} for seed in seed_list]
# exps += multinomial
# exps += geometric_reduction
# exps += latent_dropout
exps += transfer
# Robust labelling of experiments
client = pymongo.MongoClient()
database = client['amensch']
c = database[collection].find({}, {'_id': 1})
c = c.sort('_id', pymongo.DESCENDING).limit(1)
c = c.next()['_id'] + 1 if c.count() else 1
exps = shuffle(exps)
Parallel(n_jobs=n_jobs,
verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
for i, config_updates in enumerate(exps))
'geometric_reduction': True,
'latent_dim': 50,
'dropout_input': 0.25,
'dropout_latent': 0.5,
'optimizer': 'adam',
'seed': seed} for seed in seed_list]
exps += multinomial
# exps += geometric_reduction
# exps += latent_dropout
# exps += transfer
# Robust labelling of experiments
client = pymongo.MongoClient()
database = client['amensch']
c = database[collection].find({}, {'_id': 1})
c = c.sort('_id', pymongo.DESCENDING).limit(1)
c = c.next()['_id'] + 1 if c.count() else 1
exps = shuffle(exps)
Parallel(n_jobs=n_jobs,
verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
for i, config_updates in enumerate(exps))
param_grid = ParameterGrid(
{'datasets': [['la5c', 'hcp']],
'dataset_weight': [dict(hcp=i, la5c=1 - i)
for i in [0, 0.25, 0.5, 0.75]],
'shared_supervised': shared_supervised_list,
'task_prob': task_prob_list,
'dropout_latent': dropout_latent_list,
'latent_dim': latent_dim_list,
# Hack to iterate over seed first'
'aseed': seed_list})
# Robust labelling of experiments
client = pymongo.MongoClient()
database = client['amensch']
c = database[collection].find({}, {'_id': 1})
c = c.sort('_id', pymongo.DESCENDING).limit(1)
c = c.next()['_id'] + 1 if c.count() else 1
Parallel(n_jobs=n_jobs,
verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
for i, config_updates in enumerate(param_grid))
def run(n_components_list, alpha_list, n_jobs):
update_list = []
for n_components in n_components_list:
for alpha in alpha_list:
config_updates = {'n_components': n_components,
'alpha': alpha}
update_list.append(config_updates)
# Robust labelling of experiments
client = pymongo.MongoClient()
database = client['amensch']
c = database.runs.find({}, {'_id': 1})
c = c.sort('_id', pymongo.DESCENDING).limit(1)
c = c.next()['_id'] + 1 if c.count() else 1
Parallel(n_jobs=n_jobs,
verbose=10)(delayed(single_run)(config_updates, c + i)
for i, config_updates in enumerate(update_list))
'dropout_input': 0.25,
'dropout_latent': 0.5,
'seed': seed} for seed in seed_list]
transfer = [{'datasets': [dataset, 'hcp'],
'geometric_reduction': True,
'latent_dim': 50,
'dropout_input': 0.25,
'dropout_latent': 0.5,
'seed': seed} for seed in seed_list]
# exps += multinomial
exps += geometric_reduction
exps += latent_dropout
exps += transfer
# Robust labelling of experiments
client = pymongo.MongoClient()
database = client['amensch']
c = database[collection].find({}, {'_id': 1})
c = c.sort('_id', pymongo.DESCENDING).limit(1)
c = c.next()['_id'] + 1 if c.count() else 1
Parallel(n_jobs=n_jobs,
verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
for i, config_updates in enumerate(exps))
['camcan', 'hcp']
]
transfer_camcan = [{'datasets': dataset,
'geometric_reduction': True,
'latent_dim': 50,
'dropout_input': 0.25,
'dropout_latent': 0.5,
'train_size': train_size,
'optimizer': 'adam',
'seed': seed} for seed in seed_list
for train_size in train_sizes
for dataset in datasets_list]
exps += transfer_camcan
# Robust labelling of experiments
client = pymongo.MongoClient()
database = client['amensch']
c = database[collection].find({}, {'_id': 1})
c = c.sort('_id', pymongo.DESCENDING).limit(1)
c = c.next()['_id'] + 1 if c.count() else 1
exps = shuffle(exps)
Parallel(n_jobs=n_jobs,
verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
for i, config_updates in enumerate(exps))
seed_list = check_random_state(_seed).randint(np.iinfo(np.uint32).max,
size=n_seeds)
param_grid = ParameterGrid(
{'datasets': [['archi', 'hcp']],
'dropout_latent': dropout_latent_list,
'dropout_input': dropout_input_list,
'batch_size': batch_size_list,
'latent_dim': latent_dim_list,
# Hack to iterate over seed first'
'aseed': seed_list})
# Robust labelling of experiments
client = pymongo.MongoClient()
database = client['amensch']
c = database[collection].find({}, {'_id': 1})
c = c.sort('_id', pymongo.DESCENDING).limit(1)
c = c.next()['_id'] + 1 if c.count() else 1
Parallel(n_jobs=n_jobs,
verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
for i, config_updates in enumerate(param_grid))
seed_list = check_random_state(_seed).randint(np.iinfo(np.uint32).max,
size=n_seeds)
param_grid = ParameterGrid(
{'datasets': [['archi', 'hcp', 'brainomics', 'la5c']],
'shared_supervised': shared_supervised_list,
'task_prob': task_prob_list,
'dropout_latent': dropout_latent_list,
'latent_dim': latent_dim_list,
# Hack to iterate over seed first'
'aseed': seed_list})
# Robust labelling of experiments
client = pymongo.MongoClient()
database = client['amensch']
c = database[collection].find({}, {'_id': 1})
c = c.sort('_id', pymongo.DESCENDING).limit(1)
c = c.next()['_id'] + 1 if c.count() else 1
Parallel(n_jobs=n_jobs,
verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
for i, config_updates in enumerate(param_grid))
update_list = []
for train_size in train_size_list:
for n_components in n_components_list:
for alpha in alpha_list:
config_updates = {'task_data': {'train_size': 778},
'rest_data': {'train_size': train_size},
'components':
{'n_components': n_components,
'alpha': alpha},
}
update_list.append(config_updates)
client = pymongo.MongoClient()
database = client['amensch']
c = database.runs.find({}, {'_id': 1})
c = c.sort('_id', pymongo.DESCENDING).limit(1)
c = c.next()['_id'] + 1 if c.count() else 1
Parallel(n_jobs=n_jobs)(delayed(single_run)(config_updates, c + i)
for i, config_updates in enumerate(update_list))