Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
else:
models = [os.path.join(model_dir,ii) for ii in model_name]
forward_files = ['conf.lmp', 'lammps.in']+model_name
backward_files = ['log.lammps',task_type+'.out']
common_files=['lammps.in']+model_name
if len(model_name)>1 and task_type == 'deepmd':
backward_files = backward_files + ['model_devi.out']
else:
raise RuntimeError ("unknow task %s, something wrong" % task_type)
run_tasks = util.collect_task(all_task,task_type)
if len(run_tasks)==0: return
machine,resources,command,group_size=util.get_machine_info(mdata,task_type)
disp = make_dispatcher(machine, resources, work_path, run_tasks, group_size)
disp.run_jobs(resources,
command,
work_path,
run_tasks,
group_size,
common_files,
forward_files,
backward_files,
outlog=task_type+'.out',
errlog=task_type+'.err')
run_tasks.append(os.path.basename(jj))
disp = make_dispatcher(machine, resources, work_path, run_tasks, group_size)
disp.run_jobs(resources,
command,
ii,
run_tasks,
group_size,
common_files,
forward_files,
backward_files,
outlog=task_type+'.out',
errlog=task_type+'.err')
else:
run_tasks = util.collect_task(all_task,task_type)
if len(run_tasks)==0: return
disp = make_dispatcher(machine, resources, work_path, run_tasks, group_size)
disp.run_jobs(resources,
command,
work_path,
run_tasks,
group_size,
common_files,
forward_files,
backward_files,
outlog=task_type+'.out',
errlog=task_type+'.err')
else:
models = [os.path.join(model_dir,ii) for ii in model_name]
forward_files = ['conf.lmp', 'lammps.in','strain.out']+model_name
backward_files = ['log.lammps', task_type+'.out']
common_files=['lammps.in']+model_name
if len(model_name)>1 and task_type == 'deepmd':
backward_files = backward_files + ['model_devi.out']
else:
raise RuntimeError ("unknow task %s, something wrong" % task_type)
run_tasks = util.collect_task(all_task,task_type)
if len(run_tasks)==0: return
machine,resources,command,group_size=util.get_machine_info(mdata,task_type)
disp = make_dispatcher(machine, resources, work_path, run_tasks, group_size)
disp.run_jobs(resources,
command,
work_path,
run_tasks,
group_size,
common_files,
forward_files,
backward_files,
outlog=task_type+'.out',
errlog=task_type+'.err')
for single_sys in os.listdir(os.path.join(ii)):
trans_comm_data += glob.glob(os.path.join(ii, single_sys, 'set.*'))
trans_comm_data += glob.glob(os.path.join(ii, single_sys, 'type*.raw'))
trans_comm_data += glob.glob(os.path.join(ii, single_sys, 'nopbc'))
else:
trans_comm_data += glob.glob(os.path.join(ii, 'set.*'))
trans_comm_data += glob.glob(os.path.join(ii, 'type*.raw'))
trans_comm_data += glob.glob(os.path.join(ii, 'nopbc'))
os.chdir(cwd)
try:
train_group_size = mdata['train_group_size']
except:
train_group_size = 1
dispatcher = make_dispatcher(mdata['train_machine'], mdata['train_resources'], work_path, run_tasks, train_group_size)
dispatcher.run_jobs(mdata['train_resources'],
commands,
work_path,
run_tasks,
train_group_size,
trans_comm_data,
forward_files,
backward_files,
outlog = 'train.log',
errlog = 'train.log')
for line in frec:
iter_rec = int(line.strip())
dlog.info("continue from task %02d" % iter_rec)
for ii in range(numb_task):
sepline(str(ii), '-')
if ii <= iter_rec:
continue
elif ii == 0:
link_reaxff(jdata)
elif ii == 1:
dispatcher = make_dispatcher(mdata["reaxff_machine"])
run_reaxff(jdata, mdata, dispatcher)
elif ii == 2:
link_trj(jdata)
elif ii == 3:
dispatcher = make_dispatcher(mdata["build_machine"])
run_build_dataset(jdata, mdata, dispatcher)
elif ii == 4:
link_fp_input()
elif ii == 5:
dispatcher = make_dispatcher(mdata["fp_machine"])
run_fp(jdata, mdata, dispatcher)
elif ii == 6:
convert_data(jdata)
with open(record, "a") as frec:
frec.write(str(ii)+'\n')
if ii <= iter_rec:
continue
elif ii == 0:
link_reaxff(jdata)
elif ii == 1:
dispatcher = make_dispatcher(mdata["reaxff_machine"])
run_reaxff(jdata, mdata, dispatcher)
elif ii == 2:
link_trj(jdata)
elif ii == 3:
dispatcher = make_dispatcher(mdata["build_machine"])
run_build_dataset(jdata, mdata, dispatcher)
elif ii == 4:
link_fp_input()
elif ii == 5:
dispatcher = make_dispatcher(mdata["fp_machine"])
run_fp(jdata, mdata, dispatcher)
elif ii == 6:
convert_data(jdata)
with open(record, "a") as frec:
frec.write(str(ii)+'\n')
from monty.serialization import loadfn,dumpfn
warnings.simplefilter('ignore', ruamel.yaml.error.MantissaNoDotYAML1_1Warning)
jdata=loadfn(args.PARAM)
if args.MACHINE is not None:
mdata=loadfn(args.MACHINE)
except:
with open (args.PARAM, 'r') as fp :
jdata = json.load (fp)
if args.MACHINE is not None:
with open (args.MACHINE, "r") as fp:
mdata = json.load(fp)
if args.MACHINE is not None:
# Selecting a proper machine
mdata = decide_fp_machine(mdata)
disp = make_dispatcher(mdata["fp_machine"])
# Decide work path
out_dir = out_dir_name(jdata)
jdata['out_dir'] = out_dir
dlog.info ("# working dir %s" % out_dir)
# Decide whether to use a given poscar
from_poscar = False
if 'from_poscar' in jdata :
from_poscar = jdata['from_poscar']
# Verify md_nstep
md_nstep_jdata = jdata["md_nstep"]
try:
md_incar = jdata['md_incar']
if os.path.isfile(md_incar):
with open(md_incar , "r") as fr:
md_incar_lines = fr.readlines()
run_tasks = [os.path.basename(ii) for ii in run_tasks_]
#dlog.info("all_task is ", all_task)
#dlog.info("run_tasks in run_model_deviation",run_tasks_)
all_models = glob.glob(os.path.join(work_path, 'graph*pb'))
model_names = [os.path.basename(ii) for ii in all_models]
forward_files = ['conf.lmp', 'input.lammps', 'traj']
backward_files = ['model_devi.out', 'model_devi.log', 'traj']
if use_plm:
forward_files += ['input.plumed']
# backward_files += ['output.plumed']
backward_files += ['output.plumed','COLVAR','dump.0.xyz']
if use_plm_path:
forward_files += ['plmpath.pdb']
cwd = os.getcwd()
dispatcher = make_dispatcher(mdata['model_devi_machine'], mdata['model_devi_resources'], work_path, run_tasks, model_devi_group_size)
dispatcher.run_jobs(mdata['model_devi_resources'],
commands,
work_path,
run_tasks,
model_devi_group_size,
model_names,
forward_files,
backward_files,
outlog = 'model_devi.log',
errlog = 'model_devi.log')