Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
self.fw_id += 1
print(self.connections)
elif job in ['G0W0', 'GW0', 'scGW0']:
launch_spec = {'task_type': 'GW job', '_category': 'cluster', '_queueadapter': 'qadapterdict'}
task = VaspGWInputTask(parameters)
tasks.append(task)
task = VaspGWGetPrepResTask(parameters)
tasks.append(task)
task = VaspGWExecuteTask(parameters)
tasks.append(task)
if parameters['spec']['converge']:
task = VaspGWWriteConDatTask(parameters)
tasks.append(task)
task = VaspGWTestConTask(parameters)
tasks.append(task)
fw = FireWork(tasks, spec=launch_spec, name=job, created_on=now(), fw_id=self.fw_id)
self.connections[self.fw_id] = []
self.connections[self.prep_id].append(self.fw_id)
self.fw_id += 1
else:
fw = []
print('unspecified job, this should have been captured before !!')
exit()
self.work_list.append(fw)
connections = {}
parameters = parameters if parameters else {}
snl_priority = parameters.get('priority', 1)
priority = snl_priority * 2 # once we start a job, keep going!
f = Composition.from_formula(snl.structure.composition.reduced_formula).alphabetical_formula
# add the SNL to the SNL DB and figure out duplicate group
tasks = [AddSNLTask()]
spec = {'task_type': 'Add to SNL database', 'snl': snl.to_dict, '_queueadapter': QA_DB, '_priority': snl_priority}
if 'snlgroup_id' in parameters and isinstance(snl, MPStructureNL):
spec['force_mpsnl'] = snl.to_dict
spec['force_snlgroup_id'] = parameters['snlgroup_id']
del spec['snl']
fws.append(FireWork(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=0))
connections[0] = [1]
# run GGA structure optimization for surfaces/bulk
spec={'vasp':{}}
spec['vasp']['incar']=snl.data['_vasp']["incar"]
for i in ['poscar', 'kpoints']:
spec['vasp'][i] = snl.data['_vasp'][i].to_dict
spec['vasp']['potcar'] = MPVaspInputSet().get_potcar(snl.data['_vasp']['poscar'].structure).to_dict
# Add run tags of pseudopotential
spec['run_tags'] = spec.get('run_tags', [spec['vasp']['potcar']['functional']])
spec['run_tags'].extend(spec['vasp']['potcar']['symbols'])
# Add run tags of +U
#u_tags = ['%s=%s' % t for t in
# zip(Poscar.from_dict(spec['vasp']['poscar']).site_symbols, spec['vasp']['incar'].get('LDAUU',
# [0] * len(Poscar.from_dict(spec['vasp']['poscar']).site_symbols)))]
# what are the parent states?
parent_states = [self.id_fw[p].state for p in self.links.parent_links.get(fw_id, [])]
if len(parent_states) != 0 and not all([s == 'COMPLETED' for s in parent_states]):
m_state = 'WAITING'
else:
# my state depends on launch whose state has the highest 'score' in STATE_RANKS
max_score = 0
m_state = 'READY'
m_action = None
# TODO: pick the first launch in terms of end date that matches 'COMPLETED'; multiple might exist
for l in fw.launches:
if FireWork.STATE_RANKS[l.state] > max_score:
max_score = FireWork.STATE_RANKS[l.state]
m_state = l.state
if m_state == 'COMPLETED':
m_action = l.action
fw.state = m_state
if m_state != prev_state:
if m_state == 'COMPLETED':
updated_ids = updated_ids.union(self.apply_action(m_action, fw.fw_id))
updated_ids.add(fw_id)
# refresh all the children
for child_id in self.links[fw_id]:
updated_ids = updated_ids.union(self.refresh(child_id, updated_ids))
def mol_to_wf(mol):
spec = {}
spec['molecule'] = mol.to_dict
spec['charge'] = 0
spec['spin_multiplicity'] = 1
spec['title'] = 'first test job'
spec['functional'] = 'B3LYP'
spec['basis_set'] = '6-31+G(d)'
spec['route_parameters'] = {'Opt':'', 'SCF':'Tight'}
spec['input_parameters'] = None
spec['link0_parameters'] = {'%mem': '100MW', '%chk':'molecule'}
spec['_category'] = 'Molecules'
fw = FireWork([GaussianTask()], spec)
return Workflow.from_FireWork(fw)
def snl_to_wf(snl, inaccurate=False):
# TODO: clean this up once we're out of testing mode
# TODO: add WF metadata
fws = []
connections = {}
# add the root FW (GGA)
spec = _snl_to_spec(snl, enforce_gga=True, inaccurate=inaccurate)
tasks = [VASPWriterTask(), _get_custodian_task(spec)]
fws.append(FireWork(tasks, spec, fw_id=-1))
wf_meta = _get_metadata(snl)
# determine if GGA+U FW is needed
mpvis = MaterialsProjectVaspInputSet()
incar = mpvis.get_incar(snl.structure).to_dict
if 'LDAU' in incar and incar['LDAU']:
spec = {'task_type': 'GGA+U optimize structure (2x)', '_dupefinder': DupeFinderVASP().to_dict()}
spec.update(_get_metadata(snl))
fws.append(FireWork([VASPCopyTask({'extension': '.relax2'}), SetupGGAUTask(), _get_custodian_task(spec)], spec, fw_id=-2))
connections[-1] = -2
spec = {'task_type': 'GGA+U static', '_dupefinder': DupeFinderVASP().to_dict()}
spec.update(_get_metadata(snl))
fws.append(
FireWork([VASPCopyTask({'extension': '.relax2'}), SetupStaticRunTask(), _get_custodian_task(spec)], spec, fw_id=-3))
connections[-2] = -3
def add_wf(self, wf):
"""
:param wf: a Workflow object.
"""
if isinstance(wf, FireWork):
wf = Workflow.from_FireWork(wf)
# insert the FireWorks and get back mapping of old to new ids
old_new = self._upsert_fws(wf.id_fw.values())
# TODO: refresh_workflow probably takes care of this now
# redo the Workflow based on new mappings
wf._reassign_ids(old_new)
# insert the WFLinks
self.links.insert(wf.to_db_dict())
# refresh WF states, starting from all roots
for fw_id in wf.root_fw_ids:
self._refresh_wf(wf, fw_id)
fwaction = FWAction(stored_data=stored_data, update_spec=update_spec)
if task_dict['completed_at']:
complete_date = datetime.datetime.strptime(task_dict['completed_at'], "%Y-%m-%d %H:%M:%S")
state_history = [{"created_on": complete_date, 'state': 'COMPLETED'}]
else:
state_history = []
launches = [Launch('COMPLETED', launch_dir, fworker=None, host=None, ip=None, action=fwaction,
state_history=state_history, launch_id=l_id, fw_id=fw_id)]
f = Composition.from_formula(task_dict['pretty_formula']).alphabetical_formula
fw = FireWork(tasks, spec, name=get_slug(f + '--' + spec['task_type']), launches=launches, state='COMPLETED', created_on=None,
fw_id=fw_id)
wf_meta = get_meta_from_structure(Structure.from_dict(task_dict['snl']))
wf_meta['run_version'] = 'preproduction (0)'
wf = Workflow.from_FireWork(fw, name=f, metadata=wf_meta)
launchpad.add_wf(wf, reassign_all=False)
launchpad._upsert_launch(launches[0])
print 'ADDED', fw_id
# return fw_id
return fw_id
def submit_new_workflow(self):
# finds a submitted job, creates a workflow, and submits it to FireWorks
job = self.jobs.find_and_modify({'state': 'SUBMITTED'}, {'$set': {'state': 'WAITING'}})
if job:
submission_id = job['submission_id']
try:
firework = FireWork([StructurePredictionTask()],
spec = {'species' : job['species'],
'threshold' : job['threshold'],
'submission_id' : submission_id})
wf = Workflow([firework], metadata={'submission_id' : submission_id})
self.launchpad.add_wf(wf)
print 'ADDED WORKFLOW FOR {}'.format(job['species'])
except:
self.jobs.find_and_modify({'submission_id': submission_id},
{'$set': {'state': 'ERROR'}})
traceback.print_exc()
return submission_id
# [0] * len(Poscar.from_dict(spec['vasp']['poscar']).site_symbols)))]
#spec['run_tags'].extend(u_tags)
spec['vaspinputset_name'] = "Surfaces"
spec['_priority'] = priority
spec['_queueadapter'] = QA_VASP
spec['task_type'] = "Vasp surface optimize static"
tasks = [VaspWriterTask(), get_custodian_task(spec)]
fws.append(FireWork(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=1))
# insert into DB - GGA structure optimization
spec = {'task_type': 'VASP db insertion', '_priority': priority,
'_allow_fizzled_parents': True, '_queueadapter': QA_DB}
fws.append(
FireWork([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=2))
connections[1] = [2]
wf_meta = get_meta_from_structure(snl.structure)
wf_meta['run_version'] = 'May 2013 (1)'
if '_materialsproject' in snl.data and 'submission_id' in snl.data['_materialsproject']:
wf_meta['submission_id'] = snl.data['_materialsproject']['submission_id']
return Workflow(fws, connections, name=Composition.from_formula(
snl.structure.composition.reduced_formula).alphabetical_formula, metadata=wf_meta)