Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
return default
if "__main__" == __name__:
if len(args) < 1:
logger.critical("%s: Need at least the ACTION argument."
" Type '%s --help' to get usage help."
% (PROG, PROG))
sys.exit(1)
#try:
if 'new' == args[0]:
subset = get_required_argument(args, 1, "SUBSET")
session = get_optional_argument(args, 2, "SESSION")
if subset == 'ALL':
subsets = GamessDb().list().keys()
else:
subsets = subset.split(',')
for subset in subsets:
new(subset, session, options.template)
elif 'progress' == args[0]:
session = get_required_argument(args, 1, "SESSION")
progress(session)
elif 'abort' == args[0]:
session = get_optional_argument(args, 1, "SESSION")
abort(session)
elif 'refdata' == args[0]:
subset = get_required_argument(args, 1, "SUBSET")
for r,d in GamessDb().get_reference_data(subset):
else:
subsets = subset.split(',')
for subset in subsets:
new(subset, session, options.template)
elif 'progress' == args[0]:
session = get_required_argument(args, 1, "SESSION")
progress(session)
elif 'abort' == args[0]:
session = get_optional_argument(args, 1, "SESSION")
abort(session)
elif 'refdata' == args[0]:
subset = get_required_argument(args, 1, "SUBSET")
for r,d in GamessDb().get_reference_data(subset):
print ("%s = %.3f"
% (str.join(' + ',
[ ("%d*%s" % (qty, sy)) for sy,qty in r.items() ]),
d))
elif 'download' == args[0]:
subset = get_required_argument(args, 1, "SUBSET")
GamessDb().get_geometries(subset)
elif 'list' == args[0]:
print ("Available subsets of GMTKN24:")
ls = GamessDb().list()
for name, url in ls.items():
print (" %s --> %s" % (name, url))
elif 'doctest' == args[0]:
elif 'abort' == args[0]:
session = get_optional_argument(args, 1, "SESSION")
abort(session)
elif 'refdata' == args[0]:
subset = get_required_argument(args, 1, "SUBSET")
for r,d in GamessDb().get_reference_data(subset):
print ("%s = %.3f"
% (str.join(' + ',
[ ("%d*%s" % (qty, sy)) for sy,qty in r.items() ]),
d))
elif 'download' == args[0]:
subset = get_required_argument(args, 1, "SUBSET")
GamessDb().get_geometries(subset)
elif 'list' == args[0]:
print ("Available subsets of GMTKN24:")
ls = GamessDb().list()
for name, url in ls.items():
print (" %s --> %s" % (name, url))
elif 'doctest' == args[0]:
import doctest
doctest.testmod(name="gmtkn24",
optionflags=doctest.NORMALIZE_WHITESPACE)
else:
logger.critical("Unknown ACTION word '%s'."
" Type '%s --help' to get usage help."
% (args[0], PROG))
subsets.add(job.subset)
# map each molecule to its computed energy
energy = dict([ (job.molecule, job.energy)
for job in jobs.values() ])
print ("")
print ("STOICHIOMETRY DATA")
print ("")
print ("%-40s %-12s (%-s; %-s)"
% ("Reaction", "Comp. energy", "Ref. data", "deviation"))
print (78 * "=")
for subset in sorted(subsets):
# print subset name, centered
print ((78 - len(subset)) / 2) * ' ' + subset
print (78 * "-")
# print reaction data
for reaction,refdata in GamessDb().get_reference_data(subset):
# compute corresponding energy
computed_energy = sum([ (627.509*qty*energy[sy]) for sy,qty in reaction.items() ])
deviation = computed_energy - refdata
print ("%-40s %+.2f (%+.2f; %+.2f)"
% (
# symbolic reaction
str.join(' + ',
[ ("%d*%s" % (qty, sy)) for sy,qty in reaction.items() ]),
# numerical data
computed_energy, refdata, deviation)
)
Download geometry files for the specified GAMESS.UZH subset,
and save them into a `output_dir` subdirectory of the current
working directory.
Return list of extracted molecules/filenames.
"""
if not os.path.exists(output_dir):
os.mkdir(output_dir)
# download all links pointing to ".inp" files
subset_url = self._subsets[subset]
html = BeautifulSoup(self._browser.open(subset_url))
links = html.findAll(name="a", attrs={'class':"mapitem"})
molecules = [ ]
for a in links:
if a.string is not None:
name = GamessDb._illegal_chars_re.sub('_', a.string)
# ignore links that don't look like `.inp` files ...
if not GamessDb._inp_filename_re.search(name):
continue
# mechanize.retrieve always uses temp files
(filename, headers) = self._browser.retrieve(urlparse.urljoin(subset_url, a['href']))
shutil.copy(filename, os.path.join(output_dir, name))
molecules.append(os.path.splitext(name)[0])
logger.info("%s geometries downloaded into file '%s'", subset, output_dir)
return molecules
grid = Grid()
session_file_name = add_extension(session, 'csv')
logger.info("Will save jobs status to file '%s'", session_file_name)
session_inp_dir = add_extension(session, '.inp.d')
if not os.path.exists(session_inp_dir):
os.mkdir(session_inp_dir)
session_out_dir = add_extension(session, '.out.d')
if not os.path.exists(session_out_dir):
os.mkdir(session_out_dir)
template = read_file_contents(template_file_name)
# download geometries
subset_inp_dir = os.path.join(session_inp_dir, subset)
if not os.path.exists(subset_inp_dir):
os.mkdir(subset_inp_dir)
logger.info("Downloading %s geometries into '%s' ...", subset, subset_inp_dir)
molecules = GamessDb().get_geometries(subset, subset_inp_dir)
# prefix them with the GAMESS file snippet
for name in molecules:
dest_file_name = os.path.join(subset_inp_dir, name + '.inp')
geometry = read_file_contents(dest_file_name)
inp = open(dest_file_name, 'w')
inp.write(template)
inp.write(geometry)
inp.close()
# open session file
logger.info("Loading session file '%s' ...", session_file_name)
jobs = _load_jobs(session_file_name)
logger.info("Loaded %d jobs from session file.", len(jobs))
# append new jobs
for name in molecules:
inp_file_name = os.path.join(subset_inp_dir, name + '.inp')
# XXX: order of the following statements *is* important!
def _list_subsets(self):
"""Return dictionary mapping GAMESS.UZH subset names to download URLs."""
html = BeautifulSoup(self._browser.open(GamessDb.BASE_URL))
links = html.findAll(name="a", attrs={'class':"mapitem"})
result = { }
for a in links:
if a.string is not None:
name = a.string
result[name] = urlparse.urljoin(GamessDb.BASE_URL, a['href'])
return result
working directory.
Return list of extracted molecules/filenames.
"""
if not os.path.exists(output_dir):
os.mkdir(output_dir)
# download all links pointing to ".inp" files
subset_url = self._subsets[subset]
html = BeautifulSoup(self._browser.open(subset_url))
links = html.findAll(name="a", attrs={'class':"mapitem"})
molecules = [ ]
for a in links:
if a.string is not None:
name = GamessDb._illegal_chars_re.sub('_', a.string)
# ignore links that don't look like `.inp` files ...
if not GamessDb._inp_filename_re.search(name):
continue
# mechanize.retrieve always uses temp files
(filename, headers) = self._browser.retrieve(urlparse.urljoin(subset_url, a['href']))
shutil.copy(filename, os.path.join(output_dir, name))
molecules.append(os.path.splitext(name)[0])
logger.info("%s geometries downloaded into file '%s'", subset, output_dir)
return molecules