Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
print('\nCurrent skip={}\n-----------'.format(skip))
ret = mongo_storage.get_results(status=None, limit=max_limit, skip=skip)
mongo_res= ret['data']
print('mongo results returned: ', len(mongo_res), ', total: ', ret['meta']['n_found'])
# check if this patch has been already stored
if is_mapped(sql_storage, ResultMap, mongo_res[-1]['id']):
print('Skipping first ', skip+max_limit)
continue
# load mapped ids in memory
mongo_res = get_ids_map(sql_storage, ['molecule'], MoleculeMap, mongo_res)
mongo_res = get_ids_map(sql_storage, ['keywords'], KeywordsMap, mongo_res)
mongo_res = get_ids_map(sql_storage, ['stdout', 'stderr', 'error'], KVStoreMap, mongo_res)
results_py = [ResultRecord(**res) for res in mongo_res]
sql_insered = sql_storage.add_results(results_py)['data']
print('Inserted in SQL:', len(sql_insered))
# store the ids mapping in the sql DB
mongo_ids = [obj['id'] for obj in mongo_res]
store_ids_map(sql_storage, mongo_ids, sql_insered, ResultMap)
if with_check:
with sql_storage.session_scope() as session:
res = session.query(ResultMap).filter_by(mongo_id=mongo_res[0]['id']).first().sql_id
ret = sql_storage.get_results(id=[res])
print('Get from SQL:', ret['data'])
ret2 = mongo_storage.get_results(id=[mongo_res[0]['id']])
print('Get from Mongo:', ret2['data'])
procedure = output["result"]
# Add initial and final molecules
update_dict = {}
initial_mol, final_mol = self.storage.add_molecules(
[Molecule(**procedure["initial_molecule"]), Molecule(**procedure["final_molecule"])]
)["data"]
assert initial_mol == rec.initial_molecule
update_dict["final_molecule"] = final_mol
# Parse trajectory computations and add task_id
traj_dict = {k: v for k, v in enumerate(procedure["trajectory"])}
results = parse_single_tasks(self.storage, traj_dict)
for k, v in results.items():
v["task_id"] = output["task_id"]
results[k] = ResultRecord(**v)
ret = self.storage.add_results(list(results.values()))
update_dict["trajectory"] = ret["data"]
update_dict["energies"] = procedure["energies"]
# Save stdout/stderr
stdout, stderr, error = self.storage.add_kvstore(
[procedure["stdout"], procedure["stderr"], procedure["error"]]
)["data"]
update_dict["stdout"] = stdout
update_dict["stderr"] = stderr
update_dict["error"] = error
update_dict["provenance"] = procedure["provenance"]
rec = OptimizationRecord(**{**rec.dict(), **update_dict})
updates.append(rec)
mols_map = copy_molecules(staging_storage, production_storage, mols)
keywords_map = copy_keywords(staging_storage, production_storage, keywords)
kvstore_map = copy_kv_store(staging_storage, production_storage, kvstore)
for result in prod_results:
result['molecule'] = mols_map[result['molecule']]
if result['keywords']:
result['keywords'] = keywords_map[result['keywords']]
if result['stdout']:
result['stdout'] = kvstore_map[result['stdout']]
if result['stderr']:
result['stderr'] = kvstore_map[result['stderr']]
if result['error']:
result['error'] = kvstore_map[result['error']]
results_py = [ResultRecord(**res) for res in prod_results]
staging_ids = staging_storage.add_results(results_py)['data']
if VERBOSE:
print('Inserted in SQL:', len(staging_ids))
print('---- Done copying Results\n\n')
return {m1: m2 for m1, m2 in zip(results_ids, staging_ids)}
# Grab the tag if available
meta = data.meta.dict()
tag = meta.pop("tag", None)
priority = meta.pop("priority", None)
# Construct full tasks
new_tasks = []
results_ids = []
existing_ids = []
for mol in molecule_list:
if mol is None:
results_ids.append(None)
continue
record = ResultRecord(**meta.copy(), molecule=mol.id)
inp = record.build_schema_input(mol, keywords)
inp.extras["_qcfractal_tags"] = {"program": record.program, "keywords": record.keywords}
ret = self.storage.add_results([record])
base_id = ret["data"][0]
results_ids.append(base_id)
# Task is complete
if len(ret["meta"]["duplicates"]):
existing_ids.append(base_id)
continue
# Build task object
task = TaskRecord(
**{
# bind and expand ids list
sql_statement = sql_statement.bindparams(bindparam("optimization_ids", expanding=True))
# column types:
columns = inspect(ResultORM).columns
sql_statement = sql_statement.columns(opt_id=Integer, *columns)
query_result = self.execute_query(sql_statement, optimization_ids=list(optimization_ids))
ret = {}
for rec in query_result:
self._remove_excluded_keys(rec)
key = rec.pop("opt_id")
if key not in ret:
ret[key] = []
ret[key].append(ResultRecord(**rec))
return ret
def parse_output(self, result_outputs):
# Add new runs to database
completed_tasks = []
updates = []
for data in result_outputs:
result = self.storage.get_results(id=data["base_result"].id)["data"][0]
result = ResultRecord(**result)
rdata = data["result"]
stdout, stderr, error = self.storage.add_kvstore([rdata["stdout"], rdata["stderr"], rdata["error"]])["data"]
rdata["stdout"] = stdout
rdata["stderr"] = stderr
rdata["error"] = error
# Store Wavefunction data
if data["result"].get("wavefunction", False):
wfn = data["result"].get("wavefunction", False)
available = set(wfn.keys()) - {"restricted", "basis"}
return_map = {k: wfn[k] for k in wfn.keys() & _wfn_return_names}
rdata["wavefunction"] = {
"available": list(available),
"restricted": wfn["restricted"],