Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def assertIterationDataRecorded(self, expected, tolerance, root):
if self.comm.rank != 0:
return
db = SqliteDict(self.filename, self.tablename_iterations)
_assertIterationDataRecorded(self, db, expected, tolerance)
db.close()
if save_index:
if self.fresh_index is not None:
self.fresh_index.save(self.location('index_fresh'))
if self.opt_index is not None:
self.opt_index.save(self.location('index_opt'))
if save_model:
if self.model is not None:
self.model.save(self.location('model'))
self.payload.commit()
if clear_buffer:
if hasattr(self, 'fresh_docs'):
try:
self.fresh_docs.terminate() # erase all buffered documents + file on disk
except:
pass
self.fresh_docs = SqliteDict() # buffer defaults to a random location in temp
self.fresh_docs.sync()
histIndex = ''
else:
histIndex = '_' + chr(histIndex + ord('A'))
if histFileName == 'as':
histFileName = 'aerostruct.db'
elif histFileName == 'a':
histFileName = 'vlm.db'
elif histFileName == 's':
histFileName = 'spatialbeam.db'
try:
db = shelve.open(histFileName, 'r')
OpenMDAO = False
except: # bare except because error is not in standard Python
db = SqliteDict(histFileName, 'iterations')
OpenMDAO = True
if db.keys() == []:
OpenMDAO = False
db = SqliteDict(histFileName)
if OpenMDAO:
string = db.keys()[-1].split('|')
if string[-1]=='derivs':
nkey = int(string[-2]) + 1 # OpenMDAO uses 1-indexing
else:
nkey = int(string[-1]) + 1 # OpenMDAO uses 1-indexing
solver_name = string[0]
else:
nkey = int(db['last'])
self.iter_type = np.zeros(nkey)
else:
nkey = int(string[-1]) + 1 # OpenMDAO uses 1-indexing
solver_name = string[0]
else:
nkey = int(db['last'])
self.iter_type = np.zeros(nkey)
# Check to see if there is bounds information in the hst file
try:
self.bounds = dict(
db['varBounds'].items() + db['conBounds'].items())
except KeyError:
pass
if OpenMDAO:
deriv_keys = SqliteDict(histFileName, 'derivs').keys()
deriv_keys = [int(key.split('|')[-1]) for key in deriv_keys]
for i in xrange(nkey):
if OpenMDAO:
key = '{}|{}'.format(solver_name, i)
try:
f = db[key]['Unknowns']
if i in deriv_keys:
self.iter_type[i] = 1 # for 'major' iterations
else:
self.iter_type[i] = 2 # for 'minor' iterations
for key in sorted(f):
new_key = key + '{}'.format(histIndex)
if new_key not in self.func_data_all:
def load_db(self):
self.db = sqlitedict.SqliteDict(self.db_name, 'iterations')
self.twist = []
self.mesh = []
self.def_mesh = []
self.radius = []
self.thickness = []
sec_forces = []
normals = []
widths = []
self.lift = []
self.lift_ell = []
self.vonmises = []
alpha = []
rho = []
v = []
self.CL = []
from __future__ import division
import sqlitedict
import numpy
import matplotlib.pyplot as plt
def _get_lengths(self, A, B, axis):
return numpy.sqrt(numpy.sum((B - A)**2, axis=axis))
db = sqlitedict.SqliteDict('aerostruct.db', 'openmdao')
twist = []
mesh = []
sec_forces = []
normals = []
cos_dih = []
lift = []
for case_name, case_data in db.iteritems():
if "metadata" in case_name or "derivs" in case_name:
continue # don't plot these cases
n = case_data['Unknowns']['mesh'].shape[1]
# only grab one half of the wing
n21 = n/2
sec_forces.append(case_data['Unknowns']['sec_forces'][n21:, :])
logger.exception("failed to delete %s" % (self.filename))
def __del__(self):
# like close(), but assume globals are gone by now (do not log!)
try:
self.close(do_log=False, force=True)
except Exception:
# prevent error log flood in case of multiple SqliteDicts
# closed after connection lost (exceptions are always ignored
# in __del__ method.
pass
# Adding extra methods for python 2 compatibility (at import time)
if major_version == 2:
SqliteDict.__nonzero__ = SqliteDict.__bool__
del SqliteDict.__bool__ # not needed and confusing
#endclass SqliteDict
class SqliteMultithread(Thread):
"""
Wrap sqlite connection in a way that allows concurrent requests from multiple threads.
This is done by internally queueing the requests and processing them sequentially
in a separate thread (in the same order they arrived).
"""
def __init__(self, filename, autocommit, journal_mode):
super(SqliteMultithread, self).__init__()
self.filename = filename
self.autocommit = autocommit
self.journal_mode = journal_mode
def load_db(self):
self.db = sqlitedict.SqliteDict(self.db_name, 'iterations')
self.twist = []
self.mesh = []
self.def_mesh = []
self.r = []
self.thickness = []
sec_forces = []
normals = []
widths = []
self.lift = []
self.lift_ell = []
self.vonmises = []
alpha = []
rho = []
v = []
self.obj = []
def insert_into_DB(tsv, DB, type):
with open(tsv, 'r', encoding="utf-8") as file:
# read source tsv file
file = [i for i in DictReader(file, delimiter='\t')]
# write ordered dicsts to new db
# opens db
with SqliteDict(DB, autocommit=False) as database:
#checks for console and make a [] in case it doesn't exist
if system not in database:
database[system] = []
system_database = database[system]
# if next((item for item in file if item['Title ID'] == "Tom" and item["age"] == 11), None) is not None:
for index_file, i in enumerate(file):
print(f"Processing {type}: {progress_bar( int(index_file/(len(file) - 1) * 100) )}",
f"({index_file}/{len(file) - 1})",
end="\r")
i["Type"] = type.upper()
i['System'] = system
# check if keys part of the dict are already in the database 'Title ID' 'Region' 'Type' 'System'
try: