Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def build_socket_fixture(stype, server=None):
print("")
# Check mongo
storage_name = "test_qcfractal_storage" + stype
# IP/port/drop table is specific to build
if stype == "sqlalchemy":
server.create_database(storage_name)
storage = storage_socket_factory(server.database_uri(), storage_name, db_type=stype, sql_echo=False)
# Clean and re-init the database
storage._clear_db(storage_name)
else:
raise KeyError("Storage type {} not understood".format(stype))
yield storage
if stype == "sqlalchemy":
# todo: drop db
# storage._clear_db(storage_name)
pass
else:
raise KeyError("Storage type {} not understood".format(stype))
def server_user(args, config):
print("QCFractal server user function.\n")
print(f"QCFractal server base folder: {config.base_folder}")
print("\n>>> Checking the PostgreSQL connection...")
psql = PostgresHarness(config, quiet=False, logger=print)
ensure_postgres_alive(psql)
storage = storage_socket_factory(config.database_uri(safe=False))
try:
if args["user_command"] == "add":
print("\n>>> Adding new user...")
success, pw = storage.add_user(args["username"], password=args["password"], permissions=args["permissions"])
if success:
print(f"\n>>> New user successfully added, password:\n{pw}")
if config.fractal.security is None:
print(
"Warning: security is disabled. To enable security, change the configuration YAML field "
"fractal:security to local."
)
else:
print("\n>>> Failed to add user. Perhaps the username is already taken?")
sys.exit(1)
elif args["user_command"] == "info":
Starts the background asyncio loop or not.
reset_database : bool, optional
Resets the database or not if a storage_uri is provided
"""
# Startup a MongoDB in background thread and in custom folder.
if storage_uri is None:
self._storage = TemporaryPostgres(database_name=storage_project_name)
self._storage_uri = self._storage.database_uri(safe=False, database="")
else:
self._storage = None
self._storage_uri = storage_uri
if reset_database:
socket = storage_socket_factory(self._storage_uri, project_name=storage_project_name)
socket._clear_db(socket._project_name)
del socket
# Boot workers if needed
self.queue_socket = None
if max_workers:
self.queue_socket = ProcessPoolExecutor(max_workers=max_workers)
# Add the loop to a background thread and init the server
self.aioloop = asyncio.new_event_loop()
asyncio.set_event_loop(self.aioloop)
IOLoop.clear_instance()
IOLoop.clear_current()
loop = IOLoop()
self.loop = loop
self.loop_thread = ThreadPoolExecutor(max_workers=2)
elif ssl_options is False:
ssl_ctx = None
elif isinstance(ssl_options, dict):
if ("crt" not in ssl_options) or ("key" not in ssl_options):
raise KeyError("'crt' (SSL Certificate) and 'key' (SSL Key) fields are required for `ssl_options`.")
ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
ssl_ctx.load_cert_chain(ssl_options["crt"], ssl_options["key"])
else:
raise KeyError("ssl_options not understood")
# Setup the database connection
self.storage_database = storage_project_name
self.storage_uri = storage_uri
self.storage = storage_socket_factory(
storage_uri,
project_name=storage_project_name,
bypass_security=storage_bypass_security,
allow_read=allow_read,
max_limit=query_limit,
)
if view_enabled:
self.view_handler = ViewHandler(view_path)
else:
self.view_handler = None
# Pull the current loop if we need it
self.loop = loop or tornado.ioloop.IOLoop.current()
# Build up the application
def connect_to_DBs(mongo_uri, sql_uri, mongo_db_name, max_limit):
mongo_storage = storage_socket_factory(mongo_uri, mongo_db_name, db_type="mongoengine",
max_limit=max_limit)
sql_storage = storage_socket_factory(sql_uri, 'qcarchivedb', db_type='sqlalchemy',
max_limit=max_limit)
print("DB limit: ", max_limit)
return mongo_storage, sql_storage
def connect_to_DBs(mongo_uri, sql_uri, mongo_db_name, max_limit):
mongo_storage = storage_socket_factory(mongo_uri, mongo_db_name, db_type="mongoengine",
max_limit=max_limit)
sql_storage = storage_socket_factory(sql_uri, 'qcarchivedb', db_type='sqlalchemy',
max_limit=max_limit)
print("DB limit: ", max_limit)
return mongo_storage, sql_storage
def connect_to_DBs(staging_uri, production_uri, max_limit):
staging_storage = storage_socket_factory(staging_uri, db_type='sqlalchemy', max_limit=max_limit)
production_storage = storage_socket_factory(production_uri, db_type='sqlalchemy', max_limit=max_limit)
print("DB limit: ", max_limit)
return staging_storage, production_storage
import argparse
from qcfractal.storage_sockets import storage_socket_factory
from qcfractal.storage_sockets.sql_models import (ProcedureMap, OptimizationProcedureORM, OptimizationHistory,
TorsionDriveProcedureORM, GridOptimizationProcedureORM,
GridOptimizationAssociation, Trajectory, torsion_init_mol_association)
sql_uri = "postgresql+psycopg2://qcarchive:mypass@localhost:5432/qcarchivedb"
sql_storage = storage_socket_factory(sql_uri, 'qcarchivedb', db_type='sqlalchemy')
with sql_storage.engine.connect() as con:
con.execute('ALTER TABLE optimization_history ' +
'DROP CONSTRAINT optimization_history_pkey;')
con.execute('ALTER TABLE optimization_history '
'ADD CONSTRAINT optimization_history_pkey '
def connect_to_DBs(staging_uri, production_uri, max_limit):
staging_storage = storage_socket_factory(staging_uri, db_type='sqlalchemy', max_limit=max_limit)
production_storage = storage_socket_factory(production_uri, db_type='sqlalchemy', max_limit=max_limit)
print("DB limit: ", max_limit)
return staging_storage, production_storage