Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def create_pool(self, **kwargs):
pool = yield from sa.create_pool(database='aiopg',
user='aiopg',
password='passwd',
host='127.0.0.1',
loop=self.loop,
**kwargs)
with (yield from pool.cursor()) as cur:
yield from cur.execute("DROP TABLE IF EXISTS tbl")
yield from cur.execute("CREATE TABLE tbl "
"(id serial, name varchar(255))")
return pool
async def init_postgres(conf, loop):
engine = await aiopg.sa.create_engine(
database=conf['database'],
user=conf['user'],
password=conf['password'],
host=conf['host'],
port=conf['port'],
minsize=1,
maxsize=2,
loop=loop)
return engine
engine = loop.run_until_complete(init_postgres(pg_params, loop))
async def _get_connection_pool(self) -> aiopg.sa.Engine:
"""Returns a database connection pool object"""
if self._engine is None:
self._engine = await aiopg.sa.create_engine(_CONNECTION_STRING)
return self._engine
async def _get_connection_pool(self) -> aiopg.sa.Engine:
"""Returns a database connection pool object"""
if self._engine is None:
self._engine = await aiopg.sa.create_engine(_CONNECTION_STRING)
return self._engine
task_process.task_id = task_id
self._task_processes[task_id] = task_process
self._schedule_executions[schedule.id].task_processes[task_id] = task_process
self._logger.info(
"Process started: Schedule '%s' process '%s' task %s pid %s, %s active tasks\n%s",
schedule.name, schedule.process_name, task_id, process.pid,
len(self._task_processes), args)
if schedule.type == self._ScheduleType.STARTUP:
# Startup tasks are not tracked in the tasks table
asyncio.ensure_future(self._wait_for_task_completion(task_process))
else:
# The task row needs to exist before the completion handler runs
async with aiopg.sa.create_engine(_CONNECTION_STRING) as engine:
async with engine.acquire() as conn:
await conn.execute(self._tasks_tbl.insert().values(
id=str(task_id),
pid=(self._schedule_executions[schedule.id].
task_processes[task_id].process.pid),
process_name=schedule.process_name,
state=int(Task.State.RUNNING),
start_time=datetime.datetime.now()))
asyncio.ensure_future(self._wait_for_task_completion(task_process))
async def create_engine():
config = get_config()
config = config['postgres']
engine = await aiopg.sa.create_engine(**config)
return engine
async def init_pg(app):
database_url = app['config']['database_url']
database_extra = app['config'].get('database_extra', {})
parsed_db_url = make_url(database_url)
database_kwargs = dict(
database=parsed_db_url['database'],
user=parsed_db_url['user'],
password=parsed_db_url['password'],
host=parsed_db_url['host'],
port=parsed_db_url['port'],
**database_extra
)
engine = await aiopg.sa.create_engine(**database_kwargs, loop=app.loop)
app['db'] = engine
async def init_pg(app):
engine = await aiopg.sa.create_engine(
database=os.getenv('POSTGRES_DB'),
user=os.getenv('POSTGRES_USER'),
password=os.getenv('POSTGRES_PASSWORD'),
host=os.getenv('POSTGRES_HOST'),
port=os.getenv('POSTGRES_PORT'),
minsize=1,
maxsize=5,
loop=app.loop)
config['db'] = engine
setattr(app, 'db', engine)
'''Sends incoming data to database'''
original_payload = loads(request.payload)
payload = dict(original_payload)
key = payload.get('key')
if key is None:
key = uuid.uuid4()
else:
del payload['key']
# Demonstrate IntegrityError
key = 'same'
conf = Configurator()
async with aiopg.sa.create_engine(conf.db_conn_str) as engine:
async with engine.acquire() as conn:
try:
await conn.execute(__tbl__.insert().values(data=payload, key=key))
except psycopg2.IntegrityError as e:
logging.getLogger('coap-server').exception(
"Duplicate key (%s) inserting sensor values: %s"
, key # Maybe the generated key is the problem
, original_payload)
return aiocoap.Message(payload=''.encode("utf-8"))
async def init_pg(app):
conf = app['config']['postgres']
engine = await aiopg.sa.create_engine(
database=conf['database'],
user=conf['user'],
password=conf['password'],
host=conf['host'],
port=conf['port'],
minsize=conf['minsize'],
maxsize=conf['maxsize'],
loop=app.loop)
app['db'] = engine