Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
f"""SELECT date, '__all__' AS package, category, sum(downloads) AS downloads
FROM {table} where date = '{date}' GROUP BY date, category"""
cursor.execute(aggregate_query, (table,))
values = cursor.fetchall()
delete_query = \
f"""DELETE FROM {table}
WHERE date = '{date}' and package = '__all__'"""
insert_query = \
f"""INSERT INTO {table} (date, package, category, downloads)
VALUES %s"""
try:
print(delete_query)
cursor.execute(delete_query)
print(insert_query)
execute_values(cursor, insert_query, values)
connection.commit()
success[table] = True
except psycopg2.IntegrityError as e:
connection.rollback()
success[table] = False
print("Elapsed: " + str(time.time() - start))
success["elapsed"] = time.time() - start
return success
def update_hijack_keys(cur, map_old_new_keys):
values_ = []
for old_key_ in map_old_new_keys:
new_key_ = map_old_new_keys[old_key_]
values_.append((new_key_, old_key_))
query = "UPDATE hijacks SET key=data.new FROM (VALUES %s) AS data (new, old) WHERE key=data.old"
try:
psycopg2.extras.execute_values(cur, query, values_, page_size=1000)
except Exception:
print("error on hijack key update")
exit(-1)
cursor = source.cursor(name=cursor_name)
cursor.execute(query)
fetch_row_count = 100000
while True:
rows = cursor.fetchmany(fetch_row_count)
if len(rows) == 0:
break
# we end up doing a lot of execute statements here, copying data.
# using the inner_cursor means we don't log all the noise
destination_cursor = destination.cursor().inner_cursor
insert_query = 'INSERT INTO {} VALUES %s'.format(fully_qualified_table(destination_table))
execute_values(destination_cursor, insert_query, rows, template)
destination_cursor.close()
cursor.close()
destination.commit()
)
lines = 0
rows = []
rowid = 0
for row in tqdm(cursor, total=line_count, leave=False):
lines += 1
rowid += 1
rows.append((rowid, row[0]))
if lines == 100:
insert = "INSERT INTO {} ({}) VALUES %s".format(ordered_table, "rowid_ordered, source_year_target_year")
execute_values(cursor2, insert, rows)
rows = []
lines = 0
if lines:
insert = "INSERT INTO {} ({}) VALUES %s".format(ordered_table, "rowid_ordered, source_year_target_year")
execute_values(cursor2, insert, rows)
rows = []
lines = 0
print("Creating indexes...")
cursor2.execute(
"CREATE INDEX {}_source_year_target_year_rowid_idx ON {} USING BTREE(rowid_ordered)".format(
ordered_table, ordered_table
)
)
database.commit()
database.close()
return field_names
# Delete ignored rows
for idx in sorted(delete_rows, reverse=True):
rows.pop(idx)
delete_query = \
f"""DELETE FROM {table}
WHERE date = '{date}'"""
insert_query = \
f"""INSERT INTO {table} (date, package, category, downloads)
VALUES %s"""
try:
print(delete_query)
cursor.execute(delete_query)
print(insert_query)
execute_values(cursor, insert_query, rows)
connection.commit()
return True
except psycopg2.IntegrityError as e:
connection.rollback()
return False
def _populate_modules(self, repo_id, modules):
cur = self.conn.cursor()
names = set()
module_map = {}
arch_map = self._prepare_arch_map()
for module in modules:
names.add((module['name'], arch_map[module['arch']], repo_id))
if names:
execute_values(cur,
"""select id, name, arch_id, repo_id from module
inner join (values %s) t(name, arch_id, repo_id)
using (name, arch_id, repo_id)
""", list(names), page_size=len(names))
for row in cur.fetchall():
module_map[(row[1], row[2],)] = row[0]
names.remove((row[1], row[2], row[3]))
if names:
import_data = set()
for module in modules:
if (module['name'], arch_map[module['arch']], repo_id) in names:
import_data.add((module['name'], repo_id, arch_map[module['arch']]))
execute_values(cur,
"""insert into module (name, repo_id, arch_id)
values %s returning id, name, arch_id""",
list(import_data), page_size=len(import_data))
def insert_header(cursor, message):
sql = insert_stmt_returning('rt.messages', ['"timestamp"'], 'oid')
execute_values(cursor, sql, [(fromtimestamp(message.header.timestamp),)])
messageid = cursor.fetchone()[0]
# nyct_feed_header = message.header.Extensions[nyct_subway_pb2.nyct_feed_header]
# replacement_periods = [parse_replacement_period(e) + [messageid]
# for e in nyct_feed_header.trip_replacement_period]
# sql = insert_stmt('rt.replacement_periods', ['route_id', '"end"', 'mid'])
# execute_values(cursor, sql, replacement_periods)
return messageid
def insert(self, table, nested):
is_nested(nested)
cur = self.cursor()
template = f'INSERT INTO {table} VALUES %s'
extras.execute_values(cur, template, nested)