Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# during select are undefined. Therefore, process data in chunks.
# This is also a nice opportunity to release the GIL...
query_chunk = self.db.get_list(
'SELECT name, name_id, inode FROM contents_v WHERE '
'parent_inode=? LIMIT %d' % batch_size, (id_p,))
reinserted = False
for (name, name_id, id_) in query_chunk:
if self.db.has_val('SELECT 1 FROM contents WHERE parent_inode=?', (id_,)):
# First delete subdirectories
if not reinserted:
queue.append(id_p)
reinserted = True
queue.append(id_)
else:
if is_open:
pyfuse3.invalidate_entry_async(id_p, name)
await self._remove(id_p, name, id_, force=True)
if query_chunk and not reinserted:
# Make sure to re-insert the directory to process the remaining
# contents and delete the directory itself.
queue.append(id_p)
dt = time.time() - stamp
batch_size = int(batch_size * CHECKPOINT_INTERVAL / dt)
batch_size = min(batch_size, 200) # somewhat arbitrary...
batch_size = max(batch_size, 20000)
log.debug('Adjusting batch_size to %d and yielding', batch_size)
await trio.hazmat.checkpoint()
log.debug('re-acquired lock')
stamp = time.time()
# Make sure to re-insert the directory to process the remaining
# contents and delete the directory itself.
queue.append(id_p)
dt = time.time() - stamp
batch_size = int(batch_size * CHECKPOINT_INTERVAL / dt)
batch_size = min(batch_size, 200) # somewhat arbitrary...
batch_size = max(batch_size, 20000)
log.debug('Adjusting batch_size to %d and yielding', batch_size)
await trio.hazmat.checkpoint()
log.debug('re-acquired lock')
stamp = time.time()
if id_p0 in self.open_inodes:
log.debug('invalidate_entry(%d, %r)', id_p0, name0)
pyfuse3.invalidate_entry_async(id_p0, name0)
await self._remove(id_p0, name0, id0, force=True)
await self.forget([(id0, 1)])
log.debug('finished')