Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_init_makedirs():
cache_dir = tempfile.mkdtemp()
shutil.rmtree(cache_dir)
makedirs = mock.Mock(side_effect=OSError(errno.EACCES))
with pytest.raises(EnvironmentError):
try:
with mock.patch('os.makedirs', makedirs):
cache = dc.Cache(cache_dir)
except EnvironmentError:
shutil.rmtree(cache_dir, ignore_errors=True)
raise
process_queue = [[] for _ in range(processes)]
for index, ops in enumerate(operations):
process_queue[index % processes].append(ops)
for num in range(processes):
with open('input-%s.pkl' % num, 'wb') as writer:
pickle.dump(process_queue[num], writer, protocol=2)
for process in subprocs:
process.start()
for process in subprocs:
process.join()
with Cache('tmp') as cache:
warnings.simplefilter('error')
warnings.simplefilter('ignore', category=UnknownFileWarning)
warnings.simplefilter('ignore', category=EmptyDirWarning)
cache.check()
timings = co.defaultdict(list)
for num in range(processes):
with open('output-%s.pkl' % num, 'rb') as reader:
data = pickle.load(reader)
for key in data:
timings[key] += data[key]
if delete:
for num in range(processes):
os.remove('input-%s.pkl' % num)
diskcache.Cache('/tmp/ring-test')
])
def disk_cache(request):
client = request.param
client.ring = ring.disk
client.is_binary = False
client.has_touch = False
return client
def _get_cached(path):
# 1/ memory cache
if path in JUMBO_FIELDS_MEMORY_CACHE:
return JUMBO_FIELDS_MEMORY_CACHE[path]
# 2/ disk cache
if SIMPLEFLOW_ENABLE_DISK_CACHE:
try:
# NB: this cache may also be triggered on activity workers, where it's not that
# useful. The performance hit should be minimal. To be improved later.
# NB2: cache has to be lazily instantiated here, cache objects do not survive forks,
# see DiskCache docs.
cache = Cache(constants.CACHE_DIR)
# generate a dedicated cache key because this cache may be shared with other
# features of simpleflow at some point
cache_key = "jumbo_fields/" + path.split("/")[-1]
if cache_key in cache:
logger.debug("diskcache: getting key={} from cache_dir={}".format(cache_key, constants.CACHE_DIR))
return cache[cache_key]
except OperationalError:
logger.warning("diskcache: got an OperationalError, skipping cache usage")
# nothing to return, but better be explicit here
return
def _get_creds(self):
with diskcache.Cache(directory=self._cachedir) as cache:
return cache.get(self._CREDS_STORAGE_KEY)
if self.urlBase[-1] != '/':
self.urlBase += '/'
self.token = ''
self._folderUploadCallbacks = []
self._itemUploadCallbacks = []
self._serverVersion = []
self._serverApiDescription = {}
self.incomingMetadata = {}
self.localMetadata = {}
if cacheSettings is None:
self.cache = None
else:
self.cache = diskcache.Cache(**cacheSettings)
if progressReporterCls is None:
progressReporterCls = _NoopProgressReporter
self.progressReporterCls = progressReporterCls
self._session = None
if not os.path.exists(fname):
raise IOError("Background file {} does not exist!".format(fname))
self.background = Fasta(fname)
self.background_hash = file_checksum(fname)
return
if not genome:
if self.genome:
genome = self.genome
else:
raise ValueError("Need either genome or filename for background.")
logger.debug("using background: genome {} with size {}".format(genome, size))
lock.acquire()
with Cache(CACHE_DIR) as cache:
self.background_hash = "d{}:{}:{}:{}".format(
genome, int(size), gc, str(gc_bins)
)
c = cache.get(self.background_hash)
if c:
fa, gc_bins = c
else:
fa = None
if not fa:
if gc:
if gc_bins is None:
gc_bins = [(0.0, 0.2), (0.8, 1)]
for b in np.arange(0.2, 0.799, 0.05):
gc_bins.append((b, b + 0.05))
def __init__(self,mainArgs, *args, **kw):
logger.info(colored("- fuse 4 cloud driver -", 'red'))
self.buffer =Cache('./cache/buffer-batchmeta')
self.dir_buffer =Cache('./cache/dir_buffer-buffer-batchmeta')
self.attr_requesting = Cache('./cache/attr-requesting')
self.mainArgs = mainArgs
self.traversed_folder = Cache('./cache/traversed-folder')
self.disk = PCS(self.mainArgs)
self.createLock = Lock()
self.attrLock = Lock()
self.writing_files={}
self.downloading_files = {}
logger.info(f'mainArgs:{mainArgs}')
q = json.loads(self.disk.quota())
# only request once
try:
self.total_size =q['quota']
self.used =q['used']
def process(self, unused, site, config):
log = ""
errors = False
plugin_name = "image_resizer"
input_dir = config.input_dir
max_width = config.max_width
quality = config.quality
cache_file = os.path.join(site.config.dir.cache, plugin_name)
site_output_dir = site.config.dir.output
# opening cache
start = time.time()
cache = dc(cache_file)
cache_timing = {
'opening': time.time() - start,
'fetching': 0,
'writing': 0
}
# using the list of images from image_info
if 'image_info' not in site.plugin_data:
log += 'image_info not found in plugin_data. No images?'
return (SiteFab.ERROR, plugin_name, log)
images = site.plugin_data['image_info'].values()
# processing images
progress_bar = tqdm(total=len(images), unit=' image', desc="Resizing images", leave=False)
for img_info in images:
thumb = {}