Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_wrap_pool(self):
_tracer = tracer.Tracer()
execution_context.set_opencensus_tracer(tracer)
trace.trace_integration()
context = tracer.Tracer().span_context
print(context.trace_id)
pool = Pool(processes=1)
with _tracer.span(name='span1'):
result = pool.apply_async(fake_pooled_func, ()).get(timeout=1)
self.assertEqual(result, context.trace_id)
def apply_async(self, func, args=(), kwds={}, callback=None):
return Pool.apply_async(self,
LogThreadExceptions(func),
args,
kwds,
callback)
def _create_process_pool(processes=1):
if _MULTIPROCESSING and processes:
logger.info("creating pool with %i workers", processes)
pool = multiprocessing.pool.Pool(processes=processes)
else:
logger.info("creating dummy pool")
pool = DummyPool()
yield pool
pool.terminate()
def __init__(self, n_trees=100, n_features=7, processes=0, p_samples=P_SAMPLES):
self.n_trees = n_trees
self.trees = []
self.n_features = n_features
self.p_samples = p_samples
if processes <= 0:
processes = cpu_count() - 1
self.pool = Pool(processes)
def async_process_runner(jobs, force, pool):
try:
p = multiprocessing.pool.Pool(pool)
except:
logger.warning(' can\'t start pool, falling back to synchronous pool.')
return sync_runner(jobs, force)
return async_runner(jobs, force, pool, p)
def run_recommenders_parallel(self):
"""
create a method to run in parallel the recommenders during the co-training process
:return: True ou False, if threshold is reached (False) or not (True)
"""
flag = True
pool = Pool()
result = pool.map(self.run_recommenders, self.recommenders)
pool.close()
pool.join()
for n, r in enumerate(self.recommenders):
if not result[n][1]:
flag = False
else:
self.recommenders_predictions.setdefault(r, result[n][0])
return flag
def download_list(api_k, hash_list):
global api_key
if api_k:
api_key = api_k
files = json.load(open(hash_list))
pool = Pool(os.cpu_count())
for _ in tqdm.tqdm(pool.imap_unordered(download_file_by_hash, files), total = len(files)):
pass
def _get_all_available_palettes(
cls, image_path, use_whole_palette, quality_per_plugin
): # pylint: disable=too-many-locals
hex_palette = []
from colorz import colorz # pylint: disable=import-error
from colorthief import ColorThief # pylint: disable=import-error
from haishoku.haishoku import Haishoku # pylint: disable=import-error
with Pool() as pool:
oomox_future = pool.apply_async(apply_chain, (
get_plugin_module,
('ima', os.path.join(PLUGIN_DIR, 'ima.py'), 'get_hex_palette'),
(image_path, use_whole_palette, 48, quality_per_plugin[0])
))
from functools import partial
_opener = partial(open, image_path, 'rb')
colorz_future = pool.apply_async(delayed_partial, (
colorz,
(
(_opener, ()),
),
(quality_per_plugin[1], 50, 200, ),
))
colorthief_future = pool.apply_async(call_method_from_class, (
ColorThief,
# POSIX only
try:
class NonDaemonForkServerProcess(NonDaemonMixin, context.ForkServerProcess):
pass
class NonDaemonForkServerContext(context.ForkServerContext):
Process = NonDaemonForkServerProcess
_nondaemon_context_mapper['forkserver'] = NonDaemonForkServerContext()
except AttributeError:
pass
class DreamPool(pool.Pool):
def __init__(self, processes=None, initializer=None, initargs=(),
maxtasksperchild=None, context=None):
if context is None:
context = mp.get_context()
context = _nondaemon_context_mapper[context._name]
super(DreamPool, self).__init__(processes=processes,
initializer=initializer,
initargs=initargs,
maxtasksperchild=maxtasksperchild,
context=context)
def multi_task(file):
pool = Pool(4)
args = [(file,'IED'),(file,'AccessPoint'),(file,'LDevice'),(file,'DAI','DOI'),(file,'LN','LDevice')]
for arg in args:
pool.apply_async(resolve,args=arg)
pool.apply_async(resolve_struct,args=(file,))
pool.close()
pool.join()
print('done!')