Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def teardown(runtimes, request_handler, hostname):
request_handler.set_credentials({"user": "user0", "password": "pass0"})
for runtime in runtimes:
request_handler.quit(runtime["RT"])
time.sleep(0.2)
for p in multiprocessing.active_children():
p.terminate()
# They will die eventually (about 5 seconds) in most cases, but this makes sure without wasting time
for i in range(len(runtimes)):
os.system("pkill -9 -f 'csruntime -n {} -p 500{}'" .format(hostname,i))
time.sleep(0.2)
# Was every built circuit attached to a stream?
streams_done = (self.stats.finished_streams >=
(self.stats.successful_circuits -
self.stats.failed_circuits))
log.debug("failedCircs=%d, builtCircs=%d, totalCircs=%d, "
"finishedStreams=%d" % (self.stats.failed_circuits,
self.stats.successful_circuits,
self.stats.total_circuits,
self.stats.finished_streams))
if circs_done and streams_done:
self.already_finished = True
for proc in multiprocessing.active_children():
log.debug("Terminating remaining PID %d." % proc.pid)
proc.terminate()
if hasattr(self.module, "teardown"):
log.debug("Calling module's teardown() function.")
self.module.teardown()
log.info(self.stats)
sys.exit(0)
def run_pool(workers, n_jobs=-1, sleep=0.1):
# defensive copy
workers = workers[:]
if n_jobs < 1:
n_jobs = multiprocessing.cpu_count()
processes = []
p = None
try:
while True:
active = multiprocessing.active_children()
while len(active) < n_jobs and len(workers) > 0:
p = workers.pop(0)
p.start()
processes.append(p)
active = multiprocessing.active_children()
if len(workers) == 0 and len(active) == 0:
break
time.sleep(sleep)
except (KeyboardInterrupt, SystemExit):
if p is not None:
p.terminate()
for p in processes:
p.terminate()
raise
save_path = 'pred_embeddings/' + exp_dir_info + '/'
print(os.path.dirname(save_path))
if not os.path.exists(os.path.dirname(save_path)):
print('os mkdir')
os.makedirs(os.path.dirname(save_path))
with open(os.path.join(save_path, 'track_key_to_embedding_dict_' + weight_name + '.p'), 'wb') as handle:
pickle.dump(all_track_key_to_embedding_dict, handle, protocol=pickle.HIGHEST_PROTOCOL)
with open(os.path.join(save_path, 'tag_key_to_embedding_dict_' + weight_name + '.p'), 'wb') as handle:
pickle.dump(all_tag_key_to_embedding_dict, handle, protocol=pickle.HIGHEST_PROTOCOL)
finally:
for p in multiprocessing.active_children():
p.terminate()
def excepthook(exctype, value, traceback):
for p in multiprocessing.active_children():
p.terminate()
raise
def Run_phasing_all(chr_start,chr_end,overlap_threshold,support_threshold,output_dir,h5_dir,sample_name):
pool = Pool(chr_end - chr_start + 1)
for chr_num in range(chr_start,chr_end+1):
pool.apply_async(Phase_start,(output_dir,h5_dir,sample_name,chr_num,chr_num,overlap_threshold,support_threshold,"xin"))
pool.close()
while len(active_children()) > 1:
time.sleep(0.5)
pool.join()
print("All Done~")
bam_file_dir = out_dir + "/sorted_bam_" + sample_name + "/"
if os.path.exists(bam_file_dir):
print("using existing output folder: " + bam_file_dir)
else:
os.makedirs(bam_file_dir)
bam_sorted_file = bam_file_dir + "sorted_bam.bam"
try:
sort_bam_cmd = "samtools sort -@ " + str(num_threads) + " -n " + bam_file + " -o " + bam_sorted_file
except:
sort_bam_cmd = code_path + "samtools/" + "samtools sort -@ " + str(num_threads) + " -n " + bam_file + " -o " + bam_sorted_file
idx_bam_cmd = "touch " + bam_file_dir + "finish_bam.txt"
pool.apply_async(sort_start,(sort_bam_cmd,idx_bam_cmd,"xin"))
pool.close()
while len(active_children()) > 1:
time.sleep(0.5)
pool.join()
print("Sorting bam finished...")