Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def run_missing_wfr(input_json, input_files, run_name, auth, env):
all_inputs = []
for arg, files in input_files.items():
inp = extract_file_info(files, arg, auth, env)
all_inputs.append(inp)
# tweak to get bg2bw working
all_inputs = sorted(all_inputs, key=itemgetter('workflow_argument_name'))
my_s3_util = s3Utils(env=env)
out_bucket = my_s3_util.outfile_bucket
"""Creates the trigger json that is used by foufront endpoint.
"""
input_json['input_files'] = all_inputs
input_json['output_bucket'] = out_bucket
input_json["_tibanna"] = {
"env": env,
"run_type": input_json['app_name'],
"run_id": run_name}
# input_json['env_name'] = CGAP_ENV_WEBPROD # e.g., 'fourfront-cgap'
input_json['step_function_name'] = 'tibanna_zebra'
# input_json['public_postrun_json'] = True
try:
e = ff_utils.post_metadata(input_json, 'WorkflowRun/run', key=auth)
url = json.loads(e['input'])['_tibanna']['url']
return url
def extract_file_info(obj_id, arg_name, auth, env, rename=[]):
"""Takes file id, and creates info dict for tibanna"""
my_s3_util = s3Utils(env=env)
raw_bucket = my_s3_util.raw_file_bucket
out_bucket = my_s3_util.outfile_bucket
"""Creates the formatted dictionary for files.
"""
# start a dictionary
template = {"workflow_argument_name": arg_name}
if rename:
change_from = rename[0]
change_to = rename[1]
# if it is list of items, change the structure
if isinstance(obj_id, list):
# if it is list of list, change the structure, for RNAseq
if isinstance(obj_id[0], list):
# will only work with single item in first list (was implemented for RNA seq)
assert len(obj_id) == 1
s_date = kwargs.get('start_date')
if s_date:
query += '&date_created.from=' + s_date
# The search
res = ff_utils.search_metadata(query, key=my_auth)
if not res:
check.summary = 'All Good!'
return check
# if there are files, make sure they are not on s3
no_s3_file = []
running = []
missing_md5 = []
not_switched_status = []
# multiple failed runs
problems = []
my_s3_util = s3Utils(env=connection.ff_env)
raw_bucket = my_s3_util.raw_file_bucket
out_bucket = my_s3_util.outfile_bucket
for a_file in res:
# lambda has a time limit (300sec), kill before it is reached so we get some results
now = datetime.utcnow()
if (now-start).seconds > lambda_limit:
check.brief_output.append('did not complete checking all')
break
# find bucket
if 'FileProcessed' in a_file['@type']:
my_bucket = out_bucket
# elif 'FileVistrack' in a_file['@type']:
# my_bucket = out_bucket
else: # covers cases of FileFastq, FileReference, FileMicroscopy
my_bucket = raw_bucket
# check if file is in s3
def extract_file_info(obj_id, arg_name, auth, env, rename=[]):
"""Takes file id, and creates info dict for tibanna"""
my_s3_util = s3Utils(env=env)
raw_bucket = my_s3_util.raw_file_bucket
out_bucket = my_s3_util.outfile_bucket
"""Creates the formatted dictionary for files.
"""
# start a dictionary
template = {"workflow_argument_name": arg_name}
if rename:
change_from = rename[0]
change_to = rename[1]
# if it is list of items, change the structure
if isinstance(obj_id, list):
object_key = []
uuid = []
buckets = []
for obj in obj_id:
metadata = ff_utils.get_metadata(obj, key=auth)
def run_missing_wfr(input_json, input_files, run_name, auth, env, mount=False):
time.sleep(load_wait)
all_inputs = []
for arg, files in input_files.items():
inp = extract_file_info(files, arg, auth, env)
all_inputs.append(inp)
# tweak to get bg2bw working
all_inputs = sorted(all_inputs, key=itemgetter('workflow_argument_name'))
my_s3_util = s3Utils(env=env)
out_bucket = my_s3_util.outfile_bucket
"""Creates the trigger json that is used by foufront endpoint.
"""
input_json['input_files'] = all_inputs
input_json['output_bucket'] = out_bucket
input_json["_tibanna"] = {
"env": env,
"run_type": input_json['app_name'],
"run_id": run_name}
input_json['step_function_name'] = 'tibanna_pony'
input_json['public_postrun_json'] = True
if mount:
for a_file in input_json['input_files']:
a_file['mount'] = True
try:
e = ff_utils.post_metadata(input_json, 'WorkflowRun/run', key=auth)