How to use the dcicutils.ff_utils.post_metadata function in dcicutils

To help you get started, we’ve selected a few dcicutils examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github 4dn-dcic / foursight / chalicelib / checks / helpers / google_utils.py View on Github external
report_data = self.query_reports(**kwargs)

            # First make sure _all_ reporting methods defined on this class are included. Otherwise we might have tracking items in DB with missing data.
            for method_name in self.get_report_provider_method_names():
                if report_data['reports'].get(method_name) is None:
                    raise Exception("Not all potentially available data is included in report_data. Exiting.")
                if not isinstance(report_data['reports'][method_name], list):
                    raise Exception("Can only make tracking_item for report_data which does not contain extra raw report and request data, per the schema.")

            tracking_item = {
                "status"            : "released",
                "tracking_type"     : "google_analytics",
                "google_analytics"  : report_data
            }
            if do_post_request:
                response = ff_utils.post_metadata(tracking_item, 'tracking-items', key=dict(self.owner.access_key, server=self.owner.server))
                return response['@graph'][0]
            else:
                return tracking_item
github 4dn-dcic / foursight / chalicelib / checks / helpers / wfr_utils.py View on Github external
out_bucket = my_s3_util.outfile_bucket
    """Creates the trigger json that is used by foufront endpoint.
    """
    input_json['input_files'] = all_inputs
    input_json['output_bucket'] = out_bucket
    input_json["_tibanna"] = {
        "env": env,
        "run_type": input_json['app_name'],
        "run_id": run_name}
    input_json['step_function_name'] = 'tibanna_pony'
    input_json['public_postrun_json'] = True
    if mount:
        for a_file in input_json['input_files']:
            a_file['mount'] = True
    try:
        e = ff_utils.post_metadata(input_json, 'WorkflowRun/run', key=auth)
        url = json.loads(e['input'])['_tibanna']['url']
        return url
    except Exception as e:
        return str(e)
github 4dn-dcic / Submit4DN / wranglertools / import_data.py View on Github external
def update_item(verb, file_to_upload, post_json, filename_to_post, extrafiles, connection, identifier):
    # if FTP, grab the file from ftp
    ftp_download = False
    if file_to_upload and filename_to_post.startswith("ftp://"):
        ftp_download = True
        file_to_upload, post_json, filename_to_post = ftp_copy(filename_to_post, post_json)
    # add the md5
    if file_to_upload and not post_json.get('md5sum'):
        print("calculating md5 sum for file %s " % (filename_to_post))
        post_json['md5sum'] = md5(filename_to_post)
    try:
        if verb == 'PATCH':
            e = ff_utils.patch_metadata(post_json, identifier, key=connection.key)
        elif verb == 'POST':
            e = ff_utils.post_metadata(post_json, identifier, key=connection.key)
        else:
            raise ValueError('Unrecognized verb - must be POST or PATCH')
    except Exception as problem:
        e = parse_exception(problem)
    if e.get('status') == 'error':
        return e
    if file_to_upload:
        # get s3 credentials
        if verb == 'PATCH':
            creds = get_upload_creds(e['@graph'][0]['accession'], connection)
            e['@graph'][0]['upload_credentials'] = creds
        # upload
        upload_file_item(e, filename_to_post)
        if ftp_download:
            os.remove(filename_to_post)
    if extrafiles:
github 4dn-dcic / foursight / chalicelib / checks / helpers / cgap_utils.py View on Github external
all_inputs = sorted(all_inputs, key=itemgetter('workflow_argument_name'))
    my_s3_util = s3Utils(env=env)
    out_bucket = my_s3_util.outfile_bucket
    """Creates the trigger json that is used by foufront endpoint.
    """
    input_json['input_files'] = all_inputs
    input_json['output_bucket'] = out_bucket
    input_json["_tibanna"] = {
        "env": env,
        "run_type": input_json['app_name'],
        "run_id": run_name}
    # input_json['env_name'] = CGAP_ENV_WEBPROD  # e.g., 'fourfront-cgap'
    input_json['step_function_name'] = 'tibanna_zebra'
    # input_json['public_postrun_json'] = True
    try:
        e = ff_utils.post_metadata(input_json, 'WorkflowRun/run', key=auth)
        url = json.loads(e['input'])['_tibanna']['url']
        return url
    except Exception as e:
        return str(e)
github 4dn-dcic / foursight / chalicelib / checks / wrangler_checks.py View on Github external
]
        fields2flag = ['static_headers', 'static_content']
        post_metadata = {f: biorxiv.get(f) for f in fields2transfer if biorxiv.get(f) is not None}
        post_metadata['ID'] = pmid
        post_metadata['status'] = 'current'
        if 'url' in biorxiv:
            post_metadata['aka'] = biorxiv.get('url')
        flags = {f: biorxiv.get(f) for f in fields2flag if biorxiv.get(f) is not None}
        if flags:
            action_log[buuid] = 'Static content to check: ' + str(flags)

        # first try to post the pub
        pub_upd_res = None
        pub = None
        try:
            pub_upd_res = ff_utils.post_metadata(post_metadata, 'publication', key=connection.ff_keys)
        except Exception as e:
            error = str(e)
        else:
            if pub_upd_res.get('status') != 'success':
                error = pub_upd_res.get('status')
        if error:
            if "'code': 422" in error:
                # there is a conflict-see if pub is already in portal
                pub_search_res = None
                error = ''  # reset error
                try:
                    search = 'search/?type=Publication&ID={}&frame=object'.format(post_metadata['ID'])
                    pub_search_res = ff_utils.search_metadata(search, key=connection.ff_keys)
                except Exception as e:
                    error = 'SEARCH failure for {} - msg: {}'.format(pmid, str(e))
                else:
github 4dn-dcic / Submit4DN / wranglertools / import_data.py View on Github external
if values[0].startswith("#"):
            continue
        # Get rid of the first empty cell
        values.pop(0)
        total += 1
        # build post_json and get existing if available
        post_json = build_tibanna_json(keys, types, values, connection)
        existing_data = get_existing(post_json['wfr_meta'], connection)
        if existing_data:
            print('this workflow_run is already posted {}'.format(post_json['wfr_meta']['aliases'][0]))
            error += 1
            continue
        if post_json:
            # do the magic
            try:
                e = ff_utils.post_metadata(post_json, '/WorkflowRun/pseudo-run', key=connection.key)
            except Exception as problem:
                e = parse_exception(problem)
            if e.get("status") == "SUCCEEDED":
                post += 1
            else:
                print('can not post the workflow run {}'.format(post_json['wfr_meta']['aliases'][0]))
                print(e)  # to give a little more info even if not that informative
                error += 1
        else:
            error += 1
    # print final report
    print("{sheet:<27}: {post:>2} posted /{not_posted:>2} not posted  \
    {patch:>2} patched /{not_patched:>2} not patched,{error:>2} errors"
          .format(sheet=sheet.upper()+"("+str(total)+")", post=post, not_posted=not_posted,
                  error=error, patch="-", not_patched="-"))
github 4dn-dcic / Submit4DN / wranglertools / import_data.py View on Github external
# dryrun option
        if dryrun:
            if skip_dryrun:
                continue
            # simulate patch/post
            if existing_data.get("uuid"):
                post_json = remove_deleted(post_json)
                try:
                    e = ff_utils.patch_metadata(post_json, existing_data["uuid"], key=connection.key,
                                                add_on="check_only=True")
                except Exception as problem:
                    e = parse_exception(problem)
            else:
                post_json = remove_deleted(post_json)
                try:
                    e = ff_utils.post_metadata(post_json, sheet, key=connection.key, add_on="check_only=True")
                except Exception as problem:
                    e = parse_exception(problem)
            # check simulation status
            if e['status'] == 'success':
                pass
            else:
                # display the used alias with the error
                e_id = ""
                if post_json.get('aliases'):
                    e_id = post_json['aliases'][0]
                error_rep = error_report(e, sheet, all_aliases, connection, e_id)
                if error_rep:
                    error += 1
                    print(error_rep)
            continue