How to use the pyclowder.datasets.upload_metadata function in pyclowder

To help you get started, we’ve selected a few pyclowder examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github terraref / computing-pipeline / extractors / plotclipper / terra_plotclipper.py View on Github external
fileid = upload_to_dataset(connector, host, secret_key, target_dsid, merged_out)
                            uploaded_file_ids.append(host + ("" if host.endswith("/") else "/") + "files/" + fileid)
                        self.created += 1
                        self.bytes += os.path.getsize(merged_out)

                        # Trigger las2height extractor
                        submit_extraction(connector, host, secret_key, target_dsid, "terra.3dscanner.las2height")


        # Tell Clowder this is completed so subsequent file updates don't daisy-chain
        extractor_md = build_metadata(host, self.extractor_info, resource['id'], {
            "files_created": uploaded_file_ids
        }, 'dataset')
        self.log_info(resource, "uploading extractor metadata to Level_1 dataset")
        remove_metadata(connector, host, secret_key, resource['id'], self.extractor_info['name'])
        upload_metadata(connector, host, secret_key, resource['id'], extractor_md)

        self.end_message(resource)
github terraref / computing-pipeline / scripts / rebuild_scripts / loadDanforthSnapshots.py View on Github external
# Create Clowder dataset and add metadata
            snap_md = {
                "@context": ["https://clowder.ncsa.illinois.edu/contexts/metadata.jsonld"],
                "content": base_md['experiment'],
                "agent": {
                    "@type": "cat:user",
                    "user_id": "%sapi/users/%s" % (clowder_host, clowder_uid)
                }
            }
            if not dry_run:
                snap_dataset = get_dataset_or_create(clowder_host, clowder_key, clowder_user, clowder_pass, snap_dir,
                                                     experiment_coll, root_space)
                logger.debug("Created dataset %s [%s]" % (snap_dir, snap_dataset))
                snap_md["dataset_id"] = snap_dataset
                upload_dataset_metadata(conn, clowder_host, clowder_key, snap_dataset, snap_md)
                logger.debug("Uploaded metadata to [%s]" % snap_dataset)
            else:
                logger.debug("Skipping dataset %s [%s]" % (snap_dir, "DRY RUN"))

            # Upload files and metadata to Clowder
            snap_files = os.listdir(os.path.join(experiment_root, snap_dir))
            for img_file in snap_files:
                logger.debug("Uploading %s" % img_file)
                img_path = os.path.join(experiment_root, snap_dir, img_file)
                img_md = formatImageMetadata(img_file, experiment_md['metadata'], snap_details)
                file_md = {
                    "@context": ["https://clowder.ncsa.illinois.edu/contexts/metadata.jsonld"],
                    "content": img_md,
                    "agent": {
                        "@type": "cat:user",
                        "user_id": "%sapi/users/%s" % (clowder_host, clowder_uid)