How to use the pyclowder.connectors.Connector function in pyclowder

To help you get started, we’ve selected a few pyclowder examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github terraref / computing-pipeline / scripts / rebuild_scripts / loadDanforthSnapshots.py View on Github external
'perspective' : perspective,
        'rotation_angle' : rotation_angle,
        'zoom' : zoom,
        'imager_stage_vertical_position' : stage_position,
        'camera_gain' : camera_gain,
        'camera_exposure' : camera_exposure,
        'image_id' : img_id,
        'imagedate' : snap_details['timestamp'],
        'species' : species,
        'genotype' : genotype,
        'treatment' : treatment,
        'sample_id' : barcode['unique_id']
    }


conn = Connector({}, mounted_paths={"/home/clowder/sites":"/home/clowder/sites"})
experiment_root = sys.argv[1]
experiment_name = os.path.basename(experiment_root)
if os.path.exists(experiment_root):
    logger.debug("Searching for index files in %s" % experiment_root)
    md_file  = os.path.join(experiment_root, experiment_name+"_metadata.json")
    csv_file = os.path.join(experiment_root, "SnapshotInfo.csv")

    if not os.path.isfile(md_file):
        logger.debug("%s not found" % md_file)
        sys.exit(1)
    if not os.path.isfile(csv_file):
        logger.debug("%s not found" % csv_file)
        sys.exit(1)

    logger.debug("Found index files; loading %s" % md_file)
    base_md = loadJsonFile(md_file)
github terraref / computing-pipeline / scripts / filecounter / filecounter.py View on Github external
from terrautils.sensors import Sensors

import utils
import counts


config = {}
app_dir = '/home/filecounter'
SCAN_LOCK = False
count_defs = counts.SENSOR_COUNT_DEFINITIONS
DEFAULT_COUNT_START = None
DEFAULT_COUNT_END = None

CLOWDER_HOST = "https://terraref.ncsa.illinois.edu/clowder/"
CLOWDER_KEY = os.getenv('CLOWDER_KEY', False)
CONN = Connector("", {}, mounted_paths={"/home/clowder/sites":"/home/clowder/sites"})


# UTILITIES ----------------------------
def update_nested_dict(existing, new):
    """Nested update of python dictionaries for config parsing
    Adapted from http://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth
    """
    for k, v in new.iteritems():
        if isinstance(existing, collections.Mapping):
            if isinstance(v, collections.Mapping):
                r = update_nested_dict(existing.get(k, {}), v)
                existing[k] = r
            else:
                existing[k] = new[k]
        else:
            existing = {k: new[k]}
github terraref / computing-pipeline / scripts / upload_directories_to_clowder.py View on Github external
# Set to True to actually upload a dataset from each product then stop
test_one = True

# ---------------------------------
# Clowder instance configuration
# ---------------------------------
clowder_admin_key = "SECRET_KEY"
clowder_host = "https://terraref.ncsa.illinois.edu/clowder/"
# The following user will be shown as the creator and owner of uploaded datasets
clowder_user   = "terrarefglobus+uamac@ncsa.illinois.edu"
clowder_pass   = "PASSWORD"
clowder_userid = "57adcb81c0a7465986583df1"
# The following space in Clowder will contain all uploaded datasets and collections
clowder_space = "571fb3e1e4b032ce83d95ecf"
# Mapping of local files to the Clowder directory if mounts are different
conn = Connector("", {}, mounted_paths={"/home/clowder/sites":"/home/clowder/sites"})

# ---------------------------------
# Filesystem configuration
# ---------------------------------
root_path = "/home/clowder/sites/ua-mac"

# Defines which products to upload (i.e. remove raw_data key to only upload Level_1 data)
products_to_upload = {
    "raw_data": ["stereoTop", "flirIrCamera", "VNIR", "SWIR", "EnvironmentLogger"],
    "Level_1": ["scanner3DTop"]
}

# Products which only have a date level for each dataset
no_timestamps = ["EnvironmentLogger"]

# Defines start and end date to upload (inclusive)