Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# debug
csv_data = ','.join(map(str, fields)) + '\n' + ','.join(map(str, trait_list)) + '\n'
print csv_data
metadata = {
"@context": {
"@vocab": "https://clowder.ncsa.illinois.edu/clowder/assets/docs/api/index.html#!/files/uploadToDataset"
},
"dataset_id": parameters["datasetId"],
"content": {"status": "COMPLETED", "csv": csv_data},
"agent": {
"@type": "cat:extractor",
"extractor_id": parameters['host'] + "/api/extractors/" + extractorName
}
}
extractors.upload_dataset_metadata_jsonld(mdata=metadata, parameters=parameters)
outfile = 'avg_traits.csv'
pcia.generate_average_csv(outfile, fields, trait_list)
extractors.upload_file_to_dataset(outfile, parameters)
os.remove(outfile)
metadata = {
"@context": {
"@vocab": "https://clowder.ncsa.illinois.edu/clowder/assets/docs/api/index.html#!/files/uploadToDataset"
},
"dataset_id": parameters["datasetId"],
"content": {"status": "COMPLETED"},
"agent": {
"@type": "cat:extractor",
"extractor_id": parameters['host'] + "/api/extractors/" + extractorName
}
}
extractors.upload_dataset_metadata_jsonld(mdata=metadata, parameters=parameters)
extractors.upload_file_to_dataset(left_tiff_out, parameters)
extractors.upload_file_to_dataset(right_tiff_out, parameters)
# Tell Clowder this is completed so subsequent file updates don't daisy-chain
metadata = {
"@context": {
"@vocab": "https://clowder.ncsa.illinois.edu/clowder/assets/docs/api/index.html#!/files/uploadToDataset"
},
"dataset_id": parameters["datasetId"],
"content": {"status": "COMPLETED"},
"agent": {
"@type": "cat:extractor",
"extractor_id": parameters['host'] + "/api/extractors/" + extractorName
}
}
extractors.upload_dataset_metadata_jsonld(mdata=metadata, parameters=parameters)