Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if os.path.exists(merged_txt):
# Check if contents
with open(merged_txt, 'r') as contents:
for entry in contents.readlines():
if entry.strip() == file_path:
already_merged = True
break
if not already_merged:
clip_las(file_path, tuples, out_path=out_file, merged_path=merged_out)
with open(merged_txt, 'a') as contents:
contents.write(file_path+"\n")
# Upload the individual plot shards for optimizing las2height later
found_in_dest = check_file_in_dataset(connector, host, secret_key, target_dsid, out_file, remove=self.overwrite)
if not found_in_dest or self.overwrite:
fileid = upload_to_dataset(connector, host, secret_key, target_dsid, out_file)
uploaded_file_ids.append(host + ("" if host.endswith("/") else "/") + "files/" + fileid)
self.created += 1
self.bytes += os.path.getsize(out_file)
# Upload the merged result if necessary
found_in_dest = check_file_in_dataset(connector, host, secret_key, target_dsid, merged_out, remove=self.overwrite)
if not found_in_dest or self.overwrite:
fileid = upload_to_dataset(connector, host, secret_key, target_dsid, merged_out)
uploaded_file_ids.append(host + ("" if host.endswith("/") else "/") + "files/" + fileid)
self.created += 1
self.bytes += os.path.getsize(merged_out)
# Trigger las2height extractor
submit_extraction(connector, host, secret_key, target_dsid, "terra.3dscanner.las2height")
timestamp[:4], timestamp[5:7], timestamp[8:10], leaf_dataset))
target_dsid = build_dataset_hierarchy_crawl(host, secret_key, self.clowder_user, self.clowder_pass, self.clowderspace,
season_name, experiment_name, plot_display_name,
timestamp[:4], timestamp[5:7], timestamp[8:10], leaf_ds_name=leaf_dataset)
out_file = self.sensors.create_sensor_path(timestamp, plot=plotname, subsensor=sensor_name, filename=filename)
if not os.path.exists(os.path.dirname(out_file)):
os.makedirs(os.path.dirname(out_file))
if filename.endswith(".tif") and (not file_exists(out_file) or self.overwrite):
"""If file is a geoTIFF, simply clip it and upload it to Clowder"""
clip_raster(file_path, tuples, out_path=out_file, compress=True)
found_in_dest = check_file_in_dataset(connector, host, secret_key, target_dsid, merged_out, remove=self.overwrite)
if not found_in_dest or self.overwrite:
fileid = upload_to_dataset(connector, host, secret_key, target_dsid, merged_out)
uploaded_file_ids.append(host + ("" if host.endswith("/") else "/") + "files/" + fileid)
self.created += 1
self.bytes += os.path.getsize(merged_out)
elif filename.endswith(".las"):
"""If file is LAS, we can merge with any existing scan+plot output safely"""
merged_out = os.path.join(os.path.dirname(out_file), target_scan+"_merged.las")
merged_txt = merged_out.replace(".las", "_contents.txt")
already_merged = False
if os.path.exists(merged_txt):
# Check if contents
with open(merged_txt, 'r') as contents:
for entry in contents.readlines():
if entry.strip() == file_path:
already_merged = True