Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
process_msg=None,
written=False,
write_msg=message
)
elif data is None:
message = "output empty, nothing written"
logger.debug((process_tile.id, message))
return ProcessInfo(
tile=process_tile,
processed=False,
process_msg=None,
written=False,
write_msg=message
)
else:
with Timer() as t:
self.config.output.write(process_tile=process_tile, data=data)
message = "output written in %s" % t.elapsed
logger.debug((process_tile.id, message))
return ProcessInfo(
tile=process_tile,
processed=False,
process_msg=None,
written=True,
write_msg=message
)
def _interpolate_from_baselevel(self, tile=None, baselevel=None):
with Timer() as t:
# resample from parent tile
if baselevel == "higher":
parent_tile = tile.get_parent()
process_data = raster.resample_from_array(
in_raster=self.get_raw_output(parent_tile, _baselevel_readonly=True),
in_affine=parent_tile.affine,
out_tile=tile,
resampling=self.config.baselevels["higher"],
nodataval=self.config.output.nodata
)
# resample from children tiles
elif baselevel == "lower":
mosaic, mosaic_affine = raster.create_mosaic([
(
child_tile,
self.get_raw_output(child_tile, _baselevel_readonly=True)
def _run_without_multiprocessing(process, zoom_levels):
logger.debug("run without multiprocessing")
num_processed = 0
total_tiles = process.count_tiles(min(zoom_levels), max(zoom_levels))
logger.debug("run process on %s tiles using 1 worker", total_tiles)
with Timer() as t:
for zoom in zoom_levels:
for process_tile in process.get_process_tiles(zoom):
process_info = _process_worker(process, process_tile)
num_processed += 1
logger.debug("tile %s/%s finished", num_processed, total_tiles)
yield process_info
logger.debug("%s tile(s) iterated in %s", str(num_processed), t.elapsed)
process.config.mode == "continue" and
process.config.output.tiles_exist(process_tile) and
_is_baselevel(config=process.config, zoom=process_tile.zoom)
):
logger.debug((process_tile.id, "tile exists, skipping"))
return ProcessInfo(
tile=process_tile,
processed=False,
process_msg="output already exists",
written=False,
write_msg="nothing written"
)
# execute on process tile
else:
with Timer() as t:
try:
output = process.execute(process_tile, raise_nodata=True)
except MapcheteNodataTile:
output = None
processor_message = "processed in %s" % t.elapsed
logger.debug((process_tile.id, processor_message))
writer_info = process.write(process_tile, output)
return ProcessInfo(
tile=process_tile,
processed=True,
process_msg=processor_message,
written=writer_info.written,
write_msg=writer_info.write_msg
)
def _run_with_multiprocessing(process, zoom_levels, multi, max_chunksize):
logger.debug("run with multiprocessing")
num_processed = 0
total_tiles = process.count_tiles(min(zoom_levels), max(zoom_levels))
logger.debug("run process on %s tiles using %s workers", total_tiles, multi)
with Timer() as t:
f = partial(_process_worker, process)
for zoom in zoom_levels:
pool = Pool(multi, _worker_sigint_handler)
try:
for process_info in pool.imap_unordered(
f,
process.get_process_tiles(zoom),
# set chunksize to between 1 and max_chunksize
chunksize=max_chunksize
):
num_processed += 1
logger.debug("tile %s/%s finished", num_processed, total_tiles)
yield process_info
except KeyboardInterrupt:
logger.error("Caught KeyboardInterrupt, terminating workers")
pool.terminate()