Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def __sub__(self, other):
return Timer(elapsed=self._elapsed - other._elapsed)
try:
output_data = output_writer.streamline_output(output_data)
except MapcheteNodataTile:
output_data = None
if output_data is None:
message = "output empty, nothing written"
logger.debug((process_info.tile.id, message))
return ProcessInfo(
tile=process_info.tile,
processed=process_info.processed,
process_msg=process_info.process_msg,
written=False,
write_msg=message
)
else:
with Timer() as t:
output_writer.write(process_tile=process_info.tile, data=output_data)
message = "output written in %s" % t
logger.debug((process_info.tile.id, message))
return ProcessInfo(
tile=process_info.tile,
processed=process_info.processed,
process_msg=process_info.process_msg,
written=True,
write_msg=message
)
else:
return process_info
def _execute(self):
# If baselevel is active and zoom is outside of baselevel,
# interpolate from other zoom levels.
if self.config_baselevels:
if self.tile.zoom < min(self.config_baselevels["zooms"]):
return self._interpolate_from_baselevel("lower")
elif self.tile.zoom > max(self.config_baselevels["zooms"]):
return self._interpolate_from_baselevel("higher")
# Otherwise, execute from process file.
process_func = get_process_func(
process_path=self.process_path, config_dir=self.config_dir
)
try:
with Timer() as t:
# Actually run process.
process_data = process_func(
MapcheteProcess(
tile=self.tile,
params=self.process_func_params,
input=self.input,
output_params=self.output_params
),
**self.process_func_params
)
except MapcheteNodataTile:
raise
except Exception as e:
# Log process time
logger.exception((self.tile.id, "exception in user process", e, str(t)))
new = MapcheteProcessException(format_exc())
)
# skip execution if overwrite is disabled and tile exists
if tile_process.skip:
logger.debug((tile_process.tile.id, "tile exists, skipping"))
return None, ProcessInfo(
tile=tile_process.tile,
processed=False,
process_msg="output already exists",
written=False,
write_msg="nothing written"
)
# execute on process tile
else:
with Timer() as t:
try:
output = tile_process.execute()
except MapcheteNodataTile:
output = "empty"
processor_message = "processed in %s" % t
logger.debug((tile_process.tile.id, processor_message))
return output, ProcessInfo(
tile=tile_process.tile,
processed=True,
process_msg=processor_message,
written=None,
write_msg=None
)
def __add__(self, other):
return Timer(elapsed=self._elapsed + other._elapsed)
def _interpolate_from_baselevel(self, baselevel=None):
# This is a special tile derived from a pyramid which has the pixelbuffer setting
# from the output pyramid but metatiling from the process pyramid. This is due to
# performance reasons as for the usual case overview tiles do not need the
# process pyramid pixelbuffers.
tile = self.config_baselevels["tile_pyramid"].tile(*self.tile.id)
# get output_tiles that intersect with process tile
output_tiles = (
list(self.output_reader.pyramid.tiles_from_bounds(tile.bounds, tile.zoom))
if tile.pixelbuffer > self.output_reader.pyramid.pixelbuffer
else self.output_reader.pyramid.intersecting(tile)
)
with Timer() as t:
# resample from parent tile
if baselevel == "higher":
parent_tile = self.tile.get_parent()
process_data = raster.resample_from_array(
self.output_reader.read(parent_tile),
in_affine=parent_tile.affine,
out_tile=self.tile,
resampling=self.config_baselevels["higher"],
nodata=self.output_reader.output_params["nodata"]
)
# resample from children tiles
elif baselevel == "lower":
if self.output_reader.pyramid.pixelbuffer:
lower_tiles = set([
y for y in chain(*[
self.output_reader.pyramid.tiles_from_bounds(
multiprocessing_start_method=None,
multiprocessing_module=None,
write_in_parent_process=False,
fkwargs=None,
skip_output_check=False
):
total_tiles = process.count_tiles(min(zoom_levels), max(zoom_levels))
workers = min([multi, total_tiles])
num_processed = 0
logger.debug("run process on %s tiles using %s workers", total_tiles, workers)
# here we store the parents of processed tiles so we can update overviews
# also in "continue" mode in case there were updates at the baselevel
overview_parents = set()
with Timer() as t, Executor(
max_workers=workers,
start_method=multiprocessing_start_method,
multiprocessing_module=multiprocessing_module
) as executor:
for i, zoom in enumerate(zoom_levels):
if skip_output_check:
# don't check outputs and simply proceed
todo = process.get_process_tiles(zoom)
else:
# check which process output already exists and which process tiles need
# to be added to todo list
todo = set()
for process_info in _filter_skipable(
process=process,