Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def read(self, filename, fields=None, **kwargs):
scene = Scene(
reader=self.satpy_reader,
filenames=[filename.path]
)
# If the user has not passed any fields to us, we load all per default.
if fields is None:
fields = scene.available_dataset_ids()
# Load all selected fields
scene.load(fields, **kwargs)
if isinstance(fields[0], str):
data_arrays = {field: scene.get(field) for field in fields}
else:
data_arrays = {field.name: scene.get(field) for field in fields}
if levels[min(3, args.verbosity)] > logging.DEBUG:
import warnings
warnings.filterwarnings("ignore")
LOG.debug("Starting script with arguments: %s", " ".join(sys.argv))
# Set up dask and the number of workers
if args.num_workers:
from multiprocessing.pool import ThreadPool
dask.config.set(pool=ThreadPool(args.num_workers))
# Parse provided files and search for files if provided directories
scene_args['filenames'] = get_input_files(scene_args['filenames'])
# Create a Scene, analyze the provided files
LOG.info("Sorting and reading input files...")
try:
scn = Scene(**scene_args)
except ValueError as e:
LOG.error("{} | Enable debug message (-vvv) or see log file for details.".format(str(e)))
LOG.debug("Further error information: ", exc_info=True)
return -1
except OSError:
LOG.error("Could not open files. Enable debug message (-vvv) or see log file for details.")
LOG.debug("Further error information: ", exc_info=True)
return -1
if args.list_products:
print("\n".join(sorted(scn.available_dataset_names(composites=True))))
return 0
# Rename the log file
if rename_log:
rename_log_file(glue_name + scn.attrs['start_time'].strftime("_%Y%m%d_%H%M%S.log"))
b = gridded_scene.pop(v[2])
new_info = r.copy()
new_info["grid_data"] = new_info["grid_data"].replace(v[0], rgb_name)
new_info["product_name"] = rgb_name
data = np.memmap(new_info["grid_data"], dtype=new_info["data_type"],
mode="w+", shape=(3, new_info["grid_definition"]["height"], new_info["grid_definition"]["width"]))
data[0] = r.get_data_array()[:]
data[1] = g.get_data_array()[:]
data[2] = b.get_data_array()[:]
gridded_scene[rgb_name] = new_info
del data, new_info
# Create composites that satpy couldn't complete until after remapping
composite_names = f.missing_datasets
if composite_names:
tmp_scene = Scene()
for k, v in gridded_scene.items():
ds_id = DatasetID.from_dict(v)
dask_arr = da.from_array(v.get_data_array(), chunks=CHUNK_SIZE)
tmp_scene[ds_id] = DataArray(dask_arr, attrs=v)
tmp_scene[ds_id].attrs["area"] = this_grid_definition.to_satpy_area()
if isinstance(v, set):
tmp_scene.attrs["sensor"].update(v["sensor"])
else:
tmp_scene.attrs["sensor"].add(v["sensor"])
# Overwrite the wishlist that will include the above assigned datasets
tmp_scene.wishlist = f.wishlist.copy()
comps, mods = tmp_scene.cpl.load_compositors(tmp_scene.attrs["sensor"])
tmp_scene.dep_tree.compositors = comps
tmp_scene.dep_tree.modifiers = mods
tmp_scene.dep_tree.find_dependencies(tmp_scene.wishlist.copy())
tmp_scene.generate_composites()
help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)')
parser.add_argument('-l', '--log', dest="log_fn", default=None,
help="specify the log filename")
subgroups = add_scene_argument_groups(parser)
subgroups += add_writer_argument_groups(parser)
args = parser.parse_args()
scene_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[0]._group_actions}
load_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[1]._group_actions}
writer_init_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[2]._group_actions}
writer_call_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[3]._group_actions}
levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
logging.basicConfig(level=levels[min(3, args.verbosity)], filename=args.log_fn)
scn = Scene(**scene_args)
scn.load(load_args['datasets'])
writer_args = {}
writer_args.update(writer_init_args)
writer_args.update(writer_call_args)
scn.save_datasets(writer='scmi', **writer_args)
The ``fls_day_extra`` dataset as produced by the `FogCompositorDayExtra` and
loaded using ``.load(["fls_day_extra"])`` is unique in the sense that it is
an `xarray.Dataset` rather than an `xarray.DataArray`. This means it can't
be stored with the usual satpy routines. Because some of its attributes
contain special types, it can`t be stored with `Dataset.to_netcdf` either.
This function transfers the data variables as direct members of a new
`Scene` object and then use the `cf_writer` to write those to a NetCDF file.
Args:
sc : Scene
Scene object with the already loaded ``fls_day_extra`` "composite"
fn : str-like or path
Path to which to write NetCDF
"""
s = satpy.Scene()
ds = sc["fls_day_extra"]
for k in ds.data_vars:
s[k] = ds[k]
s.save_datasets(
writer="cf",
datasets=ds.data_vars.keys(),
filename=str(fn))
def __init__(self, path_dem, *args, **kwargs):
dem = pathlib.Path(appdirs.user_data_dir("fogpy")) / path_dem
if not dem.exists():
dl_dem(dem)
filenames = [dem]
self.elevation = satpy.Scene(reader="generic_image",
filenames=filenames)
self.elevation.load(["image"])
return super().__init__(*args, **kwargs)
def _create_scenes(self):
"""Create Scene objects for the selected files."""
all_available_products = set()
for group_id, file_group in self.file_groups.items():
scn = self.scenes.get(group_id)
if scn is None:
# need to create the Scene for the first time
# file_group includes what reader to use
# NOTE: We only allow a single reader at a time
self.scenes[group_id] = scn = Scene(filenames=file_group)
all_available_products.update(scn.available_dataset_ids())
# update the widgets
self.all_available_products = sorted(all_available_products)
for c in args.compositors:
compositor_objects[c] = compositor_manager.get_compositor(c, **args.global_kwargs)
except (ValueError, KeyError):
LOG.debug("Compositor initialization exception: ", exc_info=True)
LOG.error("Compositor initialization failed (see log for details)")
return STATUS_COMP_FAIL
try:
LOG.info("Extracting swaths from data files available...")
scene = f.create_scene(**args.subgroup_args["Frontend Swath Extraction"])
# Determine if we have a satpy scene if we should convert it to
# a P2G Scene to continue processing
resample_method = args.subgroup_args["Remapping"].get("remap_method")
is_satpy_resample_method = resample_method in SATPY_RESAMPLERS
if is_satpy_resample_method and not isinstance(scene, Scene):
raise RuntimeError("Resampling method '{}' only supports 'satpy' readers".format(resample_method))
elif not is_satpy_resample_method and isinstance(scene, Scene):
# convert satpy scene to P2G Scene to be compatible with old P2G resamplers
scene = convert_satpy_to_p2g_swath(f, scene)
if isinstance(scene, Scene):
if not scene.datasets:
LOG.error("No products were returned by the frontend")
raise RuntimeError("No products were returned by the frontend")
if args.keep_intermediate:
raise RuntimeError("satpy readers do not currently support saving intermediate files")
else:
if (isinstance(scene, Scene) and not scene.datasets) or not scene:
LOG.error("No products were returned by the frontend")
raise RuntimeError("No products were returned by the frontend")
if args.keep_intermediate: