Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
except KeyError:
return
optional_datasets = self._get_prereq_datasets(
comp_node.name,
optional_prereqs,
keepables,
skip=True
)
try:
composite = compositor(prereq_datasets,
optional_datasets=optional_datasets,
**self.attrs)
req_ids = [DatasetID.from_dict(req.attrs) for req in prereq_datasets]
opt_ids = [DatasetID.from_dict(req.attrs) for req in optional_datasets]
composite.attrs['prerequisites'] = req_ids
composite.attrs['optional_prerequisites'] = opt_ids
cid = DatasetID.from_dict(composite.attrs)
self.datasets[cid] = composite
# update the node with the computed DatasetID
if comp_node.name in self.wishlist:
self.wishlist.remove(comp_node.name)
self.wishlist.add(cid)
comp_node.name = cid
except IncompatibleAreas:
LOG.warning("Delaying generation of %s "
"because of incompatible areas",
str(compositor.id))
# Handle in-line composites
if 'compositor' in item:
# Create an unique temporary name for the composite
sub_comp_name = '_' + composite_name + '_dep_{}'.format(dep_num)
dep_num += 1
# Minimal composite config
sub_conf = {composite_type: {sub_comp_name: item}}
self._process_composite_config(
sub_comp_name, sub_conf, composite_type, sensor_id,
composite_config, **kwargs)
else:
# we want this prerequisite to act as a query with
# 'modifiers' being None otherwise it will be an empty
# tuple
item.setdefault('modifiers', None)
key = DatasetID.from_dict(item)
prereqs.append(key)
else:
prereqs.append(item)
options[prereq_type] = prereqs
if composite_type == 'composites':
options.update(**kwargs)
key = DatasetID.from_dict(options)
comp = loader(**options)
compositors[key] = comp
elif composite_type == 'modifiers':
modifiers[composite_name] = loader, options
sub_comp_name, sub_conf, composite_type, sensor_id,
composite_config, **kwargs)
else:
# we want this prerequisite to act as a query with
# 'modifiers' being None otherwise it will be an empty
# tuple
item.setdefault('modifiers', None)
key = DatasetID.from_dict(item)
prereqs.append(key)
else:
prereqs.append(item)
options[prereq_type] = prereqs
if composite_type == 'composites':
options.update(**kwargs)
key = DatasetID.from_dict(options)
comp = loader(**options)
compositors[key] = comp
elif composite_type == 'modifiers':
modifiers[composite_name] = loader, options
def replace_anc(dataset, parent_dataset):
"""Replace *dataset* the *parent_dataset*'s `ancillary_variables` field."""
if parent_dataset is None:
return
current_dsid = DatasetID.from_dict(dataset.attrs)
for idx, ds in enumerate(parent_dataset.attrs['ancillary_variables']):
if current_dsid == DatasetID.from_dict(ds.attrs):
parent_dataset.attrs['ancillary_variables'][idx] = dataset
return
optional_prereqs,
keepables,
skip=True
)
try:
composite = compositor(prereq_datasets,
optional_datasets=optional_datasets,
**self.attrs)
req_ids = [DatasetID.from_dict(req.attrs) for req in prereq_datasets]
opt_ids = [DatasetID.from_dict(req.attrs) for req in optional_datasets]
composite.attrs['prerequisites'] = req_ids
composite.attrs['optional_prerequisites'] = opt_ids
cid = DatasetID.from_dict(composite.attrs)
self.datasets[cid] = composite
# update the node with the computed DatasetID
if comp_node.name in self.wishlist:
self.wishlist.remove(comp_node.name)
self.wishlist.add(cid)
comp_node.name = cid
except IncompatibleAreas:
LOG.warning("Delaying generation of %s "
"because of incompatible areas",
str(compositor.id))
preservable_datasets = set(self.datasets.keys())
prereq_ids = set(p.name for p in prereqs)
opt_prereq_ids = set(p.name for p in optional_prereqs)
keepables |= preservable_datasets & (prereq_ids | opt_prereq_ids)
# even though it wasn't generated keep a list of what
except KeyError:
if composite_name in compositors or composite_name in modifiers:
return conf
raise ValueError("'compositor' missing or empty in {0}. Option keys = {1}".format(
composite_config, str(options.keys())))
options['name'] = composite_name
for prereq_type in ['prerequisites', 'optional_prerequisites']:
prereqs = []
for item in options.get(prereq_type, []):
if isinstance(item, dict):
# we want this prerequisite to act as a query with
# 'modifiers' being None otherwise it will be an empty
# tuple
item.setdefault('modifiers', None)
key = DatasetID.from_dict(item)
prereqs.append(key)
else:
prereqs.append(item)
options[prereq_type] = prereqs
if composite_type == 'composites':
options.update(**kwargs)
key = DatasetID.from_dict(options)
comp = loader(**options)
compositors[key] = comp
elif composite_type == 'modifiers':
modifiers[composite_name] = loader, options
def _resampled_scene(cls, datasets, destination, **resample_kwargs):
"""Generate a new scene with resampled *datasets*."""
new_scn = cls()
new_datasets = {}
destination_area = None
resamplers = {}
resampler = resample_kwargs.get('resampler')
for dataset, parent_dataset in dataset_walker(datasets):
ds_id = DatasetID.from_dict(dataset.attrs)
pres = None
if parent_dataset is not None:
pres = new_datasets[DatasetID.from_dict(parent_dataset.attrs)]
if ds_id in new_datasets:
replace_anc(dataset, pres)
continue
if dataset.attrs.get('area') is None:
if parent_dataset is None:
new_scn[ds_id] = dataset
else:
replace_anc(dataset, pres)
continue
if destination_area is None:
# FIXME: We should allow users to freeze based with specific
# dataset
destination_area = get_frozen_area(destination,
prereqs = []
for item in options.get(prereq_type, []):
if isinstance(item, dict):
# we want this prerequisite to act as a query with
# 'modifiers' being None otherwise it will be an empty
# tuple
item.setdefault('modifiers', None)
key = DatasetID.from_dict(item)
prereqs.append(key)
else:
prereqs.append(item)
options[prereq_type] = prereqs
if composite_type == 'composites':
options.update(**kwargs)
key = DatasetID.from_dict(options)
comp = loader(**options)
compositors[key] = comp
elif composite_type == 'modifiers':
modifiers[composite_name] = loader, options
def replace_anc(dataset, parent_dataset):
"""Replace *dataset* the *parent_dataset*'s `ancillary_variables` field."""
if parent_dataset is None:
return
current_dsid = DatasetID.from_dict(dataset.attrs)
for idx, ds in enumerate(parent_dataset.attrs['ancillary_variables']):
if current_dsid == DatasetID.from_dict(ds.attrs):
parent_dataset.attrs['ancillary_variables'][idx] = dataset
return