Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _assert_internal_invariants(xarray_obj: Union[DataArray, Dataset, Variable],):
"""Validate that an xarray object satisfies its own internal invariants.
This exists for the benefit of xarray's own test suite, but may be useful
in external projects if they (ill-advisedly) create objects using xarray's
private APIs.
"""
if isinstance(xarray_obj, Variable):
_assert_variable_invariants(xarray_obj)
elif isinstance(xarray_obj, DataArray):
_assert_dataarray_invariants(xarray_obj)
elif isinstance(xarray_obj, Dataset):
_assert_dataset_invariants(xarray_obj)
else:
raise TypeError(
"{} is not a supported type for xarray invariant checks".format(
type(xarray_obj)
)
k: v.variable.data if isinstance(v, DataArray) else v
for k, v in indexers.items()
}
pos_indexers, new_indexes = indexing.remap_label_indexers(
obj, v_indexers, method=method, tolerance=tolerance
)
# attach indexer's coordinate to pos_indexers
for k, v in indexers.items():
if isinstance(v, Variable):
pos_indexers[k] = Variable(v.dims, pos_indexers[k])
elif isinstance(v, DataArray):
# drop coordinates found in indexers since .sel() already
# ensures alignments
coords = {k: var for k, var in v._coords.items() if k not in indexers}
pos_indexers[k] = DataArray(pos_indexers[k], coords=coords, dims=v.dims)
return pos_indexers, new_indexes
"""Remap indexers from obj.coords.
If indexer is an instance of DataArray and it has coordinate, then this coordinate
will be attached to pos_indexers.
Returns
-------
pos_indexers: Same type of indexers.
np.ndarray or Variable or DataArray
new_indexes: mapping of new dimensional-coordinate.
"""
from .dataarray import DataArray
indexers = either_dict_or_kwargs(indexers, indexers_kwargs, "remap_label_indexers")
v_indexers = {
k: v.variable.data if isinstance(v, DataArray) else v
for k, v in indexers.items()
}
pos_indexers, new_indexes = indexing.remap_label_indexers(
obj, v_indexers, method=method, tolerance=tolerance
)
# attach indexer's coordinate to pos_indexers
for k, v in indexers.items():
if isinstance(v, Variable):
pos_indexers[k] = Variable(v.dims, pos_indexers[k])
elif isinstance(v, DataArray):
# drop coordinates found in indexers since .sel() already
# ensures alignments
coords = {k: var for k, var in v._coords.items() if k not in indexers}
pos_indexers[k] = DataArray(pos_indexers[k], coords=coords, dims=v.dims)
return pos_indexers, new_indexes
valid = (slice(None),) * axis + (slice(-shift, None),)
padded = padded.pad_with_fill_value({self.dim: (0, -shift)})
if isinstance(padded.data, dask_array_type):
raise AssertionError("should not be reachable")
values = dask_rolling_wrapper(
func, padded.data, window=self.window, min_count=min_count, axis=axis
)
else:
values = func(
padded.data, window=self.window, min_count=min_count, axis=axis
)
if self.center:
values = values[valid]
result = DataArray(values, self.obj.coords)
return result
keep_attrs=keep_attrs,
dask=dask,
)
return apply_groupby_func(this_apply, *args)
elif any(is_dict_like(a) for a in args):
return apply_dataset_vfunc(
variables_vfunc,
*args,
signature=signature,
join=join,
exclude_dims=exclude_dims,
dataset_join=dataset_join,
fill_value=dataset_fill_value,
keep_attrs=keep_attrs,
)
elif any(isinstance(a, DataArray) for a in args):
return apply_dataarray_vfunc(
variables_vfunc,
*args,
signature=signature,
join=join,
exclude_dims=exclude_dims,
keep_attrs=keep_attrs,
)
elif any(isinstance(a, Variable) for a in args):
return variables_vfunc(*args)
else:
return apply_array_ufunc(func, *args, dask=dask)
def _coord_to_xr(name: str, c: Coordinate) -> XrDataArray:
""" Construct xr.DataArray from named Coordinate object, this can then be used
to define coordinates for xr.Dataset|xr.DataArray
"""
return XrDataArray(c.values,
coords={name: c.values},
dims=(name,),
attrs={'units': c.units, 'resolution': c.resolution})
name = args[0].name
else:
name = result_name(args)
result_coords = build_output_coords(args, signature, exclude_dims)
data_vars = [getattr(a, "variable", a) for a in args]
result_var = func(*data_vars)
if signature.num_outputs > 1:
out = tuple(
DataArray(variable, coords, name=name, fastpath=True)
for variable, coords in zip(result_var, result_coords)
)
else:
(coords,) = result_coords
out = DataArray(result_var, coords, name=name, fastpath=True)
return out
that were pandas objects have been converted into native xarray objects.
"""
from .dataarray import DataArray
from .dataset import Dataset
out = []
for obj in objects:
if isinstance(obj, Dataset):
variables: "DatasetLike" = obj
else:
variables = {}
if isinstance(obj, PANDAS_TYPES):
obj = dict(obj.iteritems())
for k, v in obj.items():
if isinstance(v, PANDAS_TYPES):
v = DataArray(v)
variables[k] = v
out.append(variables)
return out
zeros_like
ones_like
"""
from .dataarray import DataArray
from .dataset import Dataset
from .variable import Variable
if isinstance(other, Dataset):
data_vars = {
k: _full_like_variable(v, fill_value, dtype)
for k, v in other.data_vars.items()
}
return Dataset(data_vars, coords=other.coords, attrs=other.attrs)
elif isinstance(other, DataArray):
return DataArray(
_full_like_variable(other.variable, fill_value, dtype),
dims=other.dims,
coords=other.coords,
attrs=other.attrs,
name=other.name,
)
elif isinstance(other, Variable):
return _full_like_variable(other, fill_value, dtype)
else:
raise TypeError("Expected DataArray, Dataset, or Variable")
values = np.asarray(variable)
name = variable.id
dims = variable.getAxisIds()
coords = {}
for axis in variable.getAxisList():
coords[axis.id] = DataArray(
np.asarray(axis),
dims=[axis.id],
attrs=_filter_attrs(axis.attributes, cdms2_ignored_attrs),
)
grid = variable.getGrid()
if grid is not None:
ids = [a.id for a in grid.getAxisList()]
for axis in grid.getLongitude(), grid.getLatitude():
if axis.id not in variable.getAxisIds():
coords[axis.id] = DataArray(
np.asarray(axis[:]),
dims=ids,
attrs=_filter_attrs(axis.attributes, cdms2_ignored_attrs),
)
attrs = _filter_attrs(variable.attributes, cdms2_ignored_attrs)
dataarray = DataArray(values, dims=dims, coords=coords, name=name, attrs=attrs)
return decode_cf(dataarray.to_dataset())[dataarray.name]