Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_Variable():
res = dataset.Variable(dimensions=('lat'), data=np.array([0.0]), attributes={})
assert res == res
assert res != 1
'source': index.filestream.path,
'filter_by_keys': filter_by_keys,
'encode_cf': encode_cf,
}
attributes = build_dataset_attributes(index, filter_by_keys, encoding)
return dimensions, variables, attributes, encoding
@attr.attrs()
class Dataset(object):
"""
Map a GRIB file to the NetCDF Common Data Model with CF Conventions.
"""
dimensions = attr.attrib(type=T.Dict[str, int])
variables = attr.attrib(type=T.Dict[str, Variable])
attributes = attr.attrib(type=T.Dict[str, T.Any])
encoding = attr.attrib(type=T.Dict[str, T.Any])
def open_fileindex(
path, grib_errors='warn', indexpath='{path}.{short_hash}.idx', index_keys=ALL_KEYS
):
stream = messages.FileStream(path, message_class=cfmessage.CfMessage, errors=grib_errors)
return stream.index(index_keys, indexpath=indexpath)
def open_file(
path,
grib_errors='warn',
indexpath='{path}.{short_hash}.idx',
filter_by_keys={},
offsets=offsets,
missing_value=missing_value,
geo_ndim=len(geo_dims),
)
if 'time' in coord_vars and 'step' in coord_vars:
# add the 'valid_time' secondary coordinate
dims, time_data = cfmessage.build_valid_time(
coord_vars['time'].data,
coord_vars['step'].data,
)
attrs = COORD_ATTRS['valid_time']
coord_vars['valid_time'] = Variable(dimensions=dims, data=time_data, attributes=attrs)
data_var_attrs['coordinates'] = ' '.join(coord_vars.keys())
data_var = Variable(dimensions=dimensions, data=data, attributes=data_var_attrs)
dims = collections.OrderedDict((d, s) for d, s in zip(dimensions, data_var.data.shape))
return dims, data_var, coord_vars
latitudes = np.array(first['distinctLatitudes'])
geo_coord_vars['latitude'] = Variable(
dimensions=('latitude',), data=latitudes, attributes=COORD_ATTRS['latitude'].copy()
)
if latitudes[0] > latitudes[-1]:
geo_coord_vars['latitude'].attributes['stored_direction'] = 'decreasing'
geo_coord_vars['longitude'] = Variable(
dimensions=('longitude',),
data=np.array(first['distinctLongitudes']),
attributes=COORD_ATTRS['longitude'],
)
elif 'geography' in encode_cf and grid_type in GRID_TYPES_2D_NON_DIMENSION_COORDS:
geo_dims = ('y', 'x')
geo_shape = (index.getone('Ny'), index.getone('Nx'))
try:
geo_coord_vars['latitude'] = Variable(
dimensions=('y', 'x'),
data=np.array(first['latitudes']).reshape(geo_shape),
attributes=COORD_ATTRS['latitude'],
)
geo_coord_vars['longitude'] = Variable(
dimensions=('y', 'x'),
data=np.array(first['longitudes']).reshape(geo_shape),
attributes=COORD_ATTRS['longitude'],
)
except KeyError: # pragma: no cover
if errors != 'ignore':
log.warning('ecCodes provides no latitudes/longitudes for gridType=%r', grid_type)
else:
geo_dims = ('values',)
geo_shape = (index.getone('numberOfPoints'),)
# add secondary coordinates if ecCodes provides them
)
geo_coord_vars['longitude'] = Variable(
dimensions=('y', 'x'),
data=np.array(first['longitudes']).reshape(geo_shape),
attributes=COORD_ATTRS['longitude'],
)
except KeyError: # pragma: no cover
if errors != 'ignore':
log.warning('ecCodes provides no latitudes/longitudes for gridType=%r', grid_type)
else:
geo_dims = ('values',)
geo_shape = (index.getone('numberOfPoints'),)
# add secondary coordinates if ecCodes provides them
try:
latitude = first['latitudes']
geo_coord_vars['latitude'] = Variable(
dimensions=('values',), data=np.array(latitude), attributes=COORD_ATTRS['latitude']
)
longitude = first['longitudes']
geo_coord_vars['longitude'] = Variable(
dimensions=('values',),
data=np.array(longitude),
attributes=COORD_ATTRS['longitude'],
)
except KeyError: # pragma: no cover
if errors != 'ignore':
log.warning('ecCodes provides no latitudes/longitudes for gridType=%r', grid_type)
return geo_dims, geo_shape, geo_coord_vars
data = OnDiskArray(
stream=index.filestream,
shape=shape,
offsets=offsets,
missing_value=missing_value,
geo_ndim=len(geo_dims),
)
if 'time' in coord_vars and 'step' in coord_vars:
# add the 'valid_time' secondary coordinate
dims, time_data = cfmessage.build_valid_time(
coord_vars['time'].data,
coord_vars['step'].data,
)
attrs = COORD_ATTRS['valid_time']
coord_vars['valid_time'] = Variable(dimensions=dims, data=time_data, attributes=attrs)
data_var_attrs['coordinates'] = ' '.join(coord_vars.keys())
data_var = Variable(dimensions=dimensions, data=data, attributes=data_var_attrs)
dims = collections.OrderedDict((d, s) for d, s in zip(dimensions, data_var.data.shape))
return dims, data_var, coord_vars
log=LOG, # type: logging.Logger
):
# type: (...) -> T.Tuple[T.Tuple[str, ...], T.Tuple[int, ...], T.Dict[str, Variable]]
first = index.first()
geo_coord_vars = collections.OrderedDict() # type: T.Dict[str, Variable]
grid_type = index.getone('gridType')
if 'geography' in encode_cf and grid_type in GRID_TYPES_DIMENSION_COORDS:
geo_dims = ('latitude', 'longitude') # type: T.Tuple[str, ...]
geo_shape = (index.getone('Ny'), index.getone('Nx')) # type: T.Tuple[int, ...]
latitudes = np.array(first['distinctLatitudes'])
geo_coord_vars['latitude'] = Variable(
dimensions=('latitude',), data=latitudes, attributes=COORD_ATTRS['latitude'].copy()
)
if latitudes[0] > latitudes[-1]:
geo_coord_vars['latitude'].attributes['stored_direction'] = 'decreasing'
geo_coord_vars['longitude'] = Variable(
dimensions=('longitude',),
data=np.array(first['distinctLongitudes']),
attributes=COORD_ATTRS['longitude'],
)
elif 'geography' in encode_cf and grid_type in GRID_TYPES_2D_NON_DIMENSION_COORDS:
geo_dims = ('y', 'x')
geo_shape = (index.getone('Ny'), index.getone('Nx'))
try:
geo_coord_vars['latitude'] = Variable(
dimensions=('y', 'x'),
data=np.array(first['latitudes']).reshape(geo_shape),
attributes=COORD_ATTRS['latitude'],
)
geo_coord_vars['longitude'] = Variable(
dimensions=('y', 'x'),
data=np.array(first['longitudes']).reshape(geo_shape),