Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_getitem_list(cache):
da = xarray_store.open_dataset(TEST_DATA, cache=cache).data_vars['t']
va = da.values[:]
assert da.isel(number=[2, 3, 4, 5]).mean() == va[[2, 3, 4, 5]].mean()
assert da.isel(number=[4, 3, 2, 5]).mean() == va[[4, 3, 2, 5]].mean()
assert da.sel(number=[2, 3, 4, 5]).mean() == va[[2, 3, 4, 5]].mean()
assert da.sel(number=[4, 3, 2, 5]).mean() == va[[4, 3, 2, 5]].mean()
def test_reanalysis_Dataset(test_file):
dataset, request, key_count = TEST_FILES[test_file]
path = cdscommon.ensure_data(dataset, request, name='cds-' + test_file + '-{uuid}.grib')
res = cfgrib.xarray_store.open_dataset(path)
res.to_netcdf(path[:-5] + '.nc')
def test_cds():
ds = xarray_store.open_dataset(TEST_DATA1)
res = cfcoords.translate_coords(ds, coord_model=datamodels.CDS)
assert set(res.dims) == {'forecast_reference_time', 'lat', 'lon', 'plev', 'realization'}
assert set(res.coords) == {
'forecast_reference_time',
'lat',
'leadtime',
'lon',
'plev',
'realization',
'time',
}
ds = xarray_store.open_dataset(TEST_DATA2)
def test_open_dataset():
res = xarray_store.open_dataset(TEST_DATA)
assert res.attrs['GRIB_edition'] == 1
var = res['t']
assert var.attrs['GRIB_gridType'] == 'regular_ll'
assert var.attrs['units'] == 'K'
assert var.dims == ('number', 'time', 'isobaricInhPa', 'latitude', 'longitude')
assert var.mean() > 0.0
with pytest.raises(ValueError):
xarray_store.open_dataset(TEST_DATA, engine='netcdf4')
res = xarray_store.open_dataset(TEST_IGNORE, backend_kwargs={'errors': 'warn'})
assert 'isobaricInhPa' in res.dims
def test_large_Dataset():
dataset, request, key_count = TEST_FILES['seasonal-original-pressure-levels-ecmwf']
# make the request large
request['leadtime_hour'] = list(range(720, 1445, 24))
request['grid'] = ['1', '1']
path = cdscommon.ensure_data(dataset, request, name='cds-' + dataset + '-LARGE-{uuid}.grib')
res = cfgrib.xarray_store.open_dataset(path)
res.to_netcdf(path[:-5] + '.nc')
def test_all(cache):
da = xarray_store.open_dataset(TEST_DATA, cache=cache).data_vars['t']
va = da.values[:]
assert va.shape == (10, 4, 2, 61, 120)
assert da.mean() == va.mean()
def test_canonical_dataset_to_grib(grib_name, tmpdir):
grib_path = os.path.join(SAMPLE_DATA_FOLDER, grib_name + '.grib')
out_path = str(tmpdir.join(grib_name + '.grib'))
res = xarray_store.open_dataset(grib_path)
xarray_to_grib.canonical_dataset_to_grib(res, out_path)
reread = xarray_store.open_dataset(out_path)
assert res.equals(reread)
assert res.attrs['GRIB_edition'] == 1
var = res['t']
assert var.attrs['GRIB_gridType'] == 'regular_ll'
assert var.attrs['units'] == 'K'
assert var.dims == ('number', 'time', 'isobaricInhPa', 'latitude', 'longitude')
assert var.mean() > 0.0
with pytest.raises(ValueError):
xarray_store.open_dataset(TEST_DATA, engine='netcdf4')
res = xarray_store.open_dataset(TEST_IGNORE, backend_kwargs={'errors': 'warn'})
assert 'isobaricInhPa' in res.dims
res = xarray_store.open_dataset(TEST_IGNORE, backend_kwargs={'errors': 'ignore'})
assert 'isobaricInhPa' in res.dims
with pytest.raises(ValueError):
xarray_store.open_dataset(TEST_IGNORE, backend_kwargs={'errors': 'raise'})
def test_open_dataset(grib_name):
grib_path = os.path.join(SAMPLE_DATA_FOLDER, grib_name + '.grib')
res = xarray_store.open_dataset(grib_path, cache=False)
print(res)
def test_Dataset(test_file):
dataset, request, key_count = TEST_FILES[test_file]
path = cdscommon.ensure_data(dataset, request, name='cds-' + test_file + '-{uuid}.grib')
res = cfgrib.xarray_store.open_dataset(path)
res.to_netcdf(path[:-5] + '.nc')