Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_nearest_multi_unraveled(self):
data = numpy.fromfunction(lambda y, x: y*x, (50, 10))
lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
data_multi = numpy.dstack((data, data, data))
res = kd_tree.resample_nearest(swath_def, data_multi,\
self.area_def, 50000, segments=1)
cross_sum = res.sum()
expected = 3 * 15874591.0
self.assertEqual(cross_sum, expected,\
msg='Swath multi channel resampling nearest failed')
def test_masked_fill_int(self):
data = numpy.ones((50, 10)).astype('int')
lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
res = kd_tree.resample_nearest(swath_def, data.ravel(),
self.area_def, 50000, fill_value=None, segments=1)
expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__),
'test_files',
'mask_test_fill_value.dat'),
sep=' ').reshape((800, 800))
fill_mask = res.mask
self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask),
msg='Failed to create fill mask on integer data')
def test_nearest_empty_multi_masked(self):
data = numpy.fromfunction(lambda y, x: y*x, (50, 10))
lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
data.ravel()))
swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
res = kd_tree.resample_nearest(swath_def, data_multi,\
self.area_def, 50000, segments=1,
fill_value=None)
self.assertEqual(res.shape, (800, 800, 3),
msg='Swath resampling nearest empty multi masked failed')
def test_nearest_remap(self):
data = numpy.fromfunction(lambda y, x: y*x, (50, 10))
lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
res = kd_tree.resample_nearest(swath_def, data.ravel(),\
self.area_def, 50000, segments=1)
remap = kd_tree.resample_nearest(self.area_def, res.ravel(),\
swath_def, 5000, segments=1)
cross_sum = remap.sum()
expected = 22275.0
self.assertEqual(cross_sum, expected,\
msg='Grid remapping nearest failed')
def test_masked_nearest_1d(self):
data = numpy.ones((800, 800))
data[:400, :] = 2
lons = numpy.fromfunction(lambda x: 3 + x / 100. , (500,))
lats = numpy.fromfunction(lambda x: 75 - x / 10., (500,))
swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
mask = numpy.ones((800, 800))
mask[400:, :] = 0
masked_data = numpy.ma.array(data, mask=mask)
res = kd_tree.resample_nearest(self.area_def, masked_data.ravel(),
swath_def, 50000, segments=1)
self.assertEqual(res.mask.sum(), 108,
msg='Swath resampling masked nearest 1d failed')
num_lons = int((new_grid_max_lon - new_grid_min_lon) / new_grid_delta_lat) + 1
if (num_lats > 0) and (num_lons > 0):
# linspace is preferred when using floats!
lat_tmp = np.linspace(new_grid_min_lat, new_grid_max_lat, num=int(num_lats))
lon_tmp = np.linspace(new_grid_min_lon, new_grid_max_lon, num=int(num_lons))
new_grid_lon, new_grid_lat = np.meshgrid(lon_tmp, lat_tmp)
# define the lat lon points of the two parts.
new_grid = pr.geometry.GridDefinition(lons=new_grid_lon,
lats=new_grid_lat)
if mapping_method == 'nearest_neighbor':
data_latlon_projection = \
pr.kd_tree.resample_nearest(orig_grid, orig_field, new_grid,
radius_of_influence=radius_of_influence,
fill_value=None,
nprocs=nprocs_user)
elif mapping_method == 'bin_average':
wf = lambda r: 1
data_latlon_projection = \
pr.kd_tree.resample_custom(orig_grid, orig_field, new_grid,
radius_of_influence=radius_of_influence,
weight_funcs = wf,
fill_value=None,
nprocs=nprocs_user)
else:
raise ValueError('mapping_method must be nearest_neighbor or bin_average. \n'
'Found mapping_method = %s ' % mapping_method)
utc = df.utcoffset.values[index]
vals = pd.Series(dtype=df.Obs.dtype)
date = pd.Series(dtype=df.datetime.dtype)
site = pd.Series(dtype=df.Site_Code.dtype)
utcoffset = pd.Series(dtype=df.utcoffset.dtype)
for i, j in enumerate(self.cmaq.indexdates):
if interp.lower() == 'idw':
val = kd_tree.resample_custom(grid1, cmaqvar[i, :, :].squeeze(), grid2, radius_of_influence=r,
fill_value=NaN, neighbours=n, weight_funcs=weight_func,
nprocs=2).squeeze()
elif interp.lower() == 'gauss':
val = kd_tree.resample_gauss(grid1, cmaqvar[i, :, :].squeeze(), grid2, radius_of_influence=r,
sigmas=r / 2., fill_value=NaN, neighbours=n, nprocs=2).squeeze()
else:
interp = 'nearest'
val = kd_tree.resample_nearest(grid1, cmaqvar[i, :, :].squeeze(), grid2, radius_of_influence=r,
fill_value=NaN, nprocs=2).squeeze()
vals = vals.append(pd.Series(val)).reset_index(drop=True)
date = date.append(pd.Series([self.cmaq.dates[j] for k in lons])).reset_index(drop=True)
site = site.append(pd.Series(sites)).reset_index(drop=True)
utcoffset = utcoffset.append(pd.Series(utc)).reset_index(drop=True)
dfs = pd.concat([vals, date, site, utcoffset], axis=1, keys=['CMAQ', 'datetime', 'Site_Code', 'utcoffset'])
dfs.index = dfs.datetime
r = dfs.groupby('Site_Code').resample('24H').mean().reset_index()
df = pd.merge(df, r, how='left', on=['Site_Code', 'datetime', 'utcoffset']).dropna(subset=['CMAQ'])
df['Obs'][df['Obs'] < 0] = NaN
df.dropna(subset=['Obs'], inplace=True)
return df