Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
lats=np.ma.array(lat_out)
)
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
if len(data.shape) == 2 and data.shape[1] != 1:
output = []
# multiple depths
for d in range(0, data.shape[1]):
masked_lon_in.mask = masked_lat_in.mask = \
data[:, d].view(np.ma.MaskedArray).mask
input_def = pyresample.geometry.SwathDefinition(
lons=masked_lon_in,
lats=masked_lat_in
)
output.append(pyresample.kd_tree.resample_custom(
input_def, data[:, d], output_def,
radius_of_influence=float(radius),
neighbours=10,
weight_funcs=weight,
fill_value=None, nprocs=4
))
output = np.ma.array(output).transpose()
else:
masked_lon_in.mask = masked_lat_in.mask = \
var[:].view(np.ma.MaskedArray).mask
input_def = pyresample.geometry.SwathDefinition(
lons=masked_lon_in,
lats=masked_lat_in
)
output = pyresample.kd_tree.resample_custom(
mask_data = var[time[-1], miny:maxy, minx:maxx]
masked_lon.mask = masked_lat.mask = mask_data.view(
np.ma.MaskedArray).mask
orig_def = SwathDefinition(lons=masked_lon, lats=masked_lat)
target_def = SwathDefinition(lons=np.array([lon]),
lats=np.array([lat]))
radius = grid.interpolation_radius(lat, lon)
if depthall:
origshape = d.shape
d = d.reshape([d.shape[0], d.shape[1], -1])
wf = [lambda r: 1 / r ** 2] * d.shape[-1]
resampled = resample_custom(
orig_def, d, target_def,
radius_of_influence=radius,
neighbours=10,
weight_funcs=wf,
fill_value=None, nprocs=4)
if depthall:
resampled = resampled.reshape([origshape[2], origshape[3]])
d = resampled
def do_save(filename, data):
d.dump(filename)
if not os.path.isdir(CACHE_DIR):
os.makedirs(CACHE_DIR)
dates = self.cmaq.dates[self.cmaq.indexdates]
# only interpolate to sites with latitude and longitude
df.dropna(subset=['Latitude', 'Longitude'], inplace=True)
vals, index = unique(df.Site_Code, return_index=True)
lats = df.Latitude.values[index]
lons = df.Longitude.values[index]
grid2 = geometry.GridDefinition(lons=vstack(lons), lats=vstack(lats))
sites = df.Site_Code.values[index]
utc = df.utcoffset.values[index]
vals = pd.Series(dtype=df.Obs.dtype)
date = pd.Series(dtype=df.datetime.dtype)
site = pd.Series(dtype=df.Site_Code.dtype)
utcoffset = pd.Series(dtype=df.utcoffset.dtype)
for i, j in enumerate(self.cmaq.indexdates):
if interp.lower() == 'idw':
val = kd_tree.resample_custom(grid1, cmaqvar[i, :, :].squeeze(), grid2, radius_of_influence=r,
fill_value=NaN, neighbours=n, weight_funcs=weight_func,
nprocs=2).squeeze()
elif interp.lower() == 'gauss':
val = kd_tree.resample_gauss(grid1, cmaqvar[i, :, :].squeeze(), grid2, radius_of_influence=r,
sigmas=r / 2., fill_value=NaN, neighbours=n, nprocs=2).squeeze()
else:
interp = 'nearest'
val = kd_tree.resample_nearest(grid1, cmaqvar[i, :, :].squeeze(), grid2, radius_of_influence=r,
fill_value=NaN, nprocs=2).squeeze()
vals = vals.append(pd.Series(val)).reset_index(drop=True)
date = date.append(pd.Series([self.cmaq.dates[j] for k in lons])).reset_index(drop=True)
site = site.append(pd.Series(sites)).reset_index(drop=True)
utcoffset = utcoffset.append(pd.Series(utc)).reset_index(drop=True)
dfs = pd.concat([vals, date, site, utcoffset], axis=1, keys=['CMAQ', 'datetime', 'Site_Code', 'utcoffset'])
dfs.index = dfs.datetime
r = dfs.groupby('Site_Code').resample('24H').mean().reset_index()
input_def, data[:, d], output_def,
radius_of_influence=float(radius),
neighbours=10,
weight_funcs=weight,
fill_value=None, nprocs=4
))
output = np.ma.array(output).transpose()
else:
masked_lon_in.mask = masked_lat_in.mask = \
var[:].view(np.ma.MaskedArray).mask
input_def = pyresample.geometry.SwathDefinition(
lons=masked_lon_in,
lats=masked_lat_in
)
output = pyresample.kd_tree.resample_custom(
input_def, data, output_def,
radius_of_influence=float(radius),
neighbours=10,
weight_funcs=weight,
fill_value=None, nprocs=4
)
if len(origshape) == 3:
output = output.reshape(origshape[1:])
return np.squeeze(output)
# define the lat lon points of the two parts.
new_grid = pr.geometry.GridDefinition(lons=new_grid_lon,
lats=new_grid_lat)
if mapping_method == 'nearest_neighbor':
data_latlon_projection = \
pr.kd_tree.resample_nearest(orig_grid, orig_field, new_grid,
radius_of_influence=radius_of_influence,
fill_value=None,
nprocs=nprocs_user)
elif mapping_method == 'bin_average':
wf = lambda r: 1
data_latlon_projection = \
pr.kd_tree.resample_custom(orig_grid, orig_field, new_grid,
radius_of_influence=radius_of_influence,
weight_funcs = wf,
fill_value=None,
nprocs=nprocs_user)
else:
raise ValueError('mapping_method must be nearest_neighbor or bin_average. \n'
'Found mapping_method = %s ' % mapping_method)
else:
raise ValueError('Number of lat and lon points to interpolate to must be > 0. \n'
'Found num_lats = %d, num lons = %d' % (num_lats,num_lons))
return new_grid_lon, new_grid_lat, data_latlon_projection
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
lon = self.cmaq.longitude
grid1 = geometry.GridDefinition(lons=lon, lats=lat)
vals = array([], dtype=cmaqvar.dtype)
date = array([], dtype='O')
site = array([], dtype=df.SCS.dtype)
print ' Interpolating using ' + interp + ' method'
for i, j in enumerate(dates):
con = df.datetime == j
lats = df[con].Latitude.values
lons = df[con].Longitude.values
grid2 = geometry.GridDefinition(lons=vstack(lons), lats=vstack(lats))
if interp.lower() == 'nearest':
val = kd_tree.resample_nearest(grid1, cmaqvar[i, :, :].squeeze(), grid2, radius_of_influence=r,
fill_value=NaN, nprocs=2).squeeze()
elif interp.lower() == 'idw':
val = kd_tree.resample_custom(grid1, cmaqvar[i, :, :].squeeze(), grid2, radius_of_influence=r,
fill_value=NaN, neighbours=n, weight_funcs=weight_func,
nprocs=2).squeeze()
elif interp.lower() == 'gauss':
val = kd_tree.resample_gauss(grid1, cmaqvar[i, :, :].squeeze(), grid2, radius_of_influence=r,
sigmas=r / 2., fill_value=NaN, neighbours=n, nprocs=2).squeeze()
vals = append(vals, val)
dd = empty(lons.shape[0], dtype=date.dtype)
dd[:] = j
date = append(date, dd)
site = append(site, df[con].SCS.values)
vals = pd.Series(vals)
date = pd.Series(date)
site = pd.Series(site)
dfs = concat([vals, date, site], axis=1, keys=['CMAQ', 'datetime', 'SCS'])
df = pd.merge(df, dfs, how='left', on=['SCS', 'datetime'])
input_def = SwathDefinition(lons=masked_lon, lats=masked_lat)
target_def = SwathDefinition(lons=out_lon, lats=out_lat)
if method == 'inv_square':
res = resample_custom(
input_def,
data,
target_def,
radius_of_influence=radius_of_influence,
neighbours=neighbours,
weight_funcs=lambda r: 1 / np.clip(r, 0.0625,
np.finfo(r.dtype).max) ** 2,
fill_value=None,
nprocs=nprocs)
elif method == 'bilinear':
res = resample_custom(
input_def,
data,
target_def,
radius_of_influence=radius_of_influence,
neighbours=4,
weight_funcs=lambda r: 1 / np.clip(r, 0.0625,
np.finfo(r.dtype).max),
fill_value=None,
nprocs=nprocs)
elif method == 'nn':
res = resample_nearest(
input_def,
data,
target_def,
radius_of_influence=radius_of_influence,
fill_value=None,
return pyresample.kd_tree.resample_gauss(input_def, data,
output_def, radius_of_influence=float(self.radius), sigmas=self.radius / 2, fill_value=None,
nprocs=8)
# Bilinear weighting
elif self.interp == "bilinear":
"""
Weight function used to determine the effect of surrounding points
on a given point
"""
def weight(r):
r = np.clip(r, np.finfo(r.dtype).eps,
np.finfo(r.dtype).max)
return 1. / r
return pyresample.kd_tree.resample_custom(input_def, data,
output_def, radius_of_influence=float(self.radius), neighbours=self.neighbours, fill_value=None,
weight_funcs=weight, nprocs=8)
# Inverse-square weighting
elif self.interp == "inverse":
"""
Weight function used to determine the effect of surrounding points
on a given point
"""
def weight(r):
r = np.clip(r, np.finfo(r.dtype).eps,
np.finfo(r.dtype).max)
return 1. / r ** 2
return pyresample.kd_tree.resample_custom(input_def, data,
output_def, radius_of_influence=float(self.radius), neighbours=self.neighbours, fill_value=None,