Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_nearest_swath_segments(self):
data = numpy.fromfunction(lambda y, x: y*x, (50, 10))
data = numpy.dstack(3 * (data,))
lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
swath_con = image.ImageContainerNearest(data, swath_def, 50000, segments=2)
area_con = swath_con.resample(self.area_def)
res = area_con.image_data
cross_sum = res.sum()
expected = 3 * 15874591.0
self.assertEqual(cross_sum, expected,\
msg='ImageContainer swath segments resampling nearest failed')
if lons.shape != self.shape or lats.shape != self.shape:
# Data on tiepoint grid:
interpolate = True
if not tiepoint_grid:
errmsg = ("Interpolation needed but insufficient" +
"information on the tiepoint grid")
raise IOError(errmsg)
else:
# Geolocation available on the full grid:
# We neeed to mask out nodata (VIIRS Bow-tie deletion...)
# We do it for all instruments, checking only against the
# nodata
lons = np.ma.masked_array(lons, nodata_mask)
lats = np.ma.masked_array(lats, nodata_mask)
self.area = geometry.SwathDefinition(lons=lons, lats=lats)
elif hasattr(self, "region") and self.region.data["area_extent"].any():
region = self.region.data
proj_dict = dict([elt.split('=')
for elt in region["pcs_def"].split(',')])
self.area = geometry.AreaDefinition(region["id"],
region["name"],
region["proj_id"],
proj_dict,
region["xsize"],
region["ysize"],
region["area_extent"])
if interpolate:
from geotiepoints import SatelliteInterpolator
if lons.shape != self.shape or lats.shape != self.shape:
# Data on tiepoint grid:
interpolate = True
if not tiepoint_grid:
errmsg = ("Interpolation needed but insufficient" +
"information on the tiepoint grid")
raise IOError(errmsg)
else:
# Geolocation available on the full grid:
# We neeed to mask out nodata (VIIRS Bow-tie deletion...)
# We do it for all instruments, checking only against the nodata
lons = np.ma.masked_array(lons, nodata_mask)
lats = np.ma.masked_array(lats, nodata_mask)
self.area = geometry.SwathDefinition(lons=lons, lats=lats)
elif hasattr(self, "region") and self.region.data["area_extent"].any():
region = self.region.data
proj_dict = dict([elt.split('=')
for elt in region["pcs_def"].split(',')])
self.area = geometry.AreaDefinition(region["id"],
region["name"],
region["proj_id"],
proj_dict,
region["xsize"],
region["ysize"],
region["area_extent"])
if interpolate:
from geotiepoints import SatelliteInterpolator
# Create kd-tree
try:
resample_kdtree = _create_resample_kdtree(source_lons, source_lats,
valid_input_index,
nprocs=nprocs)
except EmptyResult:
# Handle if all input data is reduced away
valid_output_index, index_array, distance_array = \
_create_empty_info(source_geo_def, target_geo_def, neighbours)
return (valid_input_index, valid_output_index, index_array,
distance_array)
if segments > 1:
# Iterate through segments
for i, target_slice in enumerate(geometry._get_slice(segments,
target_geo_def.shape)):
# Query on slice of target coordinates
next_voi, next_ia, next_da = \
_query_resample_kdtree(resample_kdtree, source_geo_def,
target_geo_def,
radius_of_influence, target_slice,
neighbours=neighbours,
epsilon=epsilon,
reduce_data=reduce_data,
nprocs=nprocs)
# Build result iteratively
if i == 0:
# First iteration
valid_output_index = next_voi
# Get the orbit number
if not satscene.orbit:
mda = self.data.attributes()["CoreMetadata.0"]
orbit_idx = mda.index("ORBITNUMBER")
satscene.orbit = int(mda[orbit_idx + 111:orbit_idx + 116])
# Get the geolocation
# if resolution != 1000:
# logger.warning("Cannot load geolocation at this resolution (yet).")
# return
for band_name in loaded_bands:
lon, lat = self.get_lonlat(
satscene[band_name].resolution, satscene.time_slot, cores)
area = geometry.SwathDefinition(lons=lon, lats=lat)
satscene[band_name].area = area
# Trimming out dead sensor lines (detectors) on aqua:
# (in addition channel 21 is noisy)
if satscene.satname == "aqua":
for band in ["6", "27", "36"]:
if not satscene[band].is_loaded() or satscene[band].data.mask.all():
continue
width = satscene[band].data.shape[1]
height = satscene[band].data.shape[0]
indices = satscene[band].data.mask.sum(1) < width
if indices.sum() == height:
continue
satscene[band] = satscene[band].data[indices, :]
satscene[band].area = geometry.SwathDefinition(
lons=satscene[band].area.lons[indices, :],
def interp_to_obs_new(var,df,lat,lon,radius=12000.):
from numpy import NaN,vstack
from pyresample import geometry,image
from pandas import to_timedelta,DataFrame
#define CMAQ pyresample grid (source)
grid1 = geometry.GridDefinition(lons=lon,lats=lat)
#get unique sites from df
dfn = df.drop_duplicates(subset=['Latitude','Longitude'])
#define site grid (target)
lats = dfn.Latitude.values
lons = dfn.Longitude.values
grid2 = geometry.GridDefinition(lons=vstack(lons), lats=vstack(lats))
#Create image container
i = image.ImageContainerNearest(var.transpose('ROW','COL','TSTEP').values,grid1,radius_of_influence=radius,fill_value=NaN)
#resample
ii = i.resample(grid2).image_data.squeeze()
#recombine data
e = DataFrame(ii,index=dfn.SCS,columns=var.TSTEP.values)
w = e.stack().reset_index().rename(columns={'level_1':'datetime',0:'CMAQ'})
w = w.merge(dfn.drop(['datetime','datetime_local','Obs'],axis=1),on='SCS',how='left')
w = w.merge(df[['datetime','SCS','Obs']],on=['SCS','datetime'],how='left')
#calculate datetime local
cols_full = np.arange(shape[1])
rows_full = np.arange(shape[0])
satint = SatelliteInterpolator((lons, lats),
(row_indices,
column_indices),
(rows_full, cols_full))
# satint.fill_borders("y", "x")
lons, lats = satint.interpolate()
try:
from pyresample import geometry
lons = np.ma.masked_array(lons, nodata_mask)
lats = np.ma.masked_array(lats, nodata_mask)
area = geometry.SwathDefinition(lons=lons,
lats=lats)
except ImportError:
area = None
for chn in read_external_geo.values():
if area:
chn.area = area
else:
chn.lat = lats
chn.lon = lons
LOG.info("Loading PPS parameters done.")
return
def _query_resample_kdtree(resample_kdtree,
source_geo_def,
target_geo_def,
radius_of_influence,
data_slice,
neighbours=8,
epsilon=0,
reduce_data=True,
nprocs=1):
"""Query kd-tree on slice of target coordinates"""
# Check validity of input
if not isinstance(target_geo_def, geometry.BaseDefinition):
raise TypeError('target_geo_def must be of geometry type')
elif not isinstance(radius_of_influence, (long, int, float)):
raise TypeError('radius_of_influence must be number')
elif not isinstance(neighbours, int):
raise TypeError('neighbours must be integer')
elif not isinstance(epsilon, (long, int, float)):
raise TypeError('epsilon must be number')
# Get sliced target coordinates
target_lons, target_lats = target_geo_def.get_lonlats(nprocs=nprocs,
data_slice=data_slice, dtype=source_geo_def.dtype)
# Find indiced of reduced target coordinates
valid_output_index = _get_valid_output_index(source_geo_def,
target_geo_def,
target_lons.ravel(),
/ (average_window * average_window))
# do convolution
data = ndi.filters.correlate(ch.data.astype(np.float), kernel,
mode='nearest')
# downscale
data = data[1::downscaling_factor, 1::downscaling_factor]
# New area, and correct for integer truncation.
p_size_x, p_size_y = (ch.area.pixel_size_x * downscaling_factor,
ch.area.pixel_size_y * downscaling_factor)
area_extent = (ch.area.area_extent[0],
ch.area.area_extent[1],
ch.area.area_extent[0] + data.shape[1] * p_size_x,
ch.area.area_extent[1] + data.shape[0] * p_size_y)
area = geometry.AreaDefinition(self._data_holder.satname +
self._data_holder.instrument_name +
str(area_extent) +
str(data.shape),
"On-the-fly area",
ch.area.proj_id, ch.area.proj_dict,
data.shape[1], data.shape[0],
area_extent)
return GeoImage(data, area, self.time_slot,
fill_value=(0,), mode='L')