Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def get_membership_functions(filename):
"""Reads membership function parameters from wradlib-data file.
Parameters
----------
filename : filename
Filename of wradlib-data file
Returns
-------
msf : :class:`numpy:numpy.ndarray`
Array of membership funcions with shape (hm-classes, observables,
indep-ranges, 5)
"""
gzip = util.import_optional('gzip')
with gzip.open(filename, 'rb') as f:
nclass = int(f.readline().decode().split(':')[1].strip())
nobs = int(f.readline().decode().split(':')[1].strip())
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=UserWarning)
data = np.genfromtxt(f, skip_header=10, autostrip=True,
invalid_raise=False)
data = np.reshape(data, (nobs, int(data.shape[0] / nobs), data.shape[1]))
msf = np.reshape(data, (data.shape[0], nclass, int(data.shape[1] / nclass),
data.shape[2]))
msf = np.swapaxes(msf, 0, 1)
return msf
The algorithm is based on the paper of :cite:`Wang2009`.
Parameters
----------
phidp : :class:`numpy:numpy.ndarray`
array of shape (...,nr) with nr being the number of range bins
rho : :class:`numpy:numpy.ndarray`
array of same shape as ``phidp``
width : int
Width of the analysis window
copy : bool
Leaves original ``phidp`` array unchanged if set to True
(default: False)
"""
# Check whether fast Fortran implementation is available
speedup = util.import_optional("wradlib.speedup")
shape = phidp.shape
assert rho.shape == shape, "rho and phidp must have the same shape."
phidp = phidp.reshape((-1, shape[-1]))
if copy:
phidp = phidp.copy()
rho = rho.reshape((-1, shape[-1]))
gradphi = util.gradient_from_smoothed(phidp)
beams, rs = phidp.shape
# Compute the standard deviation within windows of 9 range bins
stdarr = np.zeros(phidp.shape, dtype=np.float32)
for r in range(rs - 9):
stdarr[..., r] = np.std(phidp[..., r:r + 9], -1)
def get_radolan_filehandle(fname):
"""Opens radolan file and returns file handle
Parameters
----------
fname : string
filename
Returns
-------
f : object
filehandle
"""
gzip = util.import_optional('gzip')
# open file handle
try:
f = gzip.open(fname, 'rb')
f.read(1)
except IOError:
f = open(fname, 'rb')
f.read(1)
# rewind file
f.seek(0, 0)
return f
proj : osr spatial reference object
GDAL OSR Spatial Reference Object describing projection
Returns
-------
output : :class:`numpy:numpy.ndarray`
(num volume bins, 3)
Examples
--------
See :ref:`/notebooks/workflow/recipe2.ipynb`.
"""
# make sure that elevs is an array
elevs = np.array([elevs]).ravel()
# create polar grid
el, az, r = util.meshgrid_n(elevs, azimuths, ranges)
# get projected coordinates
coords = georef.spherical_to_proj(r, az, el, sitecoords, proj=proj)
coords = coords.reshape(-1, 3)
return coords
@util.deprecated(reproject_raster_dataset)
def resample_raster_dataset(src_ds, **kwargs):
"""Resample given dataset according to keyword arguments
.. versionadded:: 0.6.0
# function inspired from github project
# https://github.com/profLewis/geogg122
Parameters
----------
src_ds : gdal.Dataset
raster image with georeferencing (GeoTransform at least)
spacing : float
float or tuple of two floats
pixel spacing of resampled dataset, same unit as pixel coordinates
size : int
for longitude in range(max(lonmin, 0), lonmax+1):
georef = "S%02gE%03g" % (-latitude, longitude)
filelist.append(georef)
for latitude in range(max(0, latmin), latmax+1):
for longitude in range(lonmin, min(lonmax, 0)):
georef = "N%02gW%03g" % (latitude, -longitude)
filelist.append(georef)
for longitude in range(max(lonmin, 0), lonmax+1):
georef = "N%02gE%03g" % (latitude, longitude)
filelist.append(georef)
if version == 3:
filelist = ["%s.SRTMGL%s" % (f, resolution) for f in filelist]
filelist = ["%s.hgt.zip" % (f) for f in filelist]
wrl_data_path = util.get_wradlib_data_path()
srtm_path = os.path.join(wrl_data_path, "geo")
if not os.path.exists(srtm_path) and download is not None:
os.makedirs(srtm_path)
demlist = []
for filename in filelist:
path = os.path.join(srtm_path, filename)
if os.path.exists(path):
demlist.append(path)
continue
if download is not None:
download_srtm(filename, path, version, resolution, **download)
if os.path.exists(path):
demlist.append(path)
demlist = [gdal.Open(d) for d in demlist]
if not merge:
print("Could not create an array from argument .")
print("The following exception was raised:")
raise
assert (elevs.ndim == 1) and (elevs.dtype != np.dtype("object")), \
"Argument in wradlib.volcoords_from_polar must be a 1-D array."
# now: is there one azimuths array for all elevation angles
# or one for each?
try:
azimuths = np.array(azimuths)
except Exception:
print("Could not create an array from argument .")
print("The following exception was raised:")
raise
if len(azimuths) == len(elevs):
# are the items of arrays themselves?
isseq = [util.issequence(elem) for elem in azimuths]
assert not ((False in isseq) and (True in isseq)), \
"Argument contains both iterable " \
"and non-iterable items."
if True in isseq:
# we expect one azimuth array for each elevation angle
oneaz4all = False
# now: is there one ranges array for all elevation angles or one for each?
try:
ranges = np.array(ranges)
except Exception:
print("Could not create an array from argument .")
print("The following exception was raised:")
raise
if len(ranges) == len(elevs):
# are the items of arrays themselves?
isseq = [util.issequence(elem) for elem in ranges]