Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_declarative_barb_earth_relative():
"""Test making a contour plot."""
import numpy as np
data = xr.open_dataset(get_test_data('NAM_test.nc', as_file_obj=False))
contour = ContourPlot()
contour.data = data
contour.field = 'Geopotential_height_isobaric'
contour.level = 300 * units.hPa
contour.linecolor = 'red'
contour.linestyle = '-'
contour.linewidth = 2
contour.contours = np.arange(0, 20000, 120).tolist()
barb = BarbPlot()
barb.data = data
barb.level = 300 * units.hPa
barb.time = datetime(2016, 10, 31, 12)
barb.field = ['u-component_of_wind_isobaric', 'v-component_of_wind_isobaric']
barb.skip = (5, 5)
def test_declarative_contour_options():
"""Test making a contour plot."""
data = xr.open_dataset(get_test_data('narr_example.nc', as_file_obj=False))
contour = ContourPlot()
contour.data = data
contour.field = 'Temperature'
contour.level = 700 * units.hPa
contour.contours = 30
contour.linewidth = 1
contour.linecolor = 'red'
contour.linestyle = 'dashed'
contour.clabels = True
panel = MapPanel()
panel.area = 'us'
panel.proj = 'lcc'
panel.layers = ['coastline', 'borders', 'usstates']
panel.plots = [contour]
def test_declarative_sfc_obs():
"""Test making a surface observation plot."""
data = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False),
infer_datetime_format=True, parse_dates=['valid'])
obs = PlotObs()
obs.data = data
obs.time = datetime(1993, 3, 12, 12)
obs.time_window = timedelta(minutes=15)
obs.level = None
obs.fields = ['tmpf']
obs.color = ['black']
# Panel for plot with Map features
panel = MapPanel()
panel.layout = (1, 1, 1)
panel.projection = ccrs.PlateCarree()
panel.area = 'in'
panel.layers = ['states']
extra_kw = {}
if method == 'cressman':
extra_kw['search_radius'] = 200
extra_kw['minimum_neighbors'] = 1
elif method == 'barnes':
extra_kw['search_radius'] = 400
extra_kw['minimum_neighbors'] = 1
extra_kw['gamma'] = 1
if boundary_coords is not None:
extra_kw['boundary_coords'] = boundary_coords
with pytest.warns(MetpyDeprecationWarning):
_, _, img = interpolate(xp, yp, z, hres=10, interp_type=method, **extra_kw)
with get_test_data('{0}_test.npz'.format(method)) as fobj:
truth = np.load(fobj)['img']
assert_array_almost_equal(truth, img)
def test_grid():
r"""Return grid locations used for tests in this file."""
with get_test_data('interpolation_test_grid.npz') as fobj:
data = np.load(fobj)
return data['xg'], data['yg']
xg, yg = test_grid
extra_kw = {}
if method == 'cressman':
extra_kw['r'] = 20
extra_kw['min_neighbors'] = 1
test_file = 'cressman_r20_mn1.npz'
elif method == 'barnes':
extra_kw['r'] = 40
extra_kw['kappa'] = 100
test_file = 'barnes_r40_k100.npz'
with pytest.warns(MetpyDeprecationWarning):
img = inverse_distance(xp, yp, z, xg, yg, kind=method, **extra_kw)
with get_test_data(test_file) as fobj:
truth = np.load(fobj)['img']
assert_array_almost_equal(truth, img)
def test_declarative_upa_obs():
"""Test making a full upperair observation plot."""
data = pd.read_csv(get_test_data('UPA_obs.csv', as_file_obj=False))
obs = PlotObs()
obs.data = data
obs.time = datetime(1993, 3, 14, 0)
obs.level = 500 * units.hPa
obs.fields = ['temperature', 'dewpoint', 'height']
obs.locations = ['NW', 'SW', 'NE']
obs.formats = [None, None, lambda v: format(v, '.0f')[:3]]
obs.vector_field = ('u_wind', 'v_wind')
obs.reduce_points = 0
# Panel for plot with Map features
panel = MapPanel()
panel.layout = (1, 1, 1)
panel.area = (-124, -72, 20, 53)
panel.projection = 'lcc'
def test_msg15():
"""Check proper decoding of message type 15."""
f = Level2File(get_test_data('KTLX20130520_201643_V06.gz', as_file_obj=False))
data = f.clutter_filter_map['data']
assert isinstance(data[0][0], list)
assert f.clutter_filter_map['datetime'] == datetime(2013, 5, 19, 0, 0, 0, 315000)
import xarray as xr
import metpy.calc as mpcalc
from metpy.cbook import get_test_data
from metpy.interpolate import cross_section
##############################
# **Getting the data**
#
# This example uses NARR reanalysis data for 18 UTC 04 April 1987 from NCEI
# (https://www.ncdc.noaa.gov/data-access/model-data).
#
# We use MetPy's CF parsing to get the data ready for use, and squeeze down the size-one time
# dimension.
data = xr.open_dataset(get_test_data('narr_example.nc', False))
data = data.metpy.parse_cf().squeeze()
print(data)
##############################
# Define start and end points:
start = (37.0, -105.0)
end = (35.5, -65.0)
##############################
# Get the cross section, and convert lat/lon to supplementary coordinates:
cross = cross_section(data, start, end).set_coords(('lat', 'lon'))
print(cross)
##############################
def station_test_data(variable_names, proj_from=None, proj_to=None):
with get_test_data('station_data.txt') as f:
all_data = np.loadtxt(f, skiprows=1, delimiter=',',
usecols=(1, 2, 3, 4, 5, 6, 7, 17, 18, 19),
dtype=np.dtype([('stid', '3S'), ('lat', 'f'), ('lon', 'f'),
('slp', 'f'), ('air_temperature', 'f'),
('cloud_fraction', 'f'), ('dewpoint', 'f'),
('weather', '16S'),
('wind_dir', 'f'), ('wind_speed', 'f')]))
all_stids = [s.decode('ascii') for s in all_data['stid']]
data = np.concatenate([all_data[all_stids.index(site)].reshape(1, ) for site in all_stids])
value = data[variable_names]
lon = data['lon']
lat = data['lat']