Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _get_metadata(self):
ds_cat = TDSCatalog(self.url_path('dataset.xml'))
self.metadata = ds_cat.metadata
self.variables = {k.split('/')[0] for k in self.metadata['variables']}
self._get_stations()
def connect_to_catalog(self):
self.catalog = TDSCatalog(self.catalog_url)
self.fm_models = TDSCatalog(
self.catalog.catalog_refs[self.model_type].href)
self.fm_models_list = sorted(list(self.fm_models.catalog_refs.keys()))
try:
model_url = self.fm_models.catalog_refs[self.model_name].href
except ParseError:
raise ParseError(self.model_name + ' model may be unavailable.')
try:
self.model = TDSCatalog(model_url)
except HTTPError:
try:
self.model = TDSCatalog(model_url)
except HTTPError:
raise HTTPError(self.model_name + ' model may be unavailable.')
def connect_to_catalog(self):
self.catalog = TDSCatalog(self.catalog_url)
self.fm_models = TDSCatalog(
self.catalog.catalog_refs[self.model_type].href)
self.fm_models_list = sorted(list(self.fm_models.catalog_refs.keys()))
try:
model_url = self.fm_models.catalog_refs[self.model_name].href
except ParseError:
raise ParseError(self.model_name + ' model may be unavailable.')
try:
self.model = TDSCatalog(model_url)
except HTTPError:
try:
self.model = TDSCatalog(model_url)
except HTTPError:
raise HTTPError(self.model_name + ' model may be unavailable.')
import matplotlib.pyplot as plt
import metpy.calc
from metpy.units import units
from netCDF4 import num2date
import numpy as np
from siphon.catalog import TDSCatalog
###########################
# Getting the data
#
# In this example, the latest GFS forecasts data from the National Centers for
# Environmental Information (https://nomads.ncdc.noaa.gov) will be used, courtesy of the
# Univeristy Corporation for Atmospheric Research Thredds Data Server.
# Latest GFS Dataset
cat = TDSCatalog('http://thredds.ucar.edu/thredds/catalog/grib/'
'NCEP/GFS/Global_0p5deg/catalog.xml')
ncss = cat.latest.subset()
# Find the start of the model run and define time range
start_time = ncss.metadata.time_span['begin']
start = datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%Sz')
end = start + timedelta(hours=9)
# Query for Latest GFS Run
gfsdata = ncss.query().time_range(start, end).accept('netcdf4')
gfsdata.variables('Temperature_isobaric',
'u-component_of_wind_isobaric',
'v-component_of_wind_isobaric',
'Relative_humidity_isobaric').add_lonlat()
# This is currently a placeholder for a better example
from __future__ import print_function
from siphon.catalog import TDSCatalog
from siphon.http_util import session_manager
###########################################
cat = TDSCatalog('http://thredds.ucar.edu/thredds/catalog.xml')
print(list(cat.catalog_refs))
###########################################
# Basic HTTP authentication can also be used by using the HTTP session manager
# and setting some default options for HTTP sessions
session_manager.set_session_options(auth=('username', 'password'))
cat = TDSCatalog('https://rda.ucar.edu/thredds/catalog.xml')
==========
Use Siphon to query the NetCDF Subset Service (NCSS).
"""
from datetime import datetime
import matplotlib.pyplot as plt
import numpy as np
from siphon.catalog import TDSCatalog
###########################################
# First we construct a TDSCatalog instance pointing to our dataset of interest, in
# this case TDS' "Best" virtual dataset for the GFS global 0.5 degree collection of
# GRIB files. We see this catalog contains a single dataset.
best_gfs = TDSCatalog('http://thredds.ucar.edu/thredds/catalog/grib/NCEP/GFS/'
'Global_0p5deg/catalog.xml?dataset=grib/NCEP/GFS/Global_0p5deg/Best')
print(best_gfs.datasets)
###########################################
# We pull out this dataset and get the NCSS access point
best_ds = best_gfs.datasets[0]
ncss = best_ds.subset()
###########################################
# We can then use the `ncss` object to create a new query object, which
# facilitates asking for data from the server.
query = ncss.query()
###########################################
# We construct a query asking for data corresponding to latitude 40N and longitude 105W,
# for the current time. We also ask for NetCDF version 4 data, for the variables
def connect_to_catalog(self):
self.catalog = TDSCatalog(self.catalog_url)
self.fm_models = TDSCatalog(
self.catalog.catalog_refs[self.model_type].href)
self.fm_models_list = sorted(list(self.fm_models.catalog_refs.keys()))
try:
model_url = self.fm_models.catalog_refs[self.model_name].href
except ParseError:
raise ParseError(self.model_name + ' model may be unavailable.')
try:
self.model = TDSCatalog(model_url)
except HTTPError:
try:
self.model = TDSCatalog(model_url)
except HTTPError:
raise HTTPError(self.model_name + ' model may be unavailable.')
self.datasets_list = list(self.model.datasets.keys())
self.set_dataset()
self.connected = True
from datetime import datetime
import cartopy.crs as ccrs
import cartopy.feature as cfeature
import matplotlib.pyplot as plt
from netCDF4 import num2date
import numpy as np
from siphon.catalog import TDSCatalog
###########################################
# First we construct a `TDSCatalog` instance pointing to our dataset of interest, in
# this case TDS' "Best" virtual dataset for the GFS global 0.25 degree collection of
# GRIB files. This will give us a good resolution for our map. This catalog contains a
# single dataset.
best_gfs = TDSCatalog('http://thredds.ucar.edu/thredds/catalog/grib/NCEP/GFS/'
'Global_0p25deg/catalog.xml?dataset=grib/NCEP/GFS/Global_0p25deg/Best')
print(list(best_gfs.datasets))
###########################################
# We pull out this dataset and get the NCSS access point
best_ds = best_gfs.datasets[0]
ncss = best_ds.subset()
###########################################
# We can then use the `ncss` object to create a new query object, which
# facilitates asking for data from the server.
query = ncss.query()
###########################################
# We construct a query asking for data corresponding to a latitude and longitude box where 43
# lat is the northern extent, 35 lat is the southern extent, -111 long is the western extent
# Import appropriate libraries
from datetime import datetime, timedelta
from matplotlib.dates import AutoDateLocator, DateFormatter
import matplotlib.pyplot as plt
import metpy.calc as mpcalc
from metpy.units import units
from siphon.catalog import TDSCatalog
from siphon.ncss import NCSS
########################################
# Begin Data Ingest
# -----------------
# Request METAR data from TDS
metar = TDSCatalog('http://thredds.ucar.edu/thredds/catalog/nws/'
'metar/ncdecoded/catalog.xml')
dataset = list(metar.datasets.values())[0]
print(list(dataset.access_urls))
########################################
# What variables are available in dataset?
# Access netcdf subset and use siphon to request data
ncss_url = dataset.access_urls['NetcdfSubset']
ncss = NCSS(ncss_url)
print(ncss.variables)
########################################
# Set query to get desired data from Thredds server
# get current date and time
Parameters
----------
catalog_url : str
The URL of a top level data catalog
access_method : str
desired data access method (i.e. "OPENDAP", "NetcdfSubset", "WMS", etc)
Returns
-------
access_url : str
Data access URL to be used to access the latest data available from a
given catalog using the specified `access_method`. Typically a single string,
but not always.
"""
return TDSCatalog(catalog_url).latest.access_urls[access_method]