Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_getyrdoy_leap_year(self):
'''Test the date to year, day of year code functionality (leap_year)'''
date = pds.datetime(2008,12,31)
yr, doy = pysat.utils.getyrdoy(date)
assert ((yr == 2008) & (doy == 366))
# iterate over season, calculate the mean
for inst in iterator:
if not inst.data.empty:
# compute mean absolute using pandas functions and store
# data could be an image, or lower dimension, account for 2D
# and lower
data = inst[data_label]
data.dropna(inplace=True)
if by_orbit or by_file:
date = inst.data.index[0]
else:
date = inst.date
# perform average
mean_val[date] = \
pysat.utils.computational_form(data).mean(axis=0,
skipna=True)
del iterator
return mean_val
# only set once! If an Instrument object persists
# longer than a day then the download defaults would
# no longer be correct. Dates are always correct in this
# setup.
print('Downloading the most recent data by default ',
'(yesterday through tomorrow).')
start = self.yesterday()
stop = self.tomorrow()
print('Downloading data to: ', self.files.data_path)
if date_array is None:
# create range of dates to download data for
# make sure dates are whole days
start = self._filter_datetime_input(start)
stop = self._filter_datetime_input(stop)
date_array = utils.time.season_date_range(start, stop, freq=freq)
if user is None:
self._download_rtn(date_array,
tag=self.tag,
sat_id=self.sat_id,
data_path=self.files.data_path,
**kwargs)
else:
self._download_rtn(date_array,
tag=self.tag,
sat_id=self.sat_id,
data_path=self.files.data_path,
user=user,
password=password, **kwargs)
# get current file date range
first_date = self.files.start_date
DataFrame while metadata is a pysat.Meta instance.
Note
----
Any additional keyword arguments passed to pysat.Instrument
upon instantiation are passed along to this routine.
Examples
--------
::
inst = pysat.Instrument('icon', 'euv', sat_id='a', tag='level_2')
inst.load(2019,1)
"""
return pysat.utils.load_netcdf4(fnames, epoch_name='Epoch',
units_label='Units',
name_label='Long_Name',
notes_label='Var_Notes',
desc_label='CatDesc',
plot_label='FieldNam',
axis_label='LablAxis',
scale_label='ScaleTyp',
min_label='ValidMin',
max_label='ValidMax',
fill_label='FillVal')
good_vals = forecast_inst['f107'][good_times] != fill_val
# Save desired data and cycle time
f107_times.extend(list(forecast_inst.index[good_times][good_vals]))
f107_values.extend(list(forecast_inst['f107'][good_times][good_vals]))
itime = f107_times[-1] + pds.DateOffset(days=1)
notes += "{:})".format(itime.date())
inst_flag = None
if inst_flag is not None:
notes += "{:})".format(itime.date())
# Determine if the beginning or end of the time series needs to be padded
freq = pysat.utils.time.calc_freq(f107_times)
date_range = pds.date_range(start=start, end=stop-pds.DateOffset(days=1),
freq=freq)
if date_range[0] < f107_times[0]:
# Extend the time and value arrays from their beginning with fill
# values
itime = abs(date_range - f107_times[0]).argmin()
f107_times.reverse()
f107_values.reverse()
extend_times = list(date_range[:itime])
extend_times.reverse()
f107_times.extend(extend_times)
f107_values.extend([fill_val for kk in extend_times])
f107_times.reverse()
f107_values.reverse()
print("WARNING: unknown direction number [{:}]".format(kk))
# Calculate the geodetic latitude and longitude for each direction
if len(good_dir) == 0:
raise ValueError("No matching azimuth and elevation data included")
for dd in good_dir:
# Format the direction location keys
az_key = 'azdir{:d}'.format(dd)
el_key = 'eldir{:d}'.format(dd)
lat_key = 'gdlat{:d}'.format(dd)
lon_key = 'gdlon{:d}'.format(dd)
# JRO is located 520 m above sea level (jro.igp.gob.pe./english/)
# Also, altitude has already been calculated
gdaltr = np.ones(shape=self['gdlonr'].shape) * 0.52
gdlat, gdlon, _ = utils.local_horizontal_to_global_geo(self[az_key],
self[el_key],
self['range'],
self['gdlatr'],
self['gdlonr'],
gdaltr,
geodetic=True)
# Assigning as data, to ensure that the number of coordinates match
# the number of data dimensions
self.data = self.data.assign(lat_key=gdlat, lon_key=gdlon)
self.data.rename({"lat_key": lat_key, "lon_key": lon_key},
inplace=True)
# Add metadata for the new data values
bm_label = "Beam {:d} ".format(dd)
self.meta[lat_key] = {self.meta.units_label: 'degrees',