Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# create an artifical satellite data set
parts = os.path.split(fnames[0])[-1].split('-')
yr = int(parts[0])
month = int(parts[1])
day = int(parts[2][0:2])
date = pysat.datetime(yr, month, day)
# scalar divisor below used to reduce the number of time samples
# covered by the simulation per day. The higher the number the lower
# the number of samples (86400/scalar)
scalar = 100
num = 86400/scalar
# basic time signal in UTS
uts = np.arange(num) * scalar
num_array = np.arange(num) * scalar
# seed DataFrame with UT array
data = pysat.DataFrame(uts, columns=['uts'])
# need to create simple orbits here. Have start of first orbit
# at 2009,1, 0 UT. 14.84 orbits per day
# figure out how far in time from the root start
# use that info to create a signal that is continuous from that start
# going to presume there are 5820 seconds per orbit (97 minute period)
time_delta = date - pysat.datetime(2009, 1, 1)
# root start
uts_root = np.mod(time_delta.total_seconds(), 5820)
# mlt runs 0-24 each orbit.
mlt = np.mod(uts_root+np.arange(num)*scalar, 5820) * (24./5820.)
data['mlt'] = mlt
# do slt, 20 second offset from mlt
uts_root = np.mod(time_delta.total_seconds() + 20, 5820)
data['slt'] = np.mod(uts_root + np.arange(num) * scalar,
5820) * (24./5820.)
day = int(parts[-2])
date = pysat.datetime(yr, month, day)
if sim_multi_file_right:
root_date = root_date or pysat.datetime(2009, 1, 1, 12)
data_date = date+pds.DateOffset(hours=12)
elif sim_multi_file_left:
root_date = root_date or pysat.datetime(2008, 12, 31, 12)
data_date = date-pds.DateOffset(hours=12)
else:
root_date = root_date or pysat.datetime(2009, 1, 1)
data_date = date
num = 86400 if tag is '' else int(tag)
num_array = np.arange(num)
uts = num_array
data = pysat.DataFrame(uts, columns=['uts'])
# need to create simple orbits here. Have start of first orbit
# at 2009,1, 0 UT. 14.84 orbits per day
time_delta = date - root_date
uts_root = np.mod(time_delta.total_seconds(), 5820)
mlt = np.mod(uts_root + num_array, 5820) * (24. / 5820.)
data['mlt'] = mlt
# fake orbit number
fake_delta = date - pysat.datetime(2008, 1, 1)
fake_uts_root = fake_delta.total_seconds()
data['orbit_num'] = ((fake_uts_root + num_array) / 5820.).astype(int)
# create a fake longitude, resets every 6240 seconds
# sat moves at 360/5820 deg/s, Earth rotates at 360/86400, takes extra time
indexed by Epoch. data.ix[0, 'item']
Returns
-------
data, meta
"""
import string
import pysat
import pandas
# copy data
cdata = self.data.copy()
meta = pysat.Meta(pysat.DataFrame.from_dict(self.meta,
orient='index'))
# all column names should be lower case
lower_names = [name.lower() for name in meta.data.columns] #map(str.lower, meta.data.columns)
meta.data.columns = lower_names
# replace standard CDAWeb terms with more pysat friendly versions
if 'lablaxis' in meta.data.columns:
meta.data.drop('long_name', inplace=True, axis=1)
meta.data.rename(columns={'lablaxis': 'long_name'}, inplace=True)
if 'catdesc' in meta.data.columns:
meta.data.rename(columns={'catdesc': 'description'}, inplace=True)
# account for different possible cases for Epoch, epoch, EPOCH, epOch
lower_names = [name.lower() for name in meta.data.index.values] #lower_names = map(str.lower, meta.data.index.values)
for name, true_name in zip(lower_names, meta.data.index.values):
if name == 'epoch':
meta.data.rename(index={true_name: 'Epoch'}, inplace=True)
# within the new instrument module, at the top level define
# a new variable named load, and set it equal to this load method
# code below taken from cnofs_ivm.py.
# support load routine
# use the default CDAWeb method
load = cdw.load
"""
import pysatCDF
if len(fnames) <= 0:
return pysat.DataFrame(None), None
else:
# going to use pysatCDF to load the CDF and format
# data and metadata for pysat using some assumptions.
# Depending upon your needs the resulting pandas DataFrame may
# need modification
# currently only loads one file, which handles more situations via
# pysat than you may initially think
if fake_daily_files_from_monthly:
# parse out date from filename
fname = fnames[0][0:-11]
date = pysat.datetime.strptime(fnames[0][-10:], '%Y-%m-%d')
with pysatCDF.CDF(fname) as cdf:
# convert data to pysat format
data, meta = cdf.to_pysat(flatten_twod=flatten_twod)
# select data from monthly
self._units_label = units_label
self._name_label = name_label
self._notes_label = notes_label
self._desc_label = desc_label
self._plot_label = plot_label
self._axis_label = axis_label
self._scale_label = scale_label
self._min_label = min_label
self._max_label = max_label
self._fill_label = fill_label
# init higher order (nD) data structure container, a dict
self._ho_data = {}
# use any user provided data to instantiate object with data
# attirube unit and name labels are called within
if metadata is not None:
if isinstance(metadata, DataFrame):
self._data = metadata
# make sure defaults are taken care of for required metadata
self.accept_default_labels(self)
else:
raise ValueError(''.join(('Input must be a pandas DataFrame',
'type. See other constructors for',
' alternate inputs.')))
else:
self._data = DataFrame(None, columns=[self._units_label,
self._name_label,
self._desc_label,
self._plot_label,
self._axis_label,
self._scale_label,
self.notes_label,
self._min_label,
tag : (str or NoneType)
tag or None (default=None)
sat_id : (str or NoneType)
satellite id or None (default=None)
Returns
---------
data : (pandas.DataFrame)
Object containing satellite data
meta : (pysat.Meta)
Object containing metadata such as column names and units
"""
import pysatCDF
if len(fnames) <= 0 :
return pysat.DataFrame(None), None
else:
# going to use pysatCDF to load the CDF and format
# data and metadata for pysat using some assumptions.
# Depending upon your needs the resulting pandas DataFrame may
# need modification
# currently only loads one file, which handles more situations via pysat
# than you may initially think
with pysatCDF.CDF(fnames[0]) as cdf:
return cdf.to_pysat()
def load(fnames, tag=None, sat_id=None):
import davitpy
if len(fnames) <= 0:
return pysat.DataFrame(None), pysat.Meta(None)
elif len(fnames) == 1:
myPtr = davitpy.pydarn.sdio.sdDataPtr(sTime=pysat.datetime(1980, 1, 1),
fileType='grdex',
eTime=pysat.datetime(2250, 1, 1),
hemi=tag,
fileName=fnames[0])
myPtr.open()
in_list = []
in_dict = {'stid': [],
'channel': [],
'noisemean': [],
'noisesd': [],
'gsct': [],
'nvec': [],
on_travis = os.environ.get('ONTRAVIS') == 'True'
if on_travis:
times = times[0:100]
# create list to hold satellite position, velocity
position = []
velocity = []
for time in times:
# orbit propagator - computes x,y,z position and velocity
pos, vel = satellite.propagate(time.year, time.month, time.day,
time.hour, time.minute, time.second)
position.extend(pos)
velocity.extend(vel)
# put data into DataFrame
data = pysat.DataFrame({'position_eci_x': position[::3],
'position_eci_y': position[1::3],
'position_eci_z': position[2::3],
'velocity_eci_x': velocity[::3],
'velocity_eci_y': velocity[1::3],
'velocity_eci_z': velocity[2::3]},
index=times)
data.index.name = 'Epoch'
# add position and velocity in ECEF
# add call for GEI/ECEF translation here
# instead, since available, I'll use an orbit predictor from another
# package that outputs in ECEF
# it also supports ground station calculations
# the observer's (ground station) position on the Earth surface
site = ephem.Observer()
data : pandas.Series
Series of numbers, Series, DataFrames
Returns
-------
pandas.Series, DataFrame, or Panel
repacked data, aligned by indices, ready for calculation
"""
from pysat import DataFrame, Series, datetime, Panel
if isinstance(data.iloc[0], DataFrame):
dslice = Panel.from_dict(dict([(i, data.iloc[i])
for i in xrange(len(data))]))
elif isinstance(data.iloc[0], Series):
dslice = DataFrame(data.tolist())
dslice.index = data.index
else:
dslice = data
return dslice
# get list of file attributes
# ncattrsList = data.ncattrs()
ncattrsList = data._attributes.keys()
for d in ncattrsList:
new[d] = data._attributes[d] # data.getncattr(d)
# load all of the variables in the netCDF
loadedVars = {}
keys = data.variables.keys()
for key in keys:
if data.variables[key][:].dtype.byteorder != '=':
loadedVars[key] = \
data.variables[key][:].byteswap().newbyteorder()
else:
loadedVars[key] = data.variables[key][:]
new['profiles'] = pysat.DataFrame(loadedVars)
output[i] = new
data.close()
except RuntimeError:
# some of the files have zero bytes, which causes a read error
# this stores the index of these zero byte files so I can drop
# the Nones the gappy file leaves behind
drop_idx.append(i)
# drop anything that came from the zero byte files
drop_idx.reverse()
for i in drop_idx:
del output[i]
if tag == 'ionprf':
if altitude_bin is not None: