How to use the pysat.utils.time.getyrdoy function in pysat

To help you get started, we’ve selected a few pysat examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github pysat / pysat / pysat / _instrument.py View on Github external
def _set_load_parameters(self, date=None, fid=None):
        # filter supplied data so that it is only year, month, and day
        # and then store as part of instrument object
        # filtering instrinsic to assignment
        self.date = date
        self._fid = fid

        if date is not None:
            year, doy = utils.time.getyrdoy(date)
            self.yr = year
            self.doy = doy
            self._load_by_date = True
        else:
            self.yr = None
            self.doy = None
            self._load_by_date = False
github pysat / pysat / pysat / instruments / cosmic_gps.py View on Github external
elif tag == 'sonprf':
        sub_dir = 'sonPrf'
    elif tag == 'wetprf':
        sub_dir = 'wetPrf'
    elif tag == 'atmPrf':
        sub_dir = 'atmPrf'
    else:
        raise ValueError('Unknown cosmic_gps tag')

    if (user is None) or (password is None):
        raise ValueError('CDAAC user account information must be provided.')

    for date in date_array:
        print('Downloading COSMIC data for ' + date.strftime('%D'))
        sys.stdout.flush()
        yr, doy = pysat.utils.time.getyrdoy(date)
        yrdoystr = '{year:04d}.{doy:03d}'.format(year=yr, doy=doy)
        # Try re-processed data (preferred)
        try:
            dwnld = ''.join(("https://cdaac-www.cosmic.ucar.edu/cdaac/rest/",
                             "tarservice/data/cosmic2013/"))
            dwnld = dwnld + sub_dir + '/{year:04d}.{doy:03d}'.format(year=yr,
                                                                     doy=doy)
            top_dir = os.path.join(data_path, 'cosmic2013')
            req = requests.get(dwnld, auth=HTTPBasicAuth(user, password))
            req.raise_for_status()
        except requests.exceptions.HTTPError:
        # if repsonse is negative, try post-processed data
            try:
                dwnld = ''.join(("https://cdaac-www.cosmic.ucar.edu/cdaac/",
                                 "rest/tarservice/data/cosmic/"))
                dwnld = dwnld + sub_dir + '/{year:04d}.{doy:03d}'
github pysat / pysat / pysat / _instrument.py View on Github external
self.meta = meta

        # check if load routine actually returns meta
        if self.meta.data.empty:
            self.meta[self.variables] = {self.name_label: self.variables,
                                         self.units_label: [''] *
                                         len(self.variables)}

        # if loading by file set the yr, doy, and date
        if not self._load_by_date:
            if self.pad is not None:
                temp = first_time
            else:
                temp = self.index[0]
            self.date = pds.datetime(temp.year, temp.month, temp.day)
            self.yr, self.doy = utils.time.getyrdoy(self.date)

        # ensure data is unique and monotonic
        # check occurs after all the data padding loads, or individual load
        # thus it can potentially check issues with padding or with raw data
        if self.strict_time_flag:
            if (not self.index.is_monotonic_increasing) or (not self.index.is_unique):
                raise ValueError('Loaded data is not unique (',not self.index.is_unique,
                                 ') or not monotonic increasing (',
                                 not self.index.is_monotonic_increasing,
                                 ')')

        # apply default instrument routine, if data present
        if not self.empty:
            self._default_rtn(self)

        # clean data, if data is present and cleaning requested
github pysat / pysat / pysat / _instrument.py View on Github external
def _set_load_parameters(self, date=None, fid=None):
        # filter supplied data so that it is only year, month, and day
        # and then store as part of instrument object
        # filtering instrinsic to assignment
        self.date = date
        self._fid = fid

        if date is not None:
            year, doy = utils.time.getyrdoy(date)
            self.yr = year
            self.doy = doy
            self._load_by_date = True
        else:
            self.yr = None
            self.doy = None
            self._load_by_date = False
github pysat / pysat / pysat / instruments / cosmic2013_gps.py View on Github external
elif tag == 'sonprf':
        sub_dir = 'sonPrf'
    elif tag == 'wetprf':
        sub_dir = 'wetPrf'
    elif tag == 'atmPrf':
        sub_dir = 'atmPrf'
    else:
        raise ValueError('Unknown cosmic_gps tag')

    if (user is None) or (password is None):
        raise ValueError('CDAAC user account information must be provided.')

    for date in date_array:
        print('Downloading COSMIC data for '+date.strftime('%D'))
        sys.stdout.flush()
        yr, doy = pysat.utils.time.getyrdoy(date)
        yrdoystr = '{year:04d}.{doy:03d}'.format(year=yr, doy=doy)
        dwnld = ''.join(("https://cdaac-www.cosmic.ucar.edu/cdaac/rest/",
                         "tarservice/data/cosmic2013/"))
        dwnld = dwnld + sub_dir + '/{year:04d}.{doy:03d}'.format(year=yr,
                                                                 doy=doy)
        req = requests.get(dwnld, auth=HTTPBasicAuth(user, password))
        fname = os.path.join(data_path,
                             'cosmic_' + sub_dir + '_' + yrdoystr + '.tar')
        with open(fname, "wb") as local_file:
            local_file.write(req.content)
            local_file.close()
            # uncompress files
            tar = tarfile.open(fname)
            tar.extractall(path=data_path)
            tar.close()
            # move files
github pysat / pysat / pysat / _instrument.py View on Github external
self.meta = meta

        # check if load routine actually returns meta
        if self.meta.data.empty:
            self.meta[self.variables] = {self.name_label: self.variables,
                                         self.units_label: [''] *
                                         len(self.variables)}

        # if loading by file set the yr, doy, and date
        if not self._load_by_date:
            if self.pad is not None:
                temp = first_time
            else:
                temp = self.index[0]
            self.date = pds.datetime(temp.year, temp.month, temp.day)
            self.yr, self.doy = utils.time.getyrdoy(self.date)

        # ensure data is unique and monotonic
        # check occurs after all the data padding loads, or individual load
        # thus it can potentially check issues with padding or with raw data
        if self.strict_time_flag:
            if (not self.index.is_monotonic_increasing) or (not self.index.is_unique):
                raise ValueError('Loaded data is not unique (',not self.index.is_unique,
                                 ') or not monotonic increasing (',
                                 not self.index.is_monotonic_increasing,
                                 ')')
        else:
            warnings.warn('Strict times will eventually be enforced upon all instruments.'
                          ' (strict_time_flag)', DeprecationWarning)

        # apply default instrument routine, if data present
        if not self.empty: