Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _get_rgi_dir_unlocked(version=None, reset=False):
rgi_dir = cfg.PATHS['rgi_dir']
if version is None:
version = cfg.PARAMS['rgi_version']
if len(version) == 1:
version += '0'
# Be sure the user gave a sensible path to the RGI dir
if not rgi_dir:
raise InvalidParamsError('The RGI data directory has to be'
'specified explicitly.')
rgi_dir = os.path.abspath(os.path.expanduser(rgi_dir))
rgi_dir = os.path.join(rgi_dir, 'RGIV' + version)
mkdir(rgi_dir, reset=reset)
if version == '50':
dfile = 'http://www.glims.org/RGI/rgi50_files/rgi50.zip'
elif version == '60':
dfile = 'http://www.glims.org/RGI/rgi60_files/00_rgi60.zip'
elif version == '61':
dfile = 'https://cluster.klima.uni-bremen.de/data/rgi/rgi_61.zip'
elif version == '62':
dfile = 'https://cluster.klima.uni-bremen.de/~fmaussion/misc/rgi62.zip'
test_file = os.path.join(rgi_dir,
'*_rgi*{}_manifest.txt'.format(version))
cache_dir = fb_cache_dir
cache_ro = False
fb_path = os.path.join(fb_cache_dir, cache_obj_name)
if not reset and os.path.isfile(fb_path):
return fb_path
cache_path = os.path.join(cache_dir, cache_obj_name)
if not reset and os.path.isfile(cache_path):
return cache_path
if cache_ro:
if check_fb_dir:
# Add a manual check that we are caching sample data download
if 'oggm-sample-data' not in fb_path:
raise InvalidParamsError('Attempting to download something '
'with invalid global settings.')
cache_path = fb_path
if not cfg.PARAMS['has_internet']:
raise NoInternetException("Download required, but "
"`has_internet` is False.")
mkdir(os.path.dirname(cache_path))
try:
cache_path = _call_dl_func(dl_func, cache_path)
except BaseException:
if os.path.exists(cache_path):
os.remove(cache_path)
raise
if true, the ice thickness is set to zero before the simulation
bias : float
bias of the mb model. Default is to use the calibrated one, which
is often a better idea. For t* experiments it can be useful to set it
to zero
kwargs : dict
kwargs to pass to the FluxBasedModel instance
"""
if ys is None:
try:
ys = gdir.rgi_date.year
except AttributeError:
ys = gdir.rgi_date
if ye is None:
raise InvalidParamsError('Need to set the `ye` kwarg!')
if min_ys is not None:
ys = ys if ys < min_ys else min_ys
if init_model_filesuffix is not None:
fp = gdir.get_filepath('model_run', filesuffix=init_model_filesuffix)
with FileModel(fp) as fmod:
if init_model_yr is None:
init_model_yr = fmod.last_yr
fmod.run_until(init_model_yr)
init_model_fls = fmod.fls
mb = MultipleFlowlineMassBalance(gdir, mb_model_class=PastMassBalance,
filename=climate_filename, bias=bias,
input_filesuffix=climate_input_filesuffix)
return robust_model_run(gdir, output_filesuffix=output_filesuffix,
def _get_cru_file_unlocked(var=None):
cru_dir = cfg.PATHS['cru_dir']
# Be sure the user gave a sensible path to the climate dir
if not cru_dir:
raise InvalidParamsError('The CRU data directory has to be'
'specified explicitly.')
cru_dir = os.path.abspath(os.path.expanduser(cru_dir))
mkdir(cru_dir)
# Be sure input makes sense
if var not in ['tmp', 'pre']:
raise InvalidParamsError('CRU variable {} does not exist!'.format(var))
# The user files may have different dates, so search for patterns
bname = 'cru_ts*.{}.dat.nc'.format(var)
search = glob.glob(os.path.join(cru_dir, bname))
if len(search) == 1:
ofile = search[0]
elif len(search) > 1:
raise RuntimeError('You seem to have more than one file in your CRU '
'directory: {}. Help me by deleting the one'
run_ds : xarray.Dataset
stores the entire glacier geometry. It is useful to visualize the
glacier geometry or to restart a new run from a modelled geometry.
The glacier state is stored at the begining of each hydrological
year (not in between in order to spare disk space).
diag_ds : xarray.Dataset
stores a few diagnostic variables such as the volume, area, length
and ELA of the glacier.
"""
if int(y1) != y1:
raise InvalidParamsError('run_until_and_store only accepts '
'integer year dates.')
if not self.mb_model.hemisphere:
raise InvalidParamsError('run_until_and_store needs a '
'mass-balance model with an unambiguous '
'hemisphere.')
# time
yearly_time = np.arange(np.floor(self.yr), np.floor(y1)+1)
if store_monthly_step is None:
store_monthly_step = self.mb_step == 'monthly'
if store_monthly_step:
monthly_time = utils.monthly_timeseries(self.yr, y1)
else:
monthly_time = np.arange(np.floor(self.yr), np.floor(y1)+1)
sm = cfg.PARAMS['hydro_month_' + self.mb_model.hemisphere]
yrs, months = utils.floatyear_to_date(monthly_time)
if len(wid) == 0:
raise RuntimeError('Not a reference glacier!')
wid = wid.WGMS_ID.values[0]
# file
reff = os.path.join(mbdatadir,
'mbdata_WGMS-{:05d}.csv'.format(wid))
# list of years
mb_df = pd.read_csv(reff).set_index('YEAR')
# Quality checks
if 'ANNUAL_BALANCE' not in mb_df:
raise InvalidParamsError('Need an "ANNUAL_BALANCE" column in the '
'dataframe.')
if not mb_df.index.is_integer():
raise InvalidParamsError('The index needs to be integer years')
mb_df.index.name = 'YEAR'
self._mbdf = mb_df
def _get_histalp_file_unlocked(var=None):
cru_dir = cfg.PATHS['cru_dir']
# Be sure the user gave a sensible path to the climate dir
if not cru_dir:
raise InvalidParamsError('The CRU data directory has to be'
'specified explicitly.')
cru_dir = os.path.abspath(os.path.expanduser(cru_dir))
mkdir(cru_dir)
# Be sure input makes sense
if var not in ['tmp', 'pre']:
raise InvalidParamsError('HISTALP variable {} '
'does not exist!'.format(var))
# File to look for
if var == 'tmp':
bname = 'HISTALP_temperature_1780-2014.nc'
else:
bname = 'HISTALP_precipitation_all_abs_1801-2014.nc'
search = glob.glob(os.path.join(cru_dir, bname))
Returns
-------
gdirs : list of :py:class:`oggm.GlacierDirectory` objects
the initialised glacier directories
"""
if reset and not force:
reset = utils.query_yes_no('Delete all glacier directories?')
if prepro_border is None:
prepro_border = int(cfg.PARAMS['border'])
if from_prepro_level and prepro_border not in [10, 80, 160, 250]:
if 'test' not in utils._downloads.GDIR_URL:
raise InvalidParamsError("prepro_border or cfg.PARAMS['border'] "
"should be one of: 10, 80, 160, 250.")
# if reset delete also the log directory
if reset:
fpath = os.path.join(cfg.PATHS['working_dir'], 'log')
if os.path.exists(fpath):
rmtree(fpath)
gdirs = []
new_gdirs = []
if rgidf is None:
if reset:
raise ValueError('Cannot use reset without setting rgidf')
log.workflow('init_glacier_regions by parsing available folders '
'(can be slow).')
# The dirs should be there already
flink, mbdatadir = get_wgms_files()
c = 'RGI{}0_ID'.format(self.rgi_version[0])
wid = flink.loc[flink[c] == self.rgi_id]
if len(wid) == 0:
raise RuntimeError('Not a reference glacier!')
wid = wid.WGMS_ID.values[0]
# file
reff = os.path.join(mbdatadir,
'mbdata_WGMS-{:05d}.csv'.format(wid))
# list of years
mb_df = pd.read_csv(reff).set_index('YEAR')
# Quality checks
if 'ANNUAL_BALANCE' not in mb_df:
raise InvalidParamsError('Need an "ANNUAL_BALANCE" column in the '
'dataframe.')
if not mb_df.index.is_integer():
raise InvalidParamsError('The index needs to be integer years')
mb_df.index.name = 'YEAR'
self._mbdf = mb_df
defaults to yearly (see __init__).
Returns
-------
run_ds : xarray.Dataset
stores the entire glacier geometry. It is useful to visualize the
glacier geometry or to restart a new run from a modelled geometry.
The glacier state is stored at the begining of each hydrological
year (not in between in order to spare disk space).
diag_ds : xarray.Dataset
stores a few diagnostic variables such as the volume, area, length
and ELA of the glacier.
"""
if int(y1) != y1:
raise InvalidParamsError('run_until_and_store only accepts '
'integer year dates.')
if not self.mb_model.hemisphere:
raise InvalidParamsError('run_until_and_store needs a '
'mass-balance model with an unambiguous '
'hemisphere.')
# time
yearly_time = np.arange(np.floor(self.yr), np.floor(y1)+1)
if store_monthly_step is None:
store_monthly_step = self.mb_step == 'monthly'
if store_monthly_step:
monthly_time = utils.monthly_timeseries(self.yr, y1)
else:
monthly_time = np.arange(np.floor(self.yr), np.floor(y1)+1)