Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"instead.")
if cfg.PARAMS['baseline_climate'] != 'HISTALP':
raise InvalidParamsError("cfg.PARAMS['baseline_climate'] should be "
"set to HISTALP.")
# read the time out of the pure netcdf file
ft = utils.get_histalp_file('tmp')
fp = utils.get_histalp_file('pre')
with utils.ncDataset(ft) as nc:
vt = nc.variables['time']
assert vt[0] == 0
assert vt[-1] == vt.shape[0] - 1
t0 = vt.units.split(' since ')[1][:7]
time_t = pd.date_range(start=t0, periods=vt.shape[0], freq='MS')
with utils.ncDataset(fp) as nc:
vt = nc.variables['time']
assert vt[0] == 0.5
assert vt[-1] == vt.shape[0] - .5
t0 = vt.units.split(' since ')[1][:7]
time_p = pd.date_range(start=t0, periods=vt.shape[0], freq='MS')
# Now open with salem
nc_ts_tmp = salem.GeoNetcdf(ft, time=time_t)
nc_ts_pre = salem.GeoNetcdf(fp, time=time_p)
# set temporal subset for the ts data (hydro years)
# the reference time is given by precip, which is shorter
sm = cfg.PARAMS['hydro_month_nh']
em = sm - 1 if (sm > 1) else 12
yrs = nc_ts_pre.time.year
y0, y1 = yrs[0], yrs[-1]
cfg.PARAMS['smooth_window'] (i.e. a size in meters). Set to zero to
suppress smoothing.
varname_suffix : str
add a suffix to the variable written in the file (for experiments)
"""
# Variables
grids_file = gdir.get_filepath('gridded_data')
# See if we have the masks, else compute them
with utils.ncDataset(grids_file) as nc:
has_masks = 'glacier_ext_erosion' in nc.variables
if not has_masks:
from oggm.core.gis import gridded_attributes
gridded_attributes(gdir)
with utils.ncDataset(grids_file) as nc:
glacier_mask = nc.variables['glacier_mask'][:]
glacier_ext = nc.variables['glacier_ext_erosion'][:]
ice_divides = nc.variables['ice_divides'][:]
if add_slope:
slope_factor = nc.variables['slope_factor'][:]
else:
slope_factor = 1.
# Thickness to interpolate
thick = glacier_ext * np.NaN
thick[(glacier_ext-ice_divides) == 1] = 0.
# TODO: domain border too, for convenience for a start
thick[0, :] = 0.
thick[-1, :] = 0.
thick[:, 0] = 0.
thick[:, -1] = 0.
return get_yearly_mb_temp_prcp(gdir, time_range=[t0, t1])
# get needed parameters
temp_all_solid = cfg.PARAMS['temp_all_solid']
temp_melt = cfg.PARAMS['temp_melt']
prcp_fac = cfg.PARAMS['prcp_scaling_factor']
default_grad = cfg.PARAMS['temp_default_gradient']
g_minmax = cfg.PARAMS['temp_local_gradient_bounds']
# Marzeion et. al., 2012 used a precipitation lapse rate of 3%/100m.
# But the prcp gradient is omitted for now.
# prcp_grad = 3e-4
prcp_grad = 0
# read the climate file
igrad = None
with utils.ncDataset(gdir.get_filepath('climate_monthly'), mode='r') as nc:
# time
time = nc.variables['time']
time = netCDF4.num2date(time[:], time.units)
# limit data to given time range and
# raise errors is bounds are outside available data
if time_range is not None:
p0 = np.where(time == time_range[0])[0]
try:
p0 = p0[0]
except IndexError:
raise climate.MassBalanceCalibrationError('time_range[0] '
'not found in file')
p1 = np.where(time == time_range[1])[0]
try:
p1 = p1[0]
except IndexError:
Parameters
----------
gdir : :py:class:`oggm.GlacierDirectory`
where to write the data
"""
# variables
flowlines = gdir.read_pickle('inversion_flowlines')
catchment_indices = gdir.read_pickle('geometries')['catchment_indices']
# Topography is to filter the unrealistic lines afterwards.
# I take the non-smoothed topography
# I remove the boundary pixs because they are likely to be higher
fpath = gdir.get_filepath('gridded_data')
with utils.ncDataset(fpath) as nc:
topo = nc.variables['topo'][:]
mask_ext = nc.variables['glacier_ext'][:]
mask_glacier = nc.variables['glacier_mask'][:]
topo[np.where(mask_glacier == 0)] = np.NaN
topo[np.where(mask_ext == 1)] = np.NaN
# Intersects between catchments/glaciers
gdfi = gpd.GeoDataFrame(columns=['geometry'])
if gdir.has_file('catchments_intersects'):
# read and transform to grid
gdf = gdir.read_shapefile('catchments_intersects')
salem.transform_geopandas(gdf, gdir.grid, inplace=True)
gdfi = pd.concat([gdfi, gdf[['geometry']]])
if gdir.has_file('intersects'):
# read and transform to grid
gdf = gdir.read_shapefile('intersects')
"""Reads the DEM and computes the minimal and maximal glacier surface
elevation in meters asl, from the given (RGI) glacier outline.
Parameters
----------
gdir : :py:class:`oggm.GlacierDirectory`
Returns
-------
[float, float]
minimal and maximal glacier surface elevation [m asl.]
"""
# open DEM file and mask the glacier surface area
fpath = gdir.get_filepath('gridded_data')
with ncDataset(fpath) as nc:
mask = nc.variables['glacier_mask'][:]
topo = nc.variables['topo'][:]
# get relevant elevation information
min_elev = np.min(topo[np.where(mask == 1)])
max_elev = np.max(topo[np.where(mask == 1)])
return min_elev, max_elev
The rest of the job (merging centerlines + downstream into
one single glacier is realized by
:py:func:`~oggm.tasks.init_present_time_glacier`).
Parameters
----------
gdir : :py:class:`oggm.GlacierDirectory`
where to write the data
"""
# For tidewater glaciers no need for all this
if gdir.is_tidewater:
return
with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc:
topo = nc.variables['topo_smoothed'][:]
glacier_ext = nc.variables['glacier_ext'][:]
# Look for the starting points
p = gdir.read_pickle('centerlines')[-1].tail
head = (int(p.y), int(p.x))
# Make going up very costy
topo = topo**4
# We add an artificial cost as distance from the glacier
# This should have to much influence on mountain glaciers but helps for
# tidewater-candidates
topo = topo + distance_transform_edt(1 - glacier_ext)
# Make going up very costy
if smooth_radius is None:
smooth_radius = np.rint(cfg.PARAMS['smooth_window'] / dx)
thick = gaussian_blur(thick, np.int(smooth_radius))
thick = np.where(glacier_mask, thick, 0.)
# Re-mask
thick[glacier_mask == 0] = np.NaN
assert np.all(np.isfinite(thick[glacier_mask == 1]))
# Conserve volume
tmp_vol = np.nansum(thick * dx**2)
thick *= init_vol / tmp_vol
# write
grids_file = gdir.get_filepath('gridded_data')
with utils.ncDataset(grids_file, 'a') as nc:
vn = 'distributed_thickness' + varname_suffix
if vn in nc.variables:
v = nc.variables[vn]
else:
v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True)
v.units = '-'
v.long_name = 'Distributed ice thickness'
v[:] = thick
return thick
def plot_inversion(gdirs, ax=None, smap=None, linewidth=3, vmax=None):
"""Plots the result of the inversion out of a glacier directory."""
gdir = gdirs[0]
with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc:
topo = nc.variables['topo'][:]
# Dirty optim
try:
smap.set_topography(topo)
except ValueError:
pass
toplot_th = np.array([])
toplot_lines = []
toplot_crs = []
vol = []
for gdir in gdirs:
crs = gdir.grid.center_grid
geom = gdir.read_pickle('geometries')
inv = gdir.read_pickle('inversion_output')