Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
utils.mkdir(cfg.PATHS['rgi_dir'])
# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = False
cfg.PARAMS['border'] = 20
cfg.PARAMS['continue_on_error'] = False
# Read in the RGI file
rgisel = os.path.join(WORKING_DIR, 'rgi_selection.shp')
if not os.path.exists(rgisel):
rgi_dir = utils.get_rgi_dir()
regions = ['{:02d}'.format(int(p)) for p in range(1, 20)]
files = [glob.glob(os.path.join(rgi_dir, '*', r + '_rgi50_*.shp'))[0] for r in regions]
rgidf = []
for fs in files:
sh = salem.read_shapefile(os.path.join(rgi_dir, fs), cached=True)
percs = np.asarray([0, 25, 50, 75, 100])
idppercs = np.round(percs * 0.01 * (len(sh)-1)).astype(int)
rgidf.append(sh.sort_values(by='Area').iloc[idppercs])
rgidf.append(sh.sort_values(by='CenLon').iloc[idppercs])
rgidf.append(sh.sort_values(by='CenLat').iloc[idppercs])
rgidf = gpd.GeoDataFrame(pd.concat(rgidf))
rgidf = rgidf.drop_duplicates('RGIId')
rgidf.to_file(rgisel)
else:
rgidf = salem.read_shapefile(rgisel)
rgidf = rgidf.loc[~rgidf.RGIId.isin(['RGI50-10.00012', 'RGI50-17.00850',
'RGI50-19.01497', 'RGI50-19.00990',
'RGI50-19.01440'])]
sel.loc[:, 'Name'] = name
rgidf.append(sel)
_rgi_ids_for_overwrite.extend(wgms_df.RGI_ID.values)
# GTD glaciers which are not already there
# Actually we should remove the data of those 2 to be honest...
gtd_df = gtd_df.loc[~ gtd_df.RGI_ID.isin(_rgi_ids_for_overwrite)]
log.info('N glaciers GTD: {}'.format(len(gtd_df)))
for i, row in gtd_df.iterrows():
rid = row.RGI_ID
reg = rid.split('-')[1].split('.')[0]
# read the rgi region
rgi_shp = find_path(RGI_DIR, reg + '_rgi50_*.shp')
rgi_df = salem.read_shapefile(rgi_shp, cached=True)
sel = rgi_df.loc[rgi_df.RGIId.isin([rid])].copy()
assert len(sel) == 1
# add glacier name to the entity
_corname = row.NAME.replace('/', 'or').replace('.', '').replace(' ', '-')
name = ['G:' + _corname] * len(sel)
for n in name:
if len(n) > 48:
raise
sel.loc[:, 'Name'] = name
rgidf.append(sel)
# Save for not computing each time
rgidf = pd.concat(rgidf)
rgidf.to_pickle(df_rgi_file)
_m = os.path.basename(trib.get_filepath('local_mustar')).split('.')
muin = _m[0] + in_id + '.' + _m[1]
muout = _m[0] + '_' + out_id + '.' + _m[1]
shutil.copyfile(os.path.join(trib.dir, muin),
os.path.join(main.dir, muout))
# sort flowlines descending
tfls.sort(key=lambda x: x.order, reverse=True)
# loop over tributaries and reproject to main glacier
for nr, tfl in enumerate(tfls):
# 1. Step: Change projection to the main glaciers grid
_line = salem.transform_geometry(tfl.line,
crs=trib.grid, to_crs=main.grid)
# 2. set new line
tfl.set_line(_line)
# 3. set map attributes
dx = [shpg.Point(tfl.line.coords[i]).distance(
shpg.Point(tfl.line.coords[i+1]))
for i, pt in enumerate(tfl.line.coords[:-1])] # get distance
# and check if equally spaced
if not np.allclose(dx, np.mean(dx), atol=1e-2):
raise RuntimeError('Flowline is not evenly spaced.')
tfl.dx = np.mean(dx).round(2)
tfl.map_dx = mfl.map_dx
tfl.dx_meter = tfl.map_dx * tfl.dx
gname = os.path.basename(dgn)
print(gname)
ifile = find_path(os.path.join(DATA_DIR, 'itmix', 'glaciers_sorted'),
'02_surface_' + gname + '*.asc')
ds = salem.EsriITMIX(ifile)
itmix_topo = ds.get_vardata()
ifiles = find_path(ITMIX_ODIR, '*' + gname + '*.asc', allow_more=True)
for ifile in ifiles:
ds2 = salem.EsriITMIX(ifile)
oggm_topo = ds2.get_vardata()
thick = itmix_topo - oggm_topo
cm = salem.Map(ds.grid)
cm.set_plot_params(nlevels=256)
cm.set_cmap(plt.get_cmap('viridis'))
cm.set_data(thick)
cm.visualize()
pname = os.path.basename(ifile).split('.')[0]
plt.savefig(os.path.join(pdir, pname) + '.png')
plt.close()
filesuffix='_tbias')
utils.compile_run_output(gdirs, filesuffix='_defaults')
utils.compile_run_output(gdirs, filesuffix='_tbias')
# ds = xr.open_dataset(os.path.join(base_dir, 'run_output_defaults.nc'))
# (ds.volume.sum(dim='rgi_id') * 1e-9).plot()
# plt.show()
# exit()
# We prepare for the plot, which needs our own map to proceed.
# Lets do a local mercator grid
g = salem.mercator_grid(center_ll=(-19.61, 63.63),
extent=(18000, 14500))
# And a map accordingly
sm = salem.Map(g, countries=False)
# sm.set_lonlat_contours(add_xtick_labels=False, yinterval=0.05, xinterval=0.1)
sm.set_lonlat_contours(interval=0)
z = sm.set_topography('/home/mowglie/disk/OGGM_INPUT/tmp/ISL.tif')
sm.set_data(z)
# Figs
f = 0.75
f, axs = plt.subplots(2, 1, figsize=(7 * f, 10 * f))
graphics.plot_domain(gdirs, ax=axs[0], smap=sm)
sm.set_data()
# sm.set_lonlat_contours(yinterval=0.05, xinterval=0.1)
sm.set_lonlat_contours(interval=0)
sm.set_geometry()
sm.set_text()
with utils.ncDataset(ft) as nc:
vt = nc.variables['time']
assert vt[0] == 0
assert vt[-1] == vt.shape[0] - 1
t0 = vt.units.split(' since ')[1][:7]
time_t = pd.date_range(start=t0, periods=vt.shape[0], freq='MS')
with utils.ncDataset(fp) as nc:
vt = nc.variables['time']
assert vt[0] == 0.5
assert vt[-1] == vt.shape[0] - .5
t0 = vt.units.split(' since ')[1][:7]
time_p = pd.date_range(start=t0, periods=vt.shape[0], freq='MS')
# Now open with salem
nc_ts_tmp = salem.GeoNetcdf(ft, time=time_t)
nc_ts_pre = salem.GeoNetcdf(fp, time=time_p)
# set temporal subset for the ts data (hydro years)
# the reference time is given by precip, which is shorter
sm = cfg.PARAMS['hydro_month_nh']
em = sm - 1 if (sm > 1) else 12
yrs = nc_ts_pre.time.year
y0, y1 = yrs[0], yrs[-1]
if cfg.PARAMS['baseline_y0'] != 0:
y0 = cfg.PARAMS['baseline_y0']
if cfg.PARAMS['baseline_y1'] != 0:
y1 = cfg.PARAMS['baseline_y1']
nc_ts_tmp.set_period(t0='{}-{:02d}-01'.format(y0, sm),
t1='{}-{:02d}-01'.format(y1, em))
nc_ts_pre.set_period(t0='{}-{:02d}-01'.format(y0, sm),
t1='{}-{:02d}-01'.format(y1, em))
fp = utils.get_histalp_file('pre')
with utils.ncDataset(ft) as nc:
vt = nc.variables['time']
assert vt[0] == 0
assert vt[-1] == vt.shape[0] - 1
t0 = vt.units.split(' since ')[1][:7]
time_t = pd.date_range(start=t0, periods=vt.shape[0], freq='MS')
with utils.ncDataset(fp) as nc:
vt = nc.variables['time']
assert vt[0] == 0.5
assert vt[-1] == vt.shape[0] - .5
t0 = vt.units.split(' since ')[1][:7]
time_p = pd.date_range(start=t0, periods=vt.shape[0], freq='MS')
# Now open with salem
nc_ts_tmp = salem.GeoNetcdf(ft, time=time_t)
nc_ts_pre = salem.GeoNetcdf(fp, time=time_p)
# set temporal subset for the ts data (hydro years)
# the reference time is given by precip, which is shorter
sm = cfg.PARAMS['hydro_month_nh']
em = sm - 1 if (sm > 1) else 12
yrs = nc_ts_pre.time.year
y0, y1 = yrs[0], yrs[-1]
if cfg.PARAMS['baseline_y0'] != 0:
y0 = cfg.PARAMS['baseline_y0']
if cfg.PARAMS['baseline_y1'] != 0:
y1 = cfg.PARAMS['baseline_y1']
nc_ts_tmp.set_period(t0='{}-{:02d}-01'.format(y0, sm),
t1='{}-{:02d}-01'.format(y1, em))
nc_ts_pre.set_period(t0='{}-{:02d}-01'.format(y0, sm),
nx = len(ds.dimensions['west_east'])
ny = len(ds.dimensions['south_north'])
except AttributeError:
# maybe an xarray dataset
nx = ds.dims['west_east']
ny = ds.dims['south_north']
if hasattr(ds, 'PROJ_ENVI_STRING'):
# HAR
x0 = ds['west_east'][0]
y0 = ds['south_north'][0]
else:
# Normal WRF file
e, n = gis.transform_proj(wgs84, proj, cen_lon, cen_lat)
x0 = -(nx-1) / 2. * dx + e # DL corner
y0 = -(ny-1) / 2. * dy + n # DL corner
grid = gis.Grid(nxny=(nx, ny), x0y0=(x0, y0), dxdy=(dx, dy), proj=proj)
if tmp_check_wrf:
# Temporary asserts
if 'XLONG' in ds.variables:
# Normal WRF
reflon = ds.variables['XLONG']
reflat = ds.variables['XLAT']
elif 'XLONG_M' in ds.variables:
# geo_em
reflon = ds.variables['XLONG_M']
reflat = ds.variables['XLAT_M']
elif 'lon' in ds.variables:
# HAR
reflon = ds.variables['lon']
reflat = ds.variables['lat']
else:
lat = utils.str_in_list(vns, utils.valid_names['lat_var'])
if (lon is None) or (lat is None):
return None
# OK, get it
lon = nc.variables[lon][:]
lat = nc.variables[lat][:]
if len(lon.shape) != 1:
raise RuntimeError('Coordinates not of correct shape')
# Make the grid
dx = lon[1]-lon[0]
dy = lat[1]-lat[0]
args = dict(nxny=(lon.shape[0], lat.shape[0]), proj=wgs84, dxdy=(dx, dy))
args['corner'] = (lon[0], lat[0])
return gis.Grid(**args)
operations to do with the same grid, set ``lut`` to an existing
table obtained from a previous call to :py:meth:`Grid.grid_lookup`
return_lut : bool, optional
set to True if you want to return the lookup table for later use.
in this case, returns a tuple
Returns
-------
An aggregated ndarray of the data, in ``self`` coordinates.
If ``return_lut==True``, also return the lookup table
"""
# Input checks
if grid is None:
grid = check_crs(data) # xarray
if not isinstance(grid, Grid):
raise ValueError('grid should be a Grid instance')
if hasattr(data, 'values'):
data = data.values # xarray
# dimensional check
in_shape = data.shape
ndims = len(in_shape)
if (ndims < 2) or (ndims > 4):
raise ValueError('data dimension not accepted')
if (in_shape[-1] != grid.nx) or (in_shape[-2] != grid.ny):
raise ValueError('data dimension not compatible')
if lut is None:
lut = self.grid_lookup(grid)
# Prepare the output