Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# see if we can distribute
execute_entity_task(tasks.process_cru_data, gdirs)
tasks.compute_ref_t_stars(gdirs)
tasks.distribute_t_stars(gdirs)
execute_entity_task(tasks.apparent_mb, gdirs)
if RUN_INVERSION:
# Inversion
execute_entity_task(tasks.prepare_for_inversion, gdirs)
tasks.optimize_inversion_params(gdirs)
execute_entity_task(tasks.volume_inversion, gdirs)
if RUN_DYNAMICS:
# Random dynamics
execute_entity_task(tasks.init_present_time_glacier, gdirs)
execute_entity_task(tasks.random_glacier_evolution, gdirs)
# Plots (if you want)
PLOTS_DIR = ''
if PLOTS_DIR == '':
exit()
utils.mkdir(PLOTS_DIR)
for gd in gdirs:
bname = os.path.join(PLOTS_DIR, gd.name + '_' + gd.rgi_id + '_')
graphics.plot_googlemap(gd)
plt.savefig(bname + 'ggl.png')
plt.close()
graphics.plot_domain(gd)
plt.savefig(bname + 'dom.png')
plt.close()
graphics.plot_centerlines(gd)
plt.savefig(bname + 'cls.png')
for task in task_list:
execute_entity_task(task, gdirs)
if RUN_CLIMATE_PREPRO:
# Climate related tasks
# see if we can distribute
execute_entity_task(tasks.process_cru_data, gdirs)
tasks.compute_ref_t_stars(gdirs)
tasks.distribute_t_stars(gdirs)
execute_entity_task(tasks.apparent_mb, gdirs)
if RUN_INVERSION:
# Inversion
execute_entity_task(tasks.prepare_for_inversion, gdirs)
tasks.optimize_inversion_params(gdirs)
execute_entity_task(tasks.volume_inversion, gdirs)
if RUN_DYNAMICS:
# Random dynamics
execute_entity_task(tasks.init_present_time_glacier, gdirs)
execute_entity_task(tasks.random_glacier_evolution, gdirs)
# Plots (if you want)
PLOTS_DIR = ''
if PLOTS_DIR == '':
exit()
utils.mkdir(PLOTS_DIR)
for gd in gdirs:
bname = os.path.join(PLOTS_DIR, gd.name + '_' + gd.rgi_id + '_')
graphics.plot_googlemap(gd)
plt.savefig(bname + 'ggl.png')
plt.close()
execute_entity_task(tasks.local_t_star, gdirs)
execute_entity_task(tasks.mu_star_calibration, gdirs)
# Inversion tasks
execute_entity_task(tasks.prepare_for_inversion, gdirs)
# We use the default parameters for this run
execute_entity_task(tasks.mass_conservation_inversion, gdirs)
execute_entity_task(tasks.filter_inversion_output, gdirs)
# Final preparation for the run
execute_entity_task(tasks.init_present_time_glacier, gdirs)
# Random climate representative for the tstar climate, without bias
# In an ideal world this would imply that the glaciers remain stable,
# but it doesn't have to be so
execute_entity_task(tasks.run_constant_climate, gdirs,
bias=0, nyears=100,
output_filesuffix='_tstar')
execute_entity_task(tasks.run_constant_climate, gdirs,
y0=1990, nyears=100,
output_filesuffix='_pd')
# Compile output
utils.compile_glacier_statistics(gdirs)
utils.compile_run_output(gdirs, input_filesuffix='_tstar')
utils.compile_run_output(gdirs, input_filesuffix='_pd')
utils.compile_climate_input(gdirs)
return gdirs
execute_entity_task(tasks.prepare_for_inversion, col_gdir)
execute_entity_task(tasks.volume_inversion, gdirs)
pdir = os.path.join(PLOTS_DIR, 'out_raw') + '/'
if not os.path.exists(pdir):
os.mkdir(pdir)
for gd in gdirs:
_addt = addt if 'Columbia' in gd.name else ''
graphics.plot_inversion(gd, add_title_comment=_addt)
plt.savefig(pdir + gd.name + '_' + gd.rgi_id + '_inv.png')
plt.close()
# V1
distrib = partial(distribute_thickness, how='per_altitude',
add_slope=True,
smooth=True)
execute_entity_task(distrib, gdirs)
pdir = os.path.join(PLOTS_DIR, 'out_dis') + '/'
if not os.path.exists(pdir):
os.mkdir(pdir)
for gd in gdirs:
itmix.write_itmix_ascii(gd, 1)
graphics.plot_distributed_thickness(gd)
plt.savefig(pdir + gd.name + '_' + gd.rgi_id + '_d1.png')
plt.close()
# V2
distrib = partial(distribute_thickness, how='per_altitude',
add_slope=False,
smooth=True)
execute_entity_task(distrib, gdirs)
for gd in gdirs:
itmix.write_itmix_ascii(gd, 2)
Parameters
----------
gdirs : list of :py:class:`oggm.GlacierDirectory` objects
the glacier directories to process
filesuffix : str
add suffix to output file
path : str, bool
Set to "True" in order to store the info in the working directory
Set to a path to store the file to your chosen location
inversion_only : bool
if one wants to summarize the inversion output only (including calving)
"""
from oggm.workflow import execute_entity_task
out_df = execute_entity_task(glacier_statistics, gdirs,
inversion_only=inversion_only)
out = pd.DataFrame(out_df).set_index('rgi_id')
if path:
if path is True:
out.to_csv(os.path.join(cfg.PATHS['working_dir'],
('glacier_statistics' +
filesuffix + '.csv')))
else:
out.to_csv(path)
return out
rgidf = utils.get_rgi_glacier_entities(rids, version=rgi_version)
log.info('For RGIV{} we have {} candidate reference '
'glaciers.'.format(rgi_version, len(rgidf)))
# We have to check which of them actually have enough mb data.
# Let OGGM do it:
gdirs = workflow.init_glacier_regions(rgidf)
# We need to know which period we have data for
log.info('Process the climate data...')
if baseline == 'CRU':
execute_entity_task(tasks.process_cru_data, gdirs, print_log=False)
elif baseline == 'HISTALP':
# exclude glaciers outside of the Alps
gdirs = [gdir for gdir in gdirs if gdir.rgi_subregion == '11-01']
execute_entity_task(tasks.process_histalp_data, gdirs, print_log=False)
gdirs = utils.get_ref_mb_glaciers(gdirs)
# Keep only these
rgidf = rgidf.loc[rgidf.RGIId.isin([g.rgi_id for g in gdirs])]
# Save
log.info('For RGIV{} and {} we have {} reference glaciers.'.format(rgi_version,
baseline,
len(rgidf)))
rgidf.to_file(os.path.join(WORKING_DIR, 'mb_ref_glaciers.shp'))
# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)
# Go - initialize glacier directories
tasks.init_present_time_glacier,
]
for task in task_list:
start = time.time()
workflow.execute_entity_task(task, gdirs)
_add_time_to_df(odf, task.__name__, time.time()-start)
# Runs
start = time.time()
workflow.execute_entity_task(tasks.run_random_climate, gdirs,
nyears=250, bias=0, seed=0,
output_filesuffix='_tstar')
_add_time_to_df(odf, 'run_random_climate_tstar_250', time.time()-start)
start = time.time()
workflow.execute_entity_task(tasks.run_random_climate, gdirs,
nyears=250, y0=1995, seed=0,
output_filesuffix='_commit')
_add_time_to_df(odf, 'run_random_climate_commit_250', time.time()-start)
# Compile results
start = time.time()
utils.compile_glacier_statistics(gdirs)
_add_time_to_df(odf, 'compile_glacier_statistics', time.time()-start)
start = time.time()
utils.compile_climate_statistics(gdirs,
add_climate_period=[1920, 1960, 2000])
_add_time_to_df(odf, 'compile_climate_statistics', time.time()-start)
start = time.time()
utils.compile_run_output(gdirs, input_filesuffix='_tstar')
rgidf = rgidf.sort_values('Area', ascending=False)
log.workflow('Starting OGGM run')
log.workflow('Number of glaciers: {}'.format(len(rgidf)))
# Go - get the pre-processed glacier directories
gdirs = workflow.init_glacier_regions(rgidf, from_prepro_level=4)
# We can step directly to a new experiment!
# Random climate representative for the recent climate (1985-2015)
# This is a kind of "commitment" run
workflow.execute_entity_task(tasks.run_random_climate, gdirs,
nyears=300, y0=2000, seed=1,
output_filesuffix='_commitment')
# Now we add a positive and a negative bias to the random temperature series
workflow.execute_entity_task(tasks.run_random_climate, gdirs,
nyears=300, y0=2000, seed=2,
temperature_bias=0.5,
output_filesuffix='_bias_p')
workflow.execute_entity_task(tasks.run_random_climate, gdirs,
nyears=300, y0=2000, seed=3,
temperature_bias=-0.5,
output_filesuffix='_bias_m')
# Write the compiled output
utils.compile_glacier_statistics(gdirs)
utils.compile_run_output(gdirs, input_filesuffix='_commitment')
utils.compile_run_output(gdirs, input_filesuffix='_bias_p')
utils.compile_run_output(gdirs, input_filesuffix='_bias_m')
# Log
m, s = divmod(time.time() - start, 60)
tasks.catchment_width_geom,
tasks.catchment_width_correction,
]
for task in task_list:
execute_entity_task(task, gdirs)
# Climate tasks -- only data IO and tstar interpolation!
execute_entity_task(tasks.process_cru_data, gdirs)
execute_entity_task(tasks.local_t_star, gdirs)
execute_entity_task(tasks.mu_star_calibration, gdirs)
# Inversion tasks
execute_entity_task(tasks.prepare_for_inversion, gdirs)
# We use the default parameters for this run
execute_entity_task(tasks.mass_conservation_inversion, gdirs)
execute_entity_task(tasks.filter_inversion_output, gdirs)
# Final preparation for the run
execute_entity_task(tasks.init_present_time_glacier, gdirs)
# Random climate representative for the tstar climate, without bias
# In an ideal world this would imply that the glaciers remain stable,
# but it doesn't have to be so
execute_entity_task(tasks.run_constant_climate, gdirs,
bias=0, nyears=100,
output_filesuffix='_tstar')
execute_entity_task(tasks.run_constant_climate, gdirs,
y0=1990, nyears=100,
output_filesuffix='_pd')
# Compile output
# TODO : Remember to this paths on the cluster to run Columbia
Columbia_itmix = os.path.join(DATA_INPUT,
'RGI50-01.10689_itmixrun_new/')
dem_cp = os.path.join(Columbia_itmix, 'dem.tif')
dem_source_cp = os.path.join(Columbia_itmix, 'dem_source.pkl')
grid_json_cp = os.path.join(Columbia_itmix, 'glacier_grid.json')
# This is commented because we only need to replace the DEM once
# os.remove(filename)
# os.remove(dem_source)
# os.remove(grid_json)
# shutil.copy(dem_cp, filename)
# shutil.copy(dem_source_cp,dem_source)
# shutil.copy(grid_json_cp,grid_json)
execute_entity_task(tasks.glacier_masks, gdirs)
# Pre-processing tasks
task_list = [
tasks.compute_centerlines,
tasks.initialize_flowlines,
tasks.catchment_area,
tasks.catchment_intersections,
tasks.catchment_width_geom,
tasks.catchment_width_correction,
]
if RUN_GIS_PREPRO:
for task in task_list:
execute_entity_task(task, gdirs)
if RUN_CLIMATE_PREPRO: