Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# We can't do Antarctica
rids = [rid for rid in rids if not ('-19.' in rid)]
# For HISTALP only RGI reg 11
if baseline == 'HISTALP':
rids = [rid for rid in rids if '-11.' in rid]
# Make a new dataframe with those (this takes a while)
log.info('Reading the RGI shapefiles...')
rgidf = utils.get_rgi_glacier_entities(rids, version=rgi_version)
log.info('For RGIV{} we have {} candidate reference '
'glaciers.'.format(rgi_version, len(rgidf)))
# We have to check which of them actually have enough mb data.
# Let OGGM do it:
gdirs = workflow.init_glacier_regions(rgidf)
# We need to know which period we have data for
log.info('Process the climate data...')
if baseline == 'CRU':
execute_entity_task(tasks.process_cru_data, gdirs, print_log=False)
elif baseline == 'HISTALP':
# exclude glaciers outside of the Alps
gdirs = [gdir for gdir in gdirs if gdir.rgi_subregion == '11-01']
execute_entity_task(tasks.process_histalp_data, gdirs, print_log=False)
gdirs = utils.get_ref_mb_glaciers(gdirs)
# Keep only these
rgidf = rgidf.loc[rgidf.RGIId.isin([g.rgi_id for g in gdirs])]
# Save
zf.extractall(WORKING_DIR)
rgif = os.path.join(WORKING_DIR, n + '.shp')
rgidf = salem.read_shapefile(rgif, cached=True)
# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)
print('Number of glaciers: {}'.format(len(rgidf)))
# Go - initialize working directories
# -----------------------------------
# you can use the command below to reset your run -- use with caution!
# gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
gdirs = workflow.init_glacier_regions(rgidf)
# Prepro tasks
task_list = [
tasks.glacier_masks,
tasks.compute_centerlines,
tasks.compute_downstream_line,
tasks.initialize_flowlines,
tasks.compute_downstream_bedshape,
tasks.catchment_area,
tasks.catchment_intersections,
tasks.catchment_width_geom,
tasks.catchment_width_correction,
]
for task in task_list:
execute_entity_task(task, gdirs)
# Compress all in output directory
l_base_dir = os.path.join(base_dir, 'L1')
workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
base_dir=l_base_dir)
utils.base_dir_to_tar(l_base_dir)
_time_log()
return
if dem_source:
# Force a given source
rgidf['DEM_SOURCE'] = dem_source.upper()
# L1 - go
gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
# Glacier stats
sum_dir = os.path.join(base_dir, 'L1', 'summary')
utils.mkdir(sum_dir)
opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
utils.compile_glacier_statistics(gdirs, path=opath)
# L1 OK - compress all in output directory
l_base_dir = os.path.join(base_dir, 'L1')
workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
base_dir=l_base_dir)
utils.base_dir_to_tar(l_base_dir)
if max_level == 1:
_time_log()
return
# Read in the Benchmark RGI file
rgif = 'https://dl.dropboxusercontent.com/u/20930277/rgi_benchmark.zip'
rgif = utils.file_downloader(rgif)
with zipfile.ZipFile(rgif) as zf:
zf.extractall(WORKING_DIR)
rgif = os.path.join(WORKING_DIR, 'rgi_benchmark.shp')
rgidf = salem.read_shapefile(rgif, cached=True)
# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)
log.info('Number of glaciers: {}'.format(len(rgidf)))
# Go - initialize working directories
gdirs = workflow.init_glacier_regions(rgidf) # reset=True, force=True
# Prepro tasks
task_list = [
tasks.glacier_masks,
tasks.compute_centerlines,
tasks.compute_downstream_line,
tasks.initialize_flowlines,
tasks.compute_downstream_bedshape,
tasks.catchment_area,
tasks.catchment_intersections,
tasks.catchment_width_geom,
tasks.catchment_width_correction,
]
for task in task_list:
execute_entity_task(task, gdirs)
if baseline == 'HISTALP':
# Other params: see https://oggm.org/2018/08/10/histalp-parameters/
cfg.PARAMS['prcp_scaling_factor'] = 1.75
cfg.PARAMS['temp_melt'] = -1.75
# Local paths (where to find the OGGM run output)
dirname = 'OGGM_ref_mb_{}_RGIV{}_OGGM{}'.format(baseline, rgi_version,
oggm.__version__)
WORKING_DIR = utils.gettempdir(dirname, home=True)
cfg.PATHS['working_dir'] = WORKING_DIR
# Read the rgi file
rgidf = gpd.read_file(os.path.join(WORKING_DIR, 'mb_ref_glaciers.shp'))
# Go - initialize glacier directories
gdirs = workflow.init_glacier_regions(rgidf)
# Cross-validation
file = os.path.join(cfg.PATHS['working_dir'], 'ref_tstars.csv')
ref_df = pd.read_csv(file, index_col=0)
for i, gdir in enumerate(gdirs):
print('Cross-validation iteration {} of {}'.format(i + 1, len(ref_df)))
# Now recalibrate the model blindly
tmp_ref_df = ref_df.loc[ref_df.index != gdir.rgi_id]
tasks.local_t_star(gdir, ref_df=tmp_ref_df)
tasks.mu_star_calibration(gdir)
# Mass-balance model with cross-validated parameters instead
mb_mod = MultipleFlowlineMassBalance(gdir, mb_model_class=PastMassBalance,
use_inversion_flowlines=True)
log.workflow('Number of glaciers: {}'.format(len(rgidf)))
# Input
if test_topofile:
cfg.PATHS['dem_file'] = test_topofile
# L1 - initialize working directories
# Which DEM source?
if dem_source.upper() == 'ALL':
# This is the complex one, just do the job an leave
log.workflow('Running prepro on ALL sources')
for i, s in enumerate(utils.DEM_SOURCES):
rs = i == 0
rgidf['DEM_SOURCE'] = s
log.workflow('Running prepro on sources: {}'.format(s))
gdirs = workflow.init_glacier_regions(rgidf, reset=rs, force=rs)
workflow.execute_entity_task(_rename_dem_folder, gdirs, source=s)
# make a GeoTiff mask of the glacier, choose any source
workflow.execute_entity_task(gis.rasterio_glacier_mask,
gdirs, source='ALL')
# Compress all in output directory
l_base_dir = os.path.join(base_dir, 'L1')
workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
base_dir=l_base_dir)
utils.base_dir_to_tar(l_base_dir)
_time_log()
return
if dem_source:
rgif = utils.file_downloader(rgif)
with zipfile.ZipFile(rgif) as zf:
zf.extractall(base_dir)
rgif = os.path.join(base_dir, 'rgiv5_Eyjafjallajoekull.shp')
rgidf = gpd.read_file(rgif)
# Pre-download other files which will be needed later
_ = utils.get_cru_file(var='tmp')
_ = utils.get_cru_file(var='pre')
# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)
# Go - initialize working directories
# -----------------------------------
gdirs = workflow.init_glacier_regions(rgidf)
# Prepro tasks
task_list = [
tasks.glacier_masks,
tasks.compute_centerlines,
tasks.initialize_flowlines,
tasks.compute_downstream_line,
tasks.compute_downstream_bedshape,
tasks.catchment_area,
tasks.catchment_intersections,
tasks.catchment_width_geom,
tasks.catchment_width_correction,
]
for task in task_list:
execute_entity_task(task, gdirs)
_add_time_to_df(odf, 'Read RGI', time.time()-start)
# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)
log.workflow('Starting prepro run for RGI reg: {} '
'and border: {}'.format(rgi_reg, border))
log.workflow('Number of glaciers: {}'.format(len(rgidf)))
# Input
if test_topofile:
cfg.PATHS['dem_file'] = test_topofile
# Initialize working directories
start = time.time()
gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
_add_time_to_df(odf, 'init_glacier_regions', time.time()-start)
# Pre-download other files just in case
if test_crudir is None:
_ = utils.get_cru_file(var='tmp')
_ = utils.get_cru_file(var='pre')
else:
cfg.PATHS['cru_dir'] = test_crudir
# Tasks
task_list = [
tasks.process_cru_data,
tasks.glacier_masks,
tasks.compute_centerlines,
tasks.initialize_flowlines,
tasks.compute_downstream_line,
cfg.PARAMS['invert_with_sliding'] = False
cfg.PARAMS['min_slope'] = 2
cfg.PARAMS['max_shape_param'] = 0.006
cfg.PARAMS['max_thick_to_width_ratio'] = 0.5
cfg.PARAMS['base_binsize'] = 100.
cfg.PARAMS['temp_use_local_gradient'] = False
# Either do calibration (takes a long time) or do itmix
do_calib = True
do_itmix = True
log.info('Number of glaciers: {}'.format(len(rgidf)))
# Go - initialize working directories
gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
gdirs = workflow.init_glacier_regions(rgidf)
# For calibration
if do_calib:
# gdirs = [gd for gd in gdirs if gd.glacier_type != 'Ice cap']
# gdirs = [gd for gd in gdirs if gd.terminus_type == 'Land-terminating']
# Basic tasks
task_list = [
itmix.glacier_masks_itmix,
tasks.compute_centerlines,
tasks.catchment_area,
tasks.initialize_flowlines,
tasks.catchment_width_geom,
tasks.catchment_width_correction
]
gdirs = utils.get_ref_mb_glaciers(gdirs)
# Keep only these
rgidf = rgidf.loc[rgidf.RGIId.isin([g.rgi_id for g in gdirs])]
# Save
log.info('For RGIV{} and {} we have {} reference glaciers.'.format(rgi_version,
baseline,
len(rgidf)))
rgidf.to_file(os.path.join(WORKING_DIR, 'mb_ref_glaciers.shp'))
# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)
# Go - initialize glacier directories
gdirs = workflow.init_glacier_regions(rgidf)
# Prepro tasks
task_list = [
tasks.glacier_masks,
tasks.compute_centerlines,
tasks.initialize_flowlines,
tasks.catchment_area,
tasks.catchment_intersections,
tasks.catchment_width_geom,
tasks.catchment_width_correction,
]
for task in task_list:
execute_entity_task(task, gdirs)
# Climate tasks
tasks.compute_ref_t_stars(gdirs)