How to use the oggm.cfg function in oggm

To help you get started, we’ve selected a few oggm examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github OGGM / oggm / oggm / sandbox / run_demtests.py View on Github external
# Libs
import numpy as np
import pandas as pd
import geopandas as gpd
import matplotlib.pyplot as plt
import salem
# Locals
import oggm
import oggm.cfg as cfg
from oggm import workflow
from oggm import tasks
from oggm.workflow import execute_entity_task
from oggm import graphics, utils

# Initialize OGGM
cfg.initialize()

# Local paths (where to write output and where to download input)
DATA_DIR = '/home/mowglie/disk/OGGM_INPUT'
WORKING_DIR = '/home/mowglie/disk/OGGM_RUNS/TEST_DEMS'
PLOTS_DIR = os.path.join(WORKING_DIR, 'plots')

cfg.PATHS['working_dir'] = WORKING_DIR
cfg.PATHS['topo_dir'] = os.path.join(DATA_DIR, 'topo')
cfg.PATHS['rgi_dir'] = os.path.join(DATA_DIR, 'rgi')
utils.mkdir(WORKING_DIR)
utils.mkdir(cfg.PATHS['topo_dir'])
utils.mkdir(cfg.PATHS['rgi_dir'])

# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = False
cfg.PARAMS['border'] = 20
github OGGM / oggm / oggm / utils / _downloads.py View on Github external
def get_dl_verify_data():
    """Returns a dictionary with all known download object hashes.

    The returned dictionary resolves str: cache_obj_name
    to a tuple (int: size, bytes: sha256).
    """
    global _dl_verify_data

    if _dl_verify_data is not None:
        return _dl_verify_data

    verify_file_path = os.path.join(cfg.CACHE_DIR, 'downloads.sha256.xz')

    try:
        with requests.get(CHECKSUM_VALIDATION_URL) as req:
            req.raise_for_status()
            verify_file_sha256 = req.text.split(maxsplit=1)[0]
            verify_file_sha256 = bytearray.fromhex(verify_file_sha256)
    except Exception as e:
        verify_file_sha256 = None
        logger.warning('Failed getting verification checksum: ' + repr(e))

    def do_verify():
        if os.path.isfile(verify_file_path) and verify_file_sha256:
            sha256 = hashlib.sha256()
            with open(verify_file_path, 'rb') as f:
                for b in iter(lambda: f.read(0xFFFF), b''):
                    sha256.update(b)
github OGGM / oggm / benchmarks / track_model_results.py View on Github external
utils.mkdir(self.testdir, reset=True)
        self.cfg_init()

        # Pre-download other files which will be needed later
        utils.get_cru_cl_file()
        utils.get_cru_file(var='tmp')
        utils.get_cru_file(var='pre')

        # Get the RGI glaciers for the run.
        rgi_list = ['RGI60-01.10299', 'RGI60-11.00897', 'RGI60-18.02342']
        rgidf = utils.get_rgi_glacier_entities(rgi_list)

        # We use intersects
        db = utils.get_rgi_intersects_entities(rgi_list, version='61')
        cfg.set_intersects_db(db)

        # Sort for more efficient parallel computing
        rgidf = rgidf.sort_values('Area', ascending=False)

        # Go - initialize glacier directories
        gdirs = workflow.init_glacier_regions(rgidf)

        # Preprocessing tasks
        task_list = [
            tasks.glacier_masks,
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.compute_downstream_line,
            tasks.compute_downstream_bedshape,
            tasks.catchment_area,
            tasks.catchment_intersections,
github OGGM / oggm / oggm / core / massbalance.py View on Github external
time = netCDF4.num2date(time[:], time.units)
            ny, r = divmod(len(time), 12)
            if r != 0:
                raise ValueError('Climate data should be N full years')
            # This is where we switch to hydro float year format
            # Last year gives the tone of the hydro year
            self.years = np.repeat(np.arange(time[-1].year-ny+1,
                                             time[-1].year+1), 12)
            self.months = np.tile(np.arange(1, 13), ny)
            # Read timeseries
            self.temp = nc.variables['temp'][:]
            self.prcp = nc.variables['prcp'][:] * prcp_fac
            if 'gradient' in nc.variables:
                grad = nc.variables['gradient'][:]
                # Security for stuff that can happen with local gradients
                g_minmax = cfg.PARAMS['temp_local_gradient_bounds']
                grad = np.where(~np.isfinite(grad), default_grad, grad)
                grad = clip_array(grad, g_minmax[0], g_minmax[1])
            else:
                grad = self.prcp * 0 + default_grad
            self.grad = grad
            self.ref_hgt = nc.ref_hgt
            self.ys = self.years[0] if ys is None else ys
            self.ye = self.years[-1] if ye is None else ye
github OGGM / oggm / oggm / core / gis.py View on Github external
def _parse_source_text():
    fp = os.path.join(os.path.abspath(os.path.dirname(cfg.__file__)),
                      'data', 'dem_sources.txt')

    out = dict()
    cur_key = None
    with open(fp, 'r') as fr:
        this_text = []
        for l in fr.readlines():
            l = l.strip()
            if l and (l[0] == '[' and l[-1] == ']'):
                if cur_key:
                    out[cur_key] = '\n'.join(this_text)
                this_text = []
                cur_key = l.strip('[]')
                continue
            this_text.append(l)
    out[cur_key] = '\n'.join(this_text)
github OGGM / oggm / oggm / core / preprocessing / geometry.py View on Github external
# variables
    flowlines = gdir.read_pickle('inversion_flowlines', div_id=div_id)
    catchment_indices = gdir.read_pickle('catchment_indices', div_id=div_id)

    # Topography for altitude-area distribution
    # I take the non-smoothed topography and remove the borders
    fpath = gdir.get_filepath('gridded_data', div_id=div_id)
    with netCDF4.Dataset(fpath) as nc:
        topo = nc.variables['topo'][:]
        ext = nc.variables['glacier_ext'][:]
    topo[np.where(ext==1)] = np.NaN

    # Param
    nmin = int(cfg.PARAMS['min_n_per_bin'])
    smooth_ws = int(cfg.PARAMS['smooth_widths_window_size'])

    # Per flowline (important so that later, the indices can be moved)
    catchment_heights = []
    for ci in catchment_indices:
        _t = topo[tuple(ci.T)][:]
        catchment_heights.append(list(_t[np.isfinite(_t)]))

    # Loop over lines in a reverse order
    for fl, catch_h in zip(flowlines, catchment_heights):

        # Interpolate widths
        widths = utils.interp_nans(fl.widths)
        widths = np.clip(widths, 0.1, np.max(widths))

        # Get topo per catchment and per flowline point
        fhgt = fl.surface_h
github OGGM / oggm / oggm / core / climate.py View on Github external
(time, tempformelt, prcpsol)::
        - time: array of shape (nt,)
        - tempformelt:  array of shape (len(heights), nt)
        - prcpsol:  array of shape (len(heights), nt)
    """

    if year_range is not None:
        sm = cfg.PARAMS['hydro_month_' + gdir.hemisphere]
        em = sm - 1 if (sm > 1) else 12
        t0 = datetime.datetime(year_range[0]-1, sm, 1)
        t1 = datetime.datetime(year_range[1], em, 1)
        return mb_climate_on_height(gdir, heights, time_range=[t0, t1])

    # Parameters
    temp_all_solid = cfg.PARAMS['temp_all_solid']
    temp_all_liq = cfg.PARAMS['temp_all_liq']
    temp_melt = cfg.PARAMS['temp_melt']
    prcp_fac = cfg.PARAMS['prcp_scaling_factor']
    default_grad = cfg.PARAMS['temp_default_gradient']
    g_minmax = cfg.PARAMS['temp_local_gradient_bounds']

    # Read file
    igrad = None
    with utils.ncDataset(gdir.get_filepath('climate_monthly'), mode='r') as nc:
        # time
        time = nc.variables['time']
        time = netCDF4.num2date(time[:], time.units)
        if time_range is not None:
            p0 = np.where(time == time_range[0])[0]
            try:
                p0 = p0[0]
            except IndexError:
github OGGM / oggm / oggm / sandbox / run_rgi_reg_from_precalibrated_mb.py View on Github external
# North Asia 10
# Central Europe 11
# Caucasus and Middle East 12
# Central Asia 13
# South Asia West 14
# South Asia East 15
# Low Latitudes 16
# Southern Andes 17
# New Zealand 18
# Antarctic and Subantarctic 19
rgi_reg = '07'

# Initialize OGGM and set up the run parameters
# ---------------------------------------------

cfg.initialize()

# Local paths (where to write output and where to download input)
WORKING_DIR = '/home/mowglie/disk/OGGM_Runs/TESTS'
utils.mkdir(WORKING_DIR)
cfg.PATHS['working_dir'] = WORKING_DIR

PLOTS_DIR = os.path.join(WORKING_DIR, 'plots')
utils.mkdir(PLOTS_DIR)

# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = False

# How many grid points around the glacier?
# Make it large if you expect your glaciers to grow large
cfg.PARAMS['border'] = 100
github OGGM / oggm / oggm / sandbox / run_alaska.py View on Github external
RGI_FILE = os.path.join(RGI_PATH,'01_rgi50_Alaska.shp')
    DATA_INPUT = '/home/users/bea/oggm_run_alaska/input_data/'


# Configuration of the run
cfg.PARAMS['rgi_version'] = "5"
cfg.PATHS['working_dir'] = WORKING_DIR
# How many grid points around the glacier?
# Make it large if you expect your glaciers to grow large
cfg.PARAMS['border'] = 80
cfg.PARAMS['optimize_inversion_params'] = True
#cfg.PARAMS['inversion_glen_a'] = 2.6e-24

# Set to True for cluster runs
if Cluster:
    cfg.PARAMS['use_multiprocessing'] = True
    cfg.PARAMS['continue_on_error'] = True
else:
    cfg.PARAMS['use_multiprocessing'] = False
    cfg.PARAMS['continue_on_error'] = False

# We use intersects
# (this is slow, it could be replaced with a subset of the global file)
rgi_dir = utils.get_rgi_intersects_dir()
cfg.set_intersects_db(os.path.join(rgi_dir, '00_rgi50_AllRegs',
                                'intersects_rgi50_AllRegs.shp'))

# Pre-download other files which will be needed later
utils.get_cru_cl_file()
utils.get_cru_file(var='tmp')
utils.get_cru_file(var='pre')