Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
excluded_vars = ['e_cap_net']
def _setkey(d, key, value): # fillna, round, multiply passed implicitly
if fillna is not None and np.isnan(value):
value = fillna
if map_numeric:
try:
# TypeError if not a number, we don't want to multiply strings
value = map_numeric(value)
except TypeError:
pass # Ignore if not a number
if map_any:
value = map_any(value)
d.set_key(key, value)
d = utils.AttrDict()
# Get a list of default constraints, so that we know which constraints
# exist in a form that includes sub-constraints (like '.max')
o = solution.config_model
possible_constraints = list(o.techs.defaults.constraints.keys())
default_constraints = list(o.techs.defaults.constraints.keys_nested())
max_min_equals_constraints = set([c.split('.')[0]
for c in default_constraints
if '.max' in c])
# Set up the list of locations, techs, constraints
locations = solution.coords['x'].values
techs_in_solution = [i for i in solution.coords['y'].values if ':' not in i]
if not techs:
techs = techs_in_solution
if not constraints:
def _save_csv(self):
"""Save solution as CSV files to ``self.config_run.output.path``"""
for k in self.solution.data_vars:
out_path = os.path.join(self.config_run.output.path, '{}.csv'.format(k))
self.solution[k].to_dataframe().to_csv(out_path)
# Metadata
md = utils.AttrDict()
for k in ['config_model', 'config_run', 'run_time', 'calliope_version']:
md[k] = self.solution.attrs[k]
md.to_yaml(os.path.join(self.config_run.output.path, 'metadata.yaml'))
return self.config_run.output.path
def node_costs(model):
m = model.m
time_res = model.data['_time_res'].to_series()
weights = model.data['_weights'].to_series()
cost_getter = utils.cost_getter(model.get_option)
depreciation_getter = utils.depreciation_getter(model.get_option)
cost_per_distance_getter = utils.cost_per_distance_getter(model.config_model)
@utils.memoize
def _depreciation_rate(y, x, k):
return depreciation_getter(y, x, k)
@utils.memoize
def _cost(cost, y, k, x=None):
return cost_getter(cost, y, k, x=x)
@utils.memoize
def _cost_per_distance(cost, y, k, x):
return cost_per_distance_getter(cost, y, k, x)
def _check_and_set(cost, loc_tech, y, x, k):
"""
def get_cost_param(model, param_string, k, y, x, t):
"""
Function to get values for constraints which can optionally be
loaded from file (so may have time dependency).
model = calliope model
cost = cost name, e.g. 'om_fuel'
k = cost type, e.g. 'monetary'
y = technology
x = location
t = timestep
"""
cost_getter = utils.cost_getter(model.get_option)
@utils.memoize
def _cost(cost, y, k, x=None):
return cost_getter(cost, y, k, x=x)
if param_string in model.data and y in model._sets['y_' + param_string + '_timeseries']:
return getattr(model.m, param_string + '_param')[y, x, t, k]
else: # Search in model.config_model
return _cost(param_string, y, k, x=x)
time_config = self.config_run.get('time', False)
if not time_config:
return None # Nothing more to do here
else:
# For analysis purposes, keep old data around
self.data_original = self.data.copy(deep=True)
##
# Process masking and get list of timesteps to keep at high res
##
if 'masks' in time_config:
masks = {}
# time.masks is a list of {'function': .., 'options': ..} dicts
for entry in time_config.masks:
entry = utils.AttrDict(entry)
mask_func = utils.plugin_load(entry.function,
builtin_module='time_masks')
mask_kwargs = entry.get_key('options', default={})
masks[entry.to_yaml()] = mask_func(self.data, **mask_kwargs)
self._masks = masks # FIXME a better place to put masks
# Concatenate the DatetimeIndexes by using dummy Series
chosen_timesteps = pd.concat([pd.Series(0, index=m)
for m in masks.values()]).index
# timesteps: a list of timesteps NOT picked by masks
timesteps = pd.Index(self.data.t.values).difference(chosen_timesteps)
else:
timesteps = None
##
# Process function, apply resolution adjustments
def df_tech_table(model, columns, parents=['supply', 'supply_plus']):
"""
Returns a pandas DataFrame of technologies from the given model with
the given parent tech, with a column for each of the tech
constraints/options given in ``columns``.
"""
get_any_option = utils.any_option_getter(model)
cm = model.config_model
techs = []
for p in parents:
techs.extend([
k for k in cm.techs
if model.ischild(k, p) and 'name' in cm.techs[k]
])
data = []
for t in techs:
item = {'name': cm.techs[t].name}
for c in columns:
item[c] = get_any_option(t + '.' + c)
data.append(item)
return pd.DataFrame.from_records(data)
@utils.memoize
def get_children(parent, childless_only=True):
"""
If childless_only is True, only children that have no children
themselves are returned.
"""
locations = model._locations
children = list(locations[locations._within == parent].index)
if childless_only: # FIXME childless_only param needs tests
children = [i for i in children if len(get_children(i)) == 0]
return children
def _get_iteration_config(self, config, index_str, iter_row):
iter_c = config.copy() # iter_c is this iteration's config
# `iteration_override` is a pandas series (dataframe row)
# Build up an AttrDict with the specified overrides
override_c = utils.AttrDict()
for k, v in iter_row.to_dict().items():
# NaN values can show in this row if some but not all iterations
# specify a value, so we simply skip them
if not isinstance(v, list) and pd.isnull(v):
# NB the isinstance and pd.isnull checks should cover all cases
# i.e. both not a list (which is definitely not null) or a
# single value that could be null. But this could blow up in
# unexpected edge cases...
continue
# Convert numpy dtypes to python ones, else YAML chokes
if isinstance(v, np.generic):
v = np.asscalar(v)
if isinstance(v, dict):
override_c.set_key(k, utils.AttrDict(v))
else:
override_c.set_key(k, copy.copy(v))