Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
##### Define earthquake epicenter, magnitude, and depth
# In[3]:
epicenter = (210,110) # x,y location
magnitude = 5 # Richter scale
depth = 10000 # m, shallow depth
##### Plot location of epicenter on the network
# In[4]:
wntr.network.draw_graph(wn, node_size=0, figsize=(10,8), dpi=100)
plt.hold('True')
plt.scatter(epicenter[0], epicenter[1], s=1000, c='r', marker='*', zorder=2)
##### Generate the earthquake scenario
# In[5]:
#This scenario assumes uniform pipe and soil type throughout the network. These parameters can be set for individual pipes
#PGA = 0.001 g (0.01 m/s2) – perceptible by people
#PGA = 0.02 g (0.2 m/s2) – people lose their balance
#PGA = 0.50 g (5 m/s2) – very high; well-designed buildings can survive if the duration is short
#Repair rate of 1/km (0.001/m) has been suggested as an upper bound
earthquake = wntr.scenario.Earthquake(epicenter, magnitude, depth)
earthquake.generate(wn, coordinate_scale=200, correct_length=False)
else:
results_repair = sim.run_sim()
pickle.dump(results_repair, open('demo_Net6_repair.pickle', 'wb'))
##### Compare results
# In[12]:
wn_nodes = [node_name for node_name, node in wn.nodes(wntr.network.Junction)]
pressure_at_24hr = results.node.loc[(wn_nodes, pd.Timedelta(hours = 24)), 'pressure']
wntr.network.draw_graph(wn, node_attribute=pressure_at_24hr, node_size=20, node_range = [0,90], title='Pressure at 24 hours, without repair', figsize=(12,8), dpi=100)
pressure_at_24hr = results_repair.node.loc[(wn_nodes, pd.Timedelta(hours = 24)), 'pressure']
wntr.network.draw_graph(wn, node_attribute=pressure_at_24hr, node_size=20, node_range = [0,90], title='Pressure at 24 hours, with repair', figsize=(12,8), dpi=100)
# Node pressure
plt.figure(figsize=(30,8), dpi=100)
plt.subplot(1,2,1)
for name, node in wn.nodes(wntr.network.Junction):
pressure = results.node['pressure'][name]
pressure.index = pressure.index.format()
pressure.plot()
plt.hold(True)
plt.ylim(ymin=0)
plt.ylabel('Node Pressure (m)')
plt.title('Without repair')
plt.subplot(1,2,2)
for name, node in wn.nodes(wntr.network.Junction):
pressure = results_repair.node['pressure'][name]
title='Chemical concentration, time = 5 hours')
CHEM_at_node = results_CHEM.node.loc['quality', :, '208']
plt.figure()
CHEM_at_node.plot(title='Chemical concentration, node 208')
# Plot age scenario (convert to hours)
AGE_at_5hr = results_AGE.node.loc['quality', 5*3600, :]/3600.0
wntr.network.draw_graph(wn, node_attribute=AGE_at_5hr, node_size=20,
title='Water age (hrs), time = 5 hours')
AGE_at_node = results_AGE.node.loc['quality', :, '208']/3600.0
plt.figure()
AGE_at_node.plot(title='Water age, node 208')
# Plot trace scenario
TRACE_at_5hr = results_TRACE.node.loc['quality', 5*3600, :]
wntr.network.draw_graph(wn, node_attribute=TRACE_at_5hr, node_size=20,
title='Trace percent, time = 5 hours')
TRACE_at_node = results_TRACE.node.loc['quality', :, '208']
plt.figure()
TRACE_at_node.plot(title='Trace percent, node 208')
# Calculate average water age (last 48 hours)
age = results_AGE.node.loc['quality',:,:]
age_last_48h = age.loc[age.index[-1]-48*3600:age.index[-1]]/3600
age_last_48h.index = age_last_48h.index/3600
age_last_48h.plot(legend=False)
plt.ylabel('Water age (h)')
plt.xlabel('Time (h)')
wntr.network.draw_graph(wn, node_attribute=age_last_48h.mean(),
title='Average water age (last 48 hours)', node_size=40)
print "Average water age (last 48 hours): " +str(age_last_48h.mean().mean()) + " hr"
print " Mean: " + str(np.mean(todini))
print " Max: " + str(np.max(todini))
print " Min: " + str(np.min(todini))
# Create a weighted graph for flowrate at time 36 hours
t = 36*3600
attr = results.link.loc[(slice(None), t), 'flowrate']
G_flowrate_36hrs = wn.get_weighted_graph_deep_copy(link_attribute=attr)
node_attr = results.node.loc[(slice(None), t), 'demand']
G_temp = wn.get_weighted_graph_deep_copy(node_attribute=node_attr)
# Compute betweenness-centrality time 36 hours
bet_cen = nx.betweenness_centrality(G_flowrate_36hrs)
bet_cen_trim = dict([(k,v) for k,v in bet_cen.iteritems() if v > 0.001])
wntr.network.draw_graph(wn, node_attribute=bet_cen,
title='Betweenness Centrality', node_size=40)
central_pt_dom = sum(max(bet_cen.values()) - np.array(bet_cen.values()))/G_flowrate_36hrs.number_of_nodes()
print "Central point dominance: " + str(central_pt_dom)
# Compute all paths at time 36, for node 185
[S, Shat, sp, dk] = wntr.metrics.entropy(G_flowrate_36hrs, sink=['185'])
attr = dict( (k,1) for u,v,k,d in G_flowrate_36hrs.edges(keys=True,data=True))
for k in dk.keys():
u = k[0]
v = k[1]
link = G_flowrate_36hrs.edge[u][v].keys()[0]
attr[link] = 2 #dk[k]
cmap = plt.cm.jet
cmaplist = [cmap(i) for i in range(cmap.N)] # extract all colors from the .jet map
cmaplist[0] = (.5,.5,.5,1.0) # force the first color entry to be grey
cmaplist[cmap.N-1] = (1,0,0,1) # force the last color entry to be red
G_flowrate_36hrs = wn.get_graph_deep_copy()
G_flowrate_36hrs.weight_graph(link_attribute=attr)
# Compute betweenness-centrality time 36 hours
bet_cen = nx.betweenness_centrality(G_flowrate_36hrs)
wntr.network.draw_graph(wn, node_attribute=bet_cen,
title='Betweenness Centrality', node_size=40)
central_pt_dom = G_flowrate_36hrs.central_point_dominance()
print "Central point dominance: " + str(central_pt_dom)
# Compute entropy at time 36, for node 185
[S, Shat] = wntr.metrics.entropy(G_flowrate_36hrs, sources=None, sinks=['185'])
# Plot all simple paths between the Lake/River and node 185
link_count = G_flowrate_36hrs.links_in_simple_paths(sources=['Lake', 'River'], sinks=['185'])
wntr.network.draw_graph(wn, link_attribute=link_count, link_width=1,
node_attribute = {'River': 1, 'Lake': 1, '185': 1},
node_size=30, title='Link count in paths')
# Calculate entropy for 1 day, all nodes
shat = []
G_flowrate_t = wn.get_graph_deep_copy()
for t in np.arange(0, 24*3600+1,3600):
attr = results.link.loc['flowrate', t, :]
G_flowrate_t.weight_graph(link_attribute=attr)
entropy = wntr.metrics.entropy(G_flowrate_t)
shat.append(entropy[1])
plt.figure()
plt.plot(shat)
plt.ylabel('System Entropy')
plt.xlabel('Time, hr')
print "Entropy"
# Get a copy of the graph and convert the MultiDiGraph to a MultiGraph
G = wn.get_graph_deep_copy() #.to_undirected()
# Graph the network
wntr.network.draw_graph(wn, title= wn.name)
# Print general topographic information (type, number of nodes,
# number of edges, average degree)
print nx.info(G)
# Set node and edge attribute and plot the graph.
junction_attr = wn.query_node_attribute('elevation',
node_type=wntr.network.Junction)
pipe_attr = wn.query_link_attribute('length', link_type=wntr.network.Pipe)
wntr.network.draw_graph(wn, node_attribute=junction_attr,
link_attribute=pipe_attr,
title='Node elevation and pipe length',
node_size=40, link_width=2)
# Compute link density (2m/n(n-1) where n is the number of nodes and m is the
# number of edges in G. The density is 0 for a graph without edges and 1 for
# a dense graph (a graph with the maximum number of edges). The density of
# multigraphs can be higher than 1)
print "Link density: " + str(nx.density(G))
# Compute number of self loops (a link that connects a node to itself)
print "Number of self loops: " + str(G.number_of_selfloops())
# Compute node degree (number of links per node)
node_degree = G.degree()
wntr.network.draw_graph(wn, node_attribute=node_degree,
plt.ylabel('Todini Index')
plt.xlabel('Time, hr')
print "Todini Index"
print " Mean: " + str(np.mean(todini))
print " Max: " + str(np.max(todini))
print " Min: " + str(np.min(todini))
# Create a weighted graph for flowrate at time 36 hours
t = 36*3600
attr = results.link.loc['flowrate', t, :]
G_flowrate_36hrs = wn.get_graph_deep_copy()
G_flowrate_36hrs.weight_graph(link_attribute=attr)
# Compute betweenness-centrality time 36 hours
bet_cen = nx.betweenness_centrality(G_flowrate_36hrs)
wntr.network.draw_graph(wn, node_attribute=bet_cen,
title='Betweenness Centrality', node_size=40)
central_pt_dom = G_flowrate_36hrs.central_point_dominance()
print "Central point dominance: " + str(central_pt_dom)
# Compute entropy at time 36, for node 185
[S, Shat] = wntr.metrics.entropy(G_flowrate_36hrs, sources=None, sinks=['185'])
# Plot all simple paths between the Lake/River and node 185
link_count = G_flowrate_36hrs.links_in_simple_paths(sources=['Lake', 'River'], sinks=['185'])
wntr.network.draw_graph(wn, link_attribute=link_count, link_width=1,
node_attribute = {'River': 1, 'Lake': 1, '185': 1},
node_size=30, title='Link count in paths')
# Calculate entropy for 1 day, all nodes
shat = []
G_flowrate_t = wn.get_graph_deep_copy()
import wntr
import numpy as np
import matplotlib.pyplot as plt
# Create a water network model
inp_file = 'networks/Net3.inp'
wn = wntr.network.WaterNetworkModel(inp_file)
# Compute population per node
pop = wntr.metrics.population(wn)
total_population = pop.sum()
print "Total population: " + str(total_population)
wntr.network.draw_graph(wn, node_attribute=pop, node_range = [0,400], node_size=40,
title='Population, Total = ' + str(total_population))
# Find population and nodes impacted by pressure less than 40 m
sim = wntr.sim.EpanetSimulator(wn)
results = sim.run_sim()
junctions = [name for name, node in wn.junctions()]
pop_impacted = wntr.metrics.population_impacted(pop, results.node['pressure',:,junctions], np.less, 40)
plt.figure()
pop_impacted.sum(axis=1).plot(title='Total population with pressure < 40 m')
nodes_impacted = wntr.metrics.query(results.node['pressure',:,junctions], np.less, 40)
wntr.network.draw_graph(wn, node_attribute=nodes_impacted.any(axis=0), node_size=40,
title='Nodes impacted')
# Copute network cost
network_cost = wntr.metrics.cost(wn)
print "Network cost: $" + str(round(network_cost,2))
fdd.plot(ylim=(-0.05, 1.05),legend=False, title='FDD for each node and time')
# Fraction of nodes with reduced demand
fdd_fraction_impacted = 1-fdd.sum(axis=1)/fdd.shape[1]
plt.figure()
fdd_fraction_impacted.plot(ylim=(-0.05, 1.05), title='Fraction of nodes not receiving adaquate demand')
# Population impacted by reduced demand
fdd_pop_impacted = wntr.metrics.population_impacted(pop, fdd, np.less, 1)
plt.figure()
fdd_pop_impacted.plot(legend=False, title='Population not receiving adaquate demand')
# Timeseries of fraction of population not meeting demand
plt.figure()
fdd_pop_impacted.sum(axis=1).plot(title='Total population not receiving adaquate demand')
elif average_times == True and average_nodes == False:
wntr.network.draw_graph(wn, node_attribute=fdv, node_size=40,
node_range=[0,1], title='FDV averaged over all times')
wntr.network.draw_graph(wn, node_attribute=fdd, node_size=40,
node_range=[0,1], title='FDD averaged over all times')
elif average_times == False and average_nodes == True:
plt.figure()
fdv.plot(ylim=(-0.05, 1.05), title='FDV averaged over all nodes')
plt.figure()
fdd.plot(ylim=(-0.05, 1.05), title='FDD averaged over all nodes')
# Create a water network model
inp_file = 'networks/Net3.inp'
wn = wntr.network.WaterNetworkModel(inp_file)
# Define WQ scenarios
WQscenario = wntr.scenario.Waterquality('CHEM', '121', 'SETPOINT', 1000, 2*3600, 15*3600)
# Simulate hydraulics and water quality for each scenario
sim = wntr.sim.EpanetSimulator(wn)
results_CHEM = sim.run_sim(WQscenario)
MC = wntr.metrics.mass_contaminant_consumed(results_CHEM.node)
VC = wntr.metrics.volume_contaminant_consumed(results_CHEM.node, 0.001)
EC = wntr.metrics.extent_contaminant(results_CHEM.node, results_CHEM.link, wn, 0.001)
wntr.network.draw_graph(wn, node_attribute=MC.sum(axis=0), node_range = [0,400], node_size=40,
title='Total mass consumed')
plt.figure()
EC.sum(axis=1).plot(title='Extent of contamination')