Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
import igraph
import sys
import scipy.sparse as sp
import numpy as np
import json
import time
sf_graph_file = '../data/network_graph.pkl'
g = igraph.Graph.Read_Pickle(sf_graph_file)
print(g.summary())
t0 = time.time()
path_collection = g.get_shortest_paths(1, weights='sec_length', output='epath')
t1 = time.time()
print(t1-t0)
sys.exit(0)
# IGRAPH D--- 83335 149318 --
# + attr: n_x (v), n_y (v), node_index (v), node_osmid (v), edge_index (e), edge_osmid (e), end_node (e), sec_length (e), speed_limit (e), start_node (e)
# node_osmid_list = g.vs['node_osmid']
# node_osmid2graphid = {node_osmid_list[i]: i for i in range(len(node_osmid_list))}
# with open('../data/node_osmid2graphid.json', 'w') as outfile:
# json.dump(node_osmid2graphid, outfile, indent=2)
# sys.exit(0)
def parse_obo_graph(path):
stored_pickles_found = False
g = {'biological_process': igraph.Graph(directed=True),
'cellular_component': igraph.Graph(directed=True),
'molecular_function': igraph.Graph(directed=True) }
for ns in g:
pickle_file_path = "{0}.{1}.graph".format(path, ns)
if os.path.exists(pickle_file_path):
print("Using stored ontology graph: {0}".format(pickle_file_path))
g[ns] = igraph.Graph.Read_Pickle(fname=pickle_file_path)
stored_pickles_found = True
# key: GO:ID, value = {'ns': 'biological_process', 'idx': 25}
terms = dict()
if stored_pickles_found is True:
print("Using stored terms data structure: {0}.terms".format(path))
with open("{0}.terms".format(path), 'rb') as f:
terms = pickle.load(f)
# key: namespace, value=int
next_idx = {'biological_process': 0,
'cellular_component': 0,
'molecular_function': 0 }
id = None
def load(path):
with open(path, 'r') as f:
graph = Graph.Read_Pickle(f)
return graph
def main():
absolute_path = os.path.dirname(os.path.abspath(__file__))
logging.basicConfig(filename=absolute_path+'/sf_abm_mp.log', level=logging.DEBUG)
logger = logging.getLogger('main')
logger.info('{} \n\n'.format(datetime.datetime.now()))
t_start = time.time()
### Read initial graph
global g
g = igraph.Graph.Read_Pickle(absolute_path+'/../data_repo/data/sf/network_graph.pkl')
logger.info('graph summary {}'.format(g.summary()))
g.es['fft'] = np.array(g.es['sec_length'], dtype=np.float)/np.array(g.es['maxmph'], dtype=np.float)*2.23694
fft_array = np.array(g.es['fft'], dtype=np.float)
capacity_array = np.array(g.es['capacity'], dtype=np.float)
### 2.23694 is to convert mph to m/s;
### the free flow time should still be calibrated rather than equal to the time at speed limit, check coefficient 1.2 in defining ['weight']
logger.info('max/min FFT in seconds: {}/{}'.format(max(g.es['fft']), min(g.es['fft'])))
g.es['weight'] = fft_array * 1.2 ### According to (Colak, 2015), for SF, even vol=0, t=1.2*fft, maybe traffic light? 1.2 is f_p - k_bay
for day in [1]:
for hour in range(9, 10):
logger.info('*************** DY{} HR{} ***************'.format(day, hour))
t0 = time.time()
def parse_obo_graph(path):
stored_pickles_found = False
g = {'biological_process': igraph.Graph(directed=True),
'cellular_component': igraph.Graph(directed=True),
'molecular_function': igraph.Graph(directed=True) }
for ns in g:
pickle_file_path = "{0}.{1}.graph".format(path, ns)
if os.path.exists(pickle_file_path):
print("Using stored ontology graph: {0}".format(pickle_file_path))
g[ns] = igraph.Graph.Read_Pickle(fname=pickle_file_path)
stored_pickles_found = True
# key: GO:ID, value = {'ns': 'biological_process', 'idx': 25}
terms = dict()
if stored_pickles_found is True:
print("Using stored terms data structure: {0}.terms".format(path))
with open("{0}.terms".format(path), 'rb') as f:
terms = pickle.load(f)
# key: namespace, value=int
next_idx = {'biological_process': 0,
'cellular_component': 0,
'molecular_function': 0 }
id = None
def main():
absolute_path = os.path.dirname(os.path.abspath(__file__))
logging.basicConfig(filename=absolute_path+'/sf_abm_mp.log', level=logging.DEBUG)
logger = logging.getLogger('main')
logger.info('{} \n\n'.format(datetime.datetime.now()))
t_start = time.time()
### Read initial graph
global g
g = igraph.Graph.Read_Pickle(absolute_path+'/../data_repo/data/sf/network_graph.pkl')
logger.info('graph summary {}'.format(g.summary()))
g.es['fft'] = np.array(g.es['sec_length'], dtype=np.float)/np.array(g.es['maxmph'], dtype=np.float)*2.23694
fft_array = np.array(g.es['fft'], dtype=np.float)
capacity_array = np.array(g.es['capacity'], dtype=np.float)
### 2.23694 is to convert mph to m/s;
### the free flow time should still be calibrated rather than equal to the time at speed limit, check coefficient 1.2 in defining ['weight']
logger.info('max/min FFT in seconds: {}/{}'.format(max(g.es['fft']), min(g.es['fft'])))
g.es['weight'] = fft_array * 1.2 ### According to (Colak, 2015), for SF, even vol=0, t=1.2*fft, maybe traffic light? 1.2 is f_p - k_bay
for day in [1]:
for hour in range(9, 10):
logger.info('*************** DY{} HR{} ***************'.format(day, hour))
t0 = time.time()