Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
for related_files in iter_files_per_commit(repo, limit):
related_files_by_id = []
for f in filter_files(related_files):
try:
related_files_by_id.append(file_to_id[f])
except KeyError:
related_files_by_id.append(i)
file_to_id[f] = i
id_to_file[i] = f
i += 1
for edge in combinations(related_files_by_id, 2):
togetherness[edge] += 1
finish(s)
s = start("building networkit graph")
g = graph.Graph(weighted=True)
for i in range(len(file_to_id)):
g.addNode()
for e, t in togetherness.items():
g.addEdge(e[0], e[1], 1 / t)
finish(s)
s = start("computing betweenness")
# accurate, slow calculation
b = centrality.Betweenness(g, normalized=True)
# TODO - maybe allow toggling between accurate and estimate methods
# faster but not as precise (10x better in a benchmark test)
# b = networkit.centrality.EstimateBetweenness(g, 128, normalized=True, parallel=True)
b.run()
bb = b.ranking()
finish(s)
"""
# check if graph is weighted
if A.data.max() == A.data.min() == 1:
is_weighted = False
else:
is_weighted = True
# check if graph is directed
if (abs(A.T - A) > 1e-10).nnz == 0: # if A is symmetry
is_directed = False
else:
is_directed = True
num_nodes = A.shape[0]
nkG = nk.graph.Graph(num_nodes, weighted=is_weighted, directed=is_directed)
for x, y in zip(A.nonzero()[0], A.nonzero()[1]):
if x < y and not is_directed:
pass
else:
nkG.addEdge(x, y, A[x, y]) if is_weighted else nkG.addEdge(x, y)
return nkG
if not have_nx:
raise MissingDependencyError("networkx")
# map networkx node ids to consecutive numerical node ids
idmap = dict((id, u) for (id, u) in zip(nxG.nodes(), range(nxG.number_of_nodes())))
z = max(idmap.values()) + 1
# print("z = {0}".format(z))
if weightAttr is not None:
nkG = graph.Graph(z, weighted=True, directed=nxG.is_directed())
for (u_, v_) in nxG.edges():
u, v = idmap[u_], idmap[v_]
w = nxG[u_][v_][weightAttr]
nkG.addEdge(u, v, w)
else:
nkG = graph.Graph(z, directed=nxG.is_directed())
for (u_, v_) in nxG.edges():
u, v = idmap[u_], idmap[v_]
# print(u_, v_, u, v)
assert (u < z)
assert (v < z)
nkG.addEdge(u, v)
assert (nkG.numberOfNodes() == nxG.number_of_nodes())
assert (nkG.numberOfEdges() == nxG.number_of_edges())
return nkG
def nx2nk(nxG, weightAttr=None):
"""
Convert a networkx.Graph to a NetworKit.Graph
:param weightAttr: the edge attribute which should be treated as the edge weight.
"""
if not have_nx:
raise MissingDependencyError("networkx")
# map networkx node ids to consecutive numerical node ids
idmap = dict((id, u) for (id, u) in zip(nxG.nodes(), range(nxG.number_of_nodes())))
z = max(idmap.values()) + 1
# print("z = {0}".format(z))
if weightAttr is not None:
nkG = graph.Graph(z, weighted=True, directed=nxG.is_directed())
for (u_, v_) in nxG.edges():
u, v = idmap[u_], idmap[v_]
w = nxG[u_][v_][weightAttr]
nkG.addEdge(u, v, w)
else:
nkG = graph.Graph(z, directed=nxG.is_directed())
for (u_, v_) in nxG.edges():
u, v = idmap[u_], idmap[v_]
# print(u_, v_, u, v)
assert (u < z)
assert (v < z)
nkG.addEdge(u, v)
assert (nkG.numberOfNodes() == nxG.number_of_nodes())
assert (nkG.numberOfEdges() == nxG.number_of_edges())
return nkG
def run(self, G):
bfs = networkit.graph.BFS(G, G.randomNode(), storePaths=False)
bfs.run()
reduces computation time and enhances the result.
:param G the graph (may not contain self-loops)
:param k k as in k-core
:param algorithm community detection algorithm instance
:return communities (as type Partition)
"""
coreDec = CoreDecomposition(G)
coreDec.run()
cores = coreDec.cores()
try:
kCore = cores[k]
except IndexError:
raise Error("There is no core for the specified k")
C = graph.Subgraph().fromNodes(G, kCore) # FIXME: node indices are not preserved
#properties.overview(C)
return detectCommunities(C, algo, inspect)