import networkx as net
import operator
from operator import itemgetter, attrgetter
import math
from itertools import combinations, permutations
import random
import numpy as np
import re
import codecs
import csv
import networkx.algorithms as algo
import math
import sys
import time
import pandas as pd
from pandas import Series, DataFrame
from scipy import stats
import matplotlib
matplotlib.use('pgf')
import matplotlib.pyplot as plt
link_addition_ratio = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
data_profile = {
"Infocom05": {
"file_name": "data_infocom05.csv",
"num_nodes": 41,
"median": None,
"mean": None,
"std": None,
"contact_weight_map": None,
"graph": {},
"numberOfNodes": {},
"numberOfEdges": {},
"durationThreshold": {},
"density": {},
"clustering_coefficient": {},
"diameter_cc": {},
"global_bet": {},
"Brandes_ego_bet": {},
"Brandes_ego_elapsed_time": {},
"Brandes_xego_bet": {},
"Brandes_xego_elapsed_time": {},
"Proposed_ego_bet": {},
"Proposed_ego_elapsed_time": {},
"Proposed_xego_bet": {},
"Proposed_xego_elapsed_time": {},
"ego_global_pearson_corr": {},
"xego_global_pearson_corr": {},
"ego_global_spearman_corr": {},
"xego_global_spearman_corr": {},
"ego_node_coverage_in_connected_component": {},
"ego_edge_coverage_in_connected_component": {},
"xego_node_coverage_in_connected_component": {},
"xego_edge_coverage_in_connected_component": {}
},
"Infocom06": {
"file_name": "data_infocom06.csv",
"num_nodes": 99,
"median": None,
"mean": None,
"std": None,
"contact_weight_map": None,
"graph": {},
"numberOfNodes": {},
"numberOfEdges": {},
"durationThreshold": {},
"density": {},
"clustering_coefficient": {},
"diameter_cc": {},
"global_bet": {},
"Brandes_ego_bet": {},
"Brandes_ego_elapsed_time": {},
"Brandes_xego_bet": {},
"Brandes_xego_elapsed_time": {},
"Proposed_ego_bet": {},
"Proposed_ego_elapsed_time": {},
"Proposed_xego_bet": {},
"Proposed_xego_elapsed_time": {},
"ego_global_pearson_corr": {},
"xego_global_pearson_corr": {},
"ego_global_spearman_corr": {},
"xego_global_spearman_corr": {},
"ego_node_coverage_in_connected_component": {},
"ego_edge_coverage_in_connected_component": {},
"xego_node_coverage_in_connected_component": {},
"xego_edge_coverage_in_connected_component": {}
},
"Cambridge": {
"file_name": "data_cambridge.csv",
"num_nodes": 54,
"median": None,
"mean": None,
"std": None,
"contact_weight_map": None,
"graph": {},
"numberOfNodes": {},
"numberOfEdges": {},
"durationThreshold": {},
"density": {},
"clustering_coefficient": {},
"diameter_cc": {},
"global_bet": {},
"Brandes_ego_bet": {},
"Brandes_ego_elapsed_time": {},
"Brandes_xego_bet": {},
"Brandes_xego_elapsed_time": {},
"Proposed_ego_bet": {},
"Proposed_ego_elapsed_time": {},
"Proposed_xego_bet": {},
"Proposed_xego_elapsed_time": {},
"ego_global_pearson_corr": {},
"xego_global_pearson_corr": {},
"ego_global_spearman_corr": {},
"xego_global_spearman_corr": {},
"ego_node_coverage_in_connected_component": {},
"ego_edge_coverage_in_connected_component": {},
"xego_node_coverage_in_connected_component": {},
"xego_edge_coverage_in_connected_component": {}
},
"Intel": {
"file_name": "data_intel.csv",
"num_nodes": 20,
"median": None,
"mean": None,
"std": None,
"contact_weight_map": None,
"graph": {},
"numberOfNodes": {},
"numberOfEdges": {},
"durationThreshold": {},
"density": {},
"clustering_coefficient": {},
"diameter_cc": {},
"global_bet": {},
"Brandes_ego_bet": {},
"Brandes_ego_elapsed_time": {},
"Brandes_xego_bet": {},
"Brandes_xego_elapsed_time": {},
"Proposed_ego_bet": {},
"Proposed_ego_elapsed_time": {},
"Proposed_xego_bet": {},
"Proposed_xego_elapsed_time": {},
"ego_global_pearson_corr": {},
"xego_global_pearson_corr": {},
"ego_global_spearman_corr": {},
"xego_global_spearman_corr": {},
"ego_node_coverage_in_connected_component": {},
"ego_edge_coverage_in_connected_component": {},
"xego_node_coverage_in_connected_component": {},
"xego_edge_coverage_in_connected_component": {}
}
}
def getContactDurationMap(dataFrame):
contactMap = {}
for i in range(0, len(dataFrame)):
recorder = dataFrame['Recorder'][i]
recordee = dataFrame['Recordee'][i]
contactTime = dataFrame['Contact Time'][i]
if (recorder, recordee) in contactMap:
contactMap[(recorder, recordee)] = contactMap[(recorder, recordee)] + contactTime + 1
else:
contactMap[(recorder, recordee)] = contactTime + 1
return contactMap, np.median(contactMap.values()), np.mean(contactMap.values()), np.std(contactMap.values())
for data in data_profile.values():
df = pd.read_csv(data['file_name'])
data['contact_weight_map'], data['median'], data['mean'], data['std'] = getContactDurationMap(df)
for data_name, data in data_profile.items():
print "###", data_name
print data['contact_weight_map'].items()[1:5]
print "Mean: %5.0f" % data['mean']
print "Median: %5.0f" % data['median']
print "Standard Deviation: %5.0f" % data['std']
print
### Cambridge [((6, 9), 2187), ((12, 1), 4943), ((7, 12), 40849), ((1, 6), 768)] Mean: 9961 Median: 2684 Standard Deviation: 26513 ### Infocom06 [((39, 70), 1327), ((43, 3), 6382), ((63, 76), 707), ((29, 44), 1366)] Mean: 3356 Median: 1253 Standard Deviation: 11578 ### Intel [((4, 7), 11018), ((1, 3), 19716), ((9, 1), 125), ((4, 8), 5037)] Mean: 12495 Median: 4936 Standard Deviation: 27158 ### Infocom05 [((21, 28), 369), ((4, 36), 1117), ((7, 25), 2571), ((33, 41), 2151)] Mean: 3348 Median: 1608 Standard Deviation: 9278
class ListTable(list):
""" Overridden list class which takes a 2-dimensional list of
the form [[1,2,3],[4,5,6]], and renders an HTML Table in
IPython Notebook. """
def _repr_html_(self):
html = ["<table>"]
for row in self:
html.append("<tr>")
for col in row:
html.append("<td>{0}</td>".format(col))
html.append("</tr>")
html.append("</table>")
return ''.join(html)
table = ListTable()
table.append(['', 'Infocom05', 'Infocom06', 'Cambridge', 'Intel'])
table.append(['Number of Nodes',
data_profile['Infocom05']['num_nodes'],
data_profile['Infocom06']['num_nodes'],
data_profile['Cambridge']['num_nodes'],
data_profile['Intel']['num_nodes']])
table.append(['Number of contacts between nodes', '22,459', '170,601', '4,228', '1,364'])
table.append(['Average number of contacts per node pair', '14.3', '20.2', '32.0', '18.9'])
table.append(['Mean of accumulated contact duration per node pair (sec.)',
"%5.0f" % data_profile['Infocom05']['mean'],
"%5.0f" % data_profile['Infocom06']['mean'],
"%5.0f" % data_profile['Cambridge']['mean'],
"%5.0f" % data_profile['Intel']['mean']])
table.append(['Median of accumulated contact duration per node pair (sec.)',
"%5.0f" % data_profile['Infocom05']['median'],
"%5.0f" % data_profile['Infocom06']['median'],
"%5.0f" % data_profile['Cambridge']['median'],
"%5.0f" % data_profile['Intel']['median']])
table.append(['Data collection period (sec.)', '254,151 (2.9 days)', '337,419 (3.9 days)', '455,610 (5.3 days)', '359,191 (4.2 days)'])
table
Infocom05 | Infocom06 | Cambridge | Intel | |
Number of Nodes | 41 | 99 | 54 | 20 |
Number of contacts between nodes | 22,459 | 170,601 | 4,228 | 1,364 |
Average number of contacts per node pair | 14.3 | 20.2 | 32.0 | 18.9 |
Mean of accumulated contact duration per node pair (sec.) | 3348 | 3356 | 9961 | 12495 |
Median of accumulated contact duration per node pair (sec.) | 1608 | 1253 | 2684 | 4936 |
Data collection period (sec.) | 254,151 (2.9 days) | 337,419 (3.9 days) | 455,610 (5.3 days) | 359,191 (4.2 days) |
def mean(values):
if len(values) == 0:
return None
return sum(values) / len(values)
def get_edgeList_by_threshold(n, map, median, threshold):
contactMap = sorted(map.iteritems(), key=operator.itemgetter(1))
num = int(math.ceil(float(len(contactMap)) * threshold))
edgeList = []
duration = 0
for i in range(0, num):
item = contactMap.pop()
edge = item[0]
duration = item[1]
edgeList.append(edge)
return edgeList, duration
for data in data_profile.values():
for ratio in link_addition_ratio:
edgeList, duration = get_edgeList_by_threshold(data['num_nodes'],
data['contact_weight_map'],
data['median'],
ratio)
data['durationThreshold'][ratio] = duration
data['graph'][ratio] = net.Graph(edgeList)
data['numberOfNodes'][ratio] = data['graph'][ratio].number_of_nodes()
data['numberOfEdges'][ratio] = data['graph'][ratio].number_of_edges()
for data_name, data in data_profile.items():
print "###", data_name
for ratio in sorted(data['durationThreshold']):
print "%s - %d - %d - %d" % (ratio,
data['durationThreshold'][ratio],
data['numberOfNodes'][ratio],
data['numberOfEdges'][ratio])
print
### Cambridge 0.1 - 19606 - 8 - 8 0.2 - 7786 - 12 - 15 0.3 - 4470 - 12 - 22 0.4 - 3246 - 12 - 29 0.5 - 2709 - 12 - 39 0.6 - 2187 - 12 - 48 0.7 - 1806 - 12 - 53 0.8 - 1566 - 12 - 59 0.9 - 1318 - 12 - 64 ### Infocom06 0.1 - 6381 - 94 - 576 0.2 - 3883 - 97 - 1102 0.3 - 2568 - 98 - 1605 0.4 - 1815 - 98 - 2108 0.5 - 1253 - 98 - 2559 0.6 - 862 - 98 - 2995 0.7 - 506 - 98 - 3370 0.8 - 251 - 98 - 3716 0.9 - 10 - 98 - 4046 ### Intel 0.1 - 24329 - 5 - 5 0.2 - 16725 - 9 - 9 0.3 - 10915 - 9 - 13 0.4 - 6572 - 9 - 16 0.5 - 5037 - 9 - 19 0.6 - 4122 - 9 - 25 0.7 - 2211 - 9 - 28 0.8 - 1200 - 9 - 31 0.9 - 9 - 9 - 35 ### Infocom05 0.1 - 5921 - 39 - 91 0.2 - 3818 - 40 - 191 0.3 - 2905 - 40 - 285 0.4 - 2167 - 40 - 363 0.5 - 1609 - 41 - 452 0.6 - 1214 - 41 - 531 0.7 - 846 - 41 - 605 0.8 - 468 - 41 - 672 0.9 - 141 - 41 - 740
for data in data_profile.values():
for ratio in link_addition_ratio:
data['density'][ratio] = net.density(data['graph'][ratio])
data['clustering_coefficient'][ratio] = mean(algo.clustering(data['graph'][ratio]).values())
data['diameter_cc'][ratio] = algo.diameter(list(algo.connected_component_subgraphs(data['graph'][ratio]))[0])
for data_name, data in data_profile.items():
print "###", data_name
for ratio in sorted(data['density']):
print "%s - density: %f, clustering_coefficient: %f, diameter_cc: %f" % (ratio,
data['density'][ratio],
data['clustering_coefficient'][ratio],
data['diameter_cc'][ratio])
print
### Cambridge 0.1 - density: 0.285714, clustering_coefficient: 0.458333, diameter_cc: 3.000000 0.2 - density: 0.227273, clustering_coefficient: 0.347222, diameter_cc: 5.000000 0.3 - density: 0.333333, clustering_coefficient: 0.491667, diameter_cc: 3.000000 0.4 - density: 0.439394, clustering_coefficient: 0.648810, diameter_cc: 3.000000 0.5 - density: 0.590909, clustering_coefficient: 0.669577, diameter_cc: 3.000000 0.6 - density: 0.727273, clustering_coefficient: 0.781854, diameter_cc: 2.000000 0.7 - density: 0.803030, clustering_coefficient: 0.833027, diameter_cc: 2.000000 0.8 - density: 0.893939, clustering_coefficient: 0.893248, diameter_cc: 2.000000 0.9 - density: 0.969697, clustering_coefficient: 0.968350, diameter_cc: 2.000000 ### Infocom06 0.1 - density: 0.131778, clustering_coefficient: 0.400435, diameter_cc: 7.000000 0.2 - density: 0.236684, clustering_coefficient: 0.483801, diameter_cc: 5.000000 0.3 - density: 0.337681, clustering_coefficient: 0.564143, diameter_cc: 4.000000 0.4 - density: 0.443509, clustering_coefficient: 0.635271, diameter_cc: 4.000000 0.5 - density: 0.538397, clustering_coefficient: 0.703793, diameter_cc: 3.000000 0.6 - density: 0.630128, clustering_coefficient: 0.781277, diameter_cc: 3.000000 0.7 - density: 0.709026, clustering_coefficient: 0.834991, diameter_cc: 3.000000 0.8 - density: 0.781822, clustering_coefficient: 0.879554, diameter_cc: 3.000000 0.9 - density: 0.851252, clustering_coefficient: 0.921151, diameter_cc: 2.000000 ### Intel 0.1 - density: 0.500000, clustering_coefficient: 0.466667, diameter_cc: 3.000000 0.2 - density: 0.250000, clustering_coefficient: 0.392593, diameter_cc: 3.000000 0.3 - density: 0.361111, clustering_coefficient: 0.411111, diameter_cc: 4.000000 0.4 - density: 0.444444, clustering_coefficient: 0.820635, diameter_cc: 2.000000 0.5 - density: 0.527778, clustering_coefficient: 0.736243, diameter_cc: 2.000000 0.6 - density: 0.694444, clustering_coefficient: 0.756349, diameter_cc: 2.000000 0.7 - density: 0.777778, clustering_coefficient: 0.765608, diameter_cc: 2.000000 0.8 - density: 0.861111, clustering_coefficient: 0.862434, diameter_cc: 2.000000 0.9 - density: 0.972222, clustering_coefficient: 0.972222, diameter_cc: 2.000000 ### Infocom05 0.1 - density: 0.122807, clustering_coefficient: 0.367564, diameter_cc: 5.000000 0.2 - density: 0.244872, clustering_coefficient: 0.419765, diameter_cc: 4.000000 0.3 - density: 0.365385, clustering_coefficient: 0.480037, diameter_cc: 3.000000 0.4 - density: 0.465385, clustering_coefficient: 0.582573, diameter_cc: 3.000000 0.5 - density: 0.551220, clustering_coefficient: 0.663286, diameter_cc: 3.000000 0.6 - density: 0.647561, clustering_coefficient: 0.748057, diameter_cc: 3.000000 0.7 - density: 0.737805, clustering_coefficient: 0.827978, diameter_cc: 3.000000 0.8 - density: 0.819512, clustering_coefficient: 0.883453, diameter_cc: 2.000000 0.9 - density: 0.902439, clustering_coefficient: 0.946437, diameter_cc: 2.000000
def get_Global_Betweenness_List(g):
return net.betweenness_centrality(g)
for data in data_profile.values():
for ratio in link_addition_ratio:
data['global_bet'][ratio] = get_Global_Betweenness_List(data['graph'][ratio])
def brandes_betweenness(G):
betweenness=dict.fromkeys(G,0.0) # b[v]=0 for v in G
nodes = G
for s in nodes:
S, P, sigma = single_source_shortest_path_basic(G,s)
betweenness = accumulate_basic(betweenness,S,P,sigma,s)
betweenness = rescale(betweenness, len(G))
return betweenness
def single_source_shortest_path_basic(G, s):
S=[]
P={}
for v in G:
P[v]=[]
sigma=dict.fromkeys(G,0.0) # sigma[v]=0 for v in G
D={}
sigma[s]=1.0
D[s]=0
Q=[s]
while Q: # use BFS to find shortest paths
v=Q.pop(0)
S.append(v)
Dv=D[v]
sigmav=sigma[v]
for w in G[v]:
if w not in D:
Q.append(w)
D[w]=Dv+1
if D[w]==Dv+1: # this is a shortest path, count paths
sigma[w] += sigmav
P[w].append(v) # predecessors
return S,P,sigma
def accumulate_basic(betweenness, S, P, sigma, s):
delta=dict.fromkeys(S, 0)
while S:
w=S.pop()
coeff=(1.0+delta[w])/sigma[w]
for v in P[w]:
delta[v] += sigma[v]*coeff
if w != s:
betweenness[w]+=delta[w]
return betweenness
def rescale(betweenness, n):
if n <= 2:
scale=None # no normalization b=0 for all nodes
else:
scale=1.0/((n-1)*(n-2))
if scale is not None:
for v in betweenness:
betweenness[v] *= scale
return betweenness
def get_xego_graph(g, center):
g2 = net.Graph()
g2.add_node(center)
firstNeighbors = g.neighbors(center)
secondNeighbors = []
for neighbor in firstNeighbors:
g2.add_edge(neighbor, center)
secondNeighborsOfNode = g.neighbors(neighbor)
for nneighbor in secondNeighborsOfNode:
if (nneighbor == center):
continue
if nneighbor in firstNeighbors:
g2.add_edge(neighbor, nneighbor)
else:
g2.add_edge(neighbor, nneighbor)
secondNeighbors.append(nneighbor)
secondNeighbors = list(set(secondNeighbors))
return g2, firstNeighbors, secondNeighbors
def get_brandnes_ego_betweenness_and_elapsed_time(g):
elapsed_time = 0.0
numNodes = g.number_of_nodes()
centrality_map = {}
for node in g.nodes():
egoNet = net.ego_graph(g, node)
#######
start_time = time.time()
centrality_map[node] = brandes_betweenness(egoNet).get(node)
end_time = time.time()
#######
elapsed_time = elapsed_time + (end_time - start_time)
return centrality_map, elapsed_time
def get_brandnes_xego_betweenness_and_elapsed_time(g):
elapsed_time = 0.0
numNodes = g.number_of_nodes()
centrality_map = {}
for node in g.nodes():
xEgoNet, firstNeighbors, secondNeighbors = get_xego_graph(g, node)
#######
start_time = time.time()
centrality_map[node] = brandes_betweenness(xEgoNet).get(node)
end_time = time.time()
#######
elapsed_time = elapsed_time + (end_time - start_time)
return centrality_map, elapsed_time
for data in data_profile.values():
for ratio in link_addition_ratio:
data['Brandes_ego_bet'][ratio], data['Brandes_ego_elapsed_time'][ratio] = get_brandnes_ego_betweenness_and_elapsed_time(data['graph'][ratio])
data['Brandes_xego_bet'][ratio], data['Brandes_xego_elapsed_time'][ratio] = get_brandnes_xego_betweenness_and_elapsed_time(data['graph'][ratio])
def proposed_ego_betweenness(G, center, fe, node2Index, index2Node):
nodes = G.nodes()
nodes.remove(center)
lenOfFirstNeighbors = len(fe)
table = dict.fromkeys(index2Node.keys(), 0.0)
for i in index2Node.keys():
delta=dict.fromkeys(index2Node.keys(), 0.0)
table[i] = delta
betweenness = 0.0
for i in range(1, lenOfFirstNeighbors+1):
N_i = G[index2Node[i]].keys()
for j in range(i+1, lenOfFirstNeighbors+1):
N_j = G[index2Node[j]].keys()
table[i][j] = dependency1(index2Node[j], N_i, N_j)
betweenness += table[i][j]
betweenness = my_rescale(betweenness, len(G))
return betweenness
def proposed_xego_betweenness(G, center, firstNeighbors, secondNeighbors, node2Index, index2Node):
nodes = G.nodes()
nodes.remove(center)
lenOfFirstAndSecondNeigbors = len(nodes)
lenOfFirstNeighbors = len(firstNeighbors)
table = dict.fromkeys(index2Node.keys(), 0.0)
for i in index2Node.keys():
delta=dict.fromkeys(index2Node.keys(), 0.0)
table[i] = delta
betweenness = 0.0
for i in range(1, lenOfFirstNeighbors+1):
N_i = G[index2Node[i]].keys()
for j in range(i+1, lenOfFirstNeighbors+1):
N_j = G[index2Node[j]].keys()
table[i][j] = dependency1(index2Node[j], N_i, N_j)
betweenness += table[i][j]
for j in range(lenOfFirstNeighbors+1, lenOfFirstAndSecondNeigbors+1):
N_j = G[index2Node[j]].keys()
table[i][j] = dependency2(i, N_j, node2Index, index2Node, table)
betweenness += table[i][j]
for i in range(lenOfFirstNeighbors+1, lenOfFirstAndSecondNeigbors+1):
for j in range(i+1, lenOfFirstAndSecondNeigbors+1):
N_j = G[index2Node[j]].keys()
table[i][j] = dependency2(i, N_j, node2Index, index2Node, table)
betweenness += table[i][j]
betweenness = my_rescale(betweenness, len(G))
return betweenness
def dependency1(jnode, Ni, Nj):
if jnode in Ni:
return 0.0
else:
return 1.0 / len(set(Ni) & set(Nj))
def dependency2(i, Nj, node2Index, index2Node, table):
values = []
for neighbor in Nj:
f = 0.0
if (i < node2Index[neighbor]):
f = table[i][node2Index[neighbor]]
else:
f = table[node2Index[neighbor]][i]
if (f == 0.0):
return 0.0
else:
values.append(f)
return stats.hmean(values)
def my_rescale(betweenness, n):
if n <= 2:
scale = None # no normalization b=0 for all nodes
else:
scale = 2.0/((n-1)*(n-2))
if scale is not None:
betweenness *= scale
return betweenness
def get_ego_graph(g, center):
g2 = net.Graph()
node2Index = {}
index2Node = {}
g2.add_node(center)
firstNeighbors = g.neighbors(center)
node2Index[center] = 0
index2Node[0] = center
i = 1;
for neighbor in firstNeighbors:
node2Index[neighbor] = i
index2Node[i] = neighbor
i += 1;
for neighbor in firstNeighbors:
g2.add_edge(neighbor, center)
NeighborsOfNeighbors = g.neighbors(neighbor)
for nneighbor in NeighborsOfNeighbors:
if (nneighbor == center):
continue
if nneighbor in firstNeighbors:
g2.add_edge(neighbor, nneighbor)
return g2, firstNeighbors, node2Index, index2Node
def get_proposed_ego_betweenness_and_elapsed_time(g):
elapsed_time = 0.0
numNodes = g.number_of_nodes()
centrality_map = {}
for node in g.nodes():
egoNet, firstNeighbors, node2Index, index2Node = get_ego_graph(g, node)
#######
start_time = time.time()
centrality_map[node] = proposed_ego_betweenness(egoNet, node, firstNeighbors, node2Index, index2Node)
end_time = time.time()
#######
elapsed_time = elapsed_time + (end_time - start_time)
return centrality_map, elapsed_time
def get_xego_graph(g, center):
g2 = net.Graph()
node2Index = {}
index2Node = {}
g2.add_node(center)
firstNeighbors = g.neighbors(center)
node2Index[center] = 0
index2Node[0] = center
i = 1;
for neighbor in firstNeighbors:
node2Index[neighbor] = i
index2Node[i] = neighbor
i += 1;
secondNeighbors = []
for neighbor in firstNeighbors:
g2.add_edge(neighbor, center)
secondNeighborsOfNode = g.neighbors(neighbor)
for nneighbor in secondNeighborsOfNode:
if (nneighbor == center):
continue
if nneighbor in firstNeighbors:
g2.add_edge(neighbor, nneighbor)
else:
g2.add_edge(neighbor, nneighbor)
if not (nneighbor in secondNeighbors):
secondNeighbors.append(nneighbor)
node2Index[nneighbor] = i
index2Node[i] = nneighbor
i += 1
return g2, firstNeighbors, secondNeighbors, node2Index, index2Node
def get_proposed_xego_betweenness_and_elapsed_time(g):
elapsed_time = 0.0
numNodes = g.number_of_nodes()
centrality_map = {}
for node in g.nodes():
xEgoNet, firstNeighbors, secondNeighbors, node2Index, index2Node = get_xego_graph(g, node)
#######
start_time = time.time()
centrality_map[node] = proposed_xego_betweenness(xEgoNet, node, firstNeighbors, secondNeighbors, node2Index, index2Node)
end_time = time.time()
#######
elapsed_time = elapsed_time + (end_time - start_time)
return centrality_map, elapsed_time
for data in data_profile.values():
for ratio in link_addition_ratio:
data['Proposed_ego_bet'][ratio], data['Proposed_ego_elapsed_time'][ratio] = get_proposed_ego_betweenness_and_elapsed_time(data['graph'][ratio])
data['Proposed_xego_bet'][ratio], data['Proposed_xego_elapsed_time'][ratio] = get_proposed_xego_betweenness_and_elapsed_time(data['graph'][ratio])
for data_name, data in data_profile.items():
print "###", data_name
isEqual = True
for ratio in link_addition_ratio:
for node in data['graph'][ratio].nodes():
if data['Proposed_ego_bet'][ratio][node] - data['Brandes_ego_bet'][ratio][node] > 0.00001:
isEqual = False
print ratio, node, data['Brandes_ego_bet'][ratio][node], data['Proposed_ego_bet'][ratio][node]
break
if data['Proposed_xego_bet'][ratio][node] - data['Brandes_xego_bet'][ratio][node] > 0.00001:
isEqual = False
print ratio, node, data['Brandes_xego_bet'][ratio][node], data['Proposed_xego_bet'][ratio][node]
break
if isEqual:
print "All betweenness values are equals!"
else:
print "Fail!"
print
### Cambridge All betweenness values are equals! ### Infocom06 All betweenness values are equals! ### Intel All betweenness values are equals! ### Infocom05 All betweenness values are equals!
for data in data_profile.values():
for ratio in link_addition_ratio:
data['ego_global_pearson_corr'][ratio] = stats.pearsonr(data['Proposed_ego_bet'][ratio].values(),
data['global_bet'][ratio].values())[0]
data['xego_global_pearson_corr'][ratio] = stats.pearsonr(data['Proposed_xego_bet'][ratio].values(),
data['global_bet'][ratio].values())[0]
data['ego_global_spearman_corr'][ratio] = stats.spearmanr(data['Proposed_ego_bet'][ratio].values(),
data['global_bet'][ratio].values())[0]
data['xego_global_spearman_corr'][ratio] = stats.spearmanr(data['Proposed_xego_bet'][ratio].values(),
data['global_bet'][ratio].values())[0]
for data_name, data in data_profile.items():
print "###", data_name
for ratio in link_addition_ratio:
print "%f: Pearson %7.5f - %7.5f, Spearman %7.5f - %7.5f" % (ratio, data['ego_global_pearson_corr'][ratio], data['xego_global_pearson_corr'][ratio], data['ego_global_spearman_corr'][ratio], data['xego_global_spearman_corr'][ratio])
print
### Cambridge 0.100000: Pearson 0.97435 - 1.00000, Spearman 0.98974 - 1.00000 0.200000: Pearson 0.89201 - 0.95593, Spearman 0.98297 - 0.99248 0.300000: Pearson 0.58546 - 0.96510, Spearman 0.86920 - 0.94849 0.400000: Pearson 0.50182 - 0.87949, Spearman 0.75177 - 0.92199 0.500000: Pearson 0.30942 - 0.98596, Spearman 0.58042 - 0.98601 0.600000: Pearson 0.91412 - 1.00000, Spearman 0.94921 - 1.00000 0.700000: Pearson 0.70612 - 1.00000, Spearman 0.75571 - 1.00000 0.800000: Pearson 0.81575 - 1.00000, Spearman 0.73665 - 0.99822 0.900000: Pearson 1.00000 - 1.00000, Spearman 1.00000 - 1.00000 ### Infocom06 0.100000: Pearson 0.21325 - 0.26149, Spearman 0.31790 - 0.45351 0.200000: Pearson 0.04869 - 0.58147, Spearman 0.12212 - 0.64588 0.300000: Pearson 0.34373 - 0.99405, Spearman 0.17271 - 0.98294 0.400000: Pearson 0.55874 - 0.99932, Spearman 0.15316 - 0.99629 0.500000: Pearson 0.77453 - 0.99969, Spearman 0.17749 - 0.99890 0.600000: Pearson 0.29430 - 0.98527, Spearman 0.39760 - 0.95441 0.700000: Pearson 0.24460 - 0.99232, Spearman 0.69822 - 0.95379 0.800000: Pearson -0.03118 - 0.99728, Spearman 0.78058 - 0.98448 0.900000: Pearson 0.64310 - 1.00000, Spearman 0.79998 - 1.00000 ### Intel 0.100000: Pearson 0.90685 - 1.00000, Spearman 0.87500 - 1.00000 0.200000: Pearson 0.95553 - 1.00000, Spearman 1.00000 - 1.00000 0.300000: Pearson 0.75035 - 0.99773, Spearman 0.96925 - 1.00000 0.400000: Pearson 0.95887 - 1.00000, Spearman 1.00000 - 1.00000 0.500000: Pearson 0.84194 - 1.00000, Spearman 0.88113 - 1.00000 0.600000: Pearson 0.40899 - 1.00000, Spearman 0.11611 - 1.00000 0.700000: Pearson 0.40201 - 1.00000, Spearman 0.62984 - 1.00000 0.800000: Pearson 0.79471 - 1.00000, Spearman 0.78971 - 1.00000 0.900000: Pearson 1.00000 - 1.00000, Spearman 1.00000 - 1.00000 ### Infocom05 0.100000: Pearson 0.24996 - 0.36821, Spearman 0.33113 - 0.49406 0.200000: Pearson 0.00931 - 0.13452, Spearman 0.15490 - 0.48194 0.300000: Pearson -0.10981 - 0.16741, Spearman 0.12814 - 0.70957 0.400000: Pearson -0.06346 - 0.61565, Spearman 0.29531 - 0.83902 0.500000: Pearson -0.08782 - 0.91538, Spearman 0.35836 - 0.92334 0.600000: Pearson -0.14270 - 0.92827, Spearman 0.57195 - 0.94756 0.700000: Pearson 0.04975 - 0.99998, Spearman 0.67840 - 0.99965 0.800000: Pearson 0.62972 - 1.00000, Spearman 0.75433 - 1.00000 0.900000: Pearson 0.95045 - 1.00000, Spearman 0.89834 - 1.00000
def getNumOfNodesEdgesInConnectedComponent(g):
numNodeList = dict.fromkeys(g, 0)
numEdgeList = dict.fromkeys(g, 0)
h = list(net.connected_component_subgraphs(g))
numOfComponent = len(h)
for x in g.nodes():
for i in xrange(0, numOfComponent):
if x in h[i].nodes():
numNodeList[x]=len(h[i].nodes())
numEdgeList[x]=len(h[i].edges())
break
return numNodeList, numEdgeList
def get_ego_coverage(g, numNodeInConnectedComponent, numEdgeInConnectedComponent):
numNodeRateList = dict.fromkeys(g, 0.0)
numEdgeRateList = dict.fromkeys(g, 0.0)
numNodes = g.number_of_nodes()
for node in g.nodes():
egoNet = get_ego_graph(g, node)[0]
if (numNodeInConnectedComponent[node] != 0):
numNodeRateList[node] = float(egoNet.number_of_nodes())/float(numNodeInConnectedComponent[node])
if (numEdgeInConnectedComponent[node] != 0):
numEdgeRateList[node] = float(egoNet.number_of_edges())/float(numEdgeInConnectedComponent[node])
return numNodeRateList, numEdgeRateList
def get_xego_coverage(g, numNodeInConnectedComponent, numEdgeInConnectedComponent):
numNodeRateList = dict.fromkeys(g, 0.0)
numEdgeRateList = dict.fromkeys(g, 0.0)
numNodes = g.number_of_nodes()
for node in g.nodes():
xEgoNet = get_xego_graph(g, node)[0]
if (numNodeInConnectedComponent[node] != 0):
numNodeRateList[node] = float(xEgoNet.number_of_nodes())/float(numNodeInConnectedComponent[node])
if (numEdgeInConnectedComponent[node] != 0):
numEdgeRateList[node] = float(xEgoNet.number_of_edges())/float(numEdgeInConnectedComponent[node])
return numNodeRateList, numEdgeRateList
def getAverageFromDict(dic):
sumValue = 0.0
for i, v in dic.items():
sumValue = sumValue + v
return sumValue / len(dic)
for data in data_profile.values():
for ratio in link_addition_ratio:
numNodesCC, numEdgesCC = getNumOfNodesEdgesInConnectedComponent(data['graph'][ratio])
numNodes_ego_coverage, numEdges_ego_coverage = get_ego_coverage(data['graph'][ratio], numNodesCC, numEdgesCC)
numNodes_xego_coverage, numEdges_xego_coverage = get_xego_coverage(data['graph'][ratio], numNodesCC, numEdgesCC)
data['ego_node_coverage_in_connected_component'][ratio] = getAverageFromDict(numNodes_ego_coverage)
data['ego_edge_coverage_in_connected_component'][ratio] = getAverageFromDict(numEdges_ego_coverage)
data['xego_node_coverage_in_connected_component'][ratio] = getAverageFromDict(numNodes_xego_coverage)
data['xego_edge_coverage_in_connected_component'][ratio] = getAverageFromDict(numEdges_xego_coverage)
for data_name, data in data_profile.items():
print "###", data_name
for ratio in link_addition_ratio:
print ratio, data['ego_node_coverage_in_connected_component'][ratio], data['ego_edge_coverage_in_connected_component'][ratio],
print data['xego_node_coverage_in_connected_component'][ratio], data['xego_edge_coverage_in_connected_component'][ratio]
print
### Cambridge 0.1 0.666666666667 0.607142857143 0.916666666667 0.821428571429 0.2 0.483333333333 0.422619047619 0.683333333333 0.613095238095 0.3 0.6 0.488095238095 0.916666666667 0.77380952381 0.4 0.486111111111 0.382183908046 0.916666666667 0.709770114943 0.5 0.625 0.480769230769 0.986111111111 0.848290598291 0.6 0.75 0.625 1.0 0.940972222222 0.7 0.819444444444 0.718553459119 1.0 0.963836477987 0.8 0.902777777778 0.827683615819 1.0 0.991525423729 0.9 0.972222222222 0.947916666667 1.0 1.0 ### Infocom06 0.1 0.141014033499 0.0852171985816 0.629244001811 0.293439716312 0.2 0.244553087469 0.160383183341 0.870974598788 0.46741631897 0.3 0.344439816743 0.233492275415 0.941274468971 0.599173501176 0.4 0.449187838401 0.329812570189 0.955226988755 0.715215118305 0.5 0.543107038734 0.428260401464 0.961890878801 0.797058800073 0.6 0.633902540608 0.537893087118 0.980216576426 0.858658989472 0.7 0.711995002082 0.634824077999 0.986047480217 0.901087022346 0.8 0.784048313203 0.732801893632 0.999375260308 0.932006656269 0.9 0.8527696793 0.812225730628 1.0 0.95965276867 ### Intel 0.1 0.6 0.52 0.84 0.76 0.2 0.587301587302 0.527777777778 0.904761904762 0.805555555556 0.3 0.432098765432 0.350427350427 0.802469135802 0.709401709402 0.4 0.506172839506 0.430555555556 1.0 0.791666666667 0.5 0.58024691358 0.485380116959 1.0 0.836257309942 0.6 0.728395061728 0.622222222222 1.0 0.924444444444 0.7 0.802469135802 0.674603174603 1.0 0.960317460317 0.8 0.876543209877 0.792114695341 1.0 0.985663082437 0.9 0.975308641975 0.955555555556 1.0 1.0 ### Infocom05 0.1 0.145299145299 0.0893209354748 0.464825772518 0.272471118625 0.2 0.26375 0.151701570681 0.8775 0.479188481675 0.3 0.38125 0.23 0.9675 0.639824561404 0.4 0.47875 0.323966942149 0.98875 0.744077134986 0.5 0.562165377751 0.420300021584 0.984533016062 0.812162745521 0.6 0.656157049375 0.539203527628 0.990481856038 0.87267465895 0.7 0.744199881023 0.6531344487 0.998810232005 0.920096754687 0.8 0.823914336704 0.754573170732 1.0 0.954050522648 0.9 0.904818560381 0.876598549769 1.0 0.975346077785
class SortedDisplayDict(dict):
def __str__(self):
return "{" + ", ".join("%r: %r" % (key, self[key]) for key in sorted(self)) + "}"
def ordered_keys(self):
return sorted(self.keys())
dic = SortedDisplayDict(data_profile['Infocom05']['xego_global_spearman_corr'])
print dic
print [dic[x] for x in dic.ordered_keys()]
{0.1: 0.4940567002323909, 0.2: 0.4819401444788442, 0.3: 0.70956848030018771, 0.4: 0.83902439024390252, 0.5: 0.9233449477351916, 0.6: 0.94756097560975605, 0.7: 0.99965156794425081, 0.8: 1.0, 0.9: 1.0} [0.4940567002323909, 0.4819401444788442, 0.70956848030018771, 0.83902439024390252, 0.9233449477351916, 0.94756097560975605, 0.99965156794425081, 1.0, 1.0]
%matplotlib inline
from matplotlib.transforms import Bbox
def full_extent(ax, pad=0.0):
"""Get the full extent of an axes, including axes labels, tick labels, and
titles."""
# For text objects, we need to draw the figure first, otherwise the extents
# are undefined.
ax.figure.canvas.draw()
items = ax.get_xticklabels() + ax.get_yticklabels()
items += [ax, ax.title, ax.xaxis.label, ax.yaxis.label]
items += [ax, ax.title]
bbox = Bbox.union([item.get_window_extent() for item in items])
return bbox.expanded(1.01 + pad, 1.01 + pad)
xticklabels = [r'$10\%$', r'$20\%$', r'$30\%$', r'$40\%$', r'$50\%$', r'$60\%$', r'$70\%$', r'$80\%$', r'$90\%$']
yticklabels = [r'$-0.6$', r'$-0.4$', r'$-0.2$', r'$0.0$', r'$0.2$', r'$0.4$', r'$0.6$', r'$0.8$', r'$1.0$', r'$1.2$']
ind = np.arange(9)
barWidth = 0.35
fig, axes = plt.subplots(2, 2, figsize=(20, 14))
subfigures = {}
for data_name in data_profile.keys():
if data_name == 'Infocom05':
subfigures[data_name] = axes[0][0]
if data_name == 'Infocom06':
subfigures[data_name] = axes[0][1]
if data_name == 'Cambridge':
subfigures[data_name] = axes[1][0]
if data_name == 'Intel':
subfigures[data_name] = axes[1][1]
for data_name in data_profile.keys():
subfigures[data_name].set_xticks(ind + barWidth/2.)
subfigures[data_name].set_xticklabels(xticklabels, fontsize=21)
subfigures[data_name].set_ylim([-0.6, 1.2])
subfigures[data_name].set_yticklabels(yticklabels, fontsize=21)
corr_dic = SortedDisplayDict(data_profile[data_name]['xego_global_spearman_corr'])
corr_value_list = [corr_dic[x] for x in corr_dic.ordered_keys()]
subfigures[data_name].plot(ind + barWidth/2.,
corr_value_list,
color='k', linestyle='-', marker='s', markersize=8,
label='xego, global (spearman)')
corr_dic = SortedDisplayDict(data_profile[data_name]['ego_global_spearman_corr'])
corr_value_list = [corr_dic[x] for x in corr_dic.ordered_keys()]
subfigures[data_name].plot(ind + barWidth/2.,
corr_value_list,
color='k', linestyle='-', marker='|', markersize=8,
label='ego, global (spearman)')
corr_dic = SortedDisplayDict(data_profile[data_name]['xego_global_pearson_corr'])
corr_value_list = [corr_dic[x] for x in corr_dic.ordered_keys()]
subfigures[data_name].plot(ind + barWidth/2.,
corr_value_list,
color='k', linestyle='--', marker='s', markersize=8,
label='xego, global (pearson)')
corr_dic = SortedDisplayDict(data_profile[data_name]['ego_global_pearson_corr'])
corr_value_list = [corr_dic[x] for x in corr_dic.ordered_keys()]
subfigures[data_name].plot(ind + barWidth/2.,
corr_value_list,
color='k', linestyle='--', marker='|', markersize=8,
label='ego, global (pearson)')
subfigures[data_name].set_xlabel('Link Addition Ratio', fontsize=21)
subfigures[data_name].set_ylabel('Correlation Coefficient', fontsize=21)
subfigures[data_name].grid(True)
#subfigures[data_name].set_title(data_name, fontsize=21)
if data_name == 'Infocom05':
subfigures[data_name].legend(loc = 4, fontsize=18)
extent = full_extent(subfigures[data_name]).transformed(fig.dpi_scale_trans.inverted())
fig.savefig(str(data_name) + '_correlation.pdf', format='pdf', bbox_inches=extent)
xticklabels = [r'$10\%$', r'$20\%$', r'$30\%$', r'$40\%$', r'$50\%$', r'$60\%$', r'$70\%$', r'$80\%$', r'$90\%$']
yticklabels = [r'$0.0$', r'$0.2$', r'$0.4$', r'$0.6$', r'$0.8$', r'$1.0$']
yticklabels2 = [r'$0$', r'$2$', r'$4$', r'$6$', r'$8$', r'$10$', r'$12$']
fig, axes = plt.subplots(2, 2, figsize=(20, 14))
subfigures = {}
for data_name in data_profile.keys():
if data_name == 'Infocom05':
subfigures[data_name] = axes[0][0]
if data_name == 'Infocom06':
subfigures[data_name] = axes[0][1]
if data_name == 'Cambridge':
subfigures[data_name] = axes[1][0]
if data_name == 'Intel':
subfigures[data_name] = axes[1][1]
for data_name in data_profile.keys():
subfigures[data_name].set_xticks(ind + barWidth/2.)
subfigures[data_name].set_xticklabels(xticklabels, fontsize=21)
subfigures[data_name].set_ylim([0.0, 1.0])
subfigures[data_name].set_yticklabels(yticklabels, fontsize=21)
cluster_coef_dic = SortedDisplayDict(data_profile[data_name]['clustering_coefficient'])
cluster_coef_value_list = [cluster_coef_dic[x] for x in cluster_coef_dic.ordered_keys()]
subfigures[data_name].plot(ind + barWidth/2., cluster_coef_value_list,
color='k', linestyle='-', marker='s', markersize=8,
label='Clustering Coefficient')
subfigures[data_name].set_xlabel('Link Addition Ratio', fontsize=21)
subfigures[data_name].set_ylabel('Clustering Coefficient', fontsize=21)
#subfigures[data_name].set_title(data_name, fontsize=21)
subfigures[data_name].grid(True)
if data_name == 'Infocom05':
subfigures[data_name].legend(loc = 2)
subfigures[data_name] = subfigures[data_name].twinx()
subfigures[data_name].set_ylim([0, 12])
subfigures[data_name].set_yticklabels(yticklabels2, fontsize=21)
diameter_dic = SortedDisplayDict(data_profile[data_name]['diameter_cc'])
diameter_list = [diameter_dic[x] for x in diameter_dic.ordered_keys()]
subfigures[data_name].bar(ind, diameter_list, barWidth, color='k', alpha=0.3, label='Diameter of Connected Component')
subfigures[data_name].set_ylabel('Diameter of Connected Component', fontsize=21)
if data_name == 'Infocom05':
subfigures[data_name].legend(loc = 4, fontsize=18)
subfigures[data_name].grid(True)
fig.savefig(str(data_name) + '_clustering_and_diameter.pdf', format='pdf', bbox_inches='tight')
xticklabels = [r'$10\%$', r'$20\%$', r'$30\%$', r'$40\%$', r'$50\%$', r'$60\%$', r'$70\%$', r'$80\%$', r'$90\%$']
yticklabels = [r'$0.0$', r'$0.2$', r'$0.4$', r'$0.6$', r'$0.8$', r'$1.0$']
yticklabels2 = [r'$0$', r'$2$', r'$4$', r'$6$', r'$8$', r'$10$', r'$12$']
fig, axes = plt.subplots(1, 1, figsize=(8.719, 6.07))
subfigures = {}
data_name = 'Infocom05'
subfigures[data_name] = axes
subfigures[data_name].set_xticks(ind + barWidth/2.)
subfigures[data_name].set_xticklabels(xticklabels, fontsize=21)
subfigures[data_name].set_ylim([0.0, 1.0])
subfigures[data_name].set_yticklabels(yticklabels, fontsize=21)
cluster_coef_dic = SortedDisplayDict(data_profile[data_name]['clustering_coefficient'])
cluster_coef_value_list = [cluster_coef_dic[x] for x in cluster_coef_dic.ordered_keys()]
subfigures[data_name].plot(ind + barWidth/2., cluster_coef_value_list,
color='k', linestyle='-', marker='s', markersize=8,
label='Clustering Coefficient')
subfigures[data_name].set_xlabel('Link Addition Ratio', fontsize=21)
subfigures[data_name].set_ylabel('Clustering Coefficient', fontsize=21)
#subfigures[data_name].set_title(data_name, fontsize=21)
subfigures[data_name].grid(True)
if data_name == 'Infocom05':
subfigures[data_name].legend(loc = 2, fontsize=18)
subfigures[data_name] = subfigures[data_name].twinx()
subfigures[data_name].set_ylim([0, 12])
subfigures[data_name].set_yticklabels(yticklabels2, fontsize=21)
diameter_dic = SortedDisplayDict(data_profile[data_name]['diameter_cc'])
diameter_list = [diameter_dic[x] for x in diameter_dic.ordered_keys()]
subfigures[data_name].bar(ind, diameter_list, barWidth, color='k', alpha=0.3, label='Diameter of Connected Component')
subfigures[data_name].set_ylabel('Diameter of Connected Component', fontsize=21)
if data_name == 'Infocom05':
subfigures[data_name].legend(loc = 4, fontsize=18)
subfigures[data_name].grid(True)
fig.savefig(str(data_name) + '_clustering_and_diameter.pdf', format='pdf', bbox_inches='tight')
xticklabels = [r'$10\%$', r'$20\%$', r'$30\%$', r'$40\%$', r'$50\%$', r'$60\%$', r'$70\%$', r'$80\%$', r'$90\%$']
yticklabels = [r'$0.0$', r'$0.2$', r'$0.4$', r'$0.6$', r'$0.8$', r'$1.0$']
yticklabels2 = [r'$0$', r'$2$', r'$4$', r'$6$', r'$8$', r'$10$', r'$12$']
fig, axes = plt.subplots(1, 1, figsize=(8.719, 6.07))
subfigures = {}
data_name = 'Infocom06'
subfigures[data_name] = axes
subfigures[data_name].set_xticks(ind + barWidth/2.)
subfigures[data_name].set_xticklabels(xticklabels, fontsize=21)
subfigures[data_name].set_ylim([0.0, 1.0])
subfigures[data_name].set_yticklabels(yticklabels, fontsize=21)
cluster_coef_dic = SortedDisplayDict(data_profile[data_name]['clustering_coefficient'])
cluster_coef_value_list = [cluster_coef_dic[x] for x in cluster_coef_dic.ordered_keys()]
subfigures[data_name].plot(ind + barWidth/2., cluster_coef_value_list,
color='k', linestyle='-', marker='s', markersize=8,
label='Clustering Coefficient')
subfigures[data_name].set_xlabel('Link Addition Ratio', fontsize=21)
subfigures[data_name].set_ylabel('Clustering Coefficient', fontsize=21)
#subfigures[data_name].set_title(data_name, fontsize=21)
subfigures[data_name].grid(True)
subfigures[data_name] = subfigures[data_name].twinx()
subfigures[data_name].set_ylim([0, 12])
subfigures[data_name].set_yticklabels(yticklabels2, fontsize=21)
diameter_dic = SortedDisplayDict(data_profile[data_name]['diameter_cc'])
diameter_list = [diameter_dic[x] for x in diameter_dic.ordered_keys()]
subfigures[data_name].bar(ind, diameter_list, barWidth, color='k', alpha=0.3, label='Diameter of Connected Component')
subfigures[data_name].set_ylabel('Diameter of Connected Component', fontsize=21)
subfigures[data_name].grid(True)
fig.savefig(str(data_name) + '_clustering_and_diameter.pdf', format='pdf', bbox_inches='tight')
xticklabels = [r'$10\%$', r'$20\%$', r'$30\%$', r'$40\%$', r'$50\%$', r'$60\%$', r'$70\%$', r'$80\%$', r'$90\%$']
yticklabels = [r'$0.0$', r'$0.2$', r'$0.4$', r'$0.6$', r'$0.8$', r'$1.0$']
yticklabels2 = [r'$0$', r'$2$', r'$4$', r'$6$', r'$8$', r'$10$', r'$12$']
fig, axes = plt.subplots(1, 1, figsize=(8.719, 6.07))
subfigures = {}
data_name = 'Cambridge'
subfigures[data_name] = axes
subfigures[data_name].set_xticks(ind + barWidth/2.)
subfigures[data_name].set_xticklabels(xticklabels, fontsize=21)
subfigures[data_name].set_ylim([0.0, 1.0])
subfigures[data_name].set_yticklabels(yticklabels, fontsize=21)
cluster_coef_dic = SortedDisplayDict(data_profile[data_name]['clustering_coefficient'])
cluster_coef_value_list = [cluster_coef_dic[x] for x in cluster_coef_dic.ordered_keys()]
subfigures[data_name].plot(ind + barWidth/2., cluster_coef_value_list,
color='k', linestyle='-', marker='s', markersize=8,
label='Clustering Coefficient')
subfigures[data_name].set_xlabel('Link Addition Ratio', fontsize=21)
subfigures[data_name].set_ylabel('Clustering Coefficient', fontsize=21)
#subfigures[data_name].set_title(data_name, fontsize=21)
subfigures[data_name].grid(True)
subfigures[data_name] = subfigures[data_name].twinx()
subfigures[data_name].set_ylim([0, 12])
subfigures[data_name].set_yticklabels(yticklabels2, fontsize=21)
diameter_dic = SortedDisplayDict(data_profile[data_name]['diameter_cc'])
diameter_list = [diameter_dic[x] for x in diameter_dic.ordered_keys()]
subfigures[data_name].bar(ind, diameter_list, barWidth, color='k', alpha=0.3, label='Diameter of Connected Component')
subfigures[data_name].set_ylabel('Diameter of Connected Component', fontsize=21)
subfigures[data_name].grid(True)
fig.savefig(str(data_name) + '_clustering_and_diameter.pdf', format='pdf', bbox_inches='tight')
xticklabels = [r'$10\%$', r'$20\%$', r'$30\%$', r'$40\%$', r'$50\%$', r'$60\%$', r'$70\%$', r'$80\%$', r'$90\%$']
yticklabels = [r'$0.0$', r'$0.2$', r'$0.4$', r'$0.6$', r'$0.8$', r'$1.0$']
yticklabels2 = [r'$0$', r'$2$', r'$4$', r'$6$', r'$8$', r'$10$', r'$12$']
fig, axes = plt.subplots(1, 1, figsize=(8.719, 6.07))
subfigures = {}
data_name = 'Intel'
subfigures[data_name] = axes
subfigures[data_name].set_xticks(ind + barWidth/2.)
subfigures[data_name].set_xticklabels(xticklabels, fontsize=21)
subfigures[data_name].set_ylim([0.0, 1.0])
subfigures[data_name].set_yticklabels(yticklabels, fontsize=21)
cluster_coef_dic = SortedDisplayDict(data_profile[data_name]['clustering_coefficient'])
cluster_coef_value_list = [cluster_coef_dic[x] for x in cluster_coef_dic.ordered_keys()]
subfigures[data_name].plot(ind + barWidth/2., cluster_coef_value_list,
color='k', linestyle='-', marker='s', markersize=8,
label='Clustering Coefficient')
subfigures[data_name].set_xlabel('Link Addition Ratio', fontsize=21)
subfigures[data_name].set_ylabel('Clustering Coefficient', fontsize=21)
#subfigures[data_name].set_title(data_name, fontsize=21)
subfigures[data_name].grid(True)
subfigures[data_name] = subfigures[data_name].twinx()
subfigures[data_name].set_ylim([0, 12])
subfigures[data_name].set_yticklabels(yticklabels2, fontsize=21)
diameter_dic = SortedDisplayDict(data_profile[data_name]['diameter_cc'])
diameter_list = [diameter_dic[x] for x in diameter_dic.ordered_keys()]
subfigures[data_name].bar(ind, diameter_list, barWidth, color='k', alpha=0.3, label='Diameter of Connected Component')
subfigures[data_name].set_ylabel('Diameter of Connected Component', fontsize=21)
subfigures[data_name].grid(True)
fig.savefig(str(data_name) + '_clustering_and_diameter.pdf', format='pdf', bbox_inches='tight')
xticklabels = [r'$10\%$', r'$20\%$', r'$30\%$', r'$40\%$', r'$50\%$', r'$60\%$', r'$70\%$', r'$80\%$', r'$90\%$']
yticklabels = [r'$0.0$', r'$0.2$', r'$0.4$', r'$0.6$', r'$0.8$', r'$1.0$', r'$1.2$']
fig, axes = plt.subplots(2, 2, figsize=(20, 14))
subfigures = {}
for data_name in data_profile.keys():
if data_name == 'Infocom05':
subfigures[data_name] = axes[0][0]
if data_name == 'Infocom06':
subfigures[data_name] = axes[0][1]
if data_name == 'Cambridge':
subfigures[data_name] = axes[1][0]
if data_name == 'Intel':
subfigures[data_name] = axes[1][1]
for data_name in data_profile.keys():
subfigures[data_name].set_xticks(ind + barWidth/2.)
subfigures[data_name].set_xticklabels(xticklabels, fontsize=21)
subfigures[data_name].set_ylim([0.0, 1.2])
subfigures[data_name].set_yticklabels(yticklabels, fontsize=21)
coverage_dic = SortedDisplayDict(data_profile[data_name]['xego_node_coverage_in_connected_component'])
coverage_list = [coverage_dic[x] for x in coverage_dic.ordered_keys()]
subfigures[data_name].plot(ind+barWidth/2., coverage_list,
color='k', linestyle='-', marker='s', markersize=8,
label=r'Mean vertex coverage of x-ego networks')
coverage_dic = SortedDisplayDict(data_profile[data_name]['xego_edge_coverage_in_connected_component'])
coverage_list = [coverage_dic[x] for x in coverage_dic.ordered_keys()]
subfigures[data_name].plot(ind+barWidth/2., coverage_list,
color='k', linestyle='-', marker='|', markersize=8,
label=r'Mean edge coverage of x-ego networks')
coverage_dic = SortedDisplayDict(data_profile[data_name]['ego_node_coverage_in_connected_component'])
coverage_list = [coverage_dic[x] for x in coverage_dic.ordered_keys()]
subfigures[data_name].plot(ind+barWidth/2., coverage_list,
color='k', linestyle='--', marker='s', markersize=8,
label=r'Mean vertex coverage of ego networks')
coverage_dic = SortedDisplayDict(data_profile[data_name]['ego_edge_coverage_in_connected_component'])
coverage_list = [coverage_dic[x] for x in coverage_dic.ordered_keys()]
subfigures[data_name].plot(ind+barWidth/2., coverage_list,
color='k', linestyle='--', marker='|', markersize=8,
label=r'Mean edge coverage of ego networks')
subfigures[data_name].set_xlabel('Link Addition Ratio', fontsize=21)
subfigures[data_name].set_ylabel('Coverage', fontsize=21)
#subfigures[data_name].set_title(data_name, fontsize=21)
if data_name == 'Infocom05':
subfigures[data_name].legend(loc = 4, fontsize=18)
subfigures[data_name].grid(True)
extent = full_extent(subfigures[data_name]).transformed(fig.dpi_scale_trans.inverted())
fig.savefig(str(data_name) + '_graph_info_2.pdf', format='pdf', bbox_inches=extent)
width = 0.35
xticklabels = [r'$10\%$', r'$20\%$', r'$30\%$', r'$40\%$', r'$50\%$', r'$60\%$', r'$70\%$', r'$80\%$', r'$90\%$']
fig, axes = plt.subplots(2, 2, figsize=(20, 14))
subfigures = {}
for data_name in data_profile.keys():
if data_name == 'Infocom05':
subfigures[data_name] = axes[0][0]
if data_name == 'Infocom06':
subfigures[data_name] = axes[0][1]
if data_name == 'Cambridge':
subfigures[data_name] = axes[1][0]
if data_name == 'Intel':
subfigures[data_name] = axes[1][1]
for data_name in data_profile.keys():
subfigures[data_name].set_xticks(ind + width/2.)
subfigures[data_name].set_xticklabels(xticklabels, fontsize=21)
subfigures[data_name].tick_params(labelsize=21)
time_dic = SortedDisplayDict(data_profile[data_name]['Brandes_xego_elapsed_time'])
time_list = [time_dic[x] for x in time_dic.ordered_keys()]
subfigures[data_name].plot(ind + width/2., time_list,
color='k', linestyle='--', marker='o', markersize=7, label='Brandes')
time_dic = SortedDisplayDict(data_profile[data_name]['Proposed_xego_elapsed_time'])
time_list = [time_dic[x] for x in time_dic.ordered_keys()]
subfigures[data_name].plot(ind + width/2., time_list,
color='k', linestyle='-', marker='x', markersize=7, label='Proposed')
subfigures[data_name].set_xlabel('Link Addition Ratio', fontsize=21)
subfigures[data_name].set_ylabel('Mean Execution Time (sec.)', fontsize=21)
#subfigures[data_name].set_title(data_name, fontsize=21)
if data_name == 'Infocom05':
subfigures[data_name].legend(loc = 0, fontsize=18)
subfigures[data_name].grid(True)
extent = full_extent(subfigures[data_name]).transformed(fig.dpi_scale_trans.inverted())
fig.savefig(str(data_name) + '_xEgo_elapsed_time.pdf', bbox_inches=extent)
width = 0.35
xticklabels = [r'$10\%$', r'$20\%$', r'$30\%$', r'$40\%$', r'$50\%$', r'$60\%$', r'$70\%$', r'$80\%$', r'$90\%$']
yticklabels = [r'$0\%$', r'$20\%$', r'$40\%$', r'$60\%$', r'$80\%$', r'$100\%$']
fig, axes = plt.subplots(2, 2, figsize=(20, 14))
subfigures = {}
for data_name in data_profile.keys():
if data_name == 'Infocom05':
subfigures[data_name] = axes[0][0]
if data_name == 'Infocom06':
subfigures[data_name] = axes[0][1]
if data_name == 'Cambridge':
subfigures[data_name] = axes[1][0]
if data_name == 'Intel':
subfigures[data_name] = axes[1][1]
for data_name in data_profile.keys():
if data_name == 'Infocom05':
data_type = 0
if data_name == 'Infocom06':
data_type = 1
if data_name == 'Cambridge':
data_type = 2
if data_name == 'Intel':
data_type = 3
subfigures[data_name].set_xticks(ind + width/2.)
subfigures[data_name].set_xticklabels(xticklabels, fontsize=21)
subfigures[data_name].tick_params(labelsize=21)
subfigures[data_name].set_ylim([0.0, 1.1])
subfigures[data_name].set_yticklabels(yticklabels, fontsize=21)
subfigures[data_name].plot(ind + width/2., xEgo_execution_dependency2[data_type],
color='k', linestyle='-', marker='x', markersize=7, label='dependency2')
subfigures[data_name].plot(ind + width/2., xEgo_execution_dependency1[data_type],
color='k', linestyle='--', marker='o', markersize=7, label='dependency1')
subfigures[data_name].set_xlabel('Link Addition Ratio', fontsize=21)
subfigures[data_name].set_ylabel('Skip Ratio', fontsize=21)
#subfigures[data_name].set_title(data_name, fontsize=21)
if data_name == 'Infocom05':
subfigures[data_name].legend(loc = 4, fontsize=18)
subfigures[data_name].grid(True)
extent = full_extent(subfigures[data_name]).transformed(fig.dpi_scale_trans.inverted())
fig.savefig(str(data_name) + '_xEgo_skip_rate.pdf', bbox_inches=extent)
--------------------------------------------------------------------------- NameError Traceback (most recent call last) <ipython-input-25-8bbc6f7d9899> in <module>() 30 subfigures[data_name].set_ylim([0.0, 1.1]) 31 subfigures[data_name].set_yticklabels(yticklabels, fontsize=21) ---> 32 subfigures[data_name].plot(ind + width/2., xEgo_execution_dependency2[data_type], 33 color='k', linestyle='-', marker='x', markersize=7, label='dependency2') 34 subfigures[data_name].plot(ind + width/2., xEgo_execution_dependency1[data_type], NameError: name 'xEgo_execution_dependency2' is not defined
myfile = [None, None, None]
correlationTickList = [None, None, None]
correlationXEgoGlobal = [None, None, None]
correlationEgoGlobal = [None, None, None]
myfile[0] = open('1_cor_spearman_time-3600-1-undirectional.csv', 'r')
myfile[1] = open('1_cor_spearman_time-43200-1-undirectional.csv', 'r')
myfile[2] = open('1_cor_spearman_time-86400-1-undirectional.csv', 'r')
for index in range(0,3):
r = csv.reader(myfile[index])
i = 0
for row in r:
if i == 0:
frow = [int(num) for num in row]
correlationTickList[index] = frow
if i == 1:
frow = [float(num) for num in row]
correlationXEgoGlobal[index] = frow
if i == 2:
frow = [float(num) for num in row]
correlationEgoGlobal[index] = frow
i += 1;
myfile[index].close()
allzero = []
for i in range(0, len(correlationTickList[0]) - len(correlationTickList[1])):
allzero.append(0.0)
correlationXEgoGlobal[1] = allzero + correlationXEgoGlobal[1]
correlationEgoGlobal[1] = allzero + correlationEgoGlobal[1]
allzero = []
for i in range(0, len(correlationTickList[0]) - len(correlationTickList[2])):
allzero.append(0.0)
correlationXEgoGlobal[2] = allzero + correlationXEgoGlobal[2]
correlationEgoGlobal[2] = allzero + correlationEgoGlobal[2]
min_val = 1.0
ratio = 0.0
for i in correlationXEgoGlobal[0]:
if (i != 0.0 and i < min_val):
min_val = i
if (i >= 0.8):
ratio = ratio + 1
min_val = 1.0
ratio = 0.0
for i in correlationXEgoGlobal[1]:
if (i != 0.0 and i < min_val):
min_val = i
if (i >= 0.8):
ratio = ratio + 1
min_val = 1.0
ratio = 0.0
for i in correlationXEgoGlobal[2]:
if (i != 0.0 and i < min_val):
min_val = i
if (i >= 0.8):
ratio = ratio + 1
font = {'family': 'serif', 'color': 'black', 'weight': 'normal', 'size': 16}
ind = np.arange(len(correlationTickList[0]))
barWidth = 0.5
fig, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex = True, figsize=(20, 14))
xind = [ind[0]+barWidth/2.,
ind[int((len(correlationTickList[0])-1)/4)]+barWidth/2.,
ind[int((len(correlationTickList[0])-1)/2)]+barWidth/2.,
ind[int((len(correlationTickList[0])-1)*3/4)]+barWidth/2.,
ind[len(correlationTickList[0])-1]+barWidth/2.]
#step: 1
xtickLable = [correlationTickList[0][0]-20732,
correlationTickList[0][int((len(correlationTickList[0])-1)/4)]-20732,
correlationTickList[0][int((len(correlationTickList[0])-1)/2)]-20732,
correlationTickList[0][int((len(correlationTickList[0])-1)*3/4)]-20732,
correlationTickList[0][len(correlationTickList[0])-1]-20732]
ax1.tick_params(labelsize=14)
ax1.set_ylim(ymin=0.0, ymax=1.2)
#tick start - 107132
correlationXEgoGlobal2 = np.array(correlationXEgoGlobal[2])
correlationEgoGlobal2 = np.array(correlationEgoGlobal[2])
ax1.plot(ind+barWidth/2., correlationXEgoGlobal2, lw=1, ls="-", c="black")
ax1.plot(ind+barWidth/2., correlationEgoGlobal2, lw=1, ls=":", c="black")
ax1.fill_between(ind+barWidth/2.,
correlationEgoGlobal2, correlationXEgoGlobal2,
where=correlationXEgoGlobal2 >= correlationEgoGlobal2,
facecolor='green', alpha=0.2)
ax1.fill_between(ind+barWidth/2.,
correlationXEgoGlobal2, correlationEgoGlobal2,
where=correlationXEgoGlobal2 < correlationEgoGlobal2,
facecolor='red', alpha=0.2)
ax1.text(2000, 0.1, r'$\omega=86400,\,\delta=1$', fontdict=font, fontsize=18)
#ax1.annotate('107132', xy=(107132, 0.0), xycoords='data', xytext=(127132, 0.2), textcoords='offset points', arrowprops=dict(facecolor='black', shrink=0.05), horizontalalignment='right', verticalalignment='top')
#tick start - 63932
ax2.set_ylim(ymin=0.0, ymax=1.2)
ax2.tick_params(labelsize=14)
correlationXEgoGlobal2 = np.array(correlationXEgoGlobal[1])
correlationEgoGlobal2 = np.array(correlationEgoGlobal[1])
ax2.plot(ind+barWidth/2., correlationXEgoGlobal2, lw=1, ls="-", c="black")
ax2.plot(ind+barWidth/2., correlationEgoGlobal2, lw=1, ls=":", c="black")
ax2.fill_between(ind+barWidth/2.,
correlationEgoGlobal2, correlationXEgoGlobal2,
where=correlationXEgoGlobal2 >= correlationEgoGlobal2,
facecolor='green', alpha=0.2)
ax2.fill_between(ind+barWidth/2., correlationXEgoGlobal2,
correlationEgoGlobal2, where=correlationXEgoGlobal2 < correlationEgoGlobal2,
facecolor='red', alpha=0.2)
ax2.text(2000, 0.1, r'$\omega=43200,\,\delta=1$', fontdict=font, fontsize=18)
#ax2.annotate('63932', xy=(63932, 0.0), xycoords='data', xytext=(63932, 0.2), textcoords='offset points', arrowprops=dict(facecolor='black', shrink=0.05), horizontalalignment='right', verticalalignment='top')
ax3.set_ylim(ymin=0.0, ymax=1.2)
ax3.set_ylabel('Spearman Correlation', position=(0.1,1.7), fontsize=14)
ax3.set_xlabel('Time Slot', fontsize=14)
ax3.set_xticks(xind)
ax3.tick_params(labelsize=14)
ax3.set_xticklabels(xtickLable, fontsize=14)
correlationXEgoGlobal2 = np.array(correlationXEgoGlobal[0])
correlationEgoGlobal2 = np.array(correlationEgoGlobal[0])
ax3.plot(ind+barWidth/2., correlationXEgoGlobal2, lw=1, ls="-", c="black")
ax3.plot(ind+barWidth/2., correlationEgoGlobal2, lw=1, ls=":", c="black")
ax3.fill_between(ind+barWidth/2.,
correlationEgoGlobal2, correlationXEgoGlobal2,
where=correlationXEgoGlobal2 >= correlationEgoGlobal2,
facecolor='green', alpha=0.2)
ax3.fill_between(ind+barWidth/2.,
correlationXEgoGlobal2, correlationEgoGlobal2,
where=correlationXEgoGlobal2 < correlationEgoGlobal2,
facecolor='red', alpha=0.2)
ax3.text(2000, 0.1, r'$\omega=3600,\,\delta=1$', fontdict=font, fontsize=18)
#plt.savefig('cor_spearman_time-step-1.pdf', bbox_inches='tight')
myfile = [None, None, None]
correlationTickList = [None, None, None]
correlationXEgoGlobal = [None, None, None]
correlationEgoGlobal = [None, None, None]
myfile[0] = open('1_cor_spearman_time-3600-3600-undirectional.csv', 'r')
myfile[1] = open('1_cor_spearman_time-43200-3600-undirectional.csv', 'r')
myfile[2] = open('1_cor_spearman_time-86400-3600-undirectional.csv', 'r')
for index in range(0,3):
r = csv.reader(myfile[index])
i = 0
for row in r:
if i == 0:
frow = [int(num) for num in row]
correlationTickList[index] = frow
if i == 1:
frow = [float(num) for num in row]
correlationXEgoGlobal[index] = frow
if i == 2:
frow = [float(num) for num in row]
correlationEgoGlobal[index] = frow
i += 1;
myfile[index].close()
allzero = []
for i in range(0, len(correlationTickList[0]) - len(correlationTickList[1])):
allzero.append(0.0)
correlationXEgoGlobal[1] = allzero + correlationXEgoGlobal[1]
correlationEgoGlobal[1] = allzero + correlationEgoGlobal[1]
allzero = []
for i in range(0, len(correlationTickList[0]) - len(correlationTickList[2])):
allzero.append(0.0)
correlationXEgoGlobal[2] = allzero + correlationXEgoGlobal[2]
correlationEgoGlobal[2] = allzero + correlationEgoGlobal[2]
min_val = 1.0
ratio = 0.0
for i in correlationXEgoGlobal[0]:
if (i != 0.0 and i < min_val):
min_val = i
if (i >= 0.8):
ratio = ratio + 1
min_val = 1.0
ratio = 0.0
for i in correlationXEgoGlobal[1]:
if (i != 0.0 and i < min_val):
min_val = i
if (i >= 0.8):
ratio = ratio + 1
min_val = 1.0
ratio = 0.0
for i in correlationXEgoGlobal[2]:
if (i != 0.0 and i < min_val):
min_val = i
if (i >= 0.8):
ratio = ratio + 1
font = {'family': 'serif', 'color': 'black', 'weight': 'normal', 'size': 16}
ind = np.arange(len(correlationTickList[0]))
barWidth=0.5
fig, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex = True, figsize=(20, 14))
xind = [ind[0]+barWidth/2.,
ind[int((len(correlationTickList[0])-1)/4)]+barWidth/2.,
ind[int((len(correlationTickList[0])-1)/2)]+barWidth/2.,
ind[int((len(correlationTickList[0])-1)*3/4)]+barWidth/2.,
ind[len(correlationTickList[0])-1]+barWidth/2.]
#step: 3600
xtickLable = [3600, 66237, 128875, 191513, correlationTickList[0][len(correlationTickList[0])-1]-24331]
ax1.set_ylim(ymin=0.0, ymax=1.2)
ax1.tick_params(labelsize=14)
#tick start - 107132
correlationXEgoGlobal2 = np.array(correlationXEgoGlobal[2])
correlationEgoGlobal2 = np.array(correlationEgoGlobal[2])
ax1.plot(ind+barWidth/2., correlationXEgoGlobal2, lw=1, ls="-", c="black")
ax1.plot(ind+barWidth/2., correlationEgoGlobal2, lw=1, ls=":", c="black")
ax1.fill_between(ind+barWidth/2.,
correlationEgoGlobal2, correlationXEgoGlobal2,
where=correlationXEgoGlobal2 >= correlationEgoGlobal2,
facecolor='green', alpha=0.2)
ax1.fill_between(ind+barWidth/2.,
correlationXEgoGlobal2, correlationEgoGlobal2,
where=correlationXEgoGlobal2 < correlationEgoGlobal2,
facecolor='red', alpha=0.2)
ax1.text(1, 0.1, r'$\omega=86400,\,\delta=3600$', fontdict=font, fontsize=18)
#tick start - 63932
ax2.set_ylim(ymin=0.0, ymax=1.2)
ax2.tick_params(labelsize=14)
correlationXEgoGlobal2 = np.array(correlationXEgoGlobal[1])
correlationEgoGlobal2 = np.array(correlationEgoGlobal[1])
ax2.plot(ind+barWidth/2., correlationXEgoGlobal2, lw=1, ls="-", c="black")
ax2.plot(ind+barWidth/2., correlationEgoGlobal2, lw=1, ls=":", c="black")
ax2.fill_between(ind+barWidth/2.,
correlationEgoGlobal2, correlationXEgoGlobal2,
where=correlationXEgoGlobal2 >= correlationEgoGlobal2,
facecolor='green', alpha=0.2)
ax2.fill_between(ind+barWidth/2.,
correlationXEgoGlobal2, correlationEgoGlobal2,
where=correlationXEgoGlobal2 < correlationEgoGlobal2,
facecolor='red', alpha=0.2)
ax2.text(1, 0.1, r'$\omega=43200,\,\delta=3600$', fontdict=font, fontsize=18)
ax3.set_ylim(ymin=0.0, ymax=1.2)
ax3.set_ylabel('Spearman Correlation', position=(0.1,1.7), fontsize=14)
ax3.set_xlabel('Time Slot', fontsize=14)
ax3.set_xticks(xind)
ax3.tick_params(labelsize=14)
ax3.set_xticklabels(xtickLable, fontsize=14)
correlationXEgoGlobal2 = np.array(correlationXEgoGlobal[0])
correlationEgoGlobal2 = np.array(correlationEgoGlobal[0])
ax3.plot(ind+barWidth/2., correlationXEgoGlobal2, lw=1, ls="-", c="black")
ax3.plot(ind+barWidth/2., correlationEgoGlobal2, lw=1, ls=":", c="black")
ax3.fill_between(ind+barWidth/2.,
correlationEgoGlobal2, correlationXEgoGlobal2,
where=correlationXEgoGlobal2 >= correlationEgoGlobal2,
facecolor='green', alpha=0.2)
ax3.fill_between(ind+barWidth/2.,
correlationXEgoGlobal2, correlationEgoGlobal2,
where=correlationXEgoGlobal2 < correlationEgoGlobal2,
facecolor='red', alpha=0.2)
ax3.text(1, 0.1, r'$\omega=3600,\,\delta=3600$', fontdict=font, fontsize=18)
#plt.savefig('cor_spearman_time-step-3600.pdf', bbox_inches='tight')