-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathanalysis.py
More file actions
124 lines (101 loc) · 6.69 KB
/
analysis.py
File metadata and controls
124 lines (101 loc) · 6.69 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
import data
import os
import json
import mog.graph_io as GraphIO
import cache
def csv_header( modal ):
ret = ''
for m in modal:
ret += str(m) + ','
return ret[:-1]
def csv_add_row( modal, data ):
ret = '\n'
for m in modal:
if m in data:
s = str(data[m])
if s.find(",") >= 0: s = '"' + s + '"'
ret += s
ret += ','
return ret[:-1]
def generate_filter_summary(force_overwrite=False):
if not force_overwrite and os.path.exists("analysis/filter_summary.csv") : return
ffs = list(data.filter_function_names.keys())
ffs.sort()
modal = ['dataset','datafile','nodes','edges'] + ffs
ret = csv_header(modal)
record = {}
for ds0 in data.data_sets:
record['dataset'] = ds0
for ds1 in data.data_sets[ds0]:
record['datafile'] = ds1
record['nodes'] = 0
record['edges'] = 0
for ff in ffs:
record[ff] = ''
if ff in data.data_sets[ds0][ds1]:
with open('docs/data/' + ds0 + "/" + os.path.splitext(ds1)[0] + "/" + ff + ".json") as json_file:
ff_data = json.load(json_file)
if 'num_of_nodes' in ff_data: record['nodes'] = ff_data['num_of_nodes']
if 'num_of_edges' in ff_data : record['edges'] = ff_data['num_of_edges']
record[ff] = ff_data['process_time']
ret += csv_add_row(modal, record)
with open("analysis/filter_summary.csv", 'w') as outfile:
outfile.write(ret)
def generate_graph_summary(force_overwrite=False):
if not force_overwrite and os.path.exists("analysis/graph_summary.csv") : return
ret = 'dataset,datafile,node_count,edge_count\n'
for ds0 in data.data_sets:
for ds1 in data.data_sets[ds0]:
graph_data, graph = GraphIO.read_json_graph('docs/data/' + ds0 + "/" + ds1)
ret += ds0 + ',"' + ds1 + '",' + str(graph.number_of_nodes()) + ',' + str(graph.number_of_edges()) + '\n'
with open("analysis/graph_summary.csv", 'w') as outfile:
outfile.write(ret)
def generate_mog_profile(dataset,datafile):
ret = 'filter_func,cover_elem_count,component_method,connectivity_method,ranked,nodes,edges,compute_time\n'
graph_data, graph = GraphIO.read_json_graph('docs/data/' + dataset + "/" + datafile)
for ff in data.data_sets[dataset][datafile]:
levels = [2,3,4,6,8,10,20,30,40]
#for coverN in range(2,20):
for coverN in levels:
mog, mog_cf = cache.generate_mog( dataset, datafile, ff, coverN, 0, 'connected_components', 'connectivity', 'false' )
ret += ff + ',' + str(coverN) + ',' + 'connected_components,connectivity,false' + ',' + str(mog.number_of_nodes()) + ',' + str(mog.number_of_edges()) + ',' + str(mog.info['compute_time']) + '\n'
mog, mog_cf = cache.generate_mog(dataset, datafile, ff, coverN, 0, 'connected_components', 'connectivity', 'true')
ret += ff + ',' + str(coverN) + ',' + 'connected_components,connectivity,true' + ',' + str(mog.number_of_nodes()) + ',' + str(mog.number_of_edges()) + ',' + str(mog.info['compute_time']) + '\n'
if graph.number_of_nodes() < 50000:
mog, mog_cf = cache.generate_mog( dataset, datafile, ff, coverN, 0, 'modularity', 'connectivity', 'false' )
ret += ff + ',' + str(coverN) + ',' + 'modularity,connectivity,false' + ',' + str(mog.number_of_nodes()) + ',' + str(mog.number_of_edges()) + ',' + str(mog.info['compute_time']) + '\n'
mog, mog_cf = cache.generate_mog(dataset, datafile, ff, coverN, 0, 'modularity', 'connectivity', 'true')
ret += ff + ',' + str(coverN) + ',' + 'modularity,connectivity,true' + ',' + str(mog.number_of_nodes()) + ',' + str(mog.number_of_edges()) + ',' + str(mog.info['compute_time']) + '\n'
mog, mog_cf = cache.generate_mog( dataset, datafile, ff, coverN, 0, 'async_label_prop', 'connectivity', 'false' )
ret += ff + ',' + str(coverN) + ',' + 'async_label_prop,connectivity,false' + ',' + str(mog.number_of_nodes()) + ',' + str(mog.number_of_edges()) + ',' + str(mog.info['compute_time']) + '\n'
mog, mog_cf = cache.generate_mog(dataset, datafile, ff, coverN, 0, 'async_label_prop', 'connectivity', 'true')
ret += ff + ',' + str(coverN) + ',' + 'async_label_prop,connectivity,true' + ',' + str(mog.number_of_nodes()) + ',' + str(mog.number_of_edges()) + ',' + str(mog.info['compute_time']) + '\n'
with open("analysis/mog/"+datafile[:-5]+".csv",'w') as outfile:
outfile.write(ret)
def generate_mog_perf():
ret = 'datafile,df_nodes,filter_func,cover_elem_count,component_method,connectivity_method,ranked,nodes,edges,compute_time\n'
for ds in ['small','medium','large']:
for df in data.data_sets[ds]:
graph_data, graph = GraphIO.read_json_graph('docs/data/' + ds + "/" + df)
for ff in data.data_sets[ds][df]:
levels = [2,20]
for coverN in levels:
mog, mog_cf = cache.generate_mog( ds, df, ff, coverN, 0, 'connected_components', 'connectivity', 'false' )
ret += '"' + df + '",' + str(graph.number_of_nodes()) + ',' + ff + ',' + str(coverN) + ',' + 'connected_components,connectivity,false' + ',' + str(mog.number_of_nodes()) + ',' + str(mog.number_of_edges()) + ',' + str(mog.info['compute_time']) + '\n'
if graph.number_of_nodes() < 50000:
mog, mog_cf = cache.generate_mog( ds, df, ff, coverN, 0, 'modularity', 'connectivity', 'false' )
ret += '"' + df + '",' + str(graph.number_of_nodes()) + ',' + ff + ',' + str(coverN) + ',' + 'modularity,connectivity,false' + ',' + str(mog.number_of_nodes()) + ',' + str(mog.number_of_edges()) + ',' + str(mog.info['compute_time']) + '\n'
mog, mog_cf = cache.generate_mog( ds, df, ff, coverN, 0, 'async_label_prop', 'connectivity', 'false' )
ret += '"' + df + '",' + str(graph.number_of_nodes()) + ',' + ff + ',' + str(coverN) + ',' + 'async_label_prop,connectivity,false' + ',' + str(mog.number_of_nodes()) + ',' + str(mog.number_of_edges()) + ',' + str(mog.info['compute_time']) + '\n'
with open("analysis/mog_perf.csv",'w') as outfile:
outfile.write(ret)
if __name__ == '__main__':
if not os.path.exists("analysis"): os.mkdir("analysis")
if not os.path.exists("analysis/mog"): os.mkdir("analysis/mog")
data.scan_datasets()
generate_filter_summary(True)
generate_graph_summary(True)
for ds in ['small','medium','large']:
for df in data.data_sets[ds]:
generate_mog_profile(ds,df)
generate_mog_perf()