1
+ import matplotlib .pyplot as plt
2
+ import numpy as np
3
+ import json
4
+ import pandas as pd
5
+
6
+ BENCHMARKS_JSON = 'results.json'
7
+
8
+ # Hardware details shown in title
9
+ HARDWARE = "AMD Ryzen 9 9900X 12-Core Processor 63032 MB (fp64 fp16)\n oneAPI 2025.1.3 Intel(R) OpenCL Graphics: Intel(R) Arc(TM) B580 Graphics, 11873 MB (fp64 fp16)"
10
+
11
+ # Show speedup in graph
12
+ SHOW_NUMBERS = True
13
+
14
+ # Round to digits after decimal
15
+ ROUND_NUMBERS = 1
16
+
17
+ # package list in graph order; arrayfire packages are added later
18
+ PKG_NAMES = [
19
+ 'numpy' ,
20
+ 'dpnp' ,
21
+ 'cupy'
22
+ ]
23
+
24
+ # color used in graphs
25
+ PKG_COLOR = {
26
+ "numpy" : "tab:blue" ,
27
+ "cupy" : "tab:green" ,
28
+ "dpnp" : "tab:red" ,
29
+ "afcpu" : "tab:orange" ,
30
+ "afopencl" : "tab:orange" ,
31
+ "afcuda" : "tab:orange" ,
32
+ "afoneapi" : "tab:orange"
33
+ }
34
+
35
+ # labels displayed in the graph
36
+ PKG_LABELS = {
37
+ "numpy" : "numpy[cpu]" ,
38
+ "dpnp" : "dpnp[level_zero:gpu]" ,
39
+ "cupy" : "cupy" ,
40
+ "afcpu" : "afcpu" ,
41
+ "afcuda" : "afcuda" ,
42
+ "afopencl" : "afopencl[opencl:gpu]" ,
43
+ "afoneapi" : "afoneapi[opencl:gpu]"
44
+ }
45
+
46
+ AFBACKENDS = [
47
+ 'afcpu' ,
48
+ 'afcuda' ,
49
+ 'afopencl' ,
50
+ 'afoneapi'
51
+ ]
52
+
53
+ # Tests to be shown in graphs
54
+ TESTS = [
55
+ 'qr' ,
56
+ 'neural_network' ,
57
+ 'gemm' ,
58
+ 'mandelbrot' ,
59
+ 'nbody' ,
60
+ 'pi' ,
61
+ 'black_scholes' ,
62
+ 'fft' ,
63
+ 'normal' ,
64
+ 'group_elementwise' ,
65
+
66
+ #Other tests
67
+ # 'svd
68
+ # 'cholesky',
69
+ # 'det',
70
+ # 'norm',
71
+ # 'uniform',
72
+ # 'inv'
73
+ ]
74
+
75
+ def get_benchmark_data ():
76
+ results = {}
77
+ descriptions = {}
78
+ with open (BENCHMARKS_JSON ) as f :
79
+ js = json .load (f )
80
+ for bench in js ['benchmarks' ]:
81
+ test_name = bench ["name" ]
82
+ test_name = test_name [test_name .find ('_' ) + 1 :test_name .find ('[' )]
83
+
84
+ key = bench ["param" ]
85
+ val = bench ["stats" ]["ops" ]
86
+
87
+ if len (bench ["extra_info" ]) != 0 and (not test_name in descriptions ):
88
+ descriptions [test_name ] = bench ["extra_info" ]["description" ]
89
+
90
+ if test_name not in results :
91
+ results [test_name ] = { key : val }
92
+ else :
93
+ results [test_name ][key ] = val
94
+
95
+ return results , descriptions
96
+
97
+ def create_graph (test_name , test_results ):
98
+ names = []
99
+ values = []
100
+ for name in test_results :
101
+ names .append (name )
102
+ values .append (test_results [name ])
103
+
104
+ bar = plt .bar (names , values )
105
+ plt .title (test_name )
106
+
107
+ plt .savefig ("img/" + test_name + ".png" )
108
+ plt .close ()
109
+
110
+ def generate_individual_graphs ():
111
+ results , descriptions = get_benchmark_data ()
112
+
113
+ for test in results :
114
+ create_graph (test , results [test ])
115
+
116
+ # Stores the timing results in a csv file
117
+ def store_csv ():
118
+ data_dict = {}
119
+ data_dict ["Test(seconds)" ] = []
120
+ results = {}
121
+ for pkg in PKG_LABELS .keys ():
122
+ data_dict [pkg ] = []
123
+ results [pkg ] = {}
124
+
125
+ with open (BENCHMARKS_JSON ) as f :
126
+ js = json .load (f )
127
+ for bench in js ['benchmarks' ]:
128
+ test_name = bench ["name" ]
129
+ test_name = test_name [test_name .find ('_' ) + 1 :test_name .find ('[' )]
130
+
131
+ pkg = bench ["param" ]
132
+ time = bench ["stats" ]["mean" ]
133
+
134
+ if not test_name in data_dict ["Test(seconds)" ]:
135
+ data_dict ["Test(seconds)" ].append (test_name )
136
+
137
+ results [pkg ][test_name ] = time
138
+
139
+ for test in data_dict ["Test(seconds)" ]:
140
+ for pkg in PKG_LABELS .keys ():
141
+ if test in results [pkg ]:
142
+ data_dict [pkg ].append (results [pkg ][test ])
143
+ else :
144
+ data_dict [pkg ].append (np .nan )
145
+
146
+ df = pd .DataFrame (data_dict )
147
+ df .to_csv ("summary.csv" )
148
+
149
+ def generate_group_graph (test_list = None , show_numbers = False , filename = "comparison" ):
150
+ results , descriptions = get_benchmark_data ()
151
+
152
+ width = 1 / (1 + len (PKG_NAMES ))
153
+ multiplier = 0
154
+
155
+ tests = None
156
+ if test_list :
157
+ tests = test_list
158
+ else :
159
+ tests = results .keys ()
160
+
161
+ tests_values = {}
162
+ x = np .arange (len (tests ))
163
+
164
+ for name in PKG_NAMES :
165
+ tests_values [name ] = []
166
+
167
+ max_val = 1
168
+ for test in tests :
169
+ for name in PKG_NAMES :
170
+ base_value = results [test ]["numpy" ]
171
+ if name in results [test ]:
172
+ val = results [test ][name ] / base_value
173
+
174
+ if ROUND_NUMBERS :
175
+ val = round (val , ROUND_NUMBERS )
176
+
177
+ if max_val < val :
178
+ max_val = val
179
+
180
+ tests_values [name ].append (val )
181
+ else :
182
+ tests_values [name ].append (np .nan )
183
+
184
+ fig , ax = plt .subplots (layout = 'constrained' )
185
+
186
+ for name in PKG_NAMES :
187
+ offset = width * multiplier
188
+ rects = ax .barh (x + offset , tests_values [name ], width , label = PKG_LABELS [name ], color = PKG_COLOR [name ])
189
+
190
+ if show_numbers :
191
+ ax .bar_label (rects , padding = 3 , rotation = 0 )
192
+ multiplier += 1
193
+
194
+ xlabels = []
195
+ for test in tests :
196
+ xlabels .append (test + "\n " + descriptions [test ])
197
+
198
+ ax .set_xlabel ('Speedup' )
199
+ ax .set_xscale ('log' )
200
+ ax .set_title (f'Runtime Comparison\n { HARDWARE } ' )
201
+ ax .set_yticks (x + width , xlabels , rotation = 0 )
202
+ xmin , xmax = ax .get_xlim ()
203
+ ax .set_xlim (xmin , xmax * 2 )
204
+
205
+ ax .legend (loc = 'lower right' , ncols = len (PKG_NAMES ))
206
+ fig .set_figheight (8 )
207
+ fig .set_figwidth (13 )
208
+ fig .savefig (f"img/{ filename } .png" )
209
+ plt .show ()
210
+
211
+ def main ():
212
+ store_csv ()
213
+ for backend in AFBACKENDS :
214
+ try :
215
+ filename = f"comparison_{ backend } "
216
+ if not backend in PKG_NAMES :
217
+ PKG_NAMES .insert (1 , backend )
218
+ generate_group_graph (TESTS , SHOW_NUMBERS , filename )
219
+ PKG_NAMES .remove (backend )
220
+ except Exception as e :
221
+ print (e )
222
+ print ("No data for" , backend )
223
+
224
+ if __name__ == "__main__" :
225
+ main ()
0 commit comments