-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathRunInfer.py
More file actions
115 lines (80 loc) · 3.41 KB
/
RunInfer.py
File metadata and controls
115 lines (80 loc) · 3.41 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
'''
Created on Dec. 25, 2017
@author Andrew Habib
'''
import os
import shutil
import subprocess
import sys
import tempfile
from joblib import Parallel, delayed
from Util import prepare_tool
def run_infer_on_proj(proj, path, path_out_txt, path_out_json, path_infer):
log = open(os.path.join(os.getcwd(), 'inf_log'), 'a')
log.write("Runnning Infer on: " + proj + "\n\n")
_, proj_cp, proj_javac_opts, proj_buggy_files, _ = prepare_tool(path, proj)
infer_txt_results = []
infer_json_results = []
tmp_out_dir = tempfile.mkdtemp(prefix='infer-out.', dir=os.getcwd())
for buggy_f in proj_buggy_files:
cmd = [path_infer, 'run', '-o', tmp_out_dir, '--', 'javac']
if proj_javac_opts:
cmd = cmd + proj_javac_opts.split(' ') + ['-cp', proj_cp, buggy_f]
else:
cmd = cmd + ['-cp', proj_cp, buggy_f]
log.write(" ".join(cmd) + "\n\n")
p = subprocess.Popen(cmd, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
(out, _) = p.communicate()
log.write(out + "\n")
log.write("*"*24 + "\n\n")
try:
with open(os.path.join(os.getcwd(), tmp_out_dir + '/bugs.txt'), 'r') as file:
infer_txt_results.append(file.read())
except IOError:
pass
try:
with open(os.path.join(os.getcwd(), tmp_out_dir + '/report.json'), 'r') as file:
infer_json_results.append(file.read().strip("\n"))
except IOError:
pass
shutil.rmtree(tmp_out_dir)
with open(os.path.join(path_out_txt, proj), 'w') as file:
file.write("\n".join(res for res in infer_txt_results))
with open(os.path.join(path_out_json, proj), 'w') as file:
file.write(manual_merge_json(infer_json_results))
log.write("#"*212 + "\n\n")
log.close()
def manual_merge_json(json_strings):
json_strings = [x for x in json_strings if x != "" and x != '[]']
length = len(json_strings)
if length == 1:
return json_strings[0]
if length > 1:
for i in range(1, length):
json_strings[i] = json_strings[i][1:-1]
json_strings[0] = json_strings[0][:-1]
json_strings[length - 1] = json_strings[length - 1] + ']'
return ",".join(s for s in json_strings)
return ""
if __name__ == '__main__':
path_infer = os.path.join(os.getcwd(), sys.argv[1])
path_d4j_projects = os.path.join(os.getcwd(), sys.argv[2])
jobs = int(sys.argv[3])
path_out_txt = os.path.join(os.getcwd(), 'inf_output_txt')
if not os.path.isdir(path_out_txt):
os.makedirs(path_out_txt)
path_out_json = os.path.join(os.getcwd(), 'inf_output_json')
if not os.path.isdir(path_out_json):
os.makedirs(path_out_json)
projects = sorted(os.listdir(path_d4j_projects))
# Use a cmd is_filter to debug specific projects
is_filter = False
if len(sys.argv) > 4:
is_filter = True
with open(sys.argv[4]) as file:
filter_list = file.read().splitlines()
if is_filter:
projects = sorted(list(i for i in projects if i in filter_list))
Parallel(n_jobs=jobs)(delayed(run_infer_on_proj)
(p, path_d4j_projects, path_out_txt, path_out_json, path_infer)
for p in projects)