]>
Commit | Line | Data |
---|---|---|
bd94906f AM |
1 | #! /usr/bin/python2 |
2 | import os.path | |
3 | import sys | |
4 | import shlex | |
5 | import re | |
6 | import tempfile | |
7 | import copy | |
8 | ||
9 | from headerutils import * | |
10 | ||
11 | requires = { } | |
12 | provides = { } | |
13 | ||
14 | no_remove = [ "system.h", "coretypes.h", "config.h" , "bconfig.h", "backend.h" ] | |
15 | ||
16 | # These targets are the ones which provide "coverage". Typically, if any | |
17 | # target is going to fail compilation, it's one of these. This was determined | |
18 | # during the initial runs of reduce-headers... On a full set of target builds, | |
19 | # every failure which occured was triggered by one of these. | |
20 | # This list is used during target-list construction simply to put any of these | |
21 | # *first* in the candidate list, increasing the probability that a failure is | |
22 | # found quickly. | |
23 | target_priority = [ | |
24 | "aarch64-linux-gnu", | |
25 | "arm-netbsdelf", | |
bd94906f AM |
26 | "c6x-elf", |
27 | "epiphany-elf", | |
bd94906f AM |
28 | "i686-mingw32crt", |
29 | "i686-pc-msdosdjgpp", | |
30 | "mipsel-elf", | |
31 | "powerpc-eabisimaltivec", | |
32 | "rs6000-ibm-aix5.1.0", | |
33 | "sh-superh-elf", | |
2f2aeda9 | 34 | "sparc64-elf" |
bd94906f AM |
35 | ] |
36 | ||
37 | ||
38 | target_dir = "" | |
39 | build_dir = "" | |
40 | ignore_list = list() | |
41 | target_builds = list() | |
42 | ||
43 | target_dict = { } | |
44 | header_dict = { } | |
45 | search_path = [ ".", "../include", "../libcpp/include" ] | |
46 | ||
47 | remove_count = { } | |
48 | ||
49 | ||
50 | # Given a header name, normalize it. ie. cp/cp-tree.h could be in gcc, while | |
51 | # the same header could be referenced from within the cp subdirectory as | |
52 | # just cp-tree.h | |
53 | # for now, just assume basenames are unique | |
54 | ||
55 | def normalize_header (header): | |
56 | return os.path.basename (header) | |
57 | ||
58 | ||
59 | # Adds a header file and its sub-includes to the global dictionary if they | |
60 | # aren't already there. Specify s_path since different build directories may | |
61 | # append themselves on demand to the global list. | |
62 | # return entry for the specified header, knowing all sub entries are completed | |
63 | ||
64 | def get_header_info (header, s_path): | |
65 | global header_dict | |
66 | global empty_iinfo | |
67 | process_list = list () | |
68 | location = "" | |
69 | bname = "" | |
70 | bname_iinfo = empty_iinfo | |
71 | for path in s_path: | |
72 | if os.path.exists (path + "/" + header): | |
73 | location = path + "/" + header | |
74 | break | |
75 | ||
76 | if location: | |
77 | bname = normalize_header (location) | |
78 | if header_dict.get (bname): | |
79 | bname_iinfo = header_dict[bname] | |
80 | loc2 = ii_path (bname_iinfo)+ "/" + bname | |
81 | if loc2[:2] == "./": | |
82 | loc2 = loc2[2:] | |
83 | if location[:2] == "./": | |
84 | location = location[2:] | |
85 | if loc2 != location: | |
86 | # Don't use the cache if it isnt the right one. | |
87 | bname_iinfo = process_ii_macro (location) | |
88 | return bname_iinfo | |
89 | ||
90 | bname_iinfo = process_ii_macro (location) | |
91 | header_dict[bname] = bname_iinfo | |
92 | # now decend into the include tree | |
93 | for i in ii_include_list (bname_iinfo): | |
94 | get_header_info (i, s_path) | |
95 | else: | |
96 | # if the file isnt in the source directories, look in the build and target | |
97 | # directories. If it is here, then aggregate all the versions. | |
98 | location = build_dir + "/gcc/" + header | |
99 | build_inc = target_inc = False | |
100 | if os.path.exists (location): | |
101 | build_inc = True | |
102 | for x in target_dict: | |
103 | location = target_dict[x] + "/gcc/" + header | |
104 | if os.path.exists (location): | |
105 | target_inc = True | |
106 | break | |
107 | ||
108 | if (build_inc or target_inc): | |
109 | bname = normalize_header(header) | |
110 | defines = set() | |
111 | consumes = set() | |
112 | incl = set() | |
113 | if build_inc: | |
114 | iinfo = process_ii_macro (build_dir + "/gcc/" + header) | |
115 | defines = set (ii_macro_define (iinfo)) | |
116 | consumes = set (ii_macro_consume (iinfo)) | |
117 | incl = set (ii_include_list (iinfo)) | |
118 | ||
119 | if (target_inc): | |
120 | for x in target_dict: | |
121 | location = target_dict[x] + "/gcc/" + header | |
122 | if os.path.exists (location): | |
123 | iinfo = process_ii_macro (location) | |
124 | defines.update (ii_macro_define (iinfo)) | |
125 | consumes.update (ii_macro_consume (iinfo)) | |
126 | incl.update (ii_include_list (iinfo)) | |
127 | ||
128 | bname_iinfo = (header, "build", list(incl), list(), list(consumes), list(defines), list(), list()) | |
129 | ||
130 | header_dict[bname] = bname_iinfo | |
131 | for i in incl: | |
132 | get_header_info (i, s_path) | |
133 | ||
134 | return bname_iinfo | |
135 | ||
136 | ||
137 | # return a list of all headers brought in by this header | |
138 | def all_headers (fname): | |
139 | global header_dict | |
140 | headers_stack = list() | |
141 | headers_list = list() | |
142 | if header_dict.get (fname) == None: | |
143 | return list () | |
144 | for y in ii_include_list (header_dict[fname]): | |
145 | headers_stack.append (y) | |
146 | ||
147 | while headers_stack: | |
148 | h = headers_stack.pop () | |
149 | hn = normalize_header (h) | |
150 | if hn not in headers_list: | |
151 | headers_list.append (hn) | |
152 | if header_dict.get(hn): | |
153 | for y in ii_include_list (header_dict[hn]): | |
154 | if normalize_header (y) not in headers_list: | |
155 | headers_stack.append (y) | |
156 | ||
157 | return headers_list | |
158 | ||
159 | ||
160 | ||
161 | ||
162 | # Search bld_dir for all target tuples, confirm that they have a build path with | |
163 | # bld_dir/target-tuple/gcc, and build a dictionary of build paths indexed by | |
164 | # target tuple.. | |
165 | ||
166 | def build_target_dict (bld_dir, just_these): | |
167 | global target_dict | |
168 | target_doct = { } | |
169 | error = False | |
170 | if os.path.exists (bld_dir): | |
171 | if just_these: | |
172 | ls = just_these | |
173 | else: | |
174 | ls = os.listdir(bld_dir) | |
175 | for t in ls: | |
176 | if t.find("-") != -1: | |
177 | target = t.strip() | |
178 | tpath = bld_dir + "/" + target | |
179 | if not os.path.exists (tpath + "/gcc"): | |
180 | print "Error: gcc build directory for target " + t + " Does not exist: " + tpath + "/gcc" | |
181 | error = True | |
182 | else: | |
183 | target_dict[target] = tpath | |
184 | ||
185 | if error: | |
186 | target_dict = { } | |
187 | ||
188 | def get_obj_name (src_file): | |
189 | if src_file[-2:] == ".c": | |
190 | return src_file.replace (".c", ".o") | |
191 | elif src_file[-3:] == ".cc": | |
192 | return src_file.replace (".cc", ".o") | |
193 | return "" | |
194 | ||
195 | def target_obj_exists (target, obj_name): | |
196 | global target_dict | |
197 | # look in a subdir if src has a subdir, then check gcc base directory. | |
198 | if target_dict.get(target): | |
199 | obj = target_dict[target] + "/gcc/" + obj_name | |
200 | if not os.path.exists (obj): | |
201 | obj = target_dict[target] + "/gcc/" + os.path.basename(obj_name) | |
202 | if os.path.exists (obj): | |
203 | return True | |
204 | return False | |
205 | ||
206 | # Given a src file, return a list of targets which may build this file. | |
207 | def find_targets (src_file): | |
208 | global target_dict | |
209 | targ_list = list() | |
210 | obj_name = get_obj_name (src_file) | |
211 | if not obj_name: | |
212 | print "Error: " + src_file + " - Cannot determine object name." | |
213 | return list() | |
214 | ||
215 | # Put the high priority targets which tend to trigger failures first | |
216 | for target in target_priority: | |
217 | if target_obj_exists (target, obj_name): | |
218 | targ_list.append ((target, target_dict[target])) | |
219 | ||
220 | for target in target_dict: | |
221 | if target not in target_priority and target_obj_exists (target, obj_name): | |
222 | targ_list.append ((target, target_dict[target])) | |
223 | ||
224 | return targ_list | |
225 | ||
226 | ||
227 | def try_to_remove (src_file, h_list, verbose): | |
228 | global target_dict | |
229 | global header_dict | |
230 | global build_dir | |
231 | ||
232 | # build from scratch each time | |
233 | header_dict = { } | |
234 | summary = "" | |
235 | rmcount = 0 | |
236 | ||
237 | because = { } | |
238 | src_info = process_ii_macro_src (src_file) | |
239 | src_data = ii_src (src_info) | |
240 | if src_data: | |
241 | inclist = ii_include_list_non_cond (src_info) | |
242 | # work is done if there are no includes to check | |
243 | if not inclist: | |
244 | return src_file + ": No include files to attempt to remove" | |
245 | ||
246 | # work on the include list in reverse. | |
247 | inclist.reverse() | |
248 | ||
249 | # Get the target list | |
250 | targ_list = list() | |
251 | targ_list = find_targets (src_file) | |
252 | ||
253 | spath = search_path | |
254 | if os.path.dirname (src_file): | |
255 | spath.append (os.path.dirname (src_file)) | |
256 | ||
257 | hostbuild = True | |
258 | if src_file.find("config/") != -1: | |
259 | # config files dont usually build on the host | |
260 | hostbuild = False | |
261 | obn = get_obj_name (os.path.basename (src_file)) | |
262 | if obn and os.path.exists (build_dir + "/gcc/" + obn): | |
263 | hostbuild = True | |
264 | if not target_dict: | |
265 | summary = src_file + ": Target builds are required for config files. None found." | |
266 | print summary | |
267 | return summary | |
268 | if not targ_list: | |
269 | summary =src_file + ": Cannot find any targets which build this file." | |
270 | print summary | |
271 | return summary | |
272 | ||
273 | if hostbuild: | |
274 | # confirm it actually builds before we do anything | |
275 | print "Confirming source file builds" | |
276 | res = get_make_output (build_dir + "/gcc", "all") | |
277 | if res[0] != 0: | |
278 | message = "Error: " + src_file + " does not build currently." | |
279 | summary = src_file + " does not build on host." | |
280 | print message | |
281 | print res[1] | |
282 | if verbose: | |
283 | verbose.write (message + "\n") | |
284 | verbose.write (res[1]+ "\n") | |
285 | return summary | |
286 | ||
287 | src_requires = set (ii_macro_consume (src_info)) | |
288 | for macro in src_requires: | |
289 | because[macro] = src_file | |
290 | header_seen = list () | |
291 | ||
292 | os.rename (src_file, src_file + ".bak") | |
293 | src_orig = copy.deepcopy (src_data) | |
294 | src_tmp = copy.deepcopy (src_data) | |
295 | ||
296 | try: | |
297 | # process the includes from bottom to top. This is because we know that | |
298 | # later includes have are known to be needed, so any dependency from this | |
299 | # header is a true dependency | |
300 | for inc_file in inclist: | |
301 | inc_file_norm = normalize_header (inc_file) | |
302 | ||
303 | if inc_file in no_remove: | |
304 | continue | |
305 | if len (h_list) != 0 and inc_file_norm not in h_list: | |
306 | continue | |
307 | if inc_file_norm[0:3] == "gt-": | |
308 | continue | |
309 | if inc_file_norm[0:6] == "gtype-": | |
310 | continue | |
311 | if inc_file_norm.replace(".h",".c") == os.path.basename(src_file): | |
312 | continue | |
313 | ||
314 | lookfor = ii_src_line(src_info)[inc_file] | |
315 | src_tmp.remove (lookfor) | |
316 | message = "Trying " + src_file + " without " + inc_file | |
317 | print message | |
318 | if verbose: | |
319 | verbose.write (message + "\n") | |
320 | out = open(src_file, "w") | |
321 | for line in src_tmp: | |
322 | out.write (line) | |
323 | out.close() | |
324 | ||
325 | keep = False | |
326 | if hostbuild: | |
327 | res = get_make_output (build_dir + "/gcc", "all") | |
328 | else: | |
329 | res = (0, "") | |
330 | ||
331 | rc = res[0] | |
332 | message = "Passed Host build" | |
333 | if (rc != 0): | |
334 | # host build failed | |
335 | message = "Compilation failed:\n"; | |
336 | keep = True | |
337 | else: | |
338 | if targ_list: | |
339 | objfile = get_obj_name (src_file) | |
340 | t1 = targ_list[0] | |
341 | if objfile and os.path.exists(t1[1] +"/gcc/"+objfile): | |
342 | res = get_make_output_parallel (targ_list, objfile, 0) | |
343 | else: | |
344 | res = get_make_output_parallel (targ_list, "all-gcc", 0) | |
345 | rc = res[0] | |
346 | if rc != 0: | |
347 | message = "Compilation failed on TARGET : " + res[2] | |
348 | keep = True | |
349 | else: | |
350 | message = "Passed host and target builds" | |
351 | ||
352 | if keep: | |
353 | print message + "\n" | |
354 | ||
355 | if (rc != 0): | |
356 | if verbose: | |
357 | verbose.write (message + "\n"); | |
358 | verbose.write (res[1]) | |
359 | verbose.write ("\n"); | |
360 | if os.path.exists (inc_file): | |
361 | ilog = open(inc_file+".log","a") | |
362 | ilog.write (message + " for " + src_file + ":\n\n"); | |
363 | ilog.write ("============================================\n"); | |
364 | ilog.write (res[1]) | |
365 | ilog.write ("\n"); | |
366 | ilog.close() | |
367 | if os.path.exists (src_file): | |
368 | ilog = open(src_file+".log","a") | |
369 | ilog.write (message + " for " +inc_file + ":\n\n"); | |
370 | ilog.write ("============================================\n"); | |
371 | ilog.write (res[1]) | |
372 | ilog.write ("\n"); | |
373 | ilog.close() | |
374 | ||
375 | # Given a sequence where : | |
376 | # #include "tm.h" | |
377 | # #include "target.h" // includes tm.h | |
378 | ||
379 | # target.h was required, and when attempting to remove tm.h we'd see that | |
380 | # all the macro defintions are "required" since they all look like: | |
381 | # #ifndef HAVE_blah | |
382 | # #define HAVE_blah | |
383 | # endif | |
384 | ||
385 | # when target.h was found to be required, tm.h will be tagged as included. | |
386 | # so when we get this far, we know we dont have to check the macros for | |
387 | # tm.h since we know it is already been included. | |
388 | ||
389 | if inc_file_norm not in header_seen: | |
390 | iinfo = get_header_info (inc_file, spath) | |
391 | newlist = all_headers (inc_file_norm) | |
392 | if ii_path(iinfo) == "build" and not target_dict: | |
393 | keep = True | |
394 | text = message + " : Will not remove a build file without some targets." | |
395 | print text | |
396 | ilog = open(src_file+".log","a") | |
397 | ilog.write (text +"\n") | |
398 | ilog.write ("============================================\n"); | |
399 | ilog.close() | |
400 | ilog = open("reduce-headers-kept.log","a") | |
401 | ilog.write (src_file + " " + text +"\n") | |
402 | ilog.close() | |
403 | else: | |
404 | newlist = list() | |
405 | if not keep and inc_file_norm not in header_seen: | |
406 | # now look for any macro requirements. | |
407 | for h in newlist: | |
408 | if not h in header_seen: | |
409 | if header_dict.get(h): | |
410 | defined = ii_macro_define (header_dict[h]) | |
411 | for dep in defined: | |
412 | if dep in src_requires and dep not in ignore_list: | |
413 | keep = True; | |
414 | text = message + ", but must keep " + inc_file + " because it provides " + dep | |
415 | if because.get(dep) != None: | |
416 | text = text + " Possibly required by " + because[dep] | |
417 | print text | |
418 | ilog = open(inc_file+".log","a") | |
419 | ilog.write (because[dep]+": Requires [dep] in "+src_file+"\n") | |
420 | ilog.write ("============================================\n"); | |
421 | ilog.close() | |
422 | ilog = open(src_file+".log","a") | |
423 | ilog.write (text +"\n") | |
424 | ilog.write ("============================================\n"); | |
425 | ilog.close() | |
426 | ilog = open("reduce-headers-kept.log","a") | |
427 | ilog.write (src_file + " " + text +"\n") | |
428 | ilog.close() | |
429 | if verbose: | |
430 | verbose.write (text + "\n") | |
431 | ||
432 | if keep: | |
433 | # add all headers 'consumes' to src_requires list, and mark as seen | |
434 | for h in newlist: | |
435 | if not h in header_seen: | |
436 | header_seen.append (h) | |
437 | if header_dict.get(h): | |
438 | consume = ii_macro_consume (header_dict[h]) | |
439 | for dep in consume: | |
440 | if dep not in src_requires: | |
441 | src_requires.add (dep) | |
442 | if because.get(dep) == None: | |
443 | because[dep] = inc_file | |
444 | ||
445 | src_tmp = copy.deepcopy (src_data) | |
446 | else: | |
447 | print message + " --> removing " + inc_file + "\n" | |
448 | rmcount += 1 | |
449 | if verbose: | |
450 | verbose.write (message + " --> removing " + inc_file + "\n") | |
451 | if remove_count.get(inc_file) == None: | |
452 | remove_count[inc_file] = 1 | |
453 | else: | |
454 | remove_count[inc_file] += 1 | |
455 | src_data = copy.deepcopy (src_tmp) | |
456 | except: | |
457 | print "Interuption: restoring original file" | |
458 | out = open(src_file, "w") | |
459 | for line in src_orig: | |
460 | out.write (line) | |
461 | out.close() | |
462 | raise | |
463 | ||
464 | # copy current version, since it is the "right" one now. | |
465 | out = open(src_file, "w") | |
466 | for line in src_data: | |
467 | out.write (line) | |
468 | out.close() | |
469 | ||
470 | # Try a final host bootstrap build to make sure everything is kosher. | |
471 | if hostbuild: | |
472 | res = get_make_output (build_dir, "all") | |
473 | rc = res[0] | |
474 | if (rc != 0): | |
475 | # host build failed! return to original version | |
476 | print "Error: " + src_file + " Failed to bootstrap at end!!! restoring." | |
477 | print " Bad version at " + src_file + ".bad" | |
478 | os.rename (src_file, src_file + ".bad") | |
479 | out = open(src_file, "w") | |
480 | for line in src_orig: | |
481 | out.write (line) | |
482 | out.close() | |
483 | return src_file + ": failed to build after reduction. Restored original" | |
484 | ||
485 | if src_data == src_orig: | |
486 | summary = src_file + ": No change." | |
487 | else: | |
488 | summary = src_file + ": Reduction performed, "+str(rmcount)+" includes removed." | |
489 | print summary | |
490 | return summary | |
491 | ||
492 | only_h = list () | |
493 | ignore_cond = False | |
494 | ||
495 | usage = False | |
496 | src = list() | |
497 | only_targs = list () | |
498 | for x in sys.argv[1:]: | |
499 | if x[0:2] == "-b": | |
500 | build_dir = x[2:] | |
501 | elif x[0:2] == "-f": | |
502 | fn = normalize_header (x[2:]) | |
503 | if fn not in only_h: | |
504 | only_h.append (fn) | |
505 | elif x[0:2] == "-h": | |
506 | usage = True | |
507 | elif x[0:2] == "-d": | |
508 | ignore_cond = True | |
509 | elif x[0:2] == "-D": | |
510 | ignore_list.append(x[2:]) | |
511 | elif x[0:2] == "-T": | |
512 | only_targs.append(x[2:]) | |
513 | elif x[0:2] == "-t": | |
514 | target_dir = x[2:] | |
515 | elif x[0] == "-": | |
516 | print "Error: Unrecognized option " + x | |
517 | usgae = True | |
518 | else: | |
519 | if not os.path.exists (x): | |
520 | print "Error: specified file " + x + " does not exist." | |
521 | usage = True | |
522 | else: | |
523 | src.append (x) | |
524 | ||
525 | if target_dir: | |
526 | build_target_dict (target_dir, only_targs) | |
527 | ||
528 | if build_dir == "" and target_dir == "": | |
529 | print "Error: Must specify a build directory, and/or a target directory." | |
530 | usage = True | |
531 | ||
532 | if build_dir and not os.path.exists (build_dir): | |
533 | print "Error: specified build directory does not exist : " + build_dir | |
534 | usage = True | |
535 | ||
536 | if target_dir and not os.path.exists (target_dir): | |
537 | print "Error: specified target directory does not exist : " + target_dir | |
538 | usage = True | |
539 | ||
540 | if usage: | |
541 | print "Attempts to remove extraneous include files from source files." | |
542 | print " " | |
543 | print "Should be run from the main gcc source directory, and works on a target" | |
544 | print "directory, as we attempt to make the 'all' target." | |
545 | print " " | |
546 | print "By default, gcc-reorder-includes is run on each file before attempting" | |
547 | print "to remove includes. this removes duplicates and puts some headers in a" | |
548 | print "canonical ordering" | |
549 | print " " | |
550 | print "The build directory should be ready to compile via make. Time is saved" | |
551 | print "if the build is already complete, so that only changes need to be built." | |
552 | print " " | |
553 | print "Usage: [options] file1.c [file2.c] ... [filen.c]" | |
554 | print " -bdir : the root build directory to attempt buiding .o files." | |
555 | print " -tdir : the target build directory" | |
556 | print " -d : Ignore conditional macro dependencies." | |
557 | print " " | |
558 | print " -Dmacro : Ignore a specific macro for dependencies" | |
559 | print " -Ttarget : Only consider target in target directory." | |
560 | print " -fheader : Specifies a specific .h file to be considered." | |
561 | print " " | |
562 | print " -D, -T, and -f can be specified mulitple times and are aggregated." | |
563 | print " " | |
564 | print " The original file will be in filen.bak" | |
565 | print " " | |
566 | sys.exit (0) | |
567 | ||
568 | if only_h: | |
569 | print "Attempting to remove only these files:" | |
570 | for x in only_h: | |
571 | print x | |
572 | print " " | |
573 | ||
574 | logfile = open("reduce-headers.log","w") | |
575 | ||
576 | for x in src: | |
577 | msg = try_to_remove (x, only_h, logfile) | |
578 | ilog = open("reduce-headers.sum","a") | |
579 | ilog.write (msg + "\n") | |
580 | ilog.close() | |
581 | ||
582 | ilog = open("reduce-headers.sum","a") | |
583 | ilog.write ("===============================================================\n") | |
584 | for x in remove_count: | |
585 | msg = x + ": Removed " + str(remove_count[x]) + " times." | |
586 | print msg | |
587 | logfile.write (msg + "\n") | |
588 | ilog.write (msg + "\n") | |
589 | ||
590 | ||
591 | ||
592 | ||
593 |