]>
Commit | Line | Data |
---|---|---|
6de9cd9a DN |
1 | /* Control flow functions for trees. |
2 | Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. | |
3 | Contributed by Diego Novillo <dnovillo@redhat.com> | |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING. If not, write to | |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
20 | Boston, MA 02111-1307, USA. */ | |
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
25 | #include "tm.h" | |
26 | #include "tree.h" | |
27 | #include "rtl.h" | |
28 | #include "tm_p.h" | |
29 | #include "hard-reg-set.h" | |
30 | #include "basic-block.h" | |
31 | #include "output.h" | |
32 | #include "errors.h" | |
33 | #include "flags.h" | |
34 | #include "function.h" | |
35 | #include "expr.h" | |
36 | #include "ggc.h" | |
37 | #include "langhooks.h" | |
38 | #include "diagnostic.h" | |
39 | #include "tree-flow.h" | |
40 | #include "timevar.h" | |
41 | #include "tree-dump.h" | |
42 | #include "tree-pass.h" | |
43 | #include "toplev.h" | |
44 | #include "except.h" | |
45 | #include "cfgloop.h" | |
46 | ||
47 | /* This file contains functions for building the Control Flow Graph (CFG) | |
48 | for a function tree. */ | |
49 | ||
50 | /* Local declarations. */ | |
51 | ||
52 | /* Initial capacity for the basic block array. */ | |
53 | static const int initial_cfg_capacity = 20; | |
54 | ||
55 | /* Mapping of labels to their associated blocks. This can greatly speed up | |
56 | building of the CFG in code with lots of gotos. */ | |
57 | static GTY(()) varray_type label_to_block_map; | |
58 | ||
59 | /* CFG statistics. */ | |
60 | struct cfg_stats_d | |
61 | { | |
62 | long num_merged_labels; | |
63 | }; | |
64 | ||
65 | static struct cfg_stats_d cfg_stats; | |
66 | ||
67 | /* Nonzero if we found a computed goto while building basic blocks. */ | |
68 | static bool found_computed_goto; | |
69 | ||
70 | /* Basic blocks and flowgraphs. */ | |
71 | static basic_block create_bb (void *, void *, basic_block); | |
72 | static void create_block_annotation (basic_block); | |
73 | static void free_blocks_annotations (void); | |
74 | static void clear_blocks_annotations (void); | |
75 | static void make_blocks (tree); | |
76 | static void factor_computed_gotos (void); | |
77 | static tree tree_block_label (basic_block bb); | |
78 | ||
79 | /* Edges. */ | |
80 | static void make_edges (void); | |
81 | static void make_ctrl_stmt_edges (basic_block); | |
82 | static void make_exit_edges (basic_block); | |
83 | static void make_cond_expr_edges (basic_block); | |
84 | static void make_switch_expr_edges (basic_block); | |
85 | static void make_goto_expr_edges (basic_block); | |
86 | static edge tree_redirect_edge_and_branch (edge, basic_block); | |
87 | static edge tree_try_redirect_by_replacing_jump (edge, basic_block); | |
88 | static void split_critical_edges (void); | |
89 | ||
90 | /* Various helpers. */ | |
91 | static inline bool stmt_starts_bb_p (tree, tree); | |
92 | static int tree_verify_flow_info (void); | |
93 | static void tree_make_forwarder_block (edge); | |
94 | static bool thread_jumps (void); | |
95 | static bool tree_forwarder_block_p (basic_block); | |
96 | static void bsi_commit_edge_inserts_1 (edge e); | |
97 | static void tree_cfg2vcg (FILE *); | |
98 | ||
99 | /* Flowgraph optimization and cleanup. */ | |
100 | static void tree_merge_blocks (basic_block, basic_block); | |
101 | static bool tree_can_merge_blocks_p (basic_block, basic_block); | |
102 | static void remove_bb (basic_block); | |
103 | static void cleanup_dead_labels (void); | |
104 | static bool cleanup_control_flow (void); | |
105 | static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator); | |
106 | static edge find_taken_edge_cond_expr (basic_block, tree); | |
107 | static edge find_taken_edge_switch_expr (basic_block, tree); | |
108 | static tree find_case_label_for_value (tree, tree); | |
109 | static bool phi_alternatives_equal (basic_block, edge, edge); | |
110 | ||
111 | ||
112 | /*--------------------------------------------------------------------------- | |
113 | Create basic blocks | |
114 | ---------------------------------------------------------------------------*/ | |
115 | ||
116 | /* Entry point to the CFG builder for trees. TP points to the list of | |
117 | statements to be added to the flowgraph. */ | |
118 | ||
119 | static void | |
120 | build_tree_cfg (tree *tp) | |
121 | { | |
122 | /* Register specific tree functions. */ | |
123 | tree_register_cfg_hooks (); | |
124 | ||
125 | /* Initialize rbi_pool. */ | |
126 | alloc_rbi_pool (); | |
127 | ||
128 | /* Initialize the basic block array. */ | |
129 | init_flow (); | |
130 | n_basic_blocks = 0; | |
131 | last_basic_block = 0; | |
132 | VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info"); | |
133 | memset ((void *) &cfg_stats, 0, sizeof (cfg_stats)); | |
134 | ||
135 | /* Build a mapping of labels to their associated blocks. */ | |
136 | VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity, | |
137 | "label to block map"); | |
138 | ||
139 | ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR; | |
140 | EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR; | |
141 | ||
142 | found_computed_goto = 0; | |
143 | make_blocks (*tp); | |
144 | ||
145 | /* Computed gotos are hell to deal with, especially if there are | |
146 | lots of them with a large number of destinations. So we factor | |
147 | them to a common computed goto location before we build the | |
148 | edge list. After we convert back to normal form, we will un-factor | |
149 | the computed gotos since factoring introduces an unwanted jump. */ | |
150 | if (found_computed_goto) | |
151 | factor_computed_gotos (); | |
152 | ||
153 | /* Make sure there is always at least one block, even if its empty. */ | |
154 | if (n_basic_blocks == 0) | |
155 | create_empty_bb (ENTRY_BLOCK_PTR); | |
156 | ||
157 | create_block_annotation (ENTRY_BLOCK_PTR); | |
158 | create_block_annotation (EXIT_BLOCK_PTR); | |
159 | ||
160 | /* Adjust the size of the array. */ | |
161 | VARRAY_GROW (basic_block_info, n_basic_blocks); | |
162 | ||
163 | /* Create the edges of the flowgraph. */ | |
164 | make_edges (); | |
165 | ||
166 | /* Debugging dumps. */ | |
167 | ||
168 | /* Write the flowgraph to a VCG file. */ | |
169 | { | |
170 | int local_dump_flags; | |
171 | FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags); | |
172 | if (dump_file) | |
173 | { | |
174 | tree_cfg2vcg (dump_file); | |
175 | dump_end (TDI_vcg, dump_file); | |
176 | } | |
177 | } | |
178 | ||
179 | /* Dump a textual representation of the flowgraph. */ | |
180 | if (dump_file) | |
181 | dump_tree_cfg (dump_file, dump_flags); | |
182 | } | |
183 | ||
184 | static void | |
185 | execute_build_cfg (void) | |
186 | { | |
187 | build_tree_cfg (&DECL_SAVED_TREE (current_function_decl)); | |
188 | } | |
189 | ||
190 | struct tree_opt_pass pass_build_cfg = | |
191 | { | |
192 | "cfg", /* name */ | |
193 | NULL, /* gate */ | |
194 | execute_build_cfg, /* execute */ | |
195 | NULL, /* sub */ | |
196 | NULL, /* next */ | |
197 | 0, /* static_pass_number */ | |
198 | TV_TREE_CFG, /* tv_id */ | |
199 | PROP_gimple_leh, /* properties_required */ | |
200 | PROP_cfg, /* properties_provided */ | |
201 | 0, /* properties_destroyed */ | |
202 | 0, /* todo_flags_start */ | |
203 | TODO_verify_stmts /* todo_flags_finish */ | |
204 | }; | |
205 | ||
206 | /* Search the CFG for any computed gotos. If found, factor them to a | |
207 | common computed goto site. Also record the location of that site so | |
208 | that we can un-factor the gotos after we have converted back to | |
209 | normal form. */ | |
210 | ||
211 | static void | |
212 | factor_computed_gotos (void) | |
213 | { | |
214 | basic_block bb; | |
215 | tree factored_label_decl = NULL; | |
216 | tree var = NULL; | |
217 | tree factored_computed_goto_label = NULL; | |
218 | tree factored_computed_goto = NULL; | |
219 | ||
220 | /* We know there are one or more computed gotos in this function. | |
221 | Examine the last statement in each basic block to see if the block | |
222 | ends with a computed goto. */ | |
223 | ||
224 | FOR_EACH_BB (bb) | |
225 | { | |
226 | block_stmt_iterator bsi = bsi_last (bb); | |
227 | tree last; | |
228 | ||
229 | if (bsi_end_p (bsi)) | |
230 | continue; | |
231 | last = bsi_stmt (bsi); | |
232 | ||
233 | /* Ignore the computed goto we create when we factor the original | |
234 | computed gotos. */ | |
235 | if (last == factored_computed_goto) | |
236 | continue; | |
237 | ||
238 | /* If the last statement is a computed goto, factor it. */ | |
239 | if (computed_goto_p (last)) | |
240 | { | |
241 | tree assignment; | |
242 | ||
243 | /* The first time we find a computed goto we need to create | |
244 | the factored goto block and the variable each original | |
245 | computed goto will use for their goto destination. */ | |
246 | if (! factored_computed_goto) | |
247 | { | |
248 | basic_block new_bb = create_empty_bb (bb); | |
249 | block_stmt_iterator new_bsi = bsi_start (new_bb); | |
250 | ||
251 | /* Create the destination of the factored goto. Each original | |
252 | computed goto will put its desired destination into this | |
253 | variable and jump to the label we create immediately | |
254 | below. */ | |
255 | var = create_tmp_var (ptr_type_node, "gotovar"); | |
256 | ||
257 | /* Build a label for the new block which will contain the | |
258 | factored computed goto. */ | |
259 | factored_label_decl = create_artificial_label (); | |
260 | factored_computed_goto_label | |
261 | = build1 (LABEL_EXPR, void_type_node, factored_label_decl); | |
262 | bsi_insert_after (&new_bsi, factored_computed_goto_label, | |
263 | BSI_NEW_STMT); | |
264 | ||
265 | /* Build our new computed goto. */ | |
266 | factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var); | |
267 | bsi_insert_after (&new_bsi, factored_computed_goto, | |
268 | BSI_NEW_STMT); | |
269 | } | |
270 | ||
271 | /* Copy the original computed goto's destination into VAR. */ | |
272 | assignment = build (MODIFY_EXPR, ptr_type_node, | |
273 | var, GOTO_DESTINATION (last)); | |
274 | bsi_insert_before (&bsi, assignment, BSI_SAME_STMT); | |
275 | ||
276 | /* And re-vector the computed goto to the new destination. */ | |
277 | GOTO_DESTINATION (last) = factored_label_decl; | |
278 | } | |
279 | } | |
280 | } | |
281 | ||
282 | ||
283 | /* Create annotations for a single basic block. */ | |
284 | ||
285 | static void | |
286 | create_block_annotation (basic_block bb) | |
287 | { | |
288 | /* Verify that the tree_annotations field is clear. */ | |
289 | if (bb->tree_annotations) | |
290 | abort (); | |
291 | bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d)); | |
292 | } | |
293 | ||
294 | ||
295 | /* Free the annotations for all the basic blocks. */ | |
296 | ||
297 | static void free_blocks_annotations (void) | |
298 | { | |
299 | clear_blocks_annotations (); | |
300 | } | |
301 | ||
302 | ||
303 | /* Clear the annotations for all the basic blocks. */ | |
304 | ||
305 | static void | |
306 | clear_blocks_annotations (void) | |
307 | { | |
308 | basic_block bb; | |
309 | ||
310 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) | |
311 | bb->tree_annotations = NULL; | |
312 | } | |
313 | ||
314 | ||
315 | /* Build a flowgraph for the statement_list STMT_LIST. */ | |
316 | ||
317 | static void | |
318 | make_blocks (tree stmt_list) | |
319 | { | |
320 | tree_stmt_iterator i = tsi_start (stmt_list); | |
321 | tree stmt = NULL; | |
322 | bool start_new_block = true; | |
323 | bool first_stmt_of_list = true; | |
324 | basic_block bb = ENTRY_BLOCK_PTR; | |
325 | ||
326 | while (!tsi_end_p (i)) | |
327 | { | |
328 | tree prev_stmt; | |
329 | ||
330 | prev_stmt = stmt; | |
331 | stmt = tsi_stmt (i); | |
332 | ||
333 | /* If the statement starts a new basic block or if we have determined | |
334 | in a previous pass that we need to create a new block for STMT, do | |
335 | so now. */ | |
336 | if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt)) | |
337 | { | |
338 | if (!first_stmt_of_list) | |
339 | stmt_list = tsi_split_statement_list_before (&i); | |
340 | bb = create_basic_block (stmt_list, NULL, bb); | |
341 | start_new_block = false; | |
342 | } | |
343 | ||
344 | /* Now add STMT to BB and create the subgraphs for special statement | |
345 | codes. */ | |
346 | set_bb_for_stmt (stmt, bb); | |
347 | ||
348 | if (computed_goto_p (stmt)) | |
349 | found_computed_goto = true; | |
350 | ||
351 | /* If STMT is a basic block terminator, set START_NEW_BLOCK for the | |
352 | next iteration. */ | |
353 | if (stmt_ends_bb_p (stmt)) | |
354 | start_new_block = true; | |
355 | ||
356 | tsi_next (&i); | |
357 | first_stmt_of_list = false; | |
358 | } | |
359 | } | |
360 | ||
361 | ||
362 | /* Create and return a new empty basic block after bb AFTER. */ | |
363 | ||
364 | static basic_block | |
365 | create_bb (void *h, void *e, basic_block after) | |
366 | { | |
367 | basic_block bb; | |
368 | ||
369 | if (e) | |
370 | abort (); | |
371 | ||
372 | /* Create and initialize a new basic block. */ | |
373 | bb = alloc_block (); | |
374 | memset (bb, 0, sizeof (*bb)); | |
375 | ||
376 | bb->index = last_basic_block; | |
377 | bb->flags = BB_NEW; | |
378 | bb->stmt_list = h ? h : alloc_stmt_list (); | |
379 | ||
380 | /* Add the new block to the linked list of blocks. */ | |
381 | link_block (bb, after); | |
382 | ||
383 | /* Grow the basic block array if needed. */ | |
384 | if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info)) | |
385 | { | |
386 | size_t new_size = last_basic_block + (last_basic_block + 3) / 4; | |
387 | VARRAY_GROW (basic_block_info, new_size); | |
388 | } | |
389 | ||
390 | /* Add the newly created block to the array. */ | |
391 | BASIC_BLOCK (last_basic_block) = bb; | |
392 | ||
393 | create_block_annotation (bb); | |
394 | ||
395 | n_basic_blocks++; | |
396 | last_basic_block++; | |
397 | ||
398 | initialize_bb_rbi (bb); | |
399 | return bb; | |
400 | } | |
401 | ||
402 | ||
403 | /*--------------------------------------------------------------------------- | |
404 | Edge creation | |
405 | ---------------------------------------------------------------------------*/ | |
406 | ||
407 | /* Join all the blocks in the flowgraph. */ | |
408 | ||
409 | static void | |
410 | make_edges (void) | |
411 | { | |
412 | basic_block bb; | |
413 | edge e; | |
414 | ||
415 | /* Create an edge from entry to the first block with executable | |
416 | statements in it. */ | |
417 | make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU); | |
418 | ||
419 | /* Traverse basic block array placing edges. */ | |
420 | FOR_EACH_BB (bb) | |
421 | { | |
422 | tree first = first_stmt (bb); | |
423 | tree last = last_stmt (bb); | |
424 | ||
425 | if (first) | |
426 | { | |
427 | /* Edges for statements that always alter flow control. */ | |
428 | if (is_ctrl_stmt (last)) | |
429 | make_ctrl_stmt_edges (bb); | |
430 | ||
431 | /* Edges for statements that sometimes alter flow control. */ | |
432 | if (is_ctrl_altering_stmt (last)) | |
433 | make_exit_edges (bb); | |
434 | } | |
435 | ||
436 | /* Finally, if no edges were created above, this is a regular | |
437 | basic block that only needs a fallthru edge. */ | |
438 | if (bb->succ == NULL) | |
439 | make_edge (bb, bb->next_bb, EDGE_FALLTHRU); | |
440 | } | |
441 | ||
442 | /* If there is a fallthru edge to exit out of the last block, transform it | |
443 | to a return statement. */ | |
444 | for (e = EXIT_BLOCK_PTR->prev_bb->succ; e; e = e->succ_next) | |
445 | if (e->flags & EDGE_FALLTHRU) | |
446 | break; | |
447 | ||
448 | if (e && e->dest == EXIT_BLOCK_PTR) | |
449 | { | |
450 | block_stmt_iterator bsi; | |
451 | basic_block ret_bb = EXIT_BLOCK_PTR->prev_bb; | |
452 | tree x; | |
453 | ||
454 | /* If E->SRC ends with a call that has an abnormal edge (for EH or | |
455 | nonlocal goto), then we will need to split the edge to insert | |
456 | an explicit return statement. */ | |
457 | if (e != ret_bb->succ || e->succ_next) | |
458 | { | |
459 | ret_bb = split_edge (e); | |
460 | e = ret_bb->succ; | |
461 | } | |
462 | e->flags &= ~EDGE_FALLTHRU; | |
463 | ||
464 | x = build (RETURN_EXPR, void_type_node, NULL_TREE); | |
465 | bsi = bsi_last (ret_bb); | |
466 | bsi_insert_after (&bsi, x, BSI_NEW_STMT); | |
467 | } | |
468 | ||
469 | /* We do not care about fake edges, so remove any that the CFG | |
470 | builder inserted for completeness. */ | |
471 | remove_fake_edges (); | |
472 | ||
473 | /* To speed up statement iterator walks, we first purge dead labels. */ | |
474 | cleanup_dead_labels (); | |
475 | ||
476 | /* Clean up the graph and warn for unreachable code. */ | |
477 | cleanup_tree_cfg (); | |
478 | } | |
479 | ||
480 | ||
481 | /* Create edges for control statement at basic block BB. */ | |
482 | ||
483 | static void | |
484 | make_ctrl_stmt_edges (basic_block bb) | |
485 | { | |
486 | tree last = last_stmt (bb); | |
487 | tree first = first_stmt (bb); | |
488 | ||
489 | #if defined ENABLE_CHECKING | |
490 | if (last == NULL_TREE) | |
491 | abort(); | |
492 | #endif | |
493 | ||
494 | if (TREE_CODE (first) == LABEL_EXPR | |
495 | && DECL_NONLOCAL (LABEL_EXPR_LABEL (first))) | |
496 | make_edge (ENTRY_BLOCK_PTR, bb, EDGE_ABNORMAL); | |
497 | ||
498 | switch (TREE_CODE (last)) | |
499 | { | |
500 | case GOTO_EXPR: | |
501 | make_goto_expr_edges (bb); | |
502 | break; | |
503 | ||
504 | case RETURN_EXPR: | |
505 | make_edge (bb, EXIT_BLOCK_PTR, 0); | |
506 | break; | |
507 | ||
508 | case COND_EXPR: | |
509 | make_cond_expr_edges (bb); | |
510 | break; | |
511 | ||
512 | case SWITCH_EXPR: | |
513 | make_switch_expr_edges (bb); | |
514 | break; | |
515 | ||
516 | case RESX_EXPR: | |
517 | make_eh_edges (last); | |
518 | /* Yet another NORETURN hack. */ | |
519 | if (bb->succ == NULL) | |
520 | make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE); | |
521 | break; | |
522 | ||
523 | default: | |
524 | abort (); | |
525 | } | |
526 | } | |
527 | ||
528 | ||
529 | /* Create exit edges for statements in block BB that alter the flow of | |
530 | control. Statements that alter the control flow are 'goto', 'return' | |
531 | and calls to non-returning functions. */ | |
532 | ||
533 | static void | |
534 | make_exit_edges (basic_block bb) | |
535 | { | |
536 | tree last = last_stmt (bb); | |
537 | ||
538 | if (last == NULL_TREE) | |
539 | abort (); | |
540 | ||
541 | switch (TREE_CODE (last)) | |
542 | { | |
543 | case CALL_EXPR: | |
544 | /* If this function receives a nonlocal goto, then we need to | |
545 | make edges from this call site to all the nonlocal goto | |
546 | handlers. */ | |
547 | if (TREE_SIDE_EFFECTS (last) | |
548 | && current_function_has_nonlocal_label) | |
549 | make_goto_expr_edges (bb); | |
550 | ||
551 | /* If this statement has reachable exception handlers, then | |
552 | create abnormal edges to them. */ | |
553 | make_eh_edges (last); | |
554 | ||
555 | /* Some calls are known not to return. For such calls we create | |
556 | a fake edge. | |
557 | ||
558 | We really need to revamp how we build edges so that it's not | |
559 | such a bloody pain to avoid creating edges for this case since | |
560 | all we do is remove these edges when we're done building the | |
561 | CFG. */ | |
562 | if (call_expr_flags (last) & (ECF_NORETURN | ECF_LONGJMP)) | |
563 | { | |
564 | make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE); | |
565 | return; | |
566 | } | |
567 | ||
568 | /* Don't forget the fall-thru edge. */ | |
569 | make_edge (bb, bb->next_bb, EDGE_FALLTHRU); | |
570 | break; | |
571 | ||
572 | case MODIFY_EXPR: | |
573 | /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR | |
574 | may have an abnormal edge. Search the RHS for this case and | |
575 | create any required edges. */ | |
576 | if (TREE_CODE (TREE_OPERAND (last, 1)) == CALL_EXPR | |
577 | && TREE_SIDE_EFFECTS (TREE_OPERAND (last, 1)) | |
578 | && current_function_has_nonlocal_label) | |
579 | make_goto_expr_edges (bb); | |
580 | ||
581 | make_eh_edges (last); | |
582 | make_edge (bb, bb->next_bb, EDGE_FALLTHRU); | |
583 | break; | |
584 | ||
585 | default: | |
586 | abort (); | |
587 | } | |
588 | } | |
589 | ||
590 | ||
591 | /* Create the edges for a COND_EXPR starting at block BB. | |
592 | At this point, both clauses must contain only simple gotos. */ | |
593 | ||
594 | static void | |
595 | make_cond_expr_edges (basic_block bb) | |
596 | { | |
597 | tree entry = last_stmt (bb); | |
598 | basic_block then_bb, else_bb; | |
599 | tree then_label, else_label; | |
600 | ||
601 | #if defined ENABLE_CHECKING | |
602 | if (entry == NULL_TREE || TREE_CODE (entry) != COND_EXPR) | |
603 | abort (); | |
604 | #endif | |
605 | ||
606 | /* Entry basic blocks for each component. */ | |
607 | then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry)); | |
608 | else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry)); | |
609 | then_bb = label_to_block (then_label); | |
610 | else_bb = label_to_block (else_label); | |
611 | ||
612 | make_edge (bb, then_bb, EDGE_TRUE_VALUE); | |
613 | make_edge (bb, else_bb, EDGE_FALSE_VALUE); | |
614 | } | |
615 | ||
616 | ||
617 | /* Create the edges for a SWITCH_EXPR starting at block BB. | |
618 | At this point, the switch body has been lowered and the | |
619 | SWITCH_LABELS filled in, so this is in effect a multi-way branch. */ | |
620 | ||
621 | static void | |
622 | make_switch_expr_edges (basic_block bb) | |
623 | { | |
624 | tree entry = last_stmt (bb); | |
625 | size_t i, n; | |
626 | tree vec; | |
627 | ||
628 | vec = SWITCH_LABELS (entry); | |
629 | n = TREE_VEC_LENGTH (vec); | |
630 | ||
631 | for (i = 0; i < n; ++i) | |
632 | { | |
633 | tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i)); | |
634 | basic_block label_bb = label_to_block (lab); | |
635 | make_edge (bb, label_bb, 0); | |
636 | } | |
637 | } | |
638 | ||
639 | ||
640 | /* Return the basic block holding label DEST. */ | |
641 | ||
642 | basic_block | |
643 | label_to_block (tree dest) | |
644 | { | |
645 | return VARRAY_BB (label_to_block_map, LABEL_DECL_UID (dest)); | |
646 | } | |
647 | ||
648 | ||
649 | /* Create edges for a goto statement at block BB. */ | |
650 | ||
651 | static void | |
652 | make_goto_expr_edges (basic_block bb) | |
653 | { | |
654 | tree goto_t, dest; | |
655 | basic_block target_bb; | |
656 | int for_call; | |
657 | block_stmt_iterator last = bsi_last (bb); | |
658 | ||
659 | goto_t = bsi_stmt (last); | |
660 | ||
661 | /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR, | |
662 | CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting | |
663 | from a nonlocal goto. */ | |
664 | if (TREE_CODE (goto_t) != GOTO_EXPR) | |
665 | { | |
666 | dest = error_mark_node; | |
667 | for_call = 1; | |
668 | } | |
669 | else | |
670 | { | |
671 | dest = GOTO_DESTINATION (goto_t); | |
672 | for_call = 0; | |
673 | ||
674 | /* A GOTO to a local label creates normal edges. */ | |
675 | if (simple_goto_p (goto_t)) | |
676 | { | |
677 | make_edge (bb, label_to_block (dest), EDGE_FALLTHRU); | |
678 | bsi_remove (&last); | |
679 | return; | |
680 | } | |
681 | ||
682 | /* Nothing more to do for nonlocal gotos. */ | |
683 | if (TREE_CODE (dest) == LABEL_DECL) | |
684 | return; | |
685 | ||
686 | /* Computed gotos remain. */ | |
687 | } | |
688 | ||
689 | /* Look for the block starting with the destination label. In the | |
690 | case of a computed goto, make an edge to any label block we find | |
691 | in the CFG. */ | |
692 | FOR_EACH_BB (target_bb) | |
693 | { | |
694 | block_stmt_iterator bsi; | |
695 | ||
696 | for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi)) | |
697 | { | |
698 | tree target = bsi_stmt (bsi); | |
699 | ||
700 | if (TREE_CODE (target) != LABEL_EXPR) | |
701 | break; | |
702 | ||
703 | if ( | |
704 | /* Computed GOTOs. Make an edge to every label block that has | |
705 | been marked as a potential target for a computed goto. */ | |
706 | (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0) | |
707 | /* Nonlocal GOTO target. Make an edge to every label block | |
708 | that has been marked as a potential target for a nonlocal | |
709 | goto. */ | |
710 | || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1)) | |
711 | { | |
712 | make_edge (bb, target_bb, EDGE_ABNORMAL); | |
713 | break; | |
714 | } | |
715 | } | |
716 | } | |
717 | ||
718 | /* Degenerate case of computed goto with no labels. */ | |
719 | if (!for_call && !bb->succ) | |
720 | make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE); | |
721 | } | |
722 | ||
723 | ||
724 | /*--------------------------------------------------------------------------- | |
725 | Flowgraph analysis | |
726 | ---------------------------------------------------------------------------*/ | |
727 | ||
728 | /* Remove unreachable blocks and other miscellaneous clean up work. */ | |
729 | ||
730 | void | |
731 | cleanup_tree_cfg (void) | |
732 | { | |
733 | bool something_changed = true; | |
734 | ||
735 | timevar_push (TV_TREE_CLEANUP_CFG); | |
736 | ||
737 | /* These three transformations can cascade, so we iterate on them until | |
738 | nothing changes. */ | |
739 | while (something_changed) | |
740 | { | |
741 | something_changed = cleanup_control_flow (); | |
742 | something_changed |= thread_jumps (); | |
743 | something_changed |= delete_unreachable_blocks (); | |
744 | } | |
745 | ||
746 | /* Merging the blocks creates no new opportunities for the other | |
747 | optimizations, so do it here. */ | |
748 | merge_seq_blocks (); | |
749 | ||
750 | compact_blocks (); | |
751 | ||
752 | #ifdef ENABLE_CHECKING | |
753 | verify_flow_info (); | |
754 | #endif | |
755 | timevar_pop (TV_TREE_CLEANUP_CFG); | |
756 | } | |
757 | ||
758 | ||
759 | /* Cleanup useless labels from the flow graph. */ | |
760 | ||
761 | static void | |
762 | cleanup_dead_labels (void) | |
763 | { | |
764 | basic_block bb; | |
765 | tree *label_for_bb = xcalloc (last_basic_block, sizeof (tree)); | |
766 | ||
767 | /* Find a suitable label for each block. We use the first user-defined | |
768 | label is there is one, or otherwise just the first label we see. */ | |
769 | FOR_EACH_BB (bb) | |
770 | { | |
771 | block_stmt_iterator i; | |
772 | ||
773 | for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i)) | |
774 | { | |
775 | tree label, stmt = bsi_stmt (i); | |
776 | ||
777 | if (TREE_CODE (stmt) != LABEL_EXPR) | |
778 | break; | |
779 | ||
780 | label = LABEL_EXPR_LABEL (stmt); | |
781 | ||
782 | /* If we have not yet seen a label for the current block, | |
783 | remember this one and see if there are more labels. */ | |
784 | if (! label_for_bb[bb->index]) | |
785 | { | |
786 | label_for_bb[bb->index] = label; | |
787 | continue; | |
788 | } | |
789 | ||
790 | /* If we did see a label for the current block already, but it | |
791 | is an artificially created label, replace it if the current | |
792 | label is a user defined label. */ | |
793 | if (! DECL_ARTIFICIAL (label) | |
794 | && DECL_ARTIFICIAL (label_for_bb[bb->index])) | |
795 | { | |
796 | label_for_bb[bb->index] = label; | |
797 | break; | |
798 | } | |
799 | } | |
800 | } | |
801 | ||
802 | /* Now redirect all jumps/branches to the selected label for each block. */ | |
803 | FOR_EACH_BB (bb) | |
804 | { | |
805 | tree stmt = last_stmt (bb); | |
806 | if (!stmt) | |
807 | continue; | |
808 | ||
809 | switch (TREE_CODE (stmt)) | |
810 | { | |
811 | case COND_EXPR: | |
812 | { | |
813 | tree true_branch, false_branch; | |
814 | basic_block true_bb, false_bb; | |
815 | ||
816 | true_branch = COND_EXPR_THEN (stmt); | |
817 | false_branch = COND_EXPR_ELSE (stmt); | |
818 | true_bb = label_to_block (GOTO_DESTINATION (true_branch)); | |
819 | false_bb = label_to_block (GOTO_DESTINATION (false_branch)); | |
820 | ||
821 | GOTO_DESTINATION (true_branch) = label_for_bb[true_bb->index]; | |
822 | GOTO_DESTINATION (false_branch) = label_for_bb[false_bb->index]; | |
823 | ||
824 | break; | |
825 | } | |
826 | ||
827 | case SWITCH_EXPR: | |
828 | { | |
829 | size_t i; | |
830 | tree vec = SWITCH_LABELS (stmt); | |
831 | size_t n = TREE_VEC_LENGTH (vec); | |
832 | ||
833 | /* Replace all destination labels. */ | |
834 | for (i = 0; i < n; ++i) | |
835 | { | |
836 | tree label = CASE_LABEL (TREE_VEC_ELT (vec, i)); | |
837 | ||
838 | CASE_LABEL (TREE_VEC_ELT (vec, i)) = | |
839 | label_for_bb[label_to_block (label)->index]; | |
840 | } | |
841 | ||
842 | break; | |
843 | } | |
844 | ||
845 | default: | |
846 | break; | |
847 | } | |
848 | } | |
849 | ||
850 | /* Finally, purge dead labels. All user-defined labels and labels that | |
851 | can be the target of non-local gotos are preserved. */ | |
852 | FOR_EACH_BB (bb) | |
853 | { | |
854 | block_stmt_iterator i; | |
855 | tree label_for_this_bb = label_for_bb[bb->index]; | |
856 | ||
857 | if (! label_for_this_bb) | |
858 | continue; | |
859 | ||
860 | for (i = bsi_start (bb); !bsi_end_p (i); ) | |
861 | { | |
862 | tree label, stmt = bsi_stmt (i); | |
863 | ||
864 | if (TREE_CODE (stmt) != LABEL_EXPR) | |
865 | break; | |
866 | ||
867 | label = LABEL_EXPR_LABEL (stmt); | |
868 | ||
869 | if (label == label_for_this_bb | |
870 | || ! DECL_ARTIFICIAL (label) | |
871 | || DECL_NONLOCAL (label)) | |
872 | bsi_next (&i); | |
873 | else | |
874 | bsi_remove (&i); | |
875 | } | |
876 | } | |
877 | ||
878 | free (label_for_bb); | |
879 | } | |
880 | ||
881 | ||
882 | /* Checks whether we can merge block B into block A. */ | |
883 | ||
884 | static bool | |
885 | tree_can_merge_blocks_p (basic_block a, basic_block b) | |
886 | { | |
887 | tree stmt; | |
888 | block_stmt_iterator bsi; | |
889 | ||
890 | if (!a->succ | |
891 | || a->succ->succ_next) | |
892 | return false; | |
893 | ||
894 | if (a->succ->flags & EDGE_ABNORMAL) | |
895 | return false; | |
896 | ||
897 | if (a->succ->dest != b) | |
898 | return false; | |
899 | ||
900 | if (b == EXIT_BLOCK_PTR) | |
901 | return false; | |
902 | ||
903 | if (b->pred->pred_next) | |
904 | return false; | |
905 | ||
906 | /* If A ends by a statement causing exceptions or something similar, we | |
907 | cannot merge the blocks. */ | |
908 | stmt = last_stmt (a); | |
909 | if (stmt && stmt_ends_bb_p (stmt)) | |
910 | return false; | |
911 | ||
912 | /* Do not allow a block with only a non-local label to be merged. */ | |
913 | if (stmt && TREE_CODE (stmt) == LABEL_EXPR | |
914 | && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))) | |
915 | return false; | |
916 | ||
917 | /* There may be no phi nodes at the start of b. Most of these degenerate | |
918 | phi nodes should be cleaned up by kill_redundant_phi_nodes. */ | |
919 | if (phi_nodes (b)) | |
920 | return false; | |
921 | ||
922 | /* Do not remove user labels. */ | |
923 | for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi)) | |
924 | { | |
925 | stmt = bsi_stmt (bsi); | |
926 | if (TREE_CODE (stmt) != LABEL_EXPR) | |
927 | break; | |
928 | if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt))) | |
929 | return false; | |
930 | } | |
931 | ||
932 | return true; | |
933 | } | |
934 | ||
935 | ||
936 | /* Merge block B into block A. */ | |
937 | ||
938 | static void | |
939 | tree_merge_blocks (basic_block a, basic_block b) | |
940 | { | |
941 | block_stmt_iterator bsi; | |
942 | tree_stmt_iterator last; | |
943 | ||
944 | if (dump_file) | |
945 | fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index); | |
946 | ||
947 | /* Ensure that B follows A. */ | |
948 | move_block_after (b, a); | |
949 | ||
950 | if (!(a->succ->flags & EDGE_FALLTHRU)) | |
951 | abort (); | |
952 | ||
953 | if (last_stmt (a) | |
954 | && stmt_ends_bb_p (last_stmt (a))) | |
955 | abort (); | |
956 | ||
957 | /* Remove labels from B and set bb_for_stmt to A for other statements. */ | |
958 | for (bsi = bsi_start (b); !bsi_end_p (bsi);) | |
959 | { | |
960 | if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR) | |
961 | bsi_remove (&bsi); | |
962 | else | |
963 | { | |
964 | set_bb_for_stmt (bsi_stmt (bsi), a); | |
965 | bsi_next (&bsi); | |
966 | } | |
967 | } | |
968 | ||
969 | /* Merge the chains. */ | |
970 | last = tsi_last (a->stmt_list); | |
971 | tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT); | |
972 | b->stmt_list = NULL; | |
973 | } | |
974 | ||
975 | ||
976 | /* Walk the function tree removing unnecessary statements. | |
977 | ||
978 | * Empty statement nodes are removed | |
979 | ||
980 | * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed | |
981 | ||
982 | * Unnecessary COND_EXPRs are removed | |
983 | ||
984 | * Some unnecessary BIND_EXPRs are removed | |
985 | ||
986 | Clearly more work could be done. The trick is doing the analysis | |
987 | and removal fast enough to be a net improvement in compile times. | |
988 | ||
989 | Note that when we remove a control structure such as a COND_EXPR | |
990 | BIND_EXPR, or TRY block, we will need to repeat this optimization pass | |
991 | to ensure we eliminate all the useless code. */ | |
992 | ||
993 | struct rus_data | |
994 | { | |
995 | tree *last_goto; | |
996 | bool repeat; | |
997 | bool may_throw; | |
998 | bool may_branch; | |
999 | bool has_label; | |
1000 | }; | |
1001 | ||
1002 | static void remove_useless_stmts_1 (tree *, struct rus_data *); | |
1003 | ||
1004 | static bool | |
1005 | remove_useless_stmts_warn_notreached (tree stmt) | |
1006 | { | |
1007 | if (EXPR_LOCUS (stmt)) | |
1008 | { | |
1009 | warning ("%Hwill never be executed", EXPR_LOCUS (stmt)); | |
1010 | return true; | |
1011 | } | |
1012 | ||
1013 | switch (TREE_CODE (stmt)) | |
1014 | { | |
1015 | case STATEMENT_LIST: | |
1016 | { | |
1017 | tree_stmt_iterator i; | |
1018 | for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i)) | |
1019 | if (remove_useless_stmts_warn_notreached (tsi_stmt (i))) | |
1020 | return true; | |
1021 | } | |
1022 | break; | |
1023 | ||
1024 | case COND_EXPR: | |
1025 | if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt))) | |
1026 | return true; | |
1027 | if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt))) | |
1028 | return true; | |
1029 | if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt))) | |
1030 | return true; | |
1031 | break; | |
1032 | ||
1033 | case TRY_FINALLY_EXPR: | |
1034 | case TRY_CATCH_EXPR: | |
1035 | if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0))) | |
1036 | return true; | |
1037 | if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1))) | |
1038 | return true; | |
1039 | break; | |
1040 | ||
1041 | case CATCH_EXPR: | |
1042 | return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt)); | |
1043 | case EH_FILTER_EXPR: | |
1044 | return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt)); | |
1045 | case BIND_EXPR: | |
1046 | return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt)); | |
1047 | ||
1048 | default: | |
1049 | /* Not a live container. */ | |
1050 | break; | |
1051 | } | |
1052 | ||
1053 | return false; | |
1054 | } | |
1055 | ||
1056 | static void | |
1057 | remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data) | |
1058 | { | |
1059 | tree then_clause, else_clause, cond; | |
1060 | bool save_has_label, then_has_label, else_has_label; | |
1061 | ||
1062 | save_has_label = data->has_label; | |
1063 | data->has_label = false; | |
1064 | data->last_goto = NULL; | |
1065 | ||
1066 | remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data); | |
1067 | ||
1068 | then_has_label = data->has_label; | |
1069 | data->has_label = false; | |
1070 | data->last_goto = NULL; | |
1071 | ||
1072 | remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data); | |
1073 | ||
1074 | else_has_label = data->has_label; | |
1075 | data->has_label = save_has_label | then_has_label | else_has_label; | |
1076 | ||
1077 | fold_stmt (stmt_p); | |
1078 | then_clause = COND_EXPR_THEN (*stmt_p); | |
1079 | else_clause = COND_EXPR_ELSE (*stmt_p); | |
1080 | cond = COND_EXPR_COND (*stmt_p); | |
1081 | ||
1082 | /* If neither arm does anything at all, we can remove the whole IF. */ | |
1083 | if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause)) | |
1084 | { | |
1085 | *stmt_p = build_empty_stmt (); | |
1086 | data->repeat = true; | |
1087 | } | |
1088 | ||
1089 | /* If there are no reachable statements in an arm, then we can | |
1090 | zap the entire conditional. */ | |
1091 | else if (integer_nonzerop (cond) && !else_has_label) | |
1092 | { | |
1093 | if (warn_notreached) | |
1094 | remove_useless_stmts_warn_notreached (else_clause); | |
1095 | *stmt_p = then_clause; | |
1096 | data->repeat = true; | |
1097 | } | |
1098 | else if (integer_zerop (cond) && !then_has_label) | |
1099 | { | |
1100 | if (warn_notreached) | |
1101 | remove_useless_stmts_warn_notreached (then_clause); | |
1102 | *stmt_p = else_clause; | |
1103 | data->repeat = true; | |
1104 | } | |
1105 | ||
1106 | /* Check a couple of simple things on then/else with single stmts. */ | |
1107 | else | |
1108 | { | |
1109 | tree then_stmt = expr_only (then_clause); | |
1110 | tree else_stmt = expr_only (else_clause); | |
1111 | ||
1112 | /* Notice branches to a common destination. */ | |
1113 | if (then_stmt && else_stmt | |
1114 | && TREE_CODE (then_stmt) == GOTO_EXPR | |
1115 | && TREE_CODE (else_stmt) == GOTO_EXPR | |
1116 | && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt))) | |
1117 | { | |
1118 | *stmt_p = then_stmt; | |
1119 | data->repeat = true; | |
1120 | } | |
1121 | ||
1122 | /* If the THEN/ELSE clause merely assigns a value to a variable or | |
1123 | parameter which is already known to contain that value, then | |
1124 | remove the useless THEN/ELSE clause. */ | |
1125 | else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL) | |
1126 | { | |
1127 | if (else_stmt | |
1128 | && TREE_CODE (else_stmt) == MODIFY_EXPR | |
1129 | && TREE_OPERAND (else_stmt, 0) == cond | |
1130 | && integer_zerop (TREE_OPERAND (else_stmt, 1))) | |
1131 | COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list (); | |
1132 | } | |
1133 | else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR) | |
1134 | && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL | |
1135 | || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL) | |
1136 | && TREE_CONSTANT (TREE_OPERAND (cond, 1))) | |
1137 | { | |
1138 | tree stmt = (TREE_CODE (cond) == EQ_EXPR | |
1139 | ? then_stmt : else_stmt); | |
1140 | tree *location = (TREE_CODE (cond) == EQ_EXPR | |
1141 | ? &COND_EXPR_THEN (*stmt_p) | |
1142 | : &COND_EXPR_ELSE (*stmt_p)); | |
1143 | ||
1144 | if (stmt | |
1145 | && TREE_CODE (stmt) == MODIFY_EXPR | |
1146 | && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0) | |
1147 | && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1)) | |
1148 | *location = alloc_stmt_list (); | |
1149 | } | |
1150 | } | |
1151 | ||
1152 | /* Protect GOTOs in the arm of COND_EXPRs from being removed. They | |
1153 | would be re-introduced during lowering. */ | |
1154 | data->last_goto = NULL; | |
1155 | } | |
1156 | ||
1157 | ||
1158 | static void | |
1159 | remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data) | |
1160 | { | |
1161 | bool save_may_branch, save_may_throw; | |
1162 | bool this_may_branch, this_may_throw; | |
1163 | ||
1164 | /* Collect may_branch and may_throw information for the body only. */ | |
1165 | save_may_branch = data->may_branch; | |
1166 | save_may_throw = data->may_throw; | |
1167 | data->may_branch = false; | |
1168 | data->may_throw = false; | |
1169 | data->last_goto = NULL; | |
1170 | ||
1171 | remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data); | |
1172 | ||
1173 | this_may_branch = data->may_branch; | |
1174 | this_may_throw = data->may_throw; | |
1175 | data->may_branch |= save_may_branch; | |
1176 | data->may_throw |= save_may_throw; | |
1177 | data->last_goto = NULL; | |
1178 | ||
1179 | remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data); | |
1180 | ||
1181 | /* If the body is empty, then we can emit the FINALLY block without | |
1182 | the enclosing TRY_FINALLY_EXPR. */ | |
1183 | if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0))) | |
1184 | { | |
1185 | *stmt_p = TREE_OPERAND (*stmt_p, 1); | |
1186 | data->repeat = true; | |
1187 | } | |
1188 | ||
1189 | /* If the handler is empty, then we can emit the TRY block without | |
1190 | the enclosing TRY_FINALLY_EXPR. */ | |
1191 | else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1))) | |
1192 | { | |
1193 | *stmt_p = TREE_OPERAND (*stmt_p, 0); | |
1194 | data->repeat = true; | |
1195 | } | |
1196 | ||
1197 | /* If the body neither throws, nor branches, then we can safely | |
1198 | string the TRY and FINALLY blocks together. */ | |
1199 | else if (!this_may_branch && !this_may_throw) | |
1200 | { | |
1201 | tree stmt = *stmt_p; | |
1202 | *stmt_p = TREE_OPERAND (stmt, 0); | |
1203 | append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p); | |
1204 | data->repeat = true; | |
1205 | } | |
1206 | } | |
1207 | ||
1208 | ||
1209 | static void | |
1210 | remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data) | |
1211 | { | |
1212 | bool save_may_throw, this_may_throw; | |
1213 | tree_stmt_iterator i; | |
1214 | tree stmt; | |
1215 | ||
1216 | /* Collect may_throw information for the body only. */ | |
1217 | save_may_throw = data->may_throw; | |
1218 | data->may_throw = false; | |
1219 | data->last_goto = NULL; | |
1220 | ||
1221 | remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data); | |
1222 | ||
1223 | this_may_throw = data->may_throw; | |
1224 | data->may_throw = save_may_throw; | |
1225 | ||
1226 | /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */ | |
1227 | if (!this_may_throw) | |
1228 | { | |
1229 | if (warn_notreached) | |
1230 | remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1)); | |
1231 | *stmt_p = TREE_OPERAND (*stmt_p, 0); | |
1232 | data->repeat = true; | |
1233 | return; | |
1234 | } | |
1235 | ||
1236 | /* Process the catch clause specially. We may be able to tell that | |
1237 | no exceptions propagate past this point. */ | |
1238 | ||
1239 | this_may_throw = true; | |
1240 | i = tsi_start (TREE_OPERAND (*stmt_p, 1)); | |
1241 | stmt = tsi_stmt (i); | |
1242 | data->last_goto = NULL; | |
1243 | ||
1244 | switch (TREE_CODE (stmt)) | |
1245 | { | |
1246 | case CATCH_EXPR: | |
1247 | for (; !tsi_end_p (i); tsi_next (&i)) | |
1248 | { | |
1249 | stmt = tsi_stmt (i); | |
1250 | /* If we catch all exceptions, then the body does not | |
1251 | propagate exceptions past this point. */ | |
1252 | if (CATCH_TYPES (stmt) == NULL) | |
1253 | this_may_throw = false; | |
1254 | data->last_goto = NULL; | |
1255 | remove_useless_stmts_1 (&CATCH_BODY (stmt), data); | |
1256 | } | |
1257 | break; | |
1258 | ||
1259 | case EH_FILTER_EXPR: | |
1260 | if (EH_FILTER_MUST_NOT_THROW (stmt)) | |
1261 | this_may_throw = false; | |
1262 | else if (EH_FILTER_TYPES (stmt) == NULL) | |
1263 | this_may_throw = false; | |
1264 | remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data); | |
1265 | break; | |
1266 | ||
1267 | default: | |
1268 | /* Otherwise this is a cleanup. */ | |
1269 | remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data); | |
1270 | ||
1271 | /* If the cleanup is empty, then we can emit the TRY block without | |
1272 | the enclosing TRY_CATCH_EXPR. */ | |
1273 | if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1))) | |
1274 | { | |
1275 | *stmt_p = TREE_OPERAND (*stmt_p, 0); | |
1276 | data->repeat = true; | |
1277 | } | |
1278 | break; | |
1279 | } | |
1280 | data->may_throw |= this_may_throw; | |
1281 | } | |
1282 | ||
1283 | ||
1284 | static void | |
1285 | remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data) | |
1286 | { | |
1287 | tree block; | |
1288 | ||
1289 | /* First remove anything underneath the BIND_EXPR. */ | |
1290 | remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data); | |
1291 | ||
1292 | /* If the BIND_EXPR has no variables, then we can pull everything | |
1293 | up one level and remove the BIND_EXPR, unless this is the toplevel | |
1294 | BIND_EXPR for the current function or an inlined function. | |
1295 | ||
1296 | When this situation occurs we will want to apply this | |
1297 | optimization again. */ | |
1298 | block = BIND_EXPR_BLOCK (*stmt_p); | |
1299 | if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE | |
1300 | && *stmt_p != DECL_SAVED_TREE (current_function_decl) | |
1301 | && (! block | |
1302 | || ! BLOCK_ABSTRACT_ORIGIN (block) | |
1303 | || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) | |
1304 | != FUNCTION_DECL))) | |
1305 | { | |
1306 | *stmt_p = BIND_EXPR_BODY (*stmt_p); | |
1307 | data->repeat = true; | |
1308 | } | |
1309 | } | |
1310 | ||
1311 | ||
1312 | static void | |
1313 | remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data) | |
1314 | { | |
1315 | tree dest = GOTO_DESTINATION (*stmt_p); | |
1316 | ||
1317 | data->may_branch = true; | |
1318 | data->last_goto = NULL; | |
1319 | ||
1320 | /* Record the last goto expr, so that we can delete it if unnecessary. */ | |
1321 | if (TREE_CODE (dest) == LABEL_DECL) | |
1322 | data->last_goto = stmt_p; | |
1323 | } | |
1324 | ||
1325 | ||
1326 | static void | |
1327 | remove_useless_stmts_label (tree *stmt_p, struct rus_data *data) | |
1328 | { | |
1329 | tree label = LABEL_EXPR_LABEL (*stmt_p); | |
1330 | ||
1331 | data->has_label = true; | |
1332 | ||
1333 | /* We do want to jump across non-local label receiver code. */ | |
1334 | if (DECL_NONLOCAL (label)) | |
1335 | data->last_goto = NULL; | |
1336 | ||
1337 | else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label) | |
1338 | { | |
1339 | *data->last_goto = build_empty_stmt (); | |
1340 | data->repeat = true; | |
1341 | } | |
1342 | ||
1343 | /* ??? Add something here to delete unused labels. */ | |
1344 | } | |
1345 | ||
1346 | ||
1347 | /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its | |
1348 | decl. This allows us to eliminate redundant or useless | |
1349 | calls to "const" functions. | |
1350 | ||
1351 | Gimplifier already does the same operation, but we may notice functions | |
1352 | being const and pure once their calls has been gimplified, so we need | |
1353 | to update the flag. */ | |
1354 | ||
1355 | static void | |
1356 | update_call_expr_flags (tree call) | |
1357 | { | |
1358 | tree decl = get_callee_fndecl (call); | |
1359 | if (!decl) | |
1360 | return; | |
1361 | if (call_expr_flags (call) & (ECF_CONST | ECF_PURE)) | |
1362 | TREE_SIDE_EFFECTS (call) = 0; | |
1363 | if (TREE_NOTHROW (decl)) | |
1364 | TREE_NOTHROW (call) = 1; | |
1365 | } | |
1366 | ||
1367 | ||
1368 | /* T is CALL_EXPR. Set current_function_calls_* flags. */ | |
1369 | ||
1370 | void | |
1371 | notice_special_calls (tree t) | |
1372 | { | |
1373 | int flags = call_expr_flags (t); | |
1374 | ||
1375 | if (flags & ECF_MAY_BE_ALLOCA) | |
1376 | current_function_calls_alloca = true; | |
1377 | if (flags & ECF_RETURNS_TWICE) | |
1378 | current_function_calls_setjmp = true; | |
1379 | } | |
1380 | ||
1381 | ||
1382 | /* Clear flags set by notice_special_calls. Used by dead code removal | |
1383 | to update the flags. */ | |
1384 | ||
1385 | void | |
1386 | clear_special_calls (void) | |
1387 | { | |
1388 | current_function_calls_alloca = false; | |
1389 | current_function_calls_setjmp = false; | |
1390 | } | |
1391 | ||
1392 | ||
1393 | static void | |
1394 | remove_useless_stmts_1 (tree *tp, struct rus_data *data) | |
1395 | { | |
1396 | tree t = *tp; | |
1397 | ||
1398 | switch (TREE_CODE (t)) | |
1399 | { | |
1400 | case COND_EXPR: | |
1401 | remove_useless_stmts_cond (tp, data); | |
1402 | break; | |
1403 | ||
1404 | case TRY_FINALLY_EXPR: | |
1405 | remove_useless_stmts_tf (tp, data); | |
1406 | break; | |
1407 | ||
1408 | case TRY_CATCH_EXPR: | |
1409 | remove_useless_stmts_tc (tp, data); | |
1410 | break; | |
1411 | ||
1412 | case BIND_EXPR: | |
1413 | remove_useless_stmts_bind (tp, data); | |
1414 | break; | |
1415 | ||
1416 | case GOTO_EXPR: | |
1417 | remove_useless_stmts_goto (tp, data); | |
1418 | break; | |
1419 | ||
1420 | case LABEL_EXPR: | |
1421 | remove_useless_stmts_label (tp, data); | |
1422 | break; | |
1423 | ||
1424 | case RETURN_EXPR: | |
1425 | fold_stmt (tp); | |
1426 | data->last_goto = NULL; | |
1427 | data->may_branch = true; | |
1428 | break; | |
1429 | ||
1430 | case CALL_EXPR: | |
1431 | fold_stmt (tp); | |
1432 | data->last_goto = NULL; | |
1433 | notice_special_calls (t); | |
1434 | update_call_expr_flags (t); | |
1435 | if (tree_could_throw_p (t)) | |
1436 | data->may_throw = true; | |
1437 | break; | |
1438 | ||
1439 | case MODIFY_EXPR: | |
1440 | data->last_goto = NULL; | |
1441 | fold_stmt (tp); | |
1442 | if (TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR) | |
1443 | { | |
1444 | update_call_expr_flags (TREE_OPERAND (t, 1)); | |
1445 | notice_special_calls (TREE_OPERAND (t, 1)); | |
1446 | } | |
1447 | if (tree_could_throw_p (t)) | |
1448 | data->may_throw = true; | |
1449 | break; | |
1450 | ||
1451 | case STATEMENT_LIST: | |
1452 | { | |
1453 | tree_stmt_iterator i = tsi_start (t); | |
1454 | while (!tsi_end_p (i)) | |
1455 | { | |
1456 | t = tsi_stmt (i); | |
1457 | if (IS_EMPTY_STMT (t)) | |
1458 | { | |
1459 | tsi_delink (&i); | |
1460 | continue; | |
1461 | } | |
1462 | ||
1463 | remove_useless_stmts_1 (tsi_stmt_ptr (i), data); | |
1464 | ||
1465 | t = tsi_stmt (i); | |
1466 | if (TREE_CODE (t) == STATEMENT_LIST) | |
1467 | { | |
1468 | tsi_link_before (&i, t, TSI_SAME_STMT); | |
1469 | tsi_delink (&i); | |
1470 | } | |
1471 | else | |
1472 | tsi_next (&i); | |
1473 | } | |
1474 | } | |
1475 | break; | |
1476 | case SWITCH_EXPR: | |
1477 | fold_stmt (tp); | |
1478 | data->last_goto = NULL; | |
1479 | break; | |
1480 | ||
1481 | default: | |
1482 | data->last_goto = NULL; | |
1483 | break; | |
1484 | } | |
1485 | } | |
1486 | ||
1487 | static void | |
1488 | remove_useless_stmts (void) | |
1489 | { | |
1490 | struct rus_data data; | |
1491 | ||
1492 | clear_special_calls (); | |
1493 | ||
1494 | do | |
1495 | { | |
1496 | memset (&data, 0, sizeof (data)); | |
1497 | remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data); | |
1498 | } | |
1499 | while (data.repeat); | |
1500 | } | |
1501 | ||
1502 | ||
1503 | struct tree_opt_pass pass_remove_useless_stmts = | |
1504 | { | |
1505 | "useless", /* name */ | |
1506 | NULL, /* gate */ | |
1507 | remove_useless_stmts, /* execute */ | |
1508 | NULL, /* sub */ | |
1509 | NULL, /* next */ | |
1510 | 0, /* static_pass_number */ | |
1511 | 0, /* tv_id */ | |
1512 | PROP_gimple_any, /* properties_required */ | |
1513 | 0, /* properties_provided */ | |
1514 | 0, /* properties_destroyed */ | |
1515 | 0, /* todo_flags_start */ | |
1516 | TODO_dump_func /* todo_flags_finish */ | |
1517 | }; | |
1518 | ||
1519 | ||
1520 | /* Remove obviously useless statements in basic block BB. */ | |
1521 | ||
1522 | static void | |
1523 | cfg_remove_useless_stmts_bb (basic_block bb) | |
1524 | { | |
1525 | block_stmt_iterator bsi; | |
1526 | tree stmt = NULL_TREE; | |
1527 | tree cond, var = NULL_TREE, val = NULL_TREE; | |
1528 | struct var_ann_d *ann; | |
1529 | ||
1530 | /* Check whether we come here from a condition, and if so, get the | |
1531 | condition. */ | |
1532 | if (!bb->pred | |
1533 | || bb->pred->pred_next | |
1534 | || !(bb->pred->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))) | |
1535 | return; | |
1536 | ||
1537 | cond = COND_EXPR_COND (last_stmt (bb->pred->src)); | |
1538 | ||
1539 | if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL) | |
1540 | { | |
1541 | var = cond; | |
1542 | val = (bb->pred->flags & EDGE_FALSE_VALUE | |
1543 | ? boolean_false_node : boolean_true_node); | |
1544 | } | |
1545 | else if (TREE_CODE (cond) == TRUTH_NOT_EXPR | |
1546 | && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL | |
1547 | || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)) | |
1548 | { | |
1549 | var = TREE_OPERAND (cond, 0); | |
1550 | val = (bb->pred->flags & EDGE_FALSE_VALUE | |
1551 | ? boolean_true_node : boolean_false_node); | |
1552 | } | |
1553 | else | |
1554 | { | |
1555 | if (bb->pred->flags & EDGE_FALSE_VALUE) | |
1556 | cond = invert_truthvalue (cond); | |
1557 | if (TREE_CODE (cond) == EQ_EXPR | |
1558 | && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL | |
1559 | || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL) | |
1560 | && (TREE_CODE (TREE_OPERAND (cond, 1)) == VAR_DECL | |
1561 | || TREE_CODE (TREE_OPERAND (cond, 1)) == PARM_DECL | |
1562 | || TREE_CONSTANT (TREE_OPERAND (cond, 1)))) | |
1563 | { | |
1564 | var = TREE_OPERAND (cond, 0); | |
1565 | val = TREE_OPERAND (cond, 1); | |
1566 | } | |
1567 | else | |
1568 | return; | |
1569 | } | |
1570 | ||
1571 | /* Only work for normal local variables. */ | |
1572 | ann = var_ann (var); | |
1573 | if (!ann | |
1574 | || ann->may_aliases | |
1575 | || TREE_ADDRESSABLE (var)) | |
1576 | return; | |
1577 | ||
1578 | if (! TREE_CONSTANT (val)) | |
1579 | { | |
1580 | ann = var_ann (val); | |
1581 | if (!ann | |
1582 | || ann->may_aliases | |
1583 | || TREE_ADDRESSABLE (val)) | |
1584 | return; | |
1585 | } | |
1586 | ||
1587 | /* Ignore floating point variables, since comparison behaves weird for | |
1588 | them. */ | |
1589 | if (FLOAT_TYPE_P (TREE_TYPE (var))) | |
1590 | return; | |
1591 | ||
1592 | for (bsi = bsi_start (bb); !bsi_end_p (bsi);) | |
1593 | { | |
1594 | stmt = bsi_stmt (bsi); | |
1595 | ||
1596 | /* If the THEN/ELSE clause merely assigns a value to a variable/parameter | |
1597 | which is already known to contain that value, then remove the useless | |
1598 | THEN/ELSE clause. */ | |
1599 | if (TREE_CODE (stmt) == MODIFY_EXPR | |
1600 | && TREE_OPERAND (stmt, 0) == var | |
1601 | && operand_equal_p (val, TREE_OPERAND (stmt, 1), 0)) | |
1602 | { | |
1603 | bsi_remove (&bsi); | |
1604 | continue; | |
1605 | } | |
1606 | ||
1607 | /* Invalidate the var if we encounter something that could modify it. */ | |
1608 | if (TREE_CODE (stmt) == ASM_EXPR | |
1609 | || TREE_CODE (stmt) == VA_ARG_EXPR | |
1610 | || (TREE_CODE (stmt) == MODIFY_EXPR | |
1611 | && (TREE_OPERAND (stmt, 0) == var | |
1612 | || TREE_OPERAND (stmt, 0) == val | |
1613 | || TREE_CODE (TREE_OPERAND (stmt, 1)) == VA_ARG_EXPR))) | |
1614 | return; | |
1615 | ||
1616 | bsi_next (&bsi); | |
1617 | } | |
1618 | } | |
1619 | ||
1620 | ||
1621 | /* A CFG-aware version of remove_useless_stmts. */ | |
1622 | ||
1623 | void | |
1624 | cfg_remove_useless_stmts (void) | |
1625 | { | |
1626 | basic_block bb; | |
1627 | ||
1628 | #ifdef ENABLE_CHECKING | |
1629 | verify_flow_info (); | |
1630 | #endif | |
1631 | ||
1632 | FOR_EACH_BB (bb) | |
1633 | { | |
1634 | cfg_remove_useless_stmts_bb (bb); | |
1635 | } | |
1636 | } | |
1637 | ||
1638 | ||
1639 | /* Remove PHI nodes associated with basic block BB and all edges out of BB. */ | |
1640 | ||
1641 | static void | |
1642 | remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb) | |
1643 | { | |
1644 | tree phi; | |
1645 | ||
1646 | /* Since this block is no longer reachable, we can just delete all | |
1647 | of its PHI nodes. */ | |
1648 | phi = phi_nodes (bb); | |
1649 | while (phi) | |
1650 | { | |
1651 | tree next = TREE_CHAIN (phi); | |
1652 | remove_phi_node (phi, NULL_TREE, bb); | |
1653 | phi = next; | |
1654 | } | |
1655 | ||
1656 | /* Remove edges to BB's successors. */ | |
1657 | while (bb->succ != NULL) | |
1658 | ssa_remove_edge (bb->succ); | |
1659 | } | |
1660 | ||
1661 | ||
1662 | /* Remove statements of basic block BB. */ | |
1663 | ||
1664 | static void | |
1665 | remove_bb (basic_block bb) | |
1666 | { | |
1667 | block_stmt_iterator i; | |
1668 | location_t *loc = NULL; | |
1669 | ||
1670 | if (dump_file) | |
1671 | { | |
1672 | fprintf (dump_file, "Removing basic block %d\n", bb->index); | |
1673 | if (dump_flags & TDF_DETAILS) | |
1674 | { | |
1675 | dump_bb (bb, dump_file, 0); | |
1676 | fprintf (dump_file, "\n"); | |
1677 | } | |
1678 | } | |
1679 | ||
1680 | /* Remove all the instructions in the block. */ | |
1681 | for (i = bsi_start (bb); !bsi_end_p (i); bsi_remove (&i)) | |
1682 | { | |
1683 | tree stmt = bsi_stmt (i); | |
1684 | ||
1685 | set_bb_for_stmt (stmt, NULL); | |
1686 | ||
1687 | /* Don't warn for removed gotos. Gotos are often removed due to | |
1688 | jump threading, thus resulting in bogus warnings. Not great, | |
1689 | since this way we lose warnings for gotos in the original | |
1690 | program that are indeed unreachable. */ | |
1691 | if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_LOCUS (stmt) && !loc) | |
1692 | loc = EXPR_LOCUS (stmt); | |
1693 | } | |
1694 | ||
1695 | /* If requested, give a warning that the first statement in the | |
1696 | block is unreachable. We walk statements backwards in the | |
1697 | loop above, so the last statement we process is the first statement | |
1698 | in the block. */ | |
1699 | if (warn_notreached && loc) | |
1700 | warning ("%Hwill never be executed", loc); | |
1701 | ||
1702 | remove_phi_nodes_and_edges_for_unreachable_block (bb); | |
1703 | } | |
1704 | ||
1705 | ||
1706 | /* Examine BB to determine if it is a forwarding block (a block which only | |
1707 | transfers control to a new destination). If BB is a forwarding block, | |
1708 | then return the edge leading to the ultimate destination. */ | |
1709 | ||
1710 | edge | |
1711 | tree_block_forwards_to (basic_block bb) | |
1712 | { | |
1713 | block_stmt_iterator bsi; | |
1714 | bb_ann_t ann = bb_ann (bb); | |
1715 | tree stmt; | |
1716 | ||
1717 | /* If this block is not forwardable, then avoid useless work. */ | |
1718 | if (! ann->forwardable) | |
1719 | return NULL; | |
1720 | ||
1721 | /* Set this block to not be forwardable. This prevents infinite loops since | |
1722 | any block currently under examination is considered non-forwardable. */ | |
1723 | ann->forwardable = 0; | |
1724 | ||
1725 | /* No forwarding is possible if this block is a special block (ENTRY/EXIT), | |
1726 | this block has more than one successor, this block's single successor is | |
1727 | reached via an abnormal edge, this block has phi nodes, or this block's | |
1728 | single successor has phi nodes. */ | |
1729 | if (bb == EXIT_BLOCK_PTR | |
1730 | || bb == ENTRY_BLOCK_PTR | |
1731 | || !bb->succ | |
1732 | || bb->succ->succ_next | |
1733 | || bb->succ->dest == EXIT_BLOCK_PTR | |
1734 | || (bb->succ->flags & EDGE_ABNORMAL) != 0 | |
1735 | || phi_nodes (bb) | |
1736 | || phi_nodes (bb->succ->dest)) | |
1737 | return NULL; | |
1738 | ||
1739 | /* Walk past any labels at the start of this block. */ | |
1740 | for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) | |
1741 | { | |
1742 | stmt = bsi_stmt (bsi); | |
1743 | if (TREE_CODE (stmt) != LABEL_EXPR) | |
1744 | break; | |
1745 | } | |
1746 | ||
1747 | /* If we reached the end of this block we may be able to optimize this | |
1748 | case. */ | |
1749 | if (bsi_end_p (bsi)) | |
1750 | { | |
1751 | edge dest; | |
1752 | ||
1753 | /* Recursive call to pick up chains of forwarding blocks. */ | |
1754 | dest = tree_block_forwards_to (bb->succ->dest); | |
1755 | ||
1756 | /* If none found, we forward to bb->succ at minimum. */ | |
1757 | if (!dest) | |
1758 | dest = bb->succ; | |
1759 | ||
1760 | ann->forwardable = 1; | |
1761 | return dest; | |
1762 | } | |
1763 | ||
1764 | /* No forwarding possible. */ | |
1765 | return NULL; | |
1766 | } | |
1767 | ||
1768 | ||
1769 | /* Try to remove superfluous control structures. */ | |
1770 | ||
1771 | static bool | |
1772 | cleanup_control_flow (void) | |
1773 | { | |
1774 | basic_block bb; | |
1775 | block_stmt_iterator bsi; | |
1776 | bool retval = false; | |
1777 | tree stmt; | |
1778 | ||
1779 | FOR_EACH_BB (bb) | |
1780 | { | |
1781 | bsi = bsi_last (bb); | |
1782 | ||
1783 | if (bsi_end_p (bsi)) | |
1784 | continue; | |
1785 | ||
1786 | stmt = bsi_stmt (bsi); | |
1787 | if (TREE_CODE (stmt) == COND_EXPR | |
1788 | || TREE_CODE (stmt) == SWITCH_EXPR) | |
1789 | retval |= cleanup_control_expr_graph (bb, bsi); | |
1790 | } | |
1791 | return retval; | |
1792 | } | |
1793 | ||
1794 | ||
1795 | /* Disconnect an unreachable block in the control expression starting | |
1796 | at block BB. */ | |
1797 | ||
1798 | static bool | |
1799 | cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi) | |
1800 | { | |
1801 | edge taken_edge; | |
1802 | bool retval = false; | |
1803 | tree expr = bsi_stmt (bsi), val; | |
1804 | ||
1805 | if (bb->succ->succ_next) | |
1806 | { | |
1807 | edge e, next; | |
1808 | ||
1809 | switch (TREE_CODE (expr)) | |
1810 | { | |
1811 | case COND_EXPR: | |
1812 | val = COND_EXPR_COND (expr); | |
1813 | break; | |
1814 | ||
1815 | case SWITCH_EXPR: | |
1816 | val = SWITCH_COND (expr); | |
1817 | if (TREE_CODE (val) != INTEGER_CST) | |
1818 | return false; | |
1819 | break; | |
1820 | ||
1821 | default: | |
1822 | abort (); | |
1823 | } | |
1824 | ||
1825 | taken_edge = find_taken_edge (bb, val); | |
1826 | if (!taken_edge) | |
1827 | return false; | |
1828 | ||
1829 | /* Remove all the edges except the one that is always executed. */ | |
1830 | for (e = bb->succ; e; e = next) | |
1831 | { | |
1832 | next = e->succ_next; | |
1833 | if (e != taken_edge) | |
1834 | { | |
1835 | taken_edge->probability += e->probability; | |
1836 | taken_edge->count += e->count; | |
1837 | ssa_remove_edge (e); | |
1838 | retval = true; | |
1839 | } | |
1840 | } | |
1841 | if (taken_edge->probability > REG_BR_PROB_BASE) | |
1842 | taken_edge->probability = REG_BR_PROB_BASE; | |
1843 | } | |
1844 | else | |
1845 | taken_edge = bb->succ; | |
1846 | ||
1847 | bsi_remove (&bsi); | |
1848 | taken_edge->flags = EDGE_FALLTHRU; | |
1849 | ||
1850 | /* We removed some paths from the cfg. */ | |
1851 | if (dom_computed[CDI_DOMINATORS] >= DOM_CONS_OK) | |
1852 | dom_computed[CDI_DOMINATORS] = DOM_CONS_OK; | |
1853 | ||
1854 | return retval; | |
1855 | } | |
1856 | ||
1857 | ||
1858 | /* Given a control block BB and a constant value VAL, return the edge that | |
1859 | will be taken out of the block. If VAL does not match a unique edge, | |
1860 | NULL is returned. */ | |
1861 | ||
1862 | edge | |
1863 | find_taken_edge (basic_block bb, tree val) | |
1864 | { | |
1865 | tree stmt; | |
1866 | ||
1867 | stmt = last_stmt (bb); | |
1868 | ||
1869 | #if defined ENABLE_CHECKING | |
1870 | if (stmt == NULL_TREE || !is_ctrl_stmt (stmt)) | |
1871 | abort (); | |
1872 | #endif | |
1873 | ||
1874 | /* If VAL is not a constant, we can't determine which edge might | |
1875 | be taken. */ | |
1876 | if (val == NULL || !really_constant_p (val)) | |
1877 | return NULL; | |
1878 | ||
1879 | if (TREE_CODE (stmt) == COND_EXPR) | |
1880 | return find_taken_edge_cond_expr (bb, val); | |
1881 | ||
1882 | if (TREE_CODE (stmt) == SWITCH_EXPR) | |
1883 | return find_taken_edge_switch_expr (bb, val); | |
1884 | ||
1885 | return bb->succ; | |
1886 | } | |
1887 | ||
1888 | ||
1889 | /* Given a constant value VAL and the entry block BB to a COND_EXPR | |
1890 | statement, determine which of the two edges will be taken out of the | |
1891 | block. Return NULL if either edge may be taken. */ | |
1892 | ||
1893 | static edge | |
1894 | find_taken_edge_cond_expr (basic_block bb, tree val) | |
1895 | { | |
1896 | edge true_edge, false_edge; | |
1897 | ||
1898 | extract_true_false_edges_from_block (bb, &true_edge, &false_edge); | |
1899 | ||
1900 | /* If both edges of the branch lead to the same basic block, it doesn't | |
1901 | matter which edge is taken. */ | |
1902 | if (true_edge->dest == false_edge->dest) | |
1903 | return true_edge; | |
1904 | ||
1905 | /* Otherwise, try to determine which branch of the if() will be taken. | |
1906 | If VAL is a constant but it can't be reduced to a 0 or a 1, then | |
1907 | we don't really know which edge will be taken at runtime. This | |
1908 | may happen when comparing addresses (e.g., if (&var1 == 4)). */ | |
1909 | if (integer_nonzerop (val)) | |
1910 | return true_edge; | |
1911 | else if (integer_zerop (val)) | |
1912 | return false_edge; | |
1913 | else | |
1914 | return NULL; | |
1915 | } | |
1916 | ||
1917 | ||
1918 | /* Given a constant value VAL and the entry block BB to a SWITCH_EXPR | |
1919 | statement, determine which edge will be taken out of the block. Return | |
1920 | NULL if any edge may be taken. */ | |
1921 | ||
1922 | static edge | |
1923 | find_taken_edge_switch_expr (basic_block bb, tree val) | |
1924 | { | |
1925 | tree switch_expr, taken_case; | |
1926 | basic_block dest_bb; | |
1927 | edge e; | |
1928 | ||
1929 | if (TREE_CODE (val) != INTEGER_CST) | |
1930 | return NULL; | |
1931 | ||
1932 | switch_expr = last_stmt (bb); | |
1933 | taken_case = find_case_label_for_value (switch_expr, val); | |
1934 | dest_bb = label_to_block (CASE_LABEL (taken_case)); | |
1935 | ||
1936 | e = find_edge (bb, dest_bb); | |
1937 | if (!e) | |
1938 | abort (); | |
1939 | return e; | |
1940 | } | |
1941 | ||
1942 | ||
1943 | /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL. */ | |
1944 | ||
1945 | static tree | |
1946 | find_case_label_for_value (tree switch_expr, tree val) | |
1947 | { | |
1948 | tree vec = SWITCH_LABELS (switch_expr); | |
1949 | size_t i, n = TREE_VEC_LENGTH (vec); | |
1950 | tree default_case = NULL; | |
1951 | ||
1952 | for (i = 0; i < n; ++i) | |
1953 | { | |
1954 | tree t = TREE_VEC_ELT (vec, i); | |
1955 | ||
1956 | if (CASE_LOW (t) == NULL) | |
1957 | default_case = t; | |
1958 | else if (CASE_HIGH (t) == NULL) | |
1959 | { | |
1960 | /* A `normal' case label. */ | |
1961 | if (simple_cst_equal (CASE_LOW (t), val) == 1) | |
1962 | return t; | |
1963 | } | |
1964 | else | |
1965 | { | |
1966 | /* A case range. We can only handle integer ranges. */ | |
1967 | if (tree_int_cst_compare (CASE_LOW (t), val) <= 0 | |
1968 | && tree_int_cst_compare (CASE_HIGH (t), val) >= 0) | |
1969 | return t; | |
1970 | } | |
1971 | } | |
1972 | ||
1973 | if (!default_case) | |
1974 | abort (); | |
1975 | ||
1976 | return default_case; | |
1977 | } | |
1978 | ||
1979 | ||
1980 | /* If all the PHI nodes in DEST have alternatives for E1 and E2 and | |
1981 | those alternatives are equal in each of the PHI nodes, then return | |
1982 | true, else return false. */ | |
1983 | ||
1984 | static bool | |
1985 | phi_alternatives_equal (basic_block dest, edge e1, edge e2) | |
1986 | { | |
1987 | tree phi, val1, val2; | |
1988 | int n1, n2; | |
1989 | ||
1990 | for (phi = phi_nodes (dest); phi; phi = TREE_CHAIN (phi)) | |
1991 | { | |
1992 | n1 = phi_arg_from_edge (phi, e1); | |
1993 | n2 = phi_arg_from_edge (phi, e2); | |
1994 | ||
1995 | #ifdef ENABLE_CHECKING | |
1996 | if (n1 < 0 || n2 < 0) | |
1997 | abort (); | |
1998 | #endif | |
1999 | ||
2000 | val1 = PHI_ARG_DEF (phi, n1); | |
2001 | val2 = PHI_ARG_DEF (phi, n2); | |
2002 | ||
2003 | if (!operand_equal_p (val1, val2, 0)) | |
2004 | return false; | |
2005 | } | |
2006 | ||
2007 | return true; | |
2008 | } | |
2009 | ||
2010 | ||
2011 | /* Computing the Dominance Frontier: | |
2012 | ||
2013 | As described in Morgan, section 3.5, this may be done simply by | |
2014 | walking the dominator tree bottom-up, computing the frontier for | |
2015 | the children before the parent. When considering a block B, | |
2016 | there are two cases: | |
2017 | ||
2018 | (1) A flow graph edge leaving B that does not lead to a child | |
2019 | of B in the dominator tree must be a block that is either equal | |
2020 | to B or not dominated by B. Such blocks belong in the frontier | |
2021 | of B. | |
2022 | ||
2023 | (2) Consider a block X in the frontier of one of the children C | |
2024 | of B. If X is not equal to B and is not dominated by B, it | |
2025 | is in the frontier of B. */ | |
2026 | ||
2027 | static void | |
2028 | compute_dominance_frontiers_1 (bitmap *frontiers, basic_block bb, sbitmap done) | |
2029 | { | |
2030 | edge e; | |
2031 | basic_block c; | |
2032 | ||
2033 | SET_BIT (done, bb->index); | |
2034 | ||
2035 | /* Do the frontier of the children first. Not all children in the | |
2036 | dominator tree (blocks dominated by this one) are children in the | |
2037 | CFG, so check all blocks. */ | |
2038 | for (c = first_dom_son (CDI_DOMINATORS, bb); | |
2039 | c; | |
2040 | c = next_dom_son (CDI_DOMINATORS, c)) | |
2041 | { | |
2042 | if (! TEST_BIT (done, c->index)) | |
2043 | compute_dominance_frontiers_1 (frontiers, c, done); | |
2044 | } | |
2045 | ||
2046 | /* Find blocks conforming to rule (1) above. */ | |
2047 | for (e = bb->succ; e; e = e->succ_next) | |
2048 | { | |
2049 | if (e->dest == EXIT_BLOCK_PTR) | |
2050 | continue; | |
2051 | if (get_immediate_dominator (CDI_DOMINATORS, e->dest) != bb) | |
2052 | bitmap_set_bit (frontiers[bb->index], e->dest->index); | |
2053 | } | |
2054 | ||
2055 | /* Find blocks conforming to rule (2). */ | |
2056 | for (c = first_dom_son (CDI_DOMINATORS, bb); | |
2057 | c; | |
2058 | c = next_dom_son (CDI_DOMINATORS, c)) | |
2059 | { | |
2060 | int x; | |
2061 | ||
2062 | EXECUTE_IF_SET_IN_BITMAP (frontiers[c->index], 0, x, | |
2063 | { | |
2064 | if (get_immediate_dominator (CDI_DOMINATORS, BASIC_BLOCK (x)) != bb) | |
2065 | bitmap_set_bit (frontiers[bb->index], x); | |
2066 | }); | |
2067 | } | |
2068 | } | |
2069 | ||
2070 | ||
2071 | void | |
2072 | compute_dominance_frontiers (bitmap *frontiers) | |
2073 | { | |
2074 | sbitmap done = sbitmap_alloc (last_basic_block); | |
2075 | ||
2076 | timevar_push (TV_DOM_FRONTIERS); | |
2077 | ||
2078 | sbitmap_zero (done); | |
2079 | ||
2080 | compute_dominance_frontiers_1 (frontiers, ENTRY_BLOCK_PTR->succ->dest, done); | |
2081 | ||
2082 | sbitmap_free (done); | |
2083 | ||
2084 | timevar_pop (TV_DOM_FRONTIERS); | |
2085 | } | |
2086 | ||
2087 | ||
2088 | ||
2089 | /*--------------------------------------------------------------------------- | |
2090 | Debugging functions | |
2091 | ---------------------------------------------------------------------------*/ | |
2092 | ||
2093 | /* Dump tree-specific information of block BB to file OUTF. */ | |
2094 | ||
2095 | void | |
2096 | tree_dump_bb (basic_block bb, FILE *outf, int indent) | |
2097 | { | |
2098 | dump_generic_bb (outf, bb, indent, TDF_VOPS); | |
2099 | } | |
2100 | ||
2101 | ||
2102 | /* Dump a basic block on stderr. */ | |
2103 | ||
2104 | void | |
2105 | debug_tree_bb (basic_block bb) | |
2106 | { | |
2107 | dump_bb (bb, stderr, 0); | |
2108 | } | |
2109 | ||
2110 | ||
2111 | /* Dump basic block with index N on stderr. */ | |
2112 | ||
2113 | basic_block | |
2114 | debug_tree_bb_n (int n) | |
2115 | { | |
2116 | debug_tree_bb (BASIC_BLOCK (n)); | |
2117 | return BASIC_BLOCK (n); | |
2118 | } | |
2119 | ||
2120 | ||
2121 | /* Dump the CFG on stderr. | |
2122 | ||
2123 | FLAGS are the same used by the tree dumping functions | |
2124 | (see TDF_* in tree.h). */ | |
2125 | ||
2126 | void | |
2127 | debug_tree_cfg (int flags) | |
2128 | { | |
2129 | dump_tree_cfg (stderr, flags); | |
2130 | } | |
2131 | ||
2132 | ||
2133 | /* Dump the program showing basic block boundaries on the given FILE. | |
2134 | ||
2135 | FLAGS are the same used by the tree dumping functions (see TDF_* in | |
2136 | tree.h). */ | |
2137 | ||
2138 | void | |
2139 | dump_tree_cfg (FILE *file, int flags) | |
2140 | { | |
2141 | if (flags & TDF_DETAILS) | |
2142 | { | |
2143 | const char *funcname | |
2144 | = (*lang_hooks.decl_printable_name) (current_function_decl, 2); | |
2145 | ||
2146 | fputc ('\n', file); | |
2147 | fprintf (file, ";; Function %s\n\n", funcname); | |
2148 | fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n", | |
2149 | n_basic_blocks, n_edges, last_basic_block); | |
2150 | ||
2151 | brief_dump_cfg (file); | |
2152 | fprintf (file, "\n"); | |
2153 | } | |
2154 | ||
2155 | if (flags & TDF_STATS) | |
2156 | dump_cfg_stats (file); | |
2157 | ||
2158 | dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS); | |
2159 | } | |
2160 | ||
2161 | ||
2162 | /* Dump CFG statistics on FILE. */ | |
2163 | ||
2164 | void | |
2165 | dump_cfg_stats (FILE *file) | |
2166 | { | |
2167 | static long max_num_merged_labels = 0; | |
2168 | unsigned long size, total = 0; | |
2169 | long n_edges; | |
2170 | basic_block bb; | |
2171 | const char * const fmt_str = "%-30s%-13s%12s\n"; | |
2172 | const char * const fmt_str_1 = "%-30s%13lu%11lu%c\n"; | |
2173 | const char * const fmt_str_3 = "%-43s%11lu%c\n"; | |
2174 | const char *funcname | |
2175 | = (*lang_hooks.decl_printable_name) (current_function_decl, 2); | |
2176 | ||
2177 | ||
2178 | fprintf (file, "\nCFG Statistics for %s\n\n", funcname); | |
2179 | ||
2180 | fprintf (file, "---------------------------------------------------------\n"); | |
2181 | fprintf (file, fmt_str, "", " Number of ", "Memory"); | |
2182 | fprintf (file, fmt_str, "", " instances ", "used "); | |
2183 | fprintf (file, "---------------------------------------------------------\n"); | |
2184 | ||
2185 | size = n_basic_blocks * sizeof (struct basic_block_def); | |
2186 | total += size; | |
2187 | fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks, SCALE (size), | |
2188 | LABEL (size)); | |
2189 | ||
2190 | n_edges = 0; | |
2191 | FOR_EACH_BB (bb) | |
2192 | { | |
2193 | edge e; | |
2194 | for (e = bb->succ; e; e = e->succ_next) | |
2195 | n_edges++; | |
2196 | } | |
2197 | size = n_edges * sizeof (struct edge_def); | |
2198 | total += size; | |
2199 | fprintf (file, fmt_str_1, "Edges", n_edges, SCALE (size), LABEL (size)); | |
2200 | ||
2201 | size = n_basic_blocks * sizeof (struct bb_ann_d); | |
2202 | total += size; | |
2203 | fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks, | |
2204 | SCALE (size), LABEL (size)); | |
2205 | ||
2206 | fprintf (file, "---------------------------------------------------------\n"); | |
2207 | fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total), | |
2208 | LABEL (total)); | |
2209 | fprintf (file, "---------------------------------------------------------\n"); | |
2210 | fprintf (file, "\n"); | |
2211 | ||
2212 | if (cfg_stats.num_merged_labels > max_num_merged_labels) | |
2213 | max_num_merged_labels = cfg_stats.num_merged_labels; | |
2214 | ||
2215 | fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n", | |
2216 | cfg_stats.num_merged_labels, max_num_merged_labels); | |
2217 | ||
2218 | fprintf (file, "\n"); | |
2219 | } | |
2220 | ||
2221 | ||
2222 | /* Dump CFG statistics on stderr. Keep extern so that it's always | |
2223 | linked in the final executable. */ | |
2224 | ||
2225 | void | |
2226 | debug_cfg_stats (void) | |
2227 | { | |
2228 | dump_cfg_stats (stderr); | |
2229 | } | |
2230 | ||
2231 | ||
2232 | /* Dump the flowgraph to a .vcg FILE. */ | |
2233 | ||
2234 | static void | |
2235 | tree_cfg2vcg (FILE *file) | |
2236 | { | |
2237 | edge e; | |
2238 | basic_block bb; | |
2239 | const char *funcname | |
2240 | = (*lang_hooks.decl_printable_name) (current_function_decl, 2); | |
2241 | ||
2242 | /* Write the file header. */ | |
2243 | fprintf (file, "graph: { title: \"%s\"\n", funcname); | |
2244 | fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n"); | |
2245 | fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n"); | |
2246 | ||
2247 | /* Write blocks and edges. */ | |
2248 | for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next) | |
2249 | { | |
2250 | fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"", | |
2251 | e->dest->index); | |
2252 | ||
2253 | if (e->flags & EDGE_FAKE) | |
2254 | fprintf (file, " linestyle: dotted priority: 10"); | |
2255 | else | |
2256 | fprintf (file, " linestyle: solid priority: 100"); | |
2257 | ||
2258 | fprintf (file, " }\n"); | |
2259 | } | |
2260 | fputc ('\n', file); | |
2261 | ||
2262 | FOR_EACH_BB (bb) | |
2263 | { | |
2264 | enum tree_code head_code, end_code; | |
2265 | const char *head_name, *end_name; | |
2266 | int head_line = 0; | |
2267 | int end_line = 0; | |
2268 | tree first = first_stmt (bb); | |
2269 | tree last = last_stmt (bb); | |
2270 | ||
2271 | if (first) | |
2272 | { | |
2273 | head_code = TREE_CODE (first); | |
2274 | head_name = tree_code_name[head_code]; | |
2275 | head_line = get_lineno (first); | |
2276 | } | |
2277 | else | |
2278 | head_name = "no-statement"; | |
2279 | ||
2280 | if (last) | |
2281 | { | |
2282 | end_code = TREE_CODE (last); | |
2283 | end_name = tree_code_name[end_code]; | |
2284 | end_line = get_lineno (last); | |
2285 | } | |
2286 | else | |
2287 | end_name = "no-statement"; | |
2288 | ||
2289 | fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n", | |
2290 | bb->index, bb->index, head_name, head_line, end_name, | |
2291 | end_line); | |
2292 | ||
2293 | for (e = bb->succ; e; e = e->succ_next) | |
2294 | { | |
2295 | if (e->dest == EXIT_BLOCK_PTR) | |
2296 | fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index); | |
2297 | else | |
2298 | fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index); | |
2299 | ||
2300 | if (e->flags & EDGE_FAKE) | |
2301 | fprintf (file, " priority: 10 linestyle: dotted"); | |
2302 | else | |
2303 | fprintf (file, " priority: 100 linestyle: solid"); | |
2304 | ||
2305 | fprintf (file, " }\n"); | |
2306 | } | |
2307 | ||
2308 | if (bb->next_bb != EXIT_BLOCK_PTR) | |
2309 | fputc ('\n', file); | |
2310 | } | |
2311 | ||
2312 | fputs ("}\n\n", file); | |
2313 | } | |
2314 | ||
2315 | ||
2316 | ||
2317 | /*--------------------------------------------------------------------------- | |
2318 | Miscellaneous helpers | |
2319 | ---------------------------------------------------------------------------*/ | |
2320 | ||
2321 | /* Return true if T represents a stmt that always transfers control. */ | |
2322 | ||
2323 | bool | |
2324 | is_ctrl_stmt (tree t) | |
2325 | { | |
2326 | return (TREE_CODE (t) == COND_EXPR | |
2327 | || TREE_CODE (t) == SWITCH_EXPR | |
2328 | || TREE_CODE (t) == GOTO_EXPR | |
2329 | || TREE_CODE (t) == RETURN_EXPR | |
2330 | || TREE_CODE (t) == RESX_EXPR); | |
2331 | } | |
2332 | ||
2333 | ||
2334 | /* Return true if T is a statement that may alter the flow of control | |
2335 | (e.g., a call to a non-returning function). */ | |
2336 | ||
2337 | bool | |
2338 | is_ctrl_altering_stmt (tree t) | |
2339 | { | |
2340 | tree call = t; | |
2341 | ||
2342 | #if defined ENABLE_CHECKING | |
2343 | if (t == NULL) | |
2344 | abort (); | |
2345 | #endif | |
2346 | ||
2347 | switch (TREE_CODE (t)) | |
2348 | { | |
2349 | case MODIFY_EXPR: | |
2350 | /* A MODIFY_EXPR with a rhs of a call has the characteristics | |
2351 | of the call. */ | |
2352 | call = TREE_OPERAND (t, 1); | |
2353 | if (TREE_CODE (call) != CALL_EXPR) | |
2354 | break; | |
2355 | /* FALLTHRU */ | |
2356 | ||
2357 | case CALL_EXPR: | |
2358 | /* A non-pure/const CALL_EXPR alters flow control if the current | |
2359 | function has nonlocal labels. */ | |
2360 | if (TREE_SIDE_EFFECTS (t) | |
2361 | && current_function_has_nonlocal_label) | |
2362 | return true; | |
2363 | ||
2364 | /* A CALL_EXPR also alters control flow if it does not return. */ | |
2365 | if (call_expr_flags (call) & (ECF_NORETURN | ECF_LONGJMP)) | |
2366 | return true; | |
2367 | break; | |
2368 | ||
2369 | default: | |
2370 | return false; | |
2371 | } | |
2372 | ||
2373 | /* If a statement can throw, it alters control flow. */ | |
2374 | return tree_can_throw_internal (t); | |
2375 | } | |
2376 | ||
2377 | ||
2378 | /* Return true if T is a computed goto. */ | |
2379 | ||
2380 | bool | |
2381 | computed_goto_p (tree t) | |
2382 | { | |
2383 | return (TREE_CODE (t) == GOTO_EXPR | |
2384 | && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL); | |
2385 | } | |
2386 | ||
2387 | ||
2388 | /* Checks whether EXPR is a simple local goto. */ | |
2389 | ||
2390 | bool | |
2391 | simple_goto_p (tree expr) | |
2392 | { | |
2393 | return (TREE_CODE (expr) == GOTO_EXPR | |
2394 | && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL | |
2395 | && (decl_function_context (GOTO_DESTINATION (expr)) | |
2396 | == current_function_decl)); | |
2397 | } | |
2398 | ||
2399 | ||
2400 | /* Return true if T should start a new basic block. PREV_T is the | |
2401 | statement preceding T. It is used when T is a label or a case label. | |
2402 | Labels should only start a new basic block if their previous statement | |
2403 | wasn't a label. Otherwise, sequence of labels would generate | |
2404 | unnecessary basic blocks that only contain a single label. */ | |
2405 | ||
2406 | static inline bool | |
2407 | stmt_starts_bb_p (tree t, tree prev_t) | |
2408 | { | |
2409 | enum tree_code code; | |
2410 | ||
2411 | if (t == NULL_TREE) | |
2412 | return false; | |
2413 | ||
2414 | /* LABEL_EXPRs start a new basic block only if the preceding | |
2415 | statement wasn't a label of the same type. This prevents the | |
2416 | creation of consecutive blocks that have nothing but a single | |
2417 | label. */ | |
2418 | code = TREE_CODE (t); | |
2419 | if (code == LABEL_EXPR) | |
2420 | { | |
2421 | /* Nonlocal and computed GOTO targets always start a new block. */ | |
2422 | if (code == LABEL_EXPR | |
2423 | && (DECL_NONLOCAL (LABEL_EXPR_LABEL (t)) | |
2424 | || FORCED_LABEL (LABEL_EXPR_LABEL (t)))) | |
2425 | return true; | |
2426 | ||
2427 | if (prev_t && TREE_CODE (prev_t) == code) | |
2428 | { | |
2429 | if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t))) | |
2430 | return true; | |
2431 | ||
2432 | cfg_stats.num_merged_labels++; | |
2433 | return false; | |
2434 | } | |
2435 | else | |
2436 | return true; | |
2437 | } | |
2438 | ||
2439 | return false; | |
2440 | } | |
2441 | ||
2442 | ||
2443 | /* Return true if T should end a basic block. */ | |
2444 | ||
2445 | bool | |
2446 | stmt_ends_bb_p (tree t) | |
2447 | { | |
2448 | return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t); | |
2449 | } | |
2450 | ||
2451 | ||
2452 | /* Add gotos that used to be represented implicitly in the CFG. */ | |
2453 | ||
2454 | void | |
2455 | disband_implicit_edges (void) | |
2456 | { | |
2457 | basic_block bb; | |
2458 | block_stmt_iterator last; | |
2459 | edge e; | |
2460 | tree stmt, label, forward; | |
2461 | ||
2462 | FOR_EACH_BB (bb) | |
2463 | { | |
2464 | last = bsi_last (bb); | |
2465 | stmt = last_stmt (bb); | |
2466 | ||
2467 | if (stmt && TREE_CODE (stmt) == COND_EXPR) | |
2468 | { | |
2469 | /* Remove superfluous gotos from COND_EXPR branches. Moved | |
2470 | from cfg_remove_useless_stmts here since it violates the | |
2471 | invariants for tree--cfg correspondence and thus fits better | |
2472 | here where we do it anyway. */ | |
2473 | for (e = bb->succ; e; e = e->succ_next) | |
2474 | { | |
2475 | if (e->dest != bb->next_bb) | |
2476 | continue; | |
2477 | ||
2478 | if (e->flags & EDGE_TRUE_VALUE) | |
2479 | COND_EXPR_THEN (stmt) = build_empty_stmt (); | |
2480 | else if (e->flags & EDGE_FALSE_VALUE) | |
2481 | COND_EXPR_ELSE (stmt) = build_empty_stmt (); | |
2482 | else | |
2483 | abort (); | |
2484 | e->flags |= EDGE_FALLTHRU; | |
2485 | } | |
2486 | ||
2487 | continue; | |
2488 | } | |
2489 | ||
2490 | if (stmt && TREE_CODE (stmt) == RETURN_EXPR) | |
2491 | { | |
2492 | /* Remove the RETURN_EXPR if we may fall though to the exit | |
2493 | instead. */ | |
2494 | if (!bb->succ | |
2495 | || bb->succ->succ_next | |
2496 | || bb->succ->dest != EXIT_BLOCK_PTR) | |
2497 | abort (); | |
2498 | ||
2499 | if (bb->next_bb == EXIT_BLOCK_PTR | |
2500 | && !TREE_OPERAND (stmt, 0)) | |
2501 | { | |
2502 | bsi_remove (&last); | |
2503 | bb->succ->flags |= EDGE_FALLTHRU; | |
2504 | } | |
2505 | continue; | |
2506 | } | |
2507 | ||
2508 | /* There can be no fallthru edge if the last statement is a control | |
2509 | one. */ | |
2510 | if (stmt && is_ctrl_stmt (stmt)) | |
2511 | continue; | |
2512 | ||
2513 | /* Find a fallthru edge and emit the goto if necessary. */ | |
2514 | for (e = bb->succ; e; e = e->succ_next) | |
2515 | if (e->flags & EDGE_FALLTHRU) | |
2516 | break; | |
2517 | ||
2518 | if (!e | |
2519 | || e->dest == bb->next_bb) | |
2520 | continue; | |
2521 | ||
2522 | if (e->dest == EXIT_BLOCK_PTR) | |
2523 | abort (); | |
2524 | ||
2525 | label = tree_block_label (e->dest); | |
2526 | ||
2527 | /* If this is a goto to a goto, jump to the final destination. | |
2528 | Handles unfactoring of the computed jumps. | |
2529 | ??? Why bother putting this back together when rtl is just | |
2530 | about to take it apart again? */ | |
2531 | forward = last_and_only_stmt (e->dest); | |
2532 | if (forward | |
2533 | && TREE_CODE (forward) == GOTO_EXPR) | |
2534 | label = GOTO_DESTINATION (forward); | |
2535 | ||
2536 | bsi_insert_after (&last, | |
2537 | build1 (GOTO_EXPR, void_type_node, label), | |
2538 | BSI_NEW_STMT); | |
2539 | e->flags &= ~EDGE_FALLTHRU; | |
2540 | } | |
2541 | } | |
2542 | ||
2543 | ||
2544 | /* Remove all the blocks and edges that make up the flowgraph. */ | |
2545 | ||
2546 | void | |
2547 | delete_tree_cfg (void) | |
2548 | { | |
2549 | if (n_basic_blocks > 0) | |
2550 | free_blocks_annotations (); | |
2551 | ||
2552 | free_basic_block_vars (); | |
2553 | basic_block_info = NULL; | |
2554 | label_to_block_map = NULL; | |
2555 | free_rbi_pool (); | |
2556 | } | |
2557 | ||
2558 | ||
2559 | /* Return the first statement in basic block BB. */ | |
2560 | ||
2561 | tree | |
2562 | first_stmt (basic_block bb) | |
2563 | { | |
2564 | block_stmt_iterator i = bsi_start (bb); | |
2565 | return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE; | |
2566 | } | |
2567 | ||
2568 | ||
2569 | /* Return the last statement in basic block BB. */ | |
2570 | ||
2571 | tree | |
2572 | last_stmt (basic_block bb) | |
2573 | { | |
2574 | block_stmt_iterator b = bsi_last (bb); | |
2575 | return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE; | |
2576 | } | |
2577 | ||
2578 | ||
2579 | /* Return a pointer to the last statement in block BB. */ | |
2580 | ||
2581 | tree * | |
2582 | last_stmt_ptr (basic_block bb) | |
2583 | { | |
2584 | block_stmt_iterator last = bsi_last (bb); | |
2585 | return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL; | |
2586 | } | |
2587 | ||
2588 | ||
2589 | /* Return the last statement of an otherwise empty block. Return NULL | |
2590 | if the block is totally empty, or if it contains more than one | |
2591 | statement. */ | |
2592 | ||
2593 | tree | |
2594 | last_and_only_stmt (basic_block bb) | |
2595 | { | |
2596 | block_stmt_iterator i = bsi_last (bb); | |
2597 | tree last, prev; | |
2598 | ||
2599 | if (bsi_end_p (i)) | |
2600 | return NULL_TREE; | |
2601 | ||
2602 | last = bsi_stmt (i); | |
2603 | bsi_prev (&i); | |
2604 | if (bsi_end_p (i)) | |
2605 | return last; | |
2606 | ||
2607 | /* Empty statements should no longer appear in the instruction stream. | |
2608 | Everything that might have appeared before should be deleted by | |
2609 | remove_useless_stmts, and the optimizers should just bsi_remove | |
2610 | instead of smashing with build_empty_stmt. | |
2611 | ||
2612 | Thus the only thing that should appear here in a block containing | |
2613 | one executable statement is a label. */ | |
2614 | prev = bsi_stmt (i); | |
2615 | if (TREE_CODE (prev) == LABEL_EXPR) | |
2616 | return last; | |
2617 | else | |
2618 | return NULL_TREE; | |
2619 | } | |
2620 | ||
2621 | ||
2622 | /* Mark BB as the basic block holding statement T. */ | |
2623 | ||
2624 | void | |
2625 | set_bb_for_stmt (tree t, basic_block bb) | |
2626 | { | |
2627 | if (TREE_CODE (t) == STATEMENT_LIST) | |
2628 | { | |
2629 | tree_stmt_iterator i; | |
2630 | for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i)) | |
2631 | set_bb_for_stmt (tsi_stmt (i), bb); | |
2632 | } | |
2633 | else | |
2634 | { | |
2635 | stmt_ann_t ann = get_stmt_ann (t); | |
2636 | ann->bb = bb; | |
2637 | ||
2638 | /* If the statement is a label, add the label to block-to-labels map | |
2639 | so that we can speed up edge creation for GOTO_EXPRs. */ | |
2640 | if (TREE_CODE (t) == LABEL_EXPR) | |
2641 | { | |
2642 | int uid; | |
2643 | ||
2644 | t = LABEL_EXPR_LABEL (t); | |
2645 | uid = LABEL_DECL_UID (t); | |
2646 | if (uid == -1) | |
2647 | { | |
2648 | LABEL_DECL_UID (t) = uid = cfun->last_label_uid++; | |
2649 | if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid) | |
2650 | VARRAY_GROW (label_to_block_map, 3 * uid / 2); | |
2651 | } | |
2652 | else | |
2653 | { | |
2654 | #ifdef ENABLE_CHECKING | |
2655 | /* We're moving an existing label. Make sure that we've | |
2656 | removed it from the old block. */ | |
2657 | if (bb && VARRAY_BB (label_to_block_map, uid)) | |
2658 | abort (); | |
2659 | #endif | |
2660 | } | |
2661 | VARRAY_BB (label_to_block_map, uid) = bb; | |
2662 | } | |
2663 | } | |
2664 | } | |
2665 | ||
2666 | ||
2667 | /* Insert statement (or statement list) T before the statement | |
2668 | pointed-to by iterator I. M specifies how to update iterator I | |
2669 | after insertion (see enum bsi_iterator_update). */ | |
2670 | ||
2671 | void | |
2672 | bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m) | |
2673 | { | |
2674 | set_bb_for_stmt (t, i->bb); | |
2675 | modify_stmt (t); | |
2676 | tsi_link_before (&i->tsi, t, m); | |
2677 | } | |
2678 | ||
2679 | ||
2680 | /* Insert statement (or statement list) T after the statement | |
2681 | pointed-to by iterator I. M specifies how to update iterator I | |
2682 | after insertion (see enum bsi_iterator_update). */ | |
2683 | ||
2684 | void | |
2685 | bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m) | |
2686 | { | |
2687 | set_bb_for_stmt (t, i->bb); | |
2688 | modify_stmt (t); | |
2689 | tsi_link_after (&i->tsi, t, m); | |
2690 | } | |
2691 | ||
2692 | ||
2693 | /* Remove the statement pointed to by iterator I. The iterator is updated | |
2694 | to the next statement. */ | |
2695 | ||
2696 | void | |
2697 | bsi_remove (block_stmt_iterator *i) | |
2698 | { | |
2699 | tree t = bsi_stmt (*i); | |
2700 | set_bb_for_stmt (t, NULL); | |
2701 | modify_stmt (t); | |
2702 | tsi_delink (&i->tsi); | |
2703 | } | |
2704 | ||
2705 | ||
2706 | /* Move the statement at FROM so it comes right after the statement at TO. */ | |
2707 | ||
2708 | void | |
2709 | bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to) | |
2710 | { | |
2711 | tree stmt = bsi_stmt (*from); | |
2712 | bsi_remove (from); | |
2713 | bsi_insert_after (to, stmt, BSI_SAME_STMT); | |
2714 | } | |
2715 | ||
2716 | ||
2717 | /* Move the statement at FROM so it comes right before the statement at TO. */ | |
2718 | ||
2719 | void | |
2720 | bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to) | |
2721 | { | |
2722 | tree stmt = bsi_stmt (*from); | |
2723 | bsi_remove (from); | |
2724 | bsi_insert_before (to, stmt, BSI_SAME_STMT); | |
2725 | } | |
2726 | ||
2727 | ||
2728 | /* Move the statement at FROM to the end of basic block BB. */ | |
2729 | ||
2730 | void | |
2731 | bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb) | |
2732 | { | |
2733 | block_stmt_iterator last = bsi_last (bb); | |
2734 | ||
2735 | /* Have to check bsi_end_p because it could be an empty block. */ | |
2736 | if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last))) | |
2737 | bsi_move_before (from, &last); | |
2738 | else | |
2739 | bsi_move_after (from, &last); | |
2740 | } | |
2741 | ||
2742 | ||
2743 | /* Replace the contents of the statement pointed to by iterator BSI | |
2744 | with STMT. If PRESERVE_EH_INFO is true, the exception handling | |
2745 | information of the original statement is preserved. */ | |
2746 | ||
2747 | void | |
2748 | bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info) | |
2749 | { | |
2750 | int eh_region; | |
2751 | tree orig_stmt = bsi_stmt (*bsi); | |
2752 | ||
2753 | SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt)); | |
2754 | set_bb_for_stmt (stmt, bsi->bb); | |
2755 | ||
2756 | /* Preserve EH region information from the original statement, if | |
2757 | requested by the caller. */ | |
2758 | if (preserve_eh_info) | |
2759 | { | |
2760 | eh_region = lookup_stmt_eh_region (orig_stmt); | |
2761 | if (eh_region >= 0) | |
2762 | add_stmt_to_eh_region (stmt, eh_region); | |
2763 | } | |
2764 | ||
2765 | *bsi_stmt_ptr (*bsi) = stmt; | |
2766 | modify_stmt (stmt); | |
2767 | } | |
2768 | ||
2769 | ||
2770 | /* Insert the statement pointed-to by BSI into edge E. Every attempt | |
2771 | is made to place the statement in an existing basic block, but | |
2772 | sometimes that isn't possible. When it isn't possible, the edge is | |
2773 | split and the statement is added to the new block. | |
2774 | ||
2775 | In all cases, the returned *BSI points to the correct location. The | |
2776 | return value is true if insertion should be done after the location, | |
2777 | or false if it should be done before the location. */ | |
2778 | ||
2779 | static bool | |
2780 | tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi) | |
2781 | { | |
2782 | basic_block dest, src; | |
2783 | tree tmp; | |
2784 | ||
2785 | dest = e->dest; | |
2786 | restart: | |
2787 | ||
2788 | /* If the destination has one predecessor which has no PHI nodes, | |
2789 | insert there. Except for the exit block. | |
2790 | ||
2791 | The requirement for no PHI nodes could be relaxed. Basically we | |
2792 | would have to examine the PHIs to prove that none of them used | |
2793 | the value set by the statement we want to insert on E. That | |
2794 | hardly seems worth the effort. */ | |
2795 | if (dest->pred->pred_next == NULL | |
2796 | && ! phi_nodes (dest) | |
2797 | && dest != EXIT_BLOCK_PTR) | |
2798 | { | |
2799 | *bsi = bsi_start (dest); | |
2800 | if (bsi_end_p (*bsi)) | |
2801 | return true; | |
2802 | ||
2803 | /* Make sure we insert after any leading labels. */ | |
2804 | tmp = bsi_stmt (*bsi); | |
2805 | while (TREE_CODE (tmp) == LABEL_EXPR) | |
2806 | { | |
2807 | bsi_next (bsi); | |
2808 | if (bsi_end_p (*bsi)) | |
2809 | break; | |
2810 | tmp = bsi_stmt (*bsi); | |
2811 | } | |
2812 | ||
2813 | if (bsi_end_p (*bsi)) | |
2814 | { | |
2815 | *bsi = bsi_last (dest); | |
2816 | return true; | |
2817 | } | |
2818 | else | |
2819 | return false; | |
2820 | } | |
2821 | ||
2822 | /* If the source has one successor, the edge is not abnormal and | |
2823 | the last statement does not end a basic block, insert there. | |
2824 | Except for the entry block. */ | |
2825 | src = e->src; | |
2826 | if ((e->flags & EDGE_ABNORMAL) == 0 | |
2827 | && src->succ->succ_next == NULL | |
2828 | && src != ENTRY_BLOCK_PTR) | |
2829 | { | |
2830 | *bsi = bsi_last (src); | |
2831 | if (bsi_end_p (*bsi)) | |
2832 | return true; | |
2833 | ||
2834 | tmp = bsi_stmt (*bsi); | |
2835 | if (!stmt_ends_bb_p (tmp)) | |
2836 | return true; | |
2837 | } | |
2838 | ||
2839 | /* Otherwise, create a new basic block, and split this edge. */ | |
2840 | dest = split_edge (e); | |
2841 | e = dest->pred; | |
2842 | goto restart; | |
2843 | } | |
2844 | ||
2845 | ||
2846 | /* This routine will commit all pending edge insertions, creating any new | |
2847 | basic blocks which are necessary. | |
2848 | ||
2849 | If specified, NEW_BLOCKS returns a count of the number of new basic | |
2850 | blocks which were created. */ | |
2851 | ||
2852 | void | |
2853 | bsi_commit_edge_inserts (int *new_blocks) | |
2854 | { | |
2855 | basic_block bb; | |
2856 | edge e; | |
2857 | int blocks; | |
2858 | ||
2859 | blocks = n_basic_blocks; | |
2860 | ||
2861 | bsi_commit_edge_inserts_1 (ENTRY_BLOCK_PTR->succ); | |
2862 | ||
2863 | FOR_EACH_BB (bb) | |
2864 | for (e = bb->succ; e; e = e->succ_next) | |
2865 | bsi_commit_edge_inserts_1 (e); | |
2866 | ||
2867 | if (new_blocks) | |
2868 | *new_blocks = n_basic_blocks - blocks; | |
2869 | } | |
2870 | ||
2871 | ||
2872 | /* Commit insertions pending at edge E. */ | |
2873 | ||
2874 | static void | |
2875 | bsi_commit_edge_inserts_1 (edge e) | |
2876 | { | |
2877 | if (PENDING_STMT (e)) | |
2878 | { | |
2879 | block_stmt_iterator bsi; | |
2880 | tree stmt = PENDING_STMT (e); | |
2881 | ||
2882 | PENDING_STMT (e) = NULL_TREE; | |
2883 | ||
2884 | if (tree_find_edge_insert_loc (e, &bsi)) | |
2885 | bsi_insert_after (&bsi, stmt, BSI_NEW_STMT); | |
2886 | else | |
2887 | bsi_insert_before (&bsi, stmt, BSI_NEW_STMT); | |
2888 | } | |
2889 | } | |
2890 | ||
2891 | ||
2892 | /* Add STMT to the pending list of edge E. No actual insertion is | |
2893 | made until a call to bsi_commit_edge_inserts () is made. */ | |
2894 | ||
2895 | void | |
2896 | bsi_insert_on_edge (edge e, tree stmt) | |
2897 | { | |
2898 | append_to_statement_list (stmt, &PENDING_STMT (e)); | |
2899 | } | |
2900 | ||
2901 | ||
2902 | /* Specialized edge insertion for SSA-PRE. FIXME: This should | |
2903 | probably disappear. The only reason it's here is because PRE needs | |
2904 | the call to tree_find_edge_insert_loc(). */ | |
2905 | ||
2906 | void pre_insert_on_edge (edge e, tree stmt); | |
2907 | ||
2908 | void | |
2909 | pre_insert_on_edge (edge e, tree stmt) | |
2910 | { | |
2911 | block_stmt_iterator bsi; | |
2912 | ||
2913 | if (PENDING_STMT (e)) | |
2914 | abort (); | |
2915 | ||
2916 | if (tree_find_edge_insert_loc (e, &bsi)) | |
2917 | bsi_insert_after (&bsi, stmt, BSI_NEW_STMT); | |
2918 | else | |
2919 | bsi_insert_before (&bsi, stmt, BSI_NEW_STMT); | |
2920 | } | |
2921 | ||
2922 | ||
2923 | /*--------------------------------------------------------------------------- | |
2924 | Tree specific functions for CFG manipulation | |
2925 | ---------------------------------------------------------------------------*/ | |
2926 | ||
2927 | /* Split a (typically critical) edge EDGE_IN. Return the new block. | |
2928 | Abort on abnormal edges. */ | |
2929 | ||
2930 | static basic_block | |
2931 | tree_split_edge (edge edge_in) | |
2932 | { | |
2933 | basic_block new_bb, after_bb, dest, src; | |
2934 | edge new_edge, e; | |
2935 | tree phi; | |
2936 | int i, num_elem; | |
2937 | ||
2938 | /* Abnormal edges cannot be split. */ | |
2939 | if (edge_in->flags & EDGE_ABNORMAL) | |
2940 | abort (); | |
2941 | ||
2942 | src = edge_in->src; | |
2943 | dest = edge_in->dest; | |
2944 | ||
2945 | /* Place the new block in the block list. Try to keep the new block | |
2946 | near its "logical" location. This is of most help to humans looking | |
2947 | at debugging dumps. */ | |
2948 | for (e = dest->pred; e; e = e->pred_next) | |
2949 | if (e->src->next_bb == dest) | |
2950 | break; | |
2951 | if (!e) | |
2952 | after_bb = dest->prev_bb; | |
2953 | else | |
2954 | after_bb = edge_in->src; | |
2955 | ||
2956 | new_bb = create_empty_bb (after_bb); | |
2957 | new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU); | |
2958 | ||
2959 | /* Find all the PHI arguments on the original edge, and change them to | |
2960 | the new edge. Do it before redirection, so that the argument does not | |
2961 | get removed. */ | |
2962 | for (phi = phi_nodes (dest); phi; phi = TREE_CHAIN (phi)) | |
2963 | { | |
2964 | num_elem = PHI_NUM_ARGS (phi); | |
2965 | for (i = 0; i < num_elem; i++) | |
2966 | if (PHI_ARG_EDGE (phi, i) == edge_in) | |
2967 | { | |
2968 | PHI_ARG_EDGE (phi, i) = new_edge; | |
2969 | break; | |
2970 | } | |
2971 | } | |
2972 | ||
2973 | if (!redirect_edge_and_branch (edge_in, new_bb)) | |
2974 | abort (); | |
2975 | ||
2976 | if (PENDING_STMT (edge_in)) | |
2977 | abort (); | |
2978 | ||
2979 | return new_bb; | |
2980 | } | |
2981 | ||
2982 | ||
2983 | /* Return true when BB has label LABEL in it. */ | |
2984 | ||
2985 | static bool | |
2986 | has_label_p (basic_block bb, tree label) | |
2987 | { | |
2988 | block_stmt_iterator bsi; | |
2989 | ||
2990 | for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) | |
2991 | { | |
2992 | tree stmt = bsi_stmt (bsi); | |
2993 | ||
2994 | if (TREE_CODE (stmt) != LABEL_EXPR) | |
2995 | return false; | |
2996 | if (LABEL_EXPR_LABEL (stmt) == label) | |
2997 | return true; | |
2998 | } | |
2999 | return false; | |
3000 | } | |
3001 | ||
3002 | ||
3003 | /* Callback for walk_tree, check that all elements with address taken are | |
3004 | properly noticed as such. */ | |
3005 | ||
3006 | static tree | |
3007 | verify_expr (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
3008 | void *data ATTRIBUTE_UNUSED) | |
3009 | { | |
3010 | tree t = *tp, x; | |
3011 | ||
3012 | if (TYPE_P (t)) | |
3013 | *walk_subtrees = 0; | |
3014 | ||
3015 | switch (TREE_CODE (t)) | |
3016 | { | |
3017 | case SSA_NAME: | |
3018 | if (SSA_NAME_IN_FREE_LIST (t)) | |
3019 | { | |
3020 | error ("SSA name in freelist but still referenced"); | |
3021 | return *tp; | |
3022 | } | |
3023 | break; | |
3024 | ||
3025 | case MODIFY_EXPR: | |
3026 | x = TREE_OPERAND (t, 0); | |
3027 | if (TREE_CODE (x) == BIT_FIELD_REF | |
3028 | && is_gimple_reg (TREE_OPERAND (x, 0))) | |
3029 | { | |
3030 | error ("GIMPLE register modified with BIT_FIELD_REF"); | |
3031 | return *tp; | |
3032 | } | |
3033 | break; | |
3034 | ||
3035 | case ADDR_EXPR: | |
3036 | x = TREE_OPERAND (t, 0); | |
3037 | while (TREE_CODE (x) == ARRAY_REF | |
3038 | || TREE_CODE (x) == COMPONENT_REF | |
3039 | || TREE_CODE (x) == REALPART_EXPR | |
3040 | || TREE_CODE (x) == IMAGPART_EXPR) | |
3041 | x = TREE_OPERAND (x, 0); | |
3042 | if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL) | |
3043 | return NULL; | |
3044 | if (!TREE_ADDRESSABLE (x)) | |
3045 | { | |
3046 | error ("address taken, but ADDRESSABLE bit not set"); | |
3047 | return x; | |
3048 | } | |
3049 | break; | |
3050 | ||
3051 | case COND_EXPR: | |
3052 | x = TREE_OPERAND (t, 0); | |
3053 | if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE) | |
3054 | { | |
3055 | error ("non-boolean used in condition"); | |
3056 | return x; | |
3057 | } | |
3058 | break; | |
3059 | ||
3060 | case NOP_EXPR: | |
3061 | case CONVERT_EXPR: | |
3062 | case FIX_TRUNC_EXPR: | |
3063 | case FIX_CEIL_EXPR: | |
3064 | case FIX_FLOOR_EXPR: | |
3065 | case FIX_ROUND_EXPR: | |
3066 | case FLOAT_EXPR: | |
3067 | case NEGATE_EXPR: | |
3068 | case ABS_EXPR: | |
3069 | case BIT_NOT_EXPR: | |
3070 | case NON_LVALUE_EXPR: | |
3071 | case TRUTH_NOT_EXPR: | |
3072 | x = TREE_OPERAND (t, 0); | |
3073 | /* We check for constants explicitly since they are not considered | |
3074 | gimple invariants if they overflowed. */ | |
3075 | if (TREE_CODE_CLASS (TREE_CODE (x)) != 'c' | |
3076 | && !is_gimple_val (x)) | |
3077 | { | |
3078 | error ("Invalid operand to unary operator"); | |
3079 | return x; | |
3080 | } | |
3081 | break; | |
3082 | ||
3083 | case REALPART_EXPR: | |
3084 | case IMAGPART_EXPR: | |
3085 | break; | |
3086 | ||
3087 | case LT_EXPR: | |
3088 | case LE_EXPR: | |
3089 | case GT_EXPR: | |
3090 | case GE_EXPR: | |
3091 | case EQ_EXPR: | |
3092 | case NE_EXPR: | |
3093 | case UNORDERED_EXPR: | |
3094 | case ORDERED_EXPR: | |
3095 | case UNLT_EXPR: | |
3096 | case UNLE_EXPR: | |
3097 | case UNGT_EXPR: | |
3098 | case UNGE_EXPR: | |
3099 | case UNEQ_EXPR: | |
3100 | case PLUS_EXPR: | |
3101 | case MINUS_EXPR: | |
3102 | case MULT_EXPR: | |
3103 | case TRUNC_DIV_EXPR: | |
3104 | case CEIL_DIV_EXPR: | |
3105 | case FLOOR_DIV_EXPR: | |
3106 | case ROUND_DIV_EXPR: | |
3107 | case TRUNC_MOD_EXPR: | |
3108 | case CEIL_MOD_EXPR: | |
3109 | case FLOOR_MOD_EXPR: | |
3110 | case ROUND_MOD_EXPR: | |
3111 | case RDIV_EXPR: | |
3112 | case EXACT_DIV_EXPR: | |
3113 | case MIN_EXPR: | |
3114 | case MAX_EXPR: | |
3115 | case LSHIFT_EXPR: | |
3116 | case RSHIFT_EXPR: | |
3117 | case LROTATE_EXPR: | |
3118 | case RROTATE_EXPR: | |
3119 | case BIT_IOR_EXPR: | |
3120 | case BIT_XOR_EXPR: | |
3121 | case BIT_AND_EXPR: | |
3122 | x = TREE_OPERAND (t, 0); | |
3123 | /* We check for constants explicitly since they are not considered | |
3124 | gimple invariants if they overflowed. */ | |
3125 | if (TREE_CODE_CLASS (TREE_CODE (x)) != 'c' | |
3126 | && !is_gimple_val (x)) | |
3127 | { | |
3128 | error ("Invalid operand to binary operator"); | |
3129 | return x; | |
3130 | } | |
3131 | x = TREE_OPERAND (t, 1); | |
3132 | /* We check for constants explicitly since they are not considered | |
3133 | gimple invariants if they overflowed. */ | |
3134 | if (TREE_CODE_CLASS (TREE_CODE (x)) != 'c' | |
3135 | && !is_gimple_val (x)) | |
3136 | { | |
3137 | error ("Invalid operand to binary operator"); | |
3138 | return x; | |
3139 | } | |
3140 | break; | |
3141 | ||
3142 | default: | |
3143 | break; | |
3144 | } | |
3145 | return NULL; | |
3146 | } | |
3147 | ||
3148 | ||
3149 | /* Verify STMT, return true if STMT is not in GIMPLE form. | |
3150 | TODO: Implement type checking. */ | |
3151 | ||
3152 | static bool | |
3153 | verify_stmt (tree stmt) | |
3154 | { | |
3155 | tree addr; | |
3156 | ||
3157 | if (!is_gimple_stmt (stmt)) | |
3158 | { | |
3159 | error ("Is not a valid GIMPLE statement."); | |
3160 | debug_generic_stmt (stmt); | |
3161 | return true; | |
3162 | } | |
3163 | ||
3164 | addr = walk_tree (&stmt, verify_expr, NULL, NULL); | |
3165 | if (addr) | |
3166 | { | |
3167 | debug_generic_stmt (addr); | |
3168 | return true; | |
3169 | } | |
3170 | ||
3171 | return false; | |
3172 | } | |
3173 | ||
3174 | ||
3175 | /* Return true when the T can be shared. */ | |
3176 | ||
3177 | static bool | |
3178 | tree_node_can_be_shared (tree t) | |
3179 | { | |
3180 | if (TYPE_P (t) || DECL_P (t) | |
3181 | /* We check for constants explicitly since they are not considered | |
3182 | gimple invariants if they overflowed. */ | |
3183 | || TREE_CODE_CLASS (TREE_CODE (t)) == 'c' | |
3184 | || is_gimple_min_invariant (t) | |
3185 | || TREE_CODE (t) == SSA_NAME) | |
3186 | return true; | |
3187 | ||
3188 | while ((TREE_CODE (t) == ARRAY_REF | |
3189 | /* We check for constants explicitly since they are not considered | |
3190 | gimple invariants if they overflowed. */ | |
3191 | && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 1))) == 'c' | |
3192 | || is_gimple_min_invariant (TREE_OPERAND (t, 1)))) | |
3193 | || (TREE_CODE (t) == COMPONENT_REF | |
3194 | || TREE_CODE (t) == REALPART_EXPR | |
3195 | || TREE_CODE (t) == IMAGPART_EXPR)) | |
3196 | t = TREE_OPERAND (t, 0); | |
3197 | ||
3198 | if (DECL_P (t)) | |
3199 | return true; | |
3200 | ||
3201 | return false; | |
3202 | } | |
3203 | ||
3204 | ||
3205 | /* Called via walk_trees. Verify tree sharing. */ | |
3206 | ||
3207 | static tree | |
3208 | verify_node_sharing (tree * tp, int *walk_subtrees, void *data) | |
3209 | { | |
3210 | htab_t htab = (htab_t) data; | |
3211 | void **slot; | |
3212 | ||
3213 | if (tree_node_can_be_shared (*tp)) | |
3214 | { | |
3215 | *walk_subtrees = false; | |
3216 | return NULL; | |
3217 | } | |
3218 | ||
3219 | slot = htab_find_slot (htab, *tp, INSERT); | |
3220 | if (*slot) | |
3221 | return *slot; | |
3222 | *slot = *tp; | |
3223 | ||
3224 | return NULL; | |
3225 | } | |
3226 | ||
3227 | ||
3228 | /* Verify the GIMPLE statement chain. */ | |
3229 | ||
3230 | void | |
3231 | verify_stmts (void) | |
3232 | { | |
3233 | basic_block bb; | |
3234 | block_stmt_iterator bsi; | |
3235 | bool err = false; | |
3236 | htab_t htab; | |
3237 | tree addr; | |
3238 | ||
3239 | timevar_push (TV_TREE_STMT_VERIFY); | |
3240 | htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL); | |
3241 | ||
3242 | FOR_EACH_BB (bb) | |
3243 | { | |
3244 | tree phi; | |
3245 | int i; | |
3246 | ||
3247 | for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi)) | |
3248 | { | |
3249 | int phi_num_args = PHI_NUM_ARGS (phi); | |
3250 | ||
3251 | for (i = 0; i < phi_num_args; i++) | |
3252 | { | |
3253 | tree t = PHI_ARG_DEF (phi, i); | |
3254 | tree addr; | |
3255 | ||
3256 | /* Addressable variables do have SSA_NAMEs but they | |
3257 | are not considered gimple values. */ | |
3258 | if (TREE_CODE (t) != SSA_NAME | |
3259 | && TREE_CODE (t) != FUNCTION_DECL | |
3260 | && !is_gimple_val (t)) | |
3261 | { | |
3262 | error ("PHI def is not a GIMPLE value"); | |
3263 | debug_generic_stmt (phi); | |
3264 | debug_generic_stmt (t); | |
3265 | err |= true; | |
3266 | } | |
3267 | ||
3268 | addr = walk_tree (&t, verify_expr, NULL, NULL); | |
3269 | if (addr) | |
3270 | { | |
3271 | debug_generic_stmt (addr); | |
3272 | err |= true; | |
3273 | } | |
3274 | ||
3275 | addr = walk_tree (&t, verify_node_sharing, htab, NULL); | |
3276 | if (addr) | |
3277 | { | |
3278 | error ("Incorrect sharing of tree nodes"); | |
3279 | debug_generic_stmt (phi); | |
3280 | debug_generic_stmt (addr); | |
3281 | err |= true; | |
3282 | } | |
3283 | } | |
3284 | } | |
3285 | ||
3286 | for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) | |
3287 | { | |
3288 | tree stmt = bsi_stmt (bsi); | |
3289 | err |= verify_stmt (stmt); | |
3290 | addr = walk_tree (&stmt, verify_node_sharing, htab, NULL); | |
3291 | if (addr) | |
3292 | { | |
3293 | error ("Incorrect sharing of tree nodes"); | |
3294 | debug_generic_stmt (stmt); | |
3295 | debug_generic_stmt (addr); | |
3296 | err |= true; | |
3297 | } | |
3298 | } | |
3299 | } | |
3300 | ||
3301 | if (err) | |
3302 | internal_error ("verify_stmts failed."); | |
3303 | ||
3304 | htab_delete (htab); | |
3305 | timevar_pop (TV_TREE_STMT_VERIFY); | |
3306 | } | |
3307 | ||
3308 | ||
3309 | /* Verifies that the flow information is OK. */ | |
3310 | ||
3311 | static int | |
3312 | tree_verify_flow_info (void) | |
3313 | { | |
3314 | int err = 0; | |
3315 | basic_block bb; | |
3316 | block_stmt_iterator bsi; | |
3317 | tree stmt; | |
3318 | edge e; | |
3319 | ||
3320 | if (ENTRY_BLOCK_PTR->stmt_list) | |
3321 | { | |
3322 | error ("ENTRY_BLOCK has a statement list associated with it\n"); | |
3323 | err = 1; | |
3324 | } | |
3325 | ||
3326 | if (EXIT_BLOCK_PTR->stmt_list) | |
3327 | { | |
3328 | error ("EXIT_BLOCK has a statement list associated with it\n"); | |
3329 | err = 1; | |
3330 | } | |
3331 | ||
3332 | for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next) | |
3333 | if (e->flags & EDGE_FALLTHRU) | |
3334 | { | |
3335 | error ("Fallthru to exit from bb %d\n", e->src->index); | |
3336 | err = 1; | |
3337 | } | |
3338 | ||
3339 | FOR_EACH_BB (bb) | |
3340 | { | |
3341 | bool found_ctrl_stmt = false; | |
3342 | ||
3343 | /* Skip labels on the start of basic block. */ | |
3344 | for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) | |
3345 | { | |
3346 | if (TREE_CODE (bsi_stmt (bsi)) != LABEL_EXPR) | |
3347 | break; | |
3348 | ||
3349 | if (label_to_block (LABEL_EXPR_LABEL (bsi_stmt (bsi))) != bb) | |
3350 | { | |
3351 | error ("Label %s to block does not match in bb %d\n", | |
3352 | IDENTIFIER_POINTER (DECL_NAME (bsi_stmt (bsi))), | |
3353 | bb->index); | |
3354 | err = 1; | |
3355 | } | |
3356 | ||
3357 | if (decl_function_context (LABEL_EXPR_LABEL (bsi_stmt (bsi))) | |
3358 | != current_function_decl) | |
3359 | { | |
3360 | error ("Label %s has incorrect context in bb %d\n", | |
3361 | IDENTIFIER_POINTER (DECL_NAME (bsi_stmt (bsi))), | |
3362 | bb->index); | |
3363 | err = 1; | |
3364 | } | |
3365 | } | |
3366 | ||
3367 | /* Verify that body of basic block BB is free of control flow. */ | |
3368 | for (; !bsi_end_p (bsi); bsi_next (&bsi)) | |
3369 | { | |
3370 | tree stmt = bsi_stmt (bsi); | |
3371 | ||
3372 | if (found_ctrl_stmt) | |
3373 | { | |
3374 | error ("Control flow in the middle of basic block %d\n", | |
3375 | bb->index); | |
3376 | err = 1; | |
3377 | } | |
3378 | ||
3379 | if (stmt_ends_bb_p (stmt)) | |
3380 | found_ctrl_stmt = true; | |
3381 | ||
3382 | if (TREE_CODE (stmt) == LABEL_EXPR) | |
3383 | { | |
3384 | error ("Label %s in the middle of basic block %d\n", | |
3385 | IDENTIFIER_POINTER (DECL_NAME (stmt)), | |
3386 | bb->index); | |
3387 | err = 1; | |
3388 | } | |
3389 | } | |
3390 | bsi = bsi_last (bb); | |
3391 | if (bsi_end_p (bsi)) | |
3392 | continue; | |
3393 | ||
3394 | stmt = bsi_stmt (bsi); | |
3395 | ||
3396 | if (is_ctrl_stmt (stmt)) | |
3397 | { | |
3398 | for (e = bb->succ; e; e = e->succ_next) | |
3399 | if (e->flags & EDGE_FALLTHRU) | |
3400 | { | |
3401 | error ("Fallthru edge after a control statement in bb %d \n", | |
3402 | bb->index); | |
3403 | err = 1; | |
3404 | } | |
3405 | } | |
3406 | ||
3407 | switch (TREE_CODE (stmt)) | |
3408 | { | |
3409 | case COND_EXPR: | |
3410 | { | |
3411 | edge true_edge; | |
3412 | edge false_edge; | |
3413 | if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR | |
3414 | || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR) | |
3415 | { | |
3416 | error ("Structured COND_EXPR at the end of bb %d\n", bb->index); | |
3417 | err = 1; | |
3418 | } | |
3419 | ||
3420 | extract_true_false_edges_from_block (bb, &true_edge, &false_edge); | |
3421 | ||
3422 | if (!true_edge || !false_edge | |
3423 | || !(true_edge->flags & EDGE_TRUE_VALUE) | |
3424 | || !(false_edge->flags & EDGE_FALSE_VALUE) | |
3425 | || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL)) | |
3426 | || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL)) | |
3427 | || bb->succ->succ_next->succ_next) | |
3428 | { | |
3429 | error ("Wrong outgoing edge flags at end of bb %d\n", | |
3430 | bb->index); | |
3431 | err = 1; | |
3432 | } | |
3433 | ||
3434 | if (!has_label_p (true_edge->dest, | |
3435 | GOTO_DESTINATION (COND_EXPR_THEN (stmt)))) | |
3436 | { | |
3437 | error ("`then' label does not match edge at end of bb %d\n", | |
3438 | bb->index); | |
3439 | err = 1; | |
3440 | } | |
3441 | ||
3442 | if (!has_label_p (false_edge->dest, | |
3443 | GOTO_DESTINATION (COND_EXPR_ELSE (stmt)))) | |
3444 | { | |
3445 | error ("`else' label does not match edge at end of bb %d\n", | |
3446 | bb->index); | |
3447 | err = 1; | |
3448 | } | |
3449 | } | |
3450 | break; | |
3451 | ||
3452 | case GOTO_EXPR: | |
3453 | if (simple_goto_p (stmt)) | |
3454 | { | |
3455 | error ("Explicit goto at end of bb %d\n", bb->index); | |
3456 | err = 1; | |
3457 | } | |
3458 | else | |
3459 | { | |
3460 | /* FIXME. We should double check that the labels in the | |
3461 | destination blocks have their address taken. */ | |
3462 | for (e = bb->succ; e; e = e->succ_next) | |
3463 | if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE | |
3464 | | EDGE_FALSE_VALUE)) | |
3465 | || !(e->flags & EDGE_ABNORMAL)) | |
3466 | { | |
3467 | error ("Wrong outgoing edge flags at end of bb %d\n", | |
3468 | bb->index); | |
3469 | err = 1; | |
3470 | } | |
3471 | } | |
3472 | break; | |
3473 | ||
3474 | case RETURN_EXPR: | |
3475 | if (!bb->succ || bb->succ->succ_next | |
3476 | || (bb->succ->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL | |
3477 | | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))) | |
3478 | { | |
3479 | error ("Wrong outgoing edge flags at end of bb %d\n", bb->index); | |
3480 | err = 1; | |
3481 | } | |
3482 | if (bb->succ->dest != EXIT_BLOCK_PTR) | |
3483 | { | |
3484 | error ("Return edge does not point to exit in bb %d\n", | |
3485 | bb->index); | |
3486 | err = 1; | |
3487 | } | |
3488 | break; | |
3489 | ||
3490 | case SWITCH_EXPR: | |
3491 | { | |
3492 | edge e; | |
3493 | size_t i, n; | |
3494 | tree vec; | |
3495 | ||
3496 | vec = SWITCH_LABELS (stmt); | |
3497 | n = TREE_VEC_LENGTH (vec); | |
3498 | ||
3499 | /* Mark all the destination basic blocks. */ | |
3500 | for (i = 0; i < n; ++i) | |
3501 | { | |
3502 | tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i)); | |
3503 | basic_block label_bb = label_to_block (lab); | |
3504 | ||
3505 | if (label_bb->aux && label_bb->aux != (void *)1) | |
3506 | abort (); | |
3507 | label_bb->aux = (void *)1; | |
3508 | } | |
3509 | ||
3510 | for (e = bb->succ; e; e = e->succ_next) | |
3511 | { | |
3512 | if (!e->dest->aux) | |
3513 | { | |
3514 | error ("Extra outgoing edge %d->%d\n", | |
3515 | bb->index, e->dest->index); | |
3516 | err = 1; | |
3517 | } | |
3518 | e->dest->aux = (void *)2; | |
3519 | if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL | |
3520 | | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))) | |
3521 | { | |
3522 | error ("Wrong outgoing edge flags at end of bb %d\n", | |
3523 | bb->index); | |
3524 | err = 1; | |
3525 | } | |
3526 | } | |
3527 | ||
3528 | /* Check that we have all of them. */ | |
3529 | for (i = 0; i < n; ++i) | |
3530 | { | |
3531 | tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i)); | |
3532 | basic_block label_bb = label_to_block (lab); | |
3533 | ||
3534 | if (label_bb->aux != (void *)2) | |
3535 | { | |
3536 | error ("Missing edge %i->%i\n", | |
3537 | bb->index, label_bb->index); | |
3538 | err = 1; | |
3539 | } | |
3540 | } | |
3541 | ||
3542 | for (e = bb->succ; e; e = e->succ_next) | |
3543 | e->dest->aux = (void *)0; | |
3544 | } | |
3545 | ||
3546 | default: ; | |
3547 | } | |
3548 | } | |
3549 | ||
3550 | if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY) | |
3551 | verify_dominators (CDI_DOMINATORS); | |
3552 | ||
3553 | return err; | |
3554 | } | |
3555 | ||
3556 | ||
3557 | /* Updates phi nodes after creating forwarder block joined | |
3558 | by edge FALLTHRU. */ | |
3559 | ||
3560 | static void | |
3561 | tree_make_forwarder_block (edge fallthru) | |
3562 | { | |
3563 | edge e; | |
3564 | basic_block dummy, bb; | |
3565 | tree phi, new_phi, var; | |
3566 | ||
3567 | dummy = fallthru->src; | |
3568 | bb = fallthru->dest; | |
3569 | ||
3570 | if (!bb->pred->pred_next) | |
3571 | return; | |
3572 | ||
3573 | /* If we redirected a branch we must create new phi nodes at the | |
3574 | start of BB. */ | |
3575 | for (phi = phi_nodes (dummy); phi; phi = TREE_CHAIN (phi)) | |
3576 | { | |
3577 | var = PHI_RESULT (phi); | |
3578 | new_phi = create_phi_node (var, bb); | |
3579 | SSA_NAME_DEF_STMT (var) = new_phi; | |
3580 | PHI_RESULT (phi) = make_ssa_name (SSA_NAME_VAR (var), phi); | |
3581 | add_phi_arg (&new_phi, PHI_RESULT (phi), fallthru); | |
3582 | } | |
3583 | ||
3584 | /* Ensure that the PHI node chains are in the same order. */ | |
3585 | set_phi_nodes (bb, nreverse (phi_nodes (bb))); | |
3586 | ||
3587 | /* Add the arguments we have stored on edges. */ | |
3588 | for (e = bb->pred; e; e = e->pred_next) | |
3589 | { | |
3590 | if (e == fallthru) | |
3591 | continue; | |
3592 | ||
3593 | for (phi = phi_nodes (bb), var = PENDING_STMT (e); | |
3594 | phi; | |
3595 | phi = TREE_CHAIN (phi), var = TREE_CHAIN (var)) | |
3596 | add_phi_arg (&phi, TREE_VALUE (var), e); | |
3597 | ||
3598 | PENDING_STMT (e) = NULL; | |
3599 | } | |
3600 | } | |
3601 | ||
3602 | ||
3603 | /* Return true if basic block BB does nothing except pass control | |
3604 | flow to another block and that we can safely insert a label at | |
3605 | the start of the successor block. */ | |
3606 | ||
3607 | static bool | |
3608 | tree_forwarder_block_p (basic_block bb) | |
3609 | { | |
3610 | block_stmt_iterator bsi; | |
3611 | edge e; | |
3612 | ||
3613 | /* If we have already determined that this block is not forwardable, | |
3614 | then no further checks are necessary. */ | |
3615 | if (! bb_ann (bb)->forwardable) | |
3616 | return false; | |
3617 | ||
3618 | /* BB must have a single outgoing normal edge. Otherwise it can not be | |
3619 | a forwarder block. */ | |
3620 | if (!bb->succ | |
3621 | || bb->succ->succ_next | |
3622 | || bb->succ->dest == EXIT_BLOCK_PTR | |
3623 | || (bb->succ->flags & EDGE_ABNORMAL) | |
3624 | || bb == ENTRY_BLOCK_PTR) | |
3625 | { | |
3626 | bb_ann (bb)->forwardable = 0; | |
3627 | return false; | |
3628 | } | |
3629 | ||
3630 | /* Successors of the entry block are not forwarders. */ | |
3631 | for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next) | |
3632 | if (e->dest == bb) | |
3633 | { | |
3634 | bb_ann (bb)->forwardable = 0; | |
3635 | return false; | |
3636 | } | |
3637 | ||
3638 | /* BB can not have any PHI nodes. This could potentially be relaxed | |
3639 | early in compilation if we re-rewrote the variables appearing in | |
3640 | any PHI nodes in forwarder blocks. */ | |
3641 | if (phi_nodes (bb)) | |
3642 | { | |
3643 | bb_ann (bb)->forwardable = 0; | |
3644 | return false; | |
3645 | } | |
3646 | ||
3647 | /* Now walk through the statements. We can ignore labels, anything else | |
3648 | means this is not a forwarder block. */ | |
3649 | for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) | |
3650 | { | |
3651 | tree stmt = bsi_stmt (bsi); | |
3652 | ||
3653 | switch (TREE_CODE (stmt)) | |
3654 | { | |
3655 | case LABEL_EXPR: | |
3656 | if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))) | |
3657 | return false; | |
3658 | break; | |
3659 | ||
3660 | default: | |
3661 | bb_ann (bb)->forwardable = 0; | |
3662 | return false; | |
3663 | } | |
3664 | } | |
3665 | ||
3666 | return true; | |
3667 | } | |
3668 | ||
3669 | ||
3670 | /* Thread jumps over empty statements. | |
3671 | ||
3672 | This code should _not_ thread over obviously equivalent conditions | |
3673 | as that requires nontrivial updates to the SSA graph. */ | |
3674 | ||
3675 | static bool | |
3676 | thread_jumps (void) | |
3677 | { | |
3678 | edge e, next, last, old; | |
3679 | basic_block bb, dest, tmp; | |
3680 | tree phi; | |
3681 | int arg; | |
3682 | bool retval = false; | |
3683 | ||
3684 | FOR_EACH_BB (bb) | |
3685 | bb_ann (bb)->forwardable = 1; | |
3686 | ||
3687 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) | |
3688 | { | |
3689 | /* Don't waste time on unreachable blocks. */ | |
3690 | if (!bb->pred) | |
3691 | continue; | |
3692 | ||
3693 | /* Nor on forwarders. */ | |
3694 | if (tree_forwarder_block_p (bb)) | |
3695 | continue; | |
3696 | ||
3697 | /* This block is now part of a forwarding path, mark it as not | |
3698 | forwardable so that we can detect loops. This bit will be | |
3699 | reset below. */ | |
3700 | bb_ann (bb)->forwardable = 0; | |
3701 | ||
3702 | /* Examine each of our block's successors to see if it is | |
3703 | forwardable. */ | |
3704 | for (e = bb->succ; e; e = next) | |
3705 | { | |
3706 | next = e->succ_next; | |
3707 | ||
3708 | /* If the edge is abnormal or its destination is not | |
3709 | forwardable, then there's nothing to do. */ | |
3710 | if ((e->flags & EDGE_ABNORMAL) | |
3711 | || !tree_forwarder_block_p (e->dest)) | |
3712 | continue; | |
3713 | ||
3714 | /* Now walk through as many forwarder block as possible to | |
3715 | find the ultimate destination we want to thread our jump | |
3716 | to. */ | |
3717 | last = e->dest->succ; | |
3718 | bb_ann (e->dest)->forwardable = 0; | |
3719 | for (dest = e->dest->succ->dest; | |
3720 | tree_forwarder_block_p (dest); | |
3721 | last = dest->succ, | |
3722 | dest = dest->succ->dest) | |
3723 | { | |
3724 | /* An infinite loop detected. We redirect the edge anyway, so | |
3725 | that the loop is shrinked into single basic block. */ | |
3726 | if (!bb_ann (dest)->forwardable) | |
3727 | break; | |
3728 | ||
3729 | if (dest->succ->dest == EXIT_BLOCK_PTR) | |
3730 | break; | |
3731 | ||
3732 | bb_ann (dest)->forwardable = 0; | |
3733 | } | |
3734 | ||
3735 | /* Reset the forwardable marks to 1. */ | |
3736 | for (tmp = e->dest; | |
3737 | tmp != dest; | |
3738 | tmp = tmp->succ->dest) | |
3739 | bb_ann (tmp)->forwardable = 1; | |
3740 | ||
3741 | if (dest == e->dest) | |
3742 | continue; | |
3743 | ||
3744 | old = find_edge (bb, dest); | |
3745 | if (old) | |
3746 | { | |
3747 | /* If there already is an edge, check whether the values | |
3748 | in phi nodes differ. */ | |
3749 | if (!phi_alternatives_equal (dest, last, old)) | |
3750 | { | |
3751 | /* The previous block is forwarder. Redirect our jump | |
3752 | to that target instead since we know it has no PHI | |
3753 | nodes that will need updating. */ | |
3754 | dest = last->src; | |
3755 | ||
3756 | /* That might mean that no forwarding at all is possible. */ | |
3757 | if (dest == e->dest) | |
3758 | continue; | |
3759 | ||
3760 | old = find_edge (bb, dest); | |
3761 | } | |
3762 | } | |
3763 | ||
3764 | /* Perform the redirection. */ | |
3765 | retval = true; | |
3766 | e = redirect_edge_and_branch (e, dest); | |
3767 | ||
3768 | /* TODO -- updating dominators in this case is simple. */ | |
3769 | free_dominance_info (CDI_DOMINATORS); | |
3770 | ||
3771 | if (!old) | |
3772 | { | |
3773 | /* Update PHI nodes. We know that the new argument should | |
3774 | have the same value as the argument associated with LAST. | |
3775 | Otherwise we would have changed our target block above. */ | |
3776 | for (phi = phi_nodes (dest); phi; phi = TREE_CHAIN (phi)) | |
3777 | { | |
3778 | arg = phi_arg_from_edge (phi, last); | |
3779 | if (arg < 0) | |
3780 | abort (); | |
3781 | add_phi_arg (&phi, PHI_ARG_DEF (phi, arg), e); | |
3782 | } | |
3783 | } | |
3784 | } | |
3785 | ||
3786 | /* Reset the forwardable bit on our block since it's no longer in | |
3787 | a forwarding chain path. */ | |
3788 | bb_ann (bb)->forwardable = 1; | |
3789 | } | |
3790 | ||
3791 | return retval; | |
3792 | } | |
3793 | ||
3794 | ||
3795 | /* Return a non-special label in the head of basic block BLOCK. | |
3796 | Create one if it doesn't exist. */ | |
3797 | ||
3798 | static tree | |
3799 | tree_block_label (basic_block bb) | |
3800 | { | |
3801 | block_stmt_iterator i, s = bsi_start (bb); | |
3802 | bool first = true; | |
3803 | tree label, stmt; | |
3804 | ||
3805 | for (i = s; !bsi_end_p (i); first = false, bsi_next (&i)) | |
3806 | { | |
3807 | stmt = bsi_stmt (i); | |
3808 | if (TREE_CODE (stmt) != LABEL_EXPR) | |
3809 | break; | |
3810 | label = LABEL_EXPR_LABEL (stmt); | |
3811 | if (!DECL_NONLOCAL (label)) | |
3812 | { | |
3813 | if (!first) | |
3814 | bsi_move_before (&i, &s); | |
3815 | return label; | |
3816 | } | |
3817 | } | |
3818 | ||
3819 | label = create_artificial_label (); | |
3820 | stmt = build1 (LABEL_EXPR, void_type_node, label); | |
3821 | bsi_insert_before (&s, stmt, BSI_NEW_STMT); | |
3822 | return label; | |
3823 | } | |
3824 | ||
3825 | ||
3826 | /* Attempt to perform edge redirection by replacing a possibly complex | |
3827 | jump instruction by a goto or by removing the jump completely. | |
3828 | This can apply only if all edges now point to the same block. The | |
3829 | parameters and return values are equivalent to | |
3830 | redirect_edge_and_branch. */ | |
3831 | ||
3832 | static edge | |
3833 | tree_try_redirect_by_replacing_jump (edge e, basic_block target) | |
3834 | { | |
3835 | basic_block src = e->src; | |
3836 | edge tmp; | |
3837 | block_stmt_iterator b; | |
3838 | tree stmt; | |
3839 | ||
3840 | /* Verify that all targets will be TARGET. */ | |
3841 | for (tmp = src->succ; tmp; tmp = tmp->succ_next) | |
3842 | if (tmp->dest != target && tmp != e) | |
3843 | break; | |
3844 | ||
3845 | if (tmp) | |
3846 | return NULL; | |
3847 | ||
3848 | b = bsi_last (src); | |
3849 | if (bsi_end_p (b)) | |
3850 | return NULL; | |
3851 | stmt = bsi_stmt (b); | |
3852 | ||
3853 | if (TREE_CODE (stmt) == COND_EXPR | |
3854 | || TREE_CODE (stmt) == SWITCH_EXPR) | |
3855 | { | |
3856 | bsi_remove (&b); | |
3857 | e = ssa_redirect_edge (e, target); | |
3858 | e->flags = EDGE_FALLTHRU; | |
3859 | return e; | |
3860 | } | |
3861 | ||
3862 | return NULL; | |
3863 | } | |
3864 | ||
3865 | ||
3866 | /* Redirect E to DEST. Return NULL on failure. Otherwise, return the | |
3867 | edge representing the redirected branch. */ | |
3868 | ||
3869 | static edge | |
3870 | tree_redirect_edge_and_branch (edge e, basic_block dest) | |
3871 | { | |
3872 | basic_block bb = e->src; | |
3873 | block_stmt_iterator bsi; | |
3874 | edge ret; | |
3875 | tree label, stmt; | |
3876 | ||
3877 | if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)) | |
3878 | return NULL; | |
3879 | ||
3880 | if (e->src != ENTRY_BLOCK_PTR | |
3881 | && (ret = tree_try_redirect_by_replacing_jump (e, dest))) | |
3882 | return ret; | |
3883 | ||
3884 | if (e->dest == dest) | |
3885 | return NULL; | |
3886 | ||
3887 | label = tree_block_label (dest); | |
3888 | ||
3889 | bsi = bsi_last (bb); | |
3890 | stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi); | |
3891 | ||
3892 | switch (stmt ? TREE_CODE (stmt) : ERROR_MARK) | |
3893 | { | |
3894 | case COND_EXPR: | |
3895 | stmt = (e->flags & EDGE_TRUE_VALUE | |
3896 | ? COND_EXPR_THEN (stmt) | |
3897 | : COND_EXPR_ELSE (stmt)); | |
3898 | GOTO_DESTINATION (stmt) = label; | |
3899 | break; | |
3900 | ||
3901 | case GOTO_EXPR: | |
3902 | /* No non-abnormal edges should lead from a non-simple goto, and | |
3903 | simple ones should be represented implicitly. */ | |
3904 | abort (); | |
3905 | ||
3906 | case SWITCH_EXPR: | |
3907 | { | |
3908 | tree vec = SWITCH_LABELS (stmt); | |
3909 | size_t i, n = TREE_VEC_LENGTH (vec); | |
3910 | ||
3911 | for (i = 0; i < n; ++i) | |
3912 | { | |
3913 | tree elt = TREE_VEC_ELT (vec, i); | |
3914 | if (label_to_block (CASE_LABEL (elt)) == e->dest) | |
3915 | CASE_LABEL (elt) = label; | |
3916 | } | |
3917 | } | |
3918 | break; | |
3919 | ||
3920 | case RETURN_EXPR: | |
3921 | bsi_remove (&bsi); | |
3922 | e->flags |= EDGE_FALLTHRU; | |
3923 | break; | |
3924 | ||
3925 | default: | |
3926 | /* Otherwise it must be a fallthru edge, and we don't need to | |
3927 | do anything besides redirecting it. */ | |
3928 | if (!(e->flags & EDGE_FALLTHRU)) | |
3929 | abort (); | |
3930 | break; | |
3931 | } | |
3932 | ||
3933 | /* Update/insert PHI nodes as necessary. */ | |
3934 | ||
3935 | /* Now update the edges in the CFG. */ | |
3936 | e = ssa_redirect_edge (e, dest); | |
3937 | ||
3938 | return e; | |
3939 | } | |
3940 | ||
3941 | ||
3942 | /* Simple wrapper, as we can always redirect fallthru edges. */ | |
3943 | ||
3944 | static basic_block | |
3945 | tree_redirect_edge_and_branch_force (edge e, basic_block dest) | |
3946 | { | |
3947 | e = tree_redirect_edge_and_branch (e, dest); | |
3948 | if (!e) | |
3949 | abort (); | |
3950 | ||
3951 | return NULL; | |
3952 | } | |
3953 | ||
3954 | ||
3955 | /* Splits basic block BB after statement STMT (but at least after the | |
3956 | labels). If STMT is NULL, BB is split just after the labels. */ | |
3957 | ||
3958 | static basic_block | |
3959 | tree_split_block (basic_block bb, void *stmt) | |
3960 | { | |
3961 | block_stmt_iterator bsi, bsi_tgt; | |
3962 | tree act; | |
3963 | basic_block new_bb; | |
3964 | edge e; | |
3965 | ||
3966 | new_bb = create_empty_bb (bb); | |
3967 | ||
3968 | /* Redirect the outgoing edges. */ | |
3969 | new_bb->succ = bb->succ; | |
3970 | bb->succ = NULL; | |
3971 | for (e = new_bb->succ; e; e = e->succ_next) | |
3972 | e->src = new_bb; | |
3973 | ||
3974 | if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR) | |
3975 | stmt = NULL; | |
3976 | ||
3977 | /* Move everything from BSI to the new basic block. */ | |
3978 | for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) | |
3979 | { | |
3980 | act = bsi_stmt (bsi); | |
3981 | if (TREE_CODE (act) == LABEL_EXPR) | |
3982 | continue; | |
3983 | ||
3984 | if (!stmt) | |
3985 | break; | |
3986 | ||
3987 | if (stmt == act) | |
3988 | { | |
3989 | bsi_next (&bsi); | |
3990 | break; | |
3991 | } | |
3992 | } | |
3993 | ||
3994 | bsi_tgt = bsi_start (new_bb); | |
3995 | while (!bsi_end_p (bsi)) | |
3996 | { | |
3997 | act = bsi_stmt (bsi); | |
3998 | bsi_remove (&bsi); | |
3999 | bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT); | |
4000 | } | |
4001 | ||
4002 | return new_bb; | |
4003 | } | |
4004 | ||
4005 | ||
4006 | /* Moves basic block BB after block AFTER. */ | |
4007 | ||
4008 | static bool | |
4009 | tree_move_block_after (basic_block bb, basic_block after) | |
4010 | { | |
4011 | if (bb->prev_bb == after) | |
4012 | return true; | |
4013 | ||
4014 | unlink_block (bb); | |
4015 | link_block (bb, after); | |
4016 | ||
4017 | return true; | |
4018 | } | |
4019 | ||
4020 | ||
4021 | /* Return true if basic_block can be duplicated. */ | |
4022 | ||
4023 | static bool | |
4024 | tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED) | |
4025 | { | |
4026 | return true; | |
4027 | } | |
4028 | ||
4029 | ||
4030 | /* Create a duplicate of the basic block BB. NOTE: This does not | |
4031 | preserve SSA form. */ | |
4032 | ||
4033 | static basic_block | |
4034 | tree_duplicate_bb (basic_block bb) | |
4035 | { | |
4036 | basic_block new_bb; | |
4037 | block_stmt_iterator bsi, bsi_tgt; | |
4038 | ||
4039 | new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb); | |
4040 | bsi_tgt = bsi_start (new_bb); | |
4041 | for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) | |
4042 | { | |
4043 | tree stmt = bsi_stmt (bsi); | |
4044 | ||
4045 | if (TREE_CODE (stmt) == LABEL_EXPR) | |
4046 | continue; | |
4047 | ||
4048 | bsi_insert_after (&bsi_tgt, unshare_expr (stmt), BSI_NEW_STMT); | |
4049 | } | |
4050 | ||
4051 | return new_bb; | |
4052 | } | |
4053 | ||
4054 | ||
4055 | /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */ | |
4056 | ||
4057 | void | |
4058 | dump_function_to_file (tree fn, FILE *file, int flags) | |
4059 | { | |
4060 | tree arg, vars, var; | |
4061 | bool ignore_topmost_bind = false, any_var = false; | |
4062 | basic_block bb; | |
4063 | tree chain; | |
4064 | ||
4065 | fprintf (file, "%s (", (*lang_hooks.decl_printable_name) (fn, 2)); | |
4066 | ||
4067 | arg = DECL_ARGUMENTS (fn); | |
4068 | while (arg) | |
4069 | { | |
4070 | print_generic_expr (file, arg, dump_flags); | |
4071 | if (TREE_CHAIN (arg)) | |
4072 | fprintf (file, ", "); | |
4073 | arg = TREE_CHAIN (arg); | |
4074 | } | |
4075 | fprintf (file, ")\n"); | |
4076 | ||
4077 | if (flags & TDF_RAW) | |
4078 | { | |
4079 | dump_node (fn, TDF_SLIM | flags, file); | |
4080 | return; | |
4081 | } | |
4082 | ||
4083 | /* When GIMPLE is lowered, the variables are no longer available in | |
4084 | BIND_EXPRs, so display them separately. */ | |
4085 | if (cfun && cfun->unexpanded_var_list) | |
4086 | { | |
4087 | ignore_topmost_bind = true; | |
4088 | ||
4089 | fprintf (file, "{\n"); | |
4090 | for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars)) | |
4091 | { | |
4092 | var = TREE_VALUE (vars); | |
4093 | ||
4094 | print_generic_decl (file, var, flags); | |
4095 | fprintf (file, "\n"); | |
4096 | ||
4097 | any_var = true; | |
4098 | } | |
4099 | } | |
4100 | ||
4101 | if (basic_block_info) | |
4102 | { | |
4103 | /* Make a CFG based dump. */ | |
4104 | if (!ignore_topmost_bind) | |
4105 | fprintf (file, "{\n"); | |
4106 | ||
4107 | if (any_var && n_basic_blocks) | |
4108 | fprintf (file, "\n"); | |
4109 | ||
4110 | FOR_EACH_BB (bb) | |
4111 | dump_generic_bb (file, bb, 2, flags); | |
4112 | ||
4113 | fprintf (file, "}\n"); | |
4114 | } | |
4115 | else | |
4116 | { | |
4117 | int indent; | |
4118 | ||
4119 | /* Make a tree based dump. */ | |
4120 | chain = DECL_SAVED_TREE (fn); | |
4121 | ||
4122 | if (TREE_CODE (chain) == BIND_EXPR) | |
4123 | { | |
4124 | if (ignore_topmost_bind) | |
4125 | { | |
4126 | chain = BIND_EXPR_BODY (chain); | |
4127 | indent = 2; | |
4128 | } | |
4129 | else | |
4130 | indent = 0; | |
4131 | } | |
4132 | else | |
4133 | { | |
4134 | if (!ignore_topmost_bind) | |
4135 | fprintf (file, "{\n"); | |
4136 | indent = 2; | |
4137 | } | |
4138 | ||
4139 | if (any_var) | |
4140 | fprintf (file, "\n"); | |
4141 | ||
4142 | print_generic_stmt_indented (file, chain, flags, indent); | |
4143 | if (ignore_topmost_bind) | |
4144 | fprintf (file, "}\n"); | |
4145 | } | |
4146 | ||
4147 | fprintf (file, "\n\n"); | |
4148 | } | |
4149 | ||
4150 | ||
4151 | /* Pretty print of the loops intermediate representation. */ | |
4152 | static void print_loop (FILE *, struct loop *, int); | |
4153 | static void print_pred_bbs (FILE *, edge); | |
4154 | static void print_succ_bbs (FILE *, edge); | |
4155 | ||
4156 | ||
4157 | /* Print the predecessors indexes of edge E on FILE. */ | |
4158 | ||
4159 | static void | |
4160 | print_pred_bbs (FILE *file, edge e) | |
4161 | { | |
4162 | if (e == NULL) | |
4163 | return; | |
4164 | ||
4165 | else if (e->pred_next == NULL) | |
4166 | fprintf (file, "bb_%d", e->src->index); | |
4167 | ||
4168 | else | |
4169 | { | |
4170 | fprintf (file, "bb_%d, ", e->src->index); | |
4171 | print_pred_bbs (file, e->pred_next); | |
4172 | } | |
4173 | } | |
4174 | ||
4175 | ||
4176 | /* Print the successors indexes of edge E on FILE. */ | |
4177 | ||
4178 | static void | |
4179 | print_succ_bbs (FILE *file, edge e) | |
4180 | { | |
4181 | if (e == NULL) | |
4182 | return; | |
4183 | else if (e->succ_next == NULL) | |
4184 | fprintf (file, "bb_%d", e->dest->index); | |
4185 | else | |
4186 | { | |
4187 | fprintf (file, "bb_%d, ", e->dest->index); | |
4188 | print_succ_bbs (file, e->succ_next); | |
4189 | } | |
4190 | } | |
4191 | ||
4192 | ||
4193 | /* Pretty print LOOP on FILE, indented INDENT spaces. */ | |
4194 | ||
4195 | static void | |
4196 | print_loop (FILE *file, struct loop *loop, int indent) | |
4197 | { | |
4198 | char *s_indent; | |
4199 | basic_block bb; | |
4200 | ||
4201 | if (loop == NULL) | |
4202 | return; | |
4203 | ||
4204 | s_indent = (char *) alloca ((size_t) indent + 1); | |
4205 | memset ((void *) s_indent, ' ', (size_t) indent); | |
4206 | s_indent[indent] = '\0'; | |
4207 | ||
4208 | /* Print the loop's header. */ | |
4209 | fprintf (file, "%sloop_%d\n", s_indent, loop->num); | |
4210 | ||
4211 | /* Print the loop's body. */ | |
4212 | fprintf (file, "%s{\n", s_indent); | |
4213 | FOR_EACH_BB (bb) | |
4214 | if (bb->loop_father == loop) | |
4215 | { | |
4216 | /* Print the basic_block's header. */ | |
4217 | fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index); | |
4218 | print_pred_bbs (file, bb->pred); | |
4219 | fprintf (file, "}, succs = {"); | |
4220 | print_succ_bbs (file, bb->succ); | |
4221 | fprintf (file, "})\n"); | |
4222 | ||
4223 | /* Print the basic_block's body. */ | |
4224 | fprintf (file, "%s {\n", s_indent); | |
4225 | tree_dump_bb (bb, file, indent + 4); | |
4226 | fprintf (file, "%s }\n", s_indent); | |
4227 | } | |
4228 | ||
4229 | print_loop (file, loop->inner, indent + 2); | |
4230 | fprintf (file, "%s}\n", s_indent); | |
4231 | print_loop (file, loop->next, indent); | |
4232 | } | |
4233 | ||
4234 | ||
4235 | /* Follow a CFG edge from the entry point of the program, and on entry | |
4236 | of a loop, pretty print the loop structure on FILE. */ | |
4237 | ||
4238 | void | |
4239 | print_loop_ir (FILE *file) | |
4240 | { | |
4241 | basic_block bb; | |
4242 | ||
4243 | bb = BASIC_BLOCK (0); | |
4244 | if (bb && bb->loop_father) | |
4245 | print_loop (file, bb->loop_father, 0); | |
4246 | } | |
4247 | ||
4248 | ||
4249 | /* Debugging loops structure at tree level. */ | |
4250 | ||
4251 | void | |
4252 | debug_loop_ir (void) | |
4253 | { | |
4254 | print_loop_ir (stderr); | |
4255 | } | |
4256 | ||
4257 | ||
4258 | /* Return true if BB ends with a call, possibly followed by some | |
4259 | instructions that must stay with the call. Return false, | |
4260 | otherwise. */ | |
4261 | ||
4262 | static bool | |
4263 | tree_block_ends_with_call_p (basic_block bb) | |
4264 | { | |
4265 | block_stmt_iterator bsi = bsi_last (bb); | |
4266 | tree t = tsi_stmt (bsi.tsi); | |
4267 | ||
4268 | if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0)) | |
4269 | t = TREE_OPERAND (t, 0); | |
4270 | ||
4271 | if (TREE_CODE (t) == MODIFY_EXPR) | |
4272 | t = TREE_OPERAND (t, 1); | |
4273 | ||
4274 | return TREE_CODE (t) == CALL_EXPR; | |
4275 | } | |
4276 | ||
4277 | ||
4278 | /* Return true if BB ends with a conditional branch. Return false, | |
4279 | otherwise. */ | |
4280 | ||
4281 | static bool | |
4282 | tree_block_ends_with_condjump_p (basic_block bb) | |
4283 | { | |
4284 | tree stmt = tsi_stmt (bsi_last (bb).tsi); | |
4285 | return (TREE_CODE (stmt) == COND_EXPR); | |
4286 | } | |
4287 | ||
4288 | ||
4289 | /* Return true if we need to add fake edge to exit at statement T. | |
4290 | Helper function for tree_flow_call_edges_add. */ | |
4291 | ||
4292 | static bool | |
4293 | need_fake_edge_p (tree t) | |
4294 | { | |
4295 | if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0)) | |
4296 | t = TREE_OPERAND (t, 0); | |
4297 | ||
4298 | if (TREE_CODE (t) == MODIFY_EXPR) | |
4299 | t = TREE_OPERAND (t, 1); | |
4300 | ||
4301 | /* NORETURN and LONGJMP calls already have an edge to exit. | |
4302 | CONST, PURE and ALWAYS_RETURN calls do not need one. | |
4303 | We don't currently check for CONST and PURE here, although | |
4304 | it would be a good idea, because those attributes are | |
4305 | figured out from the RTL in mark_constant_function, and | |
4306 | the counter incrementation code from -fprofile-arcs | |
4307 | leads to different results from -fbranch-probabilities. */ | |
4308 | if (TREE_CODE (t) == CALL_EXPR | |
4309 | && !(call_expr_flags (t) & | |
4310 | (ECF_NORETURN | ECF_LONGJMP | ECF_ALWAYS_RETURN))) | |
4311 | return true; | |
4312 | ||
4313 | if (TREE_CODE (t) == ASM_EXPR | |
4314 | && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t))) | |
4315 | return true; | |
4316 | ||
4317 | return false; | |
4318 | } | |
4319 | ||
4320 | ||
4321 | /* Add fake edges to the function exit for any non constant and non | |
4322 | noreturn calls, volatile inline assembly in the bitmap of blocks | |
4323 | specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return | |
4324 | the number of blocks that were split. | |
4325 | ||
4326 | The goal is to expose cases in which entering a basic block does | |
4327 | not imply that all subsequent instructions must be executed. */ | |
4328 | ||
4329 | static int | |
4330 | tree_flow_call_edges_add (sbitmap blocks) | |
4331 | { | |
4332 | int i; | |
4333 | int blocks_split = 0; | |
4334 | int last_bb = last_basic_block; | |
4335 | bool check_last_block = false; | |
4336 | ||
4337 | if (n_basic_blocks == 0) | |
4338 | return 0; | |
4339 | ||
4340 | if (! blocks) | |
4341 | check_last_block = true; | |
4342 | else | |
4343 | check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index); | |
4344 | ||
4345 | /* In the last basic block, before epilogue generation, there will be | |
4346 | a fallthru edge to EXIT. Special care is required if the last insn | |
4347 | of the last basic block is a call because make_edge folds duplicate | |
4348 | edges, which would result in the fallthru edge also being marked | |
4349 | fake, which would result in the fallthru edge being removed by | |
4350 | remove_fake_edges, which would result in an invalid CFG. | |
4351 | ||
4352 | Moreover, we can't elide the outgoing fake edge, since the block | |
4353 | profiler needs to take this into account in order to solve the minimal | |
4354 | spanning tree in the case that the call doesn't return. | |
4355 | ||
4356 | Handle this by adding a dummy instruction in a new last basic block. */ | |
4357 | if (check_last_block) | |
4358 | { | |
4359 | basic_block bb = EXIT_BLOCK_PTR->prev_bb; | |
4360 | block_stmt_iterator bsi = bsi_last (bb); | |
4361 | tree t = NULL_TREE; | |
4362 | if (!bsi_end_p (bsi)) | |
4363 | t = bsi_stmt (bsi); | |
4364 | ||
4365 | if (need_fake_edge_p (t)) | |
4366 | { | |
4367 | edge e; | |
4368 | ||
4369 | for (e = bb->succ; e; e = e->succ_next) | |
4370 | if (e->dest == EXIT_BLOCK_PTR) | |
4371 | { | |
4372 | bsi_insert_on_edge (e, build_empty_stmt ()); | |
4373 | bsi_commit_edge_inserts ((int *)NULL); | |
4374 | break; | |
4375 | } | |
4376 | } | |
4377 | } | |
4378 | ||
4379 | /* Now add fake edges to the function exit for any non constant | |
4380 | calls since there is no way that we can determine if they will | |
4381 | return or not... */ | |
4382 | for (i = 0; i < last_bb; i++) | |
4383 | { | |
4384 | basic_block bb = BASIC_BLOCK (i); | |
4385 | block_stmt_iterator bsi; | |
4386 | tree stmt, last_stmt; | |
4387 | ||
4388 | if (!bb) | |
4389 | continue; | |
4390 | ||
4391 | if (blocks && !TEST_BIT (blocks, i)) | |
4392 | continue; | |
4393 | ||
4394 | bsi = bsi_last (bb); | |
4395 | if (!bsi_end_p (bsi)) | |
4396 | { | |
4397 | last_stmt = bsi_stmt (bsi); | |
4398 | do | |
4399 | { | |
4400 | stmt = bsi_stmt (bsi); | |
4401 | if (need_fake_edge_p (stmt)) | |
4402 | { | |
4403 | edge e; | |
4404 | /* The handling above of the final block before the | |
4405 | epilogue should be enough to verify that there is | |
4406 | no edge to the exit block in CFG already. | |
4407 | Calling make_edge in such case would cause us to | |
4408 | mark that edge as fake and remove it later. */ | |
4409 | #ifdef ENABLE_CHECKING | |
4410 | if (stmt == last_stmt) | |
4411 | for (e = bb->succ; e; e = e->succ_next) | |
4412 | if (e->dest == EXIT_BLOCK_PTR) | |
4413 | abort (); | |
4414 | #endif | |
4415 | ||
4416 | /* Note that the following may create a new basic block | |
4417 | and renumber the existing basic blocks. */ | |
4418 | if (stmt != last_stmt) | |
4419 | { | |
4420 | e = split_block (bb, stmt); | |
4421 | if (e) | |
4422 | blocks_split++; | |
4423 | } | |
4424 | make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE); | |
4425 | } | |
4426 | bsi_prev (&bsi); | |
4427 | } | |
4428 | while (!bsi_end_p (bsi)); | |
4429 | } | |
4430 | } | |
4431 | ||
4432 | if (blocks_split) | |
4433 | verify_flow_info (); | |
4434 | ||
4435 | return blocks_split; | |
4436 | } | |
4437 | ||
4438 | ||
4439 | struct cfg_hooks tree_cfg_hooks = { | |
4440 | "tree", | |
4441 | tree_verify_flow_info, | |
4442 | tree_dump_bb, /* dump_bb */ | |
4443 | create_bb, /* create_basic_block */ | |
4444 | tree_redirect_edge_and_branch,/* redirect_edge_and_branch */ | |
4445 | tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */ | |
4446 | remove_bb, /* delete_basic_block */ | |
4447 | tree_split_block, /* split_block */ | |
4448 | tree_move_block_after, /* move_block_after */ | |
4449 | tree_can_merge_blocks_p, /* can_merge_blocks_p */ | |
4450 | tree_merge_blocks, /* merge_blocks */ | |
4451 | tree_predict_edge, /* predict_edge */ | |
4452 | tree_predicted_by_p, /* predicted_by_p */ | |
4453 | tree_can_duplicate_bb_p, /* can_duplicate_block_p */ | |
4454 | tree_duplicate_bb, /* duplicate_block */ | |
4455 | tree_split_edge, /* split_edge */ | |
4456 | tree_make_forwarder_block, /* make_forward_block */ | |
4457 | NULL, /* tidy_fallthru_edge */ | |
4458 | tree_block_ends_with_call_p, /* block_ends_with_call_p */ | |
4459 | tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */ | |
4460 | tree_flow_call_edges_add /* flow_call_edges_add */ | |
4461 | }; | |
4462 | ||
4463 | ||
4464 | /* Split all critical edges. */ | |
4465 | ||
4466 | static void | |
4467 | split_critical_edges (void) | |
4468 | { | |
4469 | basic_block bb; | |
4470 | edge e; | |
4471 | ||
4472 | FOR_ALL_BB (bb) | |
4473 | { | |
4474 | for (e = bb->succ; e ; e = e->succ_next) | |
4475 | if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL)) | |
4476 | { | |
4477 | split_edge (e); | |
4478 | } | |
4479 | } | |
4480 | } | |
4481 | ||
4482 | struct tree_opt_pass pass_split_crit_edges = | |
4483 | { | |
4484 | NULL, /* name */ | |
4485 | NULL, /* gate */ | |
4486 | split_critical_edges, /* execute */ | |
4487 | NULL, /* sub */ | |
4488 | NULL, /* next */ | |
4489 | 0, /* static_pass_number */ | |
4490 | TV_TREE_SPLIT_EDGES, /* tv_id */ | |
4491 | PROP_cfg, /* properties required */ | |
4492 | PROP_no_crit_edges, /* properties_provided */ | |
4493 | 0, /* properties_destroyed */ | |
4494 | 0, /* todo_flags_start */ | |
4495 | 0, /* todo_flags_finish */ | |
4496 | }; | |
4497 | \f | |
4498 | /* Emit return warnings. */ | |
4499 | ||
4500 | static void | |
4501 | execute_warn_function_return (void) | |
4502 | { | |
4503 | location_t *locus; | |
4504 | tree last; | |
4505 | edge e; | |
4506 | ||
4507 | if (warn_missing_noreturn | |
4508 | && !TREE_THIS_VOLATILE (cfun->decl) | |
4509 | && EXIT_BLOCK_PTR->pred == NULL | |
4510 | && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl)) | |
4511 | warning ("%Jfunction might be possible candidate for attribute `noreturn'", | |
4512 | cfun->decl); | |
4513 | ||
4514 | /* If we have a path to EXIT, then we do return. */ | |
4515 | if (TREE_THIS_VOLATILE (cfun->decl) | |
4516 | && EXIT_BLOCK_PTR->pred != NULL) | |
4517 | { | |
4518 | locus = NULL; | |
4519 | for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next) | |
4520 | { | |
4521 | last = last_stmt (e->src); | |
4522 | if (TREE_CODE (last) == RETURN_EXPR | |
4523 | && (locus = EXPR_LOCUS (last)) != NULL) | |
4524 | break; | |
4525 | } | |
4526 | if (!locus) | |
4527 | locus = &cfun->function_end_locus; | |
4528 | warning ("%H`noreturn' function does return", locus); | |
4529 | } | |
4530 | ||
4531 | /* If we see "return;" in some basic block, then we do reach the end | |
4532 | without returning a value. */ | |
4533 | else if (warn_return_type | |
4534 | && EXIT_BLOCK_PTR->pred != NULL | |
4535 | && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl)))) | |
4536 | { | |
4537 | for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next) | |
4538 | { | |
4539 | tree last = last_stmt (e->src); | |
4540 | if (TREE_CODE (last) == RETURN_EXPR | |
4541 | && TREE_OPERAND (last, 0) == NULL) | |
4542 | { | |
4543 | locus = EXPR_LOCUS (last); | |
4544 | if (!locus) | |
4545 | locus = &cfun->function_end_locus; | |
4546 | warning ("%Hcontrol reaches end of non-void function", locus); | |
4547 | break; | |
4548 | } | |
4549 | } | |
4550 | } | |
4551 | } | |
4552 | ||
4553 | ||
4554 | /* Given a basic block B which ends with a conditional and has | |
4555 | precisely two successors, determine which of the edges is taken if | |
4556 | the conditional is true and which is taken if the conditional is | |
4557 | false. Set TRUE_EDGE and FALSE_EDGE appropriately. */ | |
4558 | ||
4559 | void | |
4560 | extract_true_false_edges_from_block (basic_block b, | |
4561 | edge *true_edge, | |
4562 | edge *false_edge) | |
4563 | { | |
4564 | edge e = b->succ; | |
4565 | ||
4566 | if (e->flags & EDGE_TRUE_VALUE) | |
4567 | { | |
4568 | *true_edge = e; | |
4569 | *false_edge = e->succ_next; | |
4570 | } | |
4571 | else | |
4572 | { | |
4573 | *false_edge = e; | |
4574 | *true_edge = e->succ_next; | |
4575 | } | |
4576 | } | |
4577 | ||
4578 | struct tree_opt_pass pass_warn_function_return = | |
4579 | { | |
4580 | NULL, /* name */ | |
4581 | NULL, /* gate */ | |
4582 | execute_warn_function_return, /* execute */ | |
4583 | NULL, /* sub */ | |
4584 | NULL, /* next */ | |
4585 | 0, /* static_pass_number */ | |
4586 | 0, /* tv_id */ | |
4587 | PROP_ssa, /* properties_required */ | |
4588 | 0, /* properties_provided */ | |
4589 | 0, /* properties_destroyed */ | |
4590 | 0, /* todo_flags_start */ | |
4591 | 0 /* todo_flags_finish */ | |
4592 | }; | |
4593 | ||
4594 | #include "gt-tree-cfg.h" |